From d1997beb3e020761a04cf97f07e9f1896f147b64 Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Mon, 8 Jan 2024 11:09:41 +0100 Subject: [PATCH 01/28] Add ARM64 pipeline (#213) --- .github/workflows/go-application.yml | 30 +++++++++++++++++++++++++++- Makefile | 3 +++ 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/.github/workflows/go-application.yml b/.github/workflows/go-application.yml index 169e88cb..a65e66c0 100644 --- a/.github/workflows/go-application.yml +++ b/.github/workflows/go-application.yml @@ -4,11 +4,13 @@ on: push: branches: - master + - v1 tags: - "*" pull_request: branches: - master + - v1 env: TAG_NAME: "${GITHUB_REF##*/}" @@ -162,6 +164,31 @@ jobs: name: logstash-exporter-linux path: out/main-linux + build-linux-arm: + name: Build Linux ARM binary + runs-on: ubuntu-20.04 + needs: [lint, test] + if: startsWith(github.ref, 'refs/tags/') + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Golang with cache + uses: magnetikonline/action-golang-cache@v4 + with: + go-version: "^1.21" + + - name: Build Linux ARM binary + run: make build-linux-arm + env: + VERSION: ${{ github.ref }} + + - name: Upload Linux ARM binary + uses: actions/upload-artifact@v3 + with: + name: logstash-exporter-linux-arm + path: out/main-linux-arm + build-darwin: name: Build Mac binary runs-on: ubuntu-20.04 @@ -369,13 +396,14 @@ jobs: upload-binaries: strategy: matrix: - binary: [linux, darwin, windows] + binary: [linux, darwin, windows, linux-arm] runs-on: ubuntu-20.04 needs: - create-release - build-linux - build-darwin - build-windows + - build-linux-arm if: startsWith(github.ref, 'refs/tags/') steps: - name: Download binary diff --git a/Makefile b/Makefile index 14b3ad5d..2019bb94 100644 --- a/Makefile +++ b/Makefile @@ -38,6 +38,9 @@ build-linux: out/main-linux build-darwin: out/main-darwin #: Builds a binary executable for Windows build-windows: out/main-windows +#: Builds a binary executable for Linux ARM +build-linux-arm: + CGO_ENABLED=0 GOOS=linux GOARCH=arm GOARM=7 go build -a -installsuffix cgo -ldflags="$(ldflags)" -o out/main-linux-arm cmd/exporter/main.go #: Builds a Docker image for the Go Exporter application build-docker: From ba0236525328a4377e5447b13c921e5772570294 Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Mon, 8 Jan 2024 11:15:11 +0100 Subject: [PATCH 02/28] Add missing dependency to application pipeline --- .github/workflows/go-application.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/go-application.yml b/.github/workflows/go-application.yml index a65e66c0..5b116cb2 100644 --- a/.github/workflows/go-application.yml +++ b/.github/workflows/go-application.yml @@ -366,6 +366,7 @@ jobs: - build-linux - build-darwin - build-windows + - build-linux-arm - build-linux-docker-image - build-docker-linux-arm-image - test-integration From 09a02b9e587b88095a49e70e46d5dc255e7f3756 Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Mon, 8 Jan 2024 11:24:11 +0100 Subject: [PATCH 03/28] Update multiple v1 dependencies (#214) --- go.mod | 8 ++++---- go.sum | 20 ++++++++------------ 2 files changed, 12 insertions(+), 16 deletions(-) diff --git a/go.mod b/go.mod index 6f60f230..b66ebd45 100644 --- a/go.mod +++ b/go.mod @@ -6,7 +6,7 @@ toolchain go1.21.4 require ( github.com/joho/godotenv v1.5.1 - github.com/prometheus/client_golang v1.17.0 + github.com/prometheus/client_golang v1.18.0 ) require ( @@ -29,7 +29,7 @@ require ( github.com/gkampitakis/go-snaps v0.4.12 github.com/prometheus/client_model v0.5.0 github.com/prometheus/common v0.45.0 - github.com/prometheus/procfs v0.11.1 // indirect - golang.org/x/sys v0.13.0 // indirect - google.golang.org/protobuf v1.31.0 // indirect + github.com/prometheus/procfs v0.12.0 // indirect + golang.org/x/sys v0.16.0 // indirect + google.golang.org/protobuf v1.32.0 // indirect ) diff --git a/go.sum b/go.sum index f51346af..e2589ec5 100644 --- a/go.sum +++ b/go.sum @@ -11,8 +11,6 @@ github.com/gkampitakis/go-diff v1.3.2 h1:Qyn0J9XJSDTgnsgHRdz9Zp24RaJeKMUHg2+PDZZ github.com/gkampitakis/go-diff v1.3.2/go.mod h1:LLgOrpqleQe26cte8s36HTWcTmMEur6OPYerdAAS9tk= github.com/gkampitakis/go-snaps v0.4.12 h1:YeMgKOm0XW3f/Pt2rYpUlpyF8nG6lYGe9oXFJw5LdME= github.com/gkampitakis/go-snaps v0.4.12/go.mod h1:PpnF1KPXQAHBdb/DHoi/1VmlwE+ZkVHzl+QHmgzMSz8= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= @@ -26,14 +24,14 @@ github.com/maruel/natural v1.1.0/go.mod h1:eFVhYCcUOfZFxXoDZam8Ktya72wa79fNC3lc/ github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 h1:jWpvCLoY8Z/e3VKvlsiIGKtc+UG6U5vzxaoagmhXfyg= github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0/go.mod h1:QUyp042oQthUoa9bqDv0ER0wrtXnBruoNd7aNjkbP+k= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/prometheus/client_golang v1.17.0 h1:rl2sfwZMtSthVU752MqfjQozy7blglC+1SOtjMAMh+Q= -github.com/prometheus/client_golang v1.17.0/go.mod h1:VeL+gMmOAxkS2IqfCq0ZmHSL+LjWfWDUmp1mBz9JgUY= +github.com/prometheus/client_golang v1.18.0 h1:HzFfmkOzH5Q8L8G+kSJKUx5dtG87sewO+FoDDqP5Tbk= +github.com/prometheus/client_golang v1.18.0/go.mod h1:T+GXkCk5wSJyOqMIzVgvvjFDlkOQntgjkJWKrN5txjA= github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= github.com/prometheus/common v0.45.0 h1:2BGz0eBc2hdMDLnO/8n0jeB3oPrt2D08CekT0lneoxM= github.com/prometheus/common v0.45.0/go.mod h1:YJmSTw9BoKxJplESWWxlbyttQR4uaEcGyv9MZjVOJsY= -github.com/prometheus/procfs v0.11.1 h1:xRC8Iq1yyca5ypa9n1EZnWZkt7dwcoRPQwX/5gwaUuI= -github.com/prometheus/procfs v0.11.1/go.mod h1:eesXgaPo1q7lBpVMoMy0ZOFTth9hBn4W/y0/p/ScXhY= +github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= +github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= @@ -47,9 +45,7 @@ github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= -golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= -golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= -google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= +golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= +google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= From d731a2d41e5d46c311591701b4d89c9d4e636f7b Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Thu, 11 Jan 2024 15:49:56 +0100 Subject: [PATCH 04/28] Change type of some variables that may hold big numbers to int64 --- .../__snapshots__/nodestats_response_test.snap | 8 ++++---- fetcher/responses/nodestats_response.go | 14 +++++++------- fixtures/node_stats.json | 2 +- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/fetcher/responses/__snapshots__/nodestats_response_test.snap b/fetcher/responses/__snapshots__/nodestats_response_test.snap index 1e0bafc8..b747b6f2 100755 --- a/fetcher/responses/__snapshots__/nodestats_response_test.snap +++ b/fetcher/responses/__snapshots__/nodestats_response_test.snap @@ -82,8 +82,8 @@ responses.NodeStatsResponse{ }, }, Reloads: responses.PipelineReloadResponse{}, - Queue: struct { Type string "json:\"type\""; EventsCount int "json:\"events_count\""; QueueSizeInBytes int "json:\"queue_size_in_bytes\""; MaxQueueSizeInBytes int "json:\"max_queue_size_in_bytes\"" }{}, - DeadLetterQueue: struct { MaxQueueSizeInBytes int "json:\"max_queue_size_in_bytes\""; QueueSizeInBytes int "json:\"queue_size_in_bytes\""; DroppedEvents int "json:\"dropped_events\""; ExpiredEvents int "json:\"expired_events\""; StoragePolicy string "json:\"storage_policy\"" }{}, + Queue: struct { Type string "json:\"type\""; EventsCount int64 "json:\"events_count\""; QueueSizeInBytes int64 "json:\"queue_size_in_bytes\""; MaxQueueSizeInBytes int64 "json:\"max_queue_size_in_bytes\"" }{}, + DeadLetterQueue: struct { MaxQueueSizeInBytes int64 "json:\"max_queue_size_in_bytes\""; QueueSizeInBytes int64 "json:\"queue_size_in_bytes\""; DroppedEvents int64 "json:\"dropped_events\""; ExpiredEvents int64 "json:\"expired_events\""; StoragePolicy string "json:\"storage_policy\"" }{}, Hash: "", EphemeralID: "", }, @@ -159,8 +159,8 @@ responses.NodeStatsResponse{ Backtrace: {"org/logstash/execution/AbstractPipelineExt.java:151:in `reload_pipeline'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:181:in `block in reload_pipeline'", "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/task.rb:24:in `block in initialize'"}, }, }, - Queue: struct { Type string "json:\"type\""; EventsCount int "json:\"events_count\""; QueueSizeInBytes int "json:\"queue_size_in_bytes\""; MaxQueueSizeInBytes int "json:\"max_queue_size_in_bytes\"" }{Type:"memory", EventsCount:0, QueueSizeInBytes:0, MaxQueueSizeInBytes:0}, - DeadLetterQueue: struct { MaxQueueSizeInBytes int "json:\"max_queue_size_in_bytes\""; QueueSizeInBytes int "json:\"queue_size_in_bytes\""; DroppedEvents int "json:\"dropped_events\""; ExpiredEvents int "json:\"expired_events\""; StoragePolicy string "json:\"storage_policy\"" }{MaxQueueSizeInBytes:1073741824, QueueSizeInBytes:1, DroppedEvents:0, ExpiredEvents:0, StoragePolicy:"drop_newer"}, + Queue: struct { Type string "json:\"type\""; EventsCount int64 "json:\"events_count\""; QueueSizeInBytes int64 "json:\"queue_size_in_bytes\""; MaxQueueSizeInBytes int64 "json:\"max_queue_size_in_bytes\"" }{Type:"memory", EventsCount:0, QueueSizeInBytes:0, MaxQueueSizeInBytes:0}, + DeadLetterQueue: struct { MaxQueueSizeInBytes int64 "json:\"max_queue_size_in_bytes\""; QueueSizeInBytes int64 "json:\"queue_size_in_bytes\""; DroppedEvents int64 "json:\"dropped_events\""; ExpiredEvents int64 "json:\"expired_events\""; StoragePolicy string "json:\"storage_policy\"" }{MaxQueueSizeInBytes:47244640256, QueueSizeInBytes:1, DroppedEvents:0, ExpiredEvents:0, StoragePolicy:"drop_newer"}, Hash: "a73729cc9c29203931db21553c5edba063820a7e40d16cb5053be75cc3811a17", EphemeralID: "a5c63d09-1ba6-4d67-90a5-075f468a7ab0", }, diff --git a/fetcher/responses/nodestats_response.go b/fetcher/responses/nodestats_response.go index d861e975..76b551f9 100644 --- a/fetcher/responses/nodestats_response.go +++ b/fetcher/responses/nodestats_response.go @@ -170,16 +170,16 @@ type SinglePipelineResponse struct { Reloads PipelineReloadResponse `json:"reloads"` Queue struct { Type string `json:"type"` - EventsCount int `json:"events_count"` - QueueSizeInBytes int `json:"queue_size_in_bytes"` - MaxQueueSizeInBytes int `json:"max_queue_size_in_bytes"` + EventsCount int64 `json:"events_count"` + QueueSizeInBytes int64 `json:"queue_size_in_bytes"` + MaxQueueSizeInBytes int64 `json:"max_queue_size_in_bytes"` } `json:"queue"` DeadLetterQueue struct { - MaxQueueSizeInBytes int `json:"max_queue_size_in_bytes"` + MaxQueueSizeInBytes int64 `json:"max_queue_size_in_bytes"` // todo: research how LastError is returned - QueueSizeInBytes int `json:"queue_size_in_bytes"` - DroppedEvents int `json:"dropped_events"` - ExpiredEvents int `json:"expired_events"` + QueueSizeInBytes int64 `json:"queue_size_in_bytes"` + DroppedEvents int64 `json:"dropped_events"` + ExpiredEvents int64 `json:"expired_events"` StoragePolicy string `json:"storage_policy"` } `json:"dead_letter_queue"` Hash string `json:"hash"` diff --git a/fixtures/node_stats.json b/fixtures/node_stats.json index 7a1c3a27..44059ded 100644 --- a/fixtures/node_stats.json +++ b/fixtures/node_stats.json @@ -250,7 +250,7 @@ "max_queue_size_in_bytes": 0 }, "dead_letter_queue": { - "max_queue_size_in_bytes": 1073741824, + "max_queue_size_in_bytes": 47244640256, "last_error": "no errors", "queue_size_in_bytes": 1, "dropped_events": 0, From 03f1f4ddcc1a74ab5ec1394e5e1f577c666a9db3 Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Thu, 11 Jan 2024 16:10:12 +0100 Subject: [PATCH 05/28] Add environment variable to configure http timeout --- cmd/exporter/main.go | 11 +++++-- collectors/collector_manager.go | 7 +++-- collectors/collector_manager_test.go | 5 ++- config/http_config.go | 25 +++++++++++++-- config/http_config_test.go | 46 ++++++++++++++++++++++++++++ server/healthcheck.go | 7 ++--- server/healthcheck_test.go | 6 ++-- server/server.go | 5 +-- server/server_test.go | 7 +++-- 9 files changed, 99 insertions(+), 20 deletions(-) create mode 100644 config/http_config_test.go diff --git a/cmd/exporter/main.go b/cmd/exporter/main.go index 1a701b5c..d9ab6762 100644 --- a/cmd/exporter/main.go +++ b/cmd/exporter/main.go @@ -41,8 +41,15 @@ func main() { versionInfo := config.GetVersionInfo() slog.Info(versionInfo.String()) - collectorManager := collectors.NewCollectorManager(logstashUrl) - appServer := server.NewAppServer(host, port) + httpTimeout, err := config.GetHttpTimeout() + if err != nil { + slog.Error("failed to get http timeout", "err", err) + os.Exit(1) + } + slog.Debug("http timeout", "timeout", httpTimeout) + + collectorManager := collectors.NewCollectorManager(logstashUrl, httpTimeout) + appServer := server.NewAppServer(host, port, httpTimeout) prometheus.MustRegister(collectorManager) slog.Info("starting server on", "host", host, "port", port) diff --git a/collectors/collector_manager.go b/collectors/collector_manager.go index d1cf446f..c0881c2f 100644 --- a/collectors/collector_manager.go +++ b/collectors/collector_manager.go @@ -22,9 +22,10 @@ type Collector interface { type CollectorManager struct { collectors map[string]Collector scrapeDurations *prometheus.SummaryVec + timeout time.Duration } -func NewCollectorManager(endpoint string) *CollectorManager { +func NewCollectorManager(endpoint string, timeout time.Duration) *CollectorManager { client := logstashclient.NewClient(endpoint) collectors := getCollectors(client) @@ -32,7 +33,7 @@ func NewCollectorManager(endpoint string) *CollectorManager { scrapeDurations := getScrapeDurationsCollector() prometheus.MustRegister(version.NewCollector("logstash_exporter")) - return &CollectorManager{collectors: collectors, scrapeDurations: scrapeDurations} + return &CollectorManager{collectors: collectors, scrapeDurations: scrapeDurations, timeout: timeout} } func getCollectors(client logstashclient.Client) map[string]Collector { @@ -45,7 +46,7 @@ func getCollectors(client logstashclient.Client) map[string]Collector { // Collect executes all collectors and sends the collected metrics to the provided channel. // It also sends the duration of the collection to the scrapeDurations collector. func (manager *CollectorManager) Collect(ch chan<- prometheus.Metric) { - ctx, cancel := context.WithTimeout(context.Background(), config.HttpTimeout) + ctx, cancel := context.WithTimeout(context.Background(), manager.timeout) defer cancel() diff --git a/collectors/collector_manager_test.go b/collectors/collector_manager_test.go index 58131155..81180d95 100644 --- a/collectors/collector_manager_test.go +++ b/collectors/collector_manager_test.go @@ -5,13 +5,16 @@ import ( "errors" "sync" "testing" + "time" "github.com/prometheus/client_golang/prometheus" ) +const httpTimeout = 2 * time.Second + func TestNewCollectorManager(t *testing.T) { mockEndpoint := "http://localhost:9600" - cm := NewCollectorManager(mockEndpoint) + cm := NewCollectorManager(mockEndpoint, httpTimeout) if cm == nil { t.Error("Expected collector manager to be initialized") diff --git a/config/http_config.go b/config/http_config.go index 91ca8122..fcccea4a 100644 --- a/config/http_config.go +++ b/config/http_config.go @@ -1,6 +1,25 @@ package config -import "time" +import ( + "os" + "time" +) -// HttpTimeout is the timeout for http requests utilized by multiple contexts -const HttpTimeout = time.Second * 2 +const ( + defaultHttpTimeout = time.Second * 2 + httpTimeoutEnvVar = "HTTP_TIMEOUT" +) + +func GetHttpTimeout() (time.Duration, error) { + userDefinedTimeout := os.Getenv(httpTimeoutEnvVar) + if userDefinedTimeout == "" { + return defaultHttpTimeout, nil + } + + timeout, err := time.ParseDuration(userDefinedTimeout) + if err != nil { + return 0, err + } + + return timeout, nil +} diff --git a/config/http_config_test.go b/config/http_config_test.go new file mode 100644 index 00000000..e8bc6b6f --- /dev/null +++ b/config/http_config_test.go @@ -0,0 +1,46 @@ +package config + +import ( + "os" + "testing" + "time" +) + +func TestGetHttpTimeout(t *testing.T) { + t.Run("DefaultTimeout", func(t *testing.T) { + t.Parallel() + os.Unsetenv(httpTimeoutEnvVar) + timeout, err := GetHttpTimeout() + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + if timeout != defaultHttpTimeout { + t.Errorf("Expected default timeout of %v, got %v", defaultHttpTimeout, timeout) + } + }) + + t.Run("CustomTimeout", func(t *testing.T) { + t.Parallel() + expectedTimeout := "5s" + os.Setenv(httpTimeoutEnvVar, expectedTimeout) + defer os.Unsetenv(httpTimeoutEnvVar) + timeout, err := GetHttpTimeout() + if err != nil { + t.Errorf("Unexpected error: %v", err) + } + parsedTimeout, _ := time.ParseDuration(expectedTimeout) + if timeout != parsedTimeout { + t.Errorf("Expected timeout of %v, got %v", parsedTimeout, timeout) + } + }) + + t.Run("InvalidTimeout", func(t *testing.T) { + t.Parallel() + os.Setenv(httpTimeoutEnvVar, "invalid") + defer os.Unsetenv(httpTimeoutEnvVar) + _, err := GetHttpTimeout() + if err == nil { + t.Error("Expected an error for invalid timeout, but got nil") + } + }) +} diff --git a/server/healthcheck.go b/server/healthcheck.go index 3dc636e7..708741a6 100644 --- a/server/healthcheck.go +++ b/server/healthcheck.go @@ -3,14 +3,13 @@ package server import ( "context" "net/http" - - "github.com/kuskoman/logstash-exporter/config" + "time" ) -func getHealthCheck(logstashURL string) func(http.ResponseWriter, *http.Request) { +func getHealthCheck(logstashURL string, timeout time.Duration) func(http.ResponseWriter, *http.Request) { client := &http.Client{} return func(w http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(r.Context(), config.HttpTimeout) + ctx, cancel := context.WithTimeout(r.Context(), timeout) defer cancel() req, err := http.NewRequestWithContext(ctx, http.MethodGet, logstashURL, nil) diff --git a/server/healthcheck_test.go b/server/healthcheck_test.go index 28fec3ef..54534047 100644 --- a/server/healthcheck_test.go +++ b/server/healthcheck_test.go @@ -13,7 +13,7 @@ func TestHealthCheck(t *testing.T) { })) defer mockServer.Close() - handler := getHealthCheck(mockServer.URL) + handler := getHealthCheck(mockServer.URL, defaultHttpTimeout) req, err := http.NewRequest(http.MethodGet, "/", nil) if err != nil { t.Fatalf("Error creating request: %v", err) @@ -40,7 +40,7 @@ func TestHealthCheck(t *testing.T) { }) t.Run("no response", func(t *testing.T) { - handler := getHealthCheck("http://localhost:12345") + handler := getHealthCheck("http://localhost:12345", defaultHttpTimeout) req, err := http.NewRequest(http.MethodGet, "/", nil) if err != nil { t.Fatalf("Error creating request: %v", err) @@ -55,7 +55,7 @@ func TestHealthCheck(t *testing.T) { }) t.Run("invalid url", func(t *testing.T) { - handler := getHealthCheck("http://localhost:96010:invalidurl") + handler := getHealthCheck("http://localhost:96010:invalidurl", defaultHttpTimeout) req, err := http.NewRequest(http.MethodGet, "/", nil) if err != nil { t.Fatalf("Error creating request: %v", err) diff --git a/server/server.go b/server/server.go index ab67af2a..afee1d99 100644 --- a/server/server.go +++ b/server/server.go @@ -3,6 +3,7 @@ package server import ( "fmt" "net/http" + "time" "github.com/kuskoman/logstash-exporter/config" "github.com/prometheus/client_golang/prometheus/promhttp" @@ -12,13 +13,13 @@ import ( // and registers the prometheus handler and the healthcheck handler // to the server's mux. The prometheus handler is managed under the // hood by the prometheus client library. -func NewAppServer(host, port string) *http.Server { +func NewAppServer(host, port string, httpTimeout time.Duration) *http.Server { mux := http.NewServeMux() mux.Handle("/metrics", promhttp.Handler()) mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { http.Redirect(w, r, "/metrics", http.StatusMovedPermanently) }) - mux.HandleFunc("/healthcheck", getHealthCheck(config.LogstashUrl)) + mux.HandleFunc("/healthcheck", getHealthCheck(config.LogstashUrl, httpTimeout)) mux.HandleFunc("/version", getVersionInfoHandler(config.GetVersionInfo())) listenUrl := fmt.Sprintf("%s:%s", host, port) diff --git a/server/server_test.go b/server/server_test.go index d262e0b6..cf6f179c 100644 --- a/server/server_test.go +++ b/server/server_test.go @@ -4,11 +4,14 @@ import ( "net/http" "net/http/httptest" "testing" + "time" ) +const defaultHttpTimeout = 2 * time.Second + func TestNewAppServer(t *testing.T) { t.Run("Test handling of /metrics endpoint", func(t *testing.T) { - server := NewAppServer("", "8080") + server := NewAppServer("", "8080", defaultHttpTimeout) req, err := http.NewRequest("GET", "/metrics", nil) if err != nil { t.Fatal(err) @@ -21,7 +24,7 @@ func TestNewAppServer(t *testing.T) { }) t.Run("Test handling of / endpoint", func(t *testing.T) { - server := NewAppServer("", "8080") + server := NewAppServer("", "8080", defaultHttpTimeout) req, err := http.NewRequest("GET", "/", nil) if err != nil { t.Fatal(err) From d0ccae085741d4e9b1787f91f05101ebf6cc93df Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Thu, 11 Jan 2024 16:12:10 +0100 Subject: [PATCH 06/28] Add documentation to http timeout configuration --- README.md | 104 ++++++++++++++++++++++++++++-------------------------- 1 file changed, 53 insertions(+), 51 deletions(-) diff --git a/README.md b/README.md index ba96c0fb..5cd106b7 100644 --- a/README.md +++ b/README.md @@ -89,12 +89,14 @@ The Helm chart has its own [README](./chart/README.md). The application can be configured using the following environment variables, which are also loaded from `.env` file: -| Variable Name | Description | Default Value | -| -------------- | ----------------------------------------------------------- | ----------------------- | -| `LOGSTASH_URL` | URL to Logstash API | `http://localhost:9600` | -| `PORT` | Port on which the application will be exposed | `9198` | -| `HOST` | Host on which the application will be exposed | empty string | -| `LOG_LEVEL` | [Log level](https://pkg.go.dev/golang.org/x/exp/slog#Level) | empty (defaults to info) | +| Variable Name | Description | Default Value | +|---------------|-----------------------------------------------------------------------------------------------|-------------------------| +| `LOGSTASH_URL`| URL to Logstash API | `http://localhost:9600` | +| `PORT` | Port on which the application will be exposed | `9198` | +| `HOST` | Host on which the application will be exposed | `""` (empty string) | +| `LOG_LEVEL` | [Log level](https://pkg.go.dev/golang.org/x/exp/slog#Level) (defaults to "info" if not set) | `""` (empty string) | +| `HTTP_TIMEOUT`| Timeout for HTTP requests to Logstash API in [Go duration format](https://golang.org/pkg/time/#ParseDuration) | `2s` | + All configuration variables can be checked in the [config directory](./config/). @@ -105,7 +107,7 @@ All configuration variables can be checked in the [config directory](./config/). #### Available Commands - + - `make all`: Builds binary executables for all OS (Win, Darwin, Linux). - `make run`: Runs the Go Exporter application. - `make build-linux`: Builds a binary executable for Linux. @@ -127,7 +129,7 @@ All configuration variables can be checked in the [config directory](./config/). - `make helm-readme`: Generates Helm chart README.md file. - `make clean-elasticsearch`: Cleans Elasticsearch data, works only with default ES port. The command may take a very long time to complete. - `make help`: Shows info about available commands. - + #### File Structure @@ -138,91 +140,91 @@ The binary executables are saved in the out directory. #### Example Usage - + Builds binary executables for all OS (Win, Darwin, Linux): - + make all - + Runs the Go Exporter application: - + make run - + Builds a binary executable for Linux: - + make build-linux - + Builds a binary executable for Darwin: - + make build-darwin - + Builds a binary executable for Windows: - + make build-windows - + Builds a Docker image for the Go Exporter application: - + make build-docker - + Builds a multi-arch Docker image (`amd64` and `arm64`): - + make build-docker-multi - + Deletes all binary executables in the out directory: - + make clean - + Runs all tests: - + make test - + Displays test coverage report: - + make test-coverage - + Starts a Docker-compose configuration: - + make compose - + Starts a Docker-compose configuration until it's ready: - + make wait-for-compose - + Stops a Docker-compose configuration: - + make compose-down - + Verifies the metrics from the Go Exporter application: - + make verify-metrics - + Pulls the Docker image from the registry: - + make pull - + Shows logs from the Docker-compose configuration: - + make logs - + Minifies the binary executables: - + make minify - + Installs readme-generator-for-helm tool: - + make install-helm-readme - + Generates Helm chart README.md file: - + make helm-readme - + Cleans Elasticsearch data, works only with default ES port. The command may take a very long time to complete: - + make clean-elasticsearch - + Shows info about available commands: - + make help - + ## Helper Scripts From ee53f7d0cc38b0b548f45419e4b5983884d1a7c9 Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Thu, 11 Jan 2024 16:16:31 +0100 Subject: [PATCH 07/28] Update actions for managing artifacts from @v3 to @v4 --- .github/workflows/go-application.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/go-application.yml b/.github/workflows/go-application.yml index 5b116cb2..7b84e252 100644 --- a/.github/workflows/go-application.yml +++ b/.github/workflows/go-application.yml @@ -159,7 +159,7 @@ jobs: VERSION: ${{ github.ref }} - name: Upload Linux binary - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: logstash-exporter-linux path: out/main-linux @@ -184,7 +184,7 @@ jobs: VERSION: ${{ github.ref }} - name: Upload Linux ARM binary - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: logstash-exporter-linux-arm path: out/main-linux-arm @@ -209,7 +209,7 @@ jobs: VERSION: ${{ github.ref }} - name: Upload Mac binary - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: logstash-exporter-darwin path: out/main-darwin @@ -234,7 +234,7 @@ jobs: VERSION: ${{ github.ref }} - name: Upload Windows binary - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: logstash-exporter-windows path: out/main-windows @@ -254,7 +254,7 @@ jobs: run: docker save logstash-exporter:latest | gzip > logstash-exporter.tar.gz - name: Upload Docker image as an artifact - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: logstash-exporter-docker-image path: logstash-exporter.tar.gz @@ -324,7 +324,7 @@ jobs: uses: actions/checkout@v4 - name: Download Docker image - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: logstash-exporter-docker-image path: .helm/files @@ -408,7 +408,7 @@ jobs: if: startsWith(github.ref, 'refs/tags/') steps: - name: Download binary - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: logstash-exporter-${{ matrix.binary }} - name: Generate sha256 checksum From b78466c48bd7084b2f0cea60f276db1a4feb5d79 Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Fri, 19 Jan 2024 09:16:42 +0100 Subject: [PATCH 08/28] Fix intendation in service.yaml (#228) --- chart/templates/service.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/chart/templates/service.yaml b/chart/templates/service.yaml index 58197f9b..f4bcaeb2 100644 --- a/chart/templates/service.yaml +++ b/chart/templates/service.yaml @@ -6,8 +6,8 @@ metadata: app: {{ template "logstash-exporter.name" . }} release: {{ .Release.Name }} {{- if .Values.service.annotations }} - annotations: - {{ toYaml .Values.service.annotations | indent 6 }} + annotations: + {{- toYaml .Values.service.annotations | nindent 4 }} {{- end }} spec: type: {{ .Values.service.type }} From ca3539821c916c0f97f5068ef7c9399ab5455e89 Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Fri, 19 Jan 2024 09:22:25 +0100 Subject: [PATCH 09/28] Update Prometheus library version (#229) --- go.mod | 3 +-- go.sum | 6 ++---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/go.mod b/go.mod index b66ebd45..0d8e4360 100644 --- a/go.mod +++ b/go.mod @@ -15,7 +15,6 @@ require ( github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect github.com/maruel/natural v1.1.0 // indirect - github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 // indirect github.com/rogpeppe/go-internal v1.11.0 // indirect github.com/tidwall/gjson v1.17.0 // indirect github.com/tidwall/match v1.1.1 // indirect @@ -28,7 +27,7 @@ require ( github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/gkampitakis/go-snaps v0.4.12 github.com/prometheus/client_model v0.5.0 - github.com/prometheus/common v0.45.0 + github.com/prometheus/common v0.46.0 github.com/prometheus/procfs v0.12.0 // indirect golang.org/x/sys v0.16.0 // indirect google.golang.org/protobuf v1.32.0 // indirect diff --git a/go.sum b/go.sum index e2589ec5..6d8ae513 100644 --- a/go.sum +++ b/go.sum @@ -21,15 +21,13 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/maruel/natural v1.1.0 h1:2z1NgP/Vae+gYrtC0VuvrTJ6U35OuyUqDdfluLqMWuQ= github.com/maruel/natural v1.1.0/go.mod h1:eFVhYCcUOfZFxXoDZam8Ktya72wa79fNC3lc/leA0DQ= -github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 h1:jWpvCLoY8Z/e3VKvlsiIGKtc+UG6U5vzxaoagmhXfyg= -github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0/go.mod h1:QUyp042oQthUoa9bqDv0ER0wrtXnBruoNd7aNjkbP+k= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/prometheus/client_golang v1.18.0 h1:HzFfmkOzH5Q8L8G+kSJKUx5dtG87sewO+FoDDqP5Tbk= github.com/prometheus/client_golang v1.18.0/go.mod h1:T+GXkCk5wSJyOqMIzVgvvjFDlkOQntgjkJWKrN5txjA= github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= -github.com/prometheus/common v0.45.0 h1:2BGz0eBc2hdMDLnO/8n0jeB3oPrt2D08CekT0lneoxM= -github.com/prometheus/common v0.45.0/go.mod h1:YJmSTw9BoKxJplESWWxlbyttQR4uaEcGyv9MZjVOJsY= +github.com/prometheus/common v0.46.0 h1:doXzt5ybi1HBKpsZOL0sSkaNHJJqkyfEWZGGqqScV0Y= +github.com/prometheus/common v0.46.0/go.mod h1:Tp0qkxpb9Jsg54QMe+EAmqXkSV7Evdy1BTn+g2pa/hQ= github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= From 387fb36f814e74e81dd14d82a948e61cefa76f5c Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Fri, 19 Jan 2024 09:24:08 +0100 Subject: [PATCH 10/28] Bump Docker image from golang 1.21.5 to 1.21.6 (#230) --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index f57ebdd6..292245fe 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.21.5-alpine3.17 as build +FROM golang:1.21.6-alpine3.19 as build ARG VERSION \ GIT_COMMIT \ From 1d107aa27a183fde40ae84dbc355e9fe0763387a Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Mon, 5 Feb 2024 21:26:59 +0100 Subject: [PATCH 11/28] Bring back CodeQL workflow to V1 --- .github/workflows/codeql.yml | 42 ++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 .github/workflows/codeql.yml diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 00000000..59d5aa49 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,42 @@ +name: "CodeQL" + +on: + push: + branches: + - "master" + pull_request: + branches: + - "master" + schedule: + - cron: "38 10 * * 6" + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Setup Golang with cache + uses: magnetikonline/action-golang-cache@v4 + with: + go-version: "^1.21" + + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: go + + - name: Build application binary + run: make build-linux + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:go" From 198ad363f8e80e8e8ae7cc4ce64a959c818c7fc5 Mon Sep 17 00:00:00 2001 From: satk0 <54475808+satk0@users.noreply.github.com> Date: Fri, 16 Feb 2024 10:25:12 +0000 Subject: [PATCH 12/28] Update dependencies for v1 branch (#277) --- .github/workflows/codeql.yml | 4 ++-- .github/workflows/go-application.yml | 14 +++++++------- Dockerfile | 2 +- Dockerfile.dev | 12 +++++++++++- Makefile | 2 ++ go.mod | 8 +++----- go.sum | 12 ++++++------ 7 files changed, 32 insertions(+), 22 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 59d5aa49..e7448b0c 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -21,12 +21,12 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Golang with cache uses: magnetikonline/action-golang-cache@v4 with: - go-version: "^1.21" + go-version: "^1.22" - name: Initialize CodeQL uses: github/codeql-action/init@v3 diff --git a/.github/workflows/go-application.yml b/.github/workflows/go-application.yml index 7b84e252..9bae45b6 100644 --- a/.github/workflows/go-application.yml +++ b/.github/workflows/go-application.yml @@ -26,7 +26,7 @@ jobs: - name: Setup Golang with cache uses: magnetikonline/action-golang-cache@v4 with: - go-version: "^1.21" + go-version: "^1.22" - name: Lint code using golangci-lint uses: golangci/golangci-lint-action@v3 @@ -48,7 +48,7 @@ jobs: - name: Setup Golang with cache uses: magnetikonline/action-golang-cache@v4 with: - go-version: "^1.21" + go-version: "^1.22" - name: Test code run: go test ./... -covermode=atomic -coverprofile=coverage.out @@ -119,7 +119,7 @@ jobs: - name: Install Compose uses: ndeloof/install-compose-action@v0.0.1 with: - version: v2.15.1 + version: v2.24.5 legacy: true - name: Print compose version @@ -151,7 +151,7 @@ jobs: - name: Setup Golang with cache uses: magnetikonline/action-golang-cache@v4 with: - go-version: "^1.21" + go-version: "^1.22" - name: Build Linux binary run: make build-linux @@ -176,7 +176,7 @@ jobs: - name: Setup Golang with cache uses: magnetikonline/action-golang-cache@v4 with: - go-version: "^1.21" + go-version: "^1.22" - name: Build Linux ARM binary run: make build-linux-arm @@ -201,7 +201,7 @@ jobs: - name: Setup Golang with cache uses: magnetikonline/action-golang-cache@v4 with: - go-version: "^1.21" + go-version: "^1.22" - name: Build Mac binary run: make build-darwin @@ -226,7 +226,7 @@ jobs: - name: Setup Golang with cache uses: magnetikonline/action-golang-cache@v4 with: - go-version: "^1.21" + go-version: "^1.22" - name: Build Windows binary run: make build-windows diff --git a/Dockerfile b/Dockerfile index 292245fe..7020ebd5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.21.6-alpine3.19 as build +FROM golang:1.22.0-alpine3.19 as build ARG VERSION \ GIT_COMMIT \ diff --git a/Dockerfile.dev b/Dockerfile.dev index f43c5e36..130daf9a 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -1,5 +1,15 @@ -FROM cosmtrek/air:v1.49.0 +FROM cosmtrek/air:v1.49.0 as air + +FROM golang:1.22.0-alpine3.19 WORKDIR /app +RUN apk add --no-cache curl + +COPY --from=air /go/bin/air /go/bin/air + +EXPOSE 9198 + +HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 CMD curl -f http://localhost:9198/healthcheck + ENTRYPOINT ["/go/bin/air", "-c", ".air.toml"] diff --git a/Makefile b/Makefile index 2019bb94..6dcb9a3b 100644 --- a/Makefile +++ b/Makefile @@ -112,6 +112,8 @@ clean-elasticsearch: #: Upgrades all dependencies upgrade-dependencies: go get -u ./... + go mod tidy + #: Shows info about available commands help: diff --git a/go.mod b/go.mod index 0d8e4360..1c4b59ae 100644 --- a/go.mod +++ b/go.mod @@ -1,8 +1,6 @@ module github.com/kuskoman/logstash-exporter -go 1.21.0 - -toolchain go1.21.4 +go 1.22 require ( github.com/joho/godotenv v1.5.1 @@ -27,8 +25,8 @@ require ( github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/gkampitakis/go-snaps v0.4.12 github.com/prometheus/client_model v0.5.0 - github.com/prometheus/common v0.46.0 + github.com/prometheus/common v0.47.0 github.com/prometheus/procfs v0.12.0 // indirect - golang.org/x/sys v0.16.0 // indirect + golang.org/x/sys v0.17.0 // indirect google.golang.org/protobuf v1.32.0 // indirect ) diff --git a/go.sum b/go.sum index 6d8ae513..68a3686e 100644 --- a/go.sum +++ b/go.sum @@ -11,8 +11,8 @@ github.com/gkampitakis/go-diff v1.3.2 h1:Qyn0J9XJSDTgnsgHRdz9Zp24RaJeKMUHg2+PDZZ github.com/gkampitakis/go-diff v1.3.2/go.mod h1:LLgOrpqleQe26cte8s36HTWcTmMEur6OPYerdAAS9tk= github.com/gkampitakis/go-snaps v0.4.12 h1:YeMgKOm0XW3f/Pt2rYpUlpyF8nG6lYGe9oXFJw5LdME= github.com/gkampitakis/go-snaps v0.4.12/go.mod h1:PpnF1KPXQAHBdb/DHoi/1VmlwE+ZkVHzl+QHmgzMSz8= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= @@ -26,8 +26,8 @@ github.com/prometheus/client_golang v1.18.0 h1:HzFfmkOzH5Q8L8G+kSJKUx5dtG87sewO+ github.com/prometheus/client_golang v1.18.0/go.mod h1:T+GXkCk5wSJyOqMIzVgvvjFDlkOQntgjkJWKrN5txjA= github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= -github.com/prometheus/common v0.46.0 h1:doXzt5ybi1HBKpsZOL0sSkaNHJJqkyfEWZGGqqScV0Y= -github.com/prometheus/common v0.46.0/go.mod h1:Tp0qkxpb9Jsg54QMe+EAmqXkSV7Evdy1BTn+g2pa/hQ= +github.com/prometheus/common v0.47.0 h1:p5Cz0FNHo7SnWOmWmoRozVcjEp0bIVU8cV7OShpjL1k= +github.com/prometheus/common v0.47.0/go.mod h1:0/KsvlIEfPQCQ5I2iNSAWKPZziNCvRs5EC6ILDTlAPc= github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= @@ -43,7 +43,7 @@ github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= -golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= -golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= From 76b41045d8e5688eb3bacdcaece5b9dafe5a039e Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Fri, 16 Feb 2024 17:21:29 +0100 Subject: [PATCH 13/28] Move some of v2 features to v1 (#286) * Add config.yml to gitignore * Move verify readme functions from v2 to v1 * Copy descriptions script from v2 to v1 * Add missing README commands to README * Add missing command to Makefile * Update README descriptions * Update chart version --- .github/workflows/go-application.yml | 22 +++- .gitignore | 3 + Makefile | 5 +- README.md | 3 + chart/Chart.yaml | 4 +- scripts/add_descriptions_to_readme.sh | 167 +++++++++++--------------- 6 files changed, 105 insertions(+), 99 deletions(-) diff --git a/.github/workflows/go-application.yml b/.github/workflows/go-application.yml index 9bae45b6..63f29691 100644 --- a/.github/workflows/go-application.yml +++ b/.github/workflows/go-application.yml @@ -75,8 +75,28 @@ jobs: run: make helm-readme - name: Verify that there are no changes - run: git diff --exit-code || echo "README.md is not up to date. Please use helm-readme and commit the changes." + run: git diff --exit-code || (echo "README.md is not up to date. Please use helm-readme and commit the changes." && exit 1) + - name: Print changes + run: git diff + if: failure() + + verify-makefile-readme: + name: Verify that Makefile section of README is up to date + runs-on: ubuntu-20.04 + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Generate README + run: make update-readme-descriptions + + - name: Verify that there are no changes + run: git diff --exit-code || (echo "README.md is not up to date. Please use makefile-readme and commit the changes." && exit 1) + + - name: Print changes + run: git diff + if: failure() lint-helm-chart: name: Lint Helm chart runs-on: ubuntu-20.04 diff --git a/.gitignore b/.gitignore index 9d66978a..474f8636 100644 --- a/.gitignore +++ b/.gitignore @@ -42,3 +42,6 @@ _debug*.yaml # Readme generator files helm-generator/ + +# V2 config +config.yml diff --git a/Makefile b/Makefile index 6dcb9a3b..b12c1f44 100644 --- a/Makefile +++ b/Makefile @@ -109,12 +109,15 @@ clean-elasticsearch: echo "" ;\ done +#: Update Makefile descriptions in main README.md +update-readme-descriptions: + ./scripts/add_descriptions_to_readme.sh + #: Upgrades all dependencies upgrade-dependencies: go get -u ./... go mod tidy - #: Shows info about available commands help: @grep -B1 -E "^[a-zA-Z0-9_-]+\:([^\=]|$$)" Makefile \ diff --git a/README.md b/README.md index 5cd106b7..a92ed6a9 100644 --- a/README.md +++ b/README.md @@ -113,6 +113,7 @@ All configuration variables can be checked in the [config directory](./config/). - `make build-linux`: Builds a binary executable for Linux. - `make build-darwin`: Builds a binary executable for Darwin. - `make build-windows`: Builds a binary executable for Windows. +- `make build-linux-arm`: Builds a binary executable for Linux ARM. - `make build-docker`: Builds a Docker image for the Go Exporter application. - `make build-docker-multi`: Builds a multi-arch Docker image (`amd64` and `arm64`). - `make clean`: Deletes all binary executables in the out directory. @@ -128,6 +129,8 @@ All configuration variables can be checked in the [config directory](./config/). - `make install-helm-readme`: Installs readme-generator-for-helm tool. - `make helm-readme`: Generates Helm chart README.md file. - `make clean-elasticsearch`: Cleans Elasticsearch data, works only with default ES port. The command may take a very long time to complete. +- `make update-readme-descriptions`: Update Makefile descriptions in main README.md. +- `make upgrade-dependencies`: Upgrades all dependencies. - `make help`: Shows info about available commands. diff --git a/chart/Chart.yaml b/chart/Chart.yaml index 3b43dd26..cbae3aa9 100644 --- a/chart/Chart.yaml +++ b/chart/Chart.yaml @@ -2,5 +2,5 @@ apiVersion: v2 name: logstash-exporter description: Prometheus exporter for Logstash written in Go type: application -version: "1.0.2" -appVersion: "1.0.2" +version: "1.6.1" +appVersion: "1.6.1" diff --git a/scripts/add_descriptions_to_readme.sh b/scripts/add_descriptions_to_readme.sh index 7313730a..37349883 100755 --- a/scripts/add_descriptions_to_readme.sh +++ b/scripts/add_descriptions_to_readme.sh @@ -2,128 +2,105 @@ # execute: ./scripts/add_descriptions_to_readme.sh +# define new line character to be u"$SED_COMMAND" in `"$SED_COMMAND"` +nl=' +' + +SED_COMMAND="sed" + +if [[ "$OSTYPE" == "darwin"* ]]; then + SED_COMMAND="gsed" +fi + +# enable regex (see: https://stackoverflow.com/a/42740385) +shopt -s extglob + scriptName="$(dirname "$0")/$(basename "$0")" function getHelp() { # get descriptions and commands from Makefile - i=0 - commands=() - descriptions=() - - while read -r line; do - if (( i % 2 == 0 )); - then - descriptions+=( "$(echo $line | sed 's/#:[ ]*//')" ) - else - commands+=( $(echo "$line" | cut -d : -f 1) ) - fi - - ((i++)) - done < <( - # https://stackoverflow.com/a/59087509 - grep -B1 -E "^[a-zA-Z0-9_-]+\:([^\=]|$)" ./Makefile \ - | grep -v -- -- - ) + i=0 + commands=() + descriptions=() + + while read -r line; do + if (( i % 2 == 0 )); + then + descriptions+=( "${line//#:*( )}" ) + else + commands+=( "$(echo "$line" | cut -d : -f 1)" ) + fi + ((i++)) + done < <( + # https://stackoverflow.com/a/59087509 + grep -B1 -E "^[a-zA-Z0-9_-]+:([^\=]|$)" ./Makefile \ + | grep -v -- -- + ) } FILE=README.md +# returns `commands` and `descriptions` arrays getHelp -let startLine=$(grep -n "^#### Available Commands" $FILE | cut -d : -f 1)+2 -let endLine=$(grep -n "^#### File Structure" $FILE | cut -d : -f 1)-2 - -# Updates "Available Commands" section: +startLine=$(( $( grep -n "^#### Available Commands" $FILE | cut -d : -f 1 ) + 2 )) +endLine=$(( $( grep -n "^#### File Structure" $FILE | cut -d : -f 1 ) - 2 )) +# Updates "Available Commands" section if (( startLine <= endLine)); -then - $(sed -i "$startLine,${endLine}d" $FILE) # deletion of previous descriptions +then + # Deletes previous descriptions + "$SED_COMMAND" -i "$startLine,${endLine}d" "$FILE" fi -function printAvailableCommands() { - curLine=$startLine - stringToWrite="" - let commentLen=${#stringToWrite}-11 - i=0 - - $(sed -i "${curLine}i\\${stringToWrite}" $FILE) - let curLine++ - - $(sed -i "${curLine}i\\ " $FILE) # empty line - let curLine++ - - while (( $i < ${#commands[@]} )) - do - - stringToWrite="- \`make ${commands[$i]}\`: ${descriptions[$i]}." - $(sed -i "${curLine}i\\${stringToWrite}" $FILE) - let curLine++ - - let i++ - done - - $(sed -i "${curLine}i\\ " $FILE) # empty line - let curLine++ - - stringToWrite="" # multiple '*' - $(sed -i "${curLine}i\\${stringToWrite}" $FILE) - let curLine++ - +function createMultipleAsterisks() { + numOfAsterisks=$1 + asterisks=$( eval printf "\*%.0s" "{1..${numOfAsterisks}}" ) + echo "$asterisks" } -echo 'Updating "Available Commands" section...' - -printAvailableCommands - -# Updates "Example Usage" section: - -let startLine=$(grep -n "^#### Example Usage" $FILE | cut -d : -f 1)+2 -let endLine=$(grep -n "^## Helper Scripts" $FILE | cut -d : -f 1)-2 - -if (( startLine <= endLine)); -then - $(sed -i "$startLine,${endLine}d" $FILE) # deletion of previous descriptions -fi +function printAvailableCommands() { + curLine=$startLine + stringToWrite="" + commentLen=$(( ${#stringToWrite} - 11 )) + i=0 -function printExampleUsage() { - curLine=$startLine - stringToWrite="" - let commentLen=${#stringToWrite}-11 - i=0 + "$SED_COMMAND" -i "${curLine}i\\${stringToWrite}" "$FILE" - $(sed -i "${curLine}i\\${stringToWrite}" $FILE) - let curLine++ + # https://www.shellcheck.net/wiki/SC2219 + (( curLine++ )) || true - $(sed -i "${curLine}i\\ " $FILE) # empty line - let curLine++ + # empty line + "$SED_COMMAND" -i "${curLine}i${nl}" "$FILE" - while (( $i < ${#commands[@]} )) - do - stringToWrite="${descriptions[$i]}:" - $(sed -i "${curLine}i\\${stringToWrite}" $FILE) - let curLine++ + (( curLine++ )) || true - $(sed -i "${curLine}i\\ " $FILE) - let curLine++ + echo 'Writing the following commands with their descriptions:' + while (( i < ${#commands[@]} )) + do + stringToWrite="- \`make ${commands[$i]}\`: ${descriptions[$i]}." + stringToWrite=${stringToWrite//\'/\\\'} + echo "$stringToWrite" - stringToWrite=" make ${commands[$i]}" # 4 spaces for tab (DON'T CHANGE IT) - $(sed -i "${curLine}i\\${stringToWrite}" $FILE) - let curLine++ + "$SED_COMMAND" -i "${curLine}i\\${stringToWrite}" "$FILE" + (( curLine++ )) || true - $(sed -i "${curLine}i\\ " $FILE) - let curLine++ + (( i++ )) || true + done - let i++ - done + # empty line + "$SED_COMMAND" -i "${curLine}i${nl}" "$FILE" + (( curLine++ )) || true - stringToWrite="" # multiple '*' - $(sed -i "${curLine}i\\${stringToWrite}" $FILE) - let curLine++ + # multiple '*' + asterisks=$(createMultipleAsterisks $commentLen) + stringToWrite="" + "$SED_COMMAND" -i "${curLine}i\\${stringToWrite}" "$FILE" + (( curLine++ )) || true } -echo 'Updating "Example Usage" section...' +echo 'Updating "Available Commands" section...' -printExampleUsage +printAvailableCommands echo 'Done.' - From 119ace0b38cd6b3a4662eb63a5d49143ef02699c Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Tue, 27 Feb 2024 18:59:46 +0100 Subject: [PATCH 14/28] Fix not enough big integers used for parsing memory (#301) --- .../nodestats_response_test.snap | 10 +++--- fetcher/responses/nodestats_response.go | 32 +++++++++---------- fixtures/node_stats.json | 2 +- 3 files changed, 22 insertions(+), 22 deletions(-) diff --git a/fetcher/responses/__snapshots__/nodestats_response_test.snap b/fetcher/responses/__snapshots__/nodestats_response_test.snap index b747b6f2..635db035 100755 --- a/fetcher/responses/__snapshots__/nodestats_response_test.snap +++ b/fetcher/responses/__snapshots__/nodestats_response_test.snap @@ -13,16 +13,16 @@ responses.NodeStatsResponse{ Pipeline: responses.PipelineResponse{Workers:16, BatchSize:125, BatchDelay:50}, Jvm: responses.JvmResponse{ Threads: struct { Count int "json:\"count\""; PeakCount int "json:\"peak_count\"" }{Count:60, PeakCount:60}, - Mem: struct { HeapUsedPercent int "json:\"heap_used_percent\""; HeapCommittedInBytes int "json:\"heap_committed_in_bytes\""; HeapMaxInBytes int "json:\"heap_max_in_bytes\""; HeapUsedInBytes int "json:\"heap_used_in_bytes\""; NonHeapUsedInBytes int "json:\"non_heap_used_in_bytes\""; NonHeapCommittedInBytes int "json:\"non_heap_committed_in_bytes\""; Pools struct { Young struct { PeakMaxInBytes int "json:\"peak_max_in_bytes\""; MaxInBytes int "json:\"max_in_bytes\""; CommittedInBytes int "json:\"committed_in_bytes\""; PeakUsedInBytes int "json:\"peak_used_in_bytes\""; UsedInBytes int "json:\"used_in_bytes\"" } "json:\"young\""; Old struct { PeakMaxInBytes int "json:\"peak_max_in_bytes\""; MaxInBytes int "json:\"max_in_bytes\""; CommittedInBytes int "json:\"committed_in_bytes\""; PeakUsedInBytes int "json:\"peak_used_in_bytes\""; UsedInBytes int "json:\"used_in_bytes\"" } "json:\"old\""; Survivor struct { PeakMaxInBytes int "json:\"peak_max_in_bytes\""; MaxInBytes int "json:\"max_in_bytes\""; CommittedInBytes int "json:\"committed_in_bytes\""; PeakUsedInBytes int "json:\"peak_used_in_bytes\""; UsedInBytes int "json:\"used_in_bytes\"" } "json:\"survivor\"" } "json:\"pools\"" }{ + Mem: struct { HeapUsedPercent int "json:\"heap_used_percent\""; HeapCommittedInBytes int64 "json:\"heap_committed_in_bytes\""; HeapMaxInBytes int64 "json:\"heap_max_in_bytes\""; HeapUsedInBytes int64 "json:\"heap_used_in_bytes\""; NonHeapUsedInBytes int64 "json:\"non_heap_used_in_bytes\""; NonHeapCommittedInBytes int64 "json:\"non_heap_committed_in_bytes\""; Pools struct { Young struct { PeakMaxInBytes int64 "json:\"peak_max_in_bytes\""; MaxInBytes int64 "json:\"max_in_bytes\""; CommittedInBytes int64 "json:\"committed_in_bytes\""; PeakUsedInBytes int64 "json:\"peak_used_in_bytes\""; UsedInBytes int64 "json:\"used_in_bytes\"" } "json:\"young\""; Old struct { PeakMaxInBytes int64 "json:\"peak_max_in_bytes\""; MaxInBytes int64 "json:\"max_in_bytes\""; CommittedInBytes int64 "json:\"committed_in_bytes\""; PeakUsedInBytes int64 "json:\"peak_used_in_bytes\""; UsedInBytes int64 "json:\"used_in_bytes\"" } "json:\"old\""; Survivor struct { PeakMaxInBytes int "json:\"peak_max_in_bytes\""; MaxInBytes int "json:\"max_in_bytes\""; CommittedInBytes int "json:\"committed_in_bytes\""; PeakUsedInBytes int "json:\"peak_used_in_bytes\""; UsedInBytes int "json:\"used_in_bytes\"" } "json:\"survivor\"" } "json:\"pools\"" }{ HeapUsedPercent: 27, - HeapCommittedInBytes: 1073741824, + HeapCommittedInBytes: 17179869184, HeapMaxInBytes: 1073741822, HeapUsedInBytes: 294044784, NonHeapUsedInBytes: 147703688, NonHeapCommittedInBytes: 155189248, - Pools: struct { Young struct { PeakMaxInBytes int "json:\"peak_max_in_bytes\""; MaxInBytes int "json:\"max_in_bytes\""; CommittedInBytes int "json:\"committed_in_bytes\""; PeakUsedInBytes int "json:\"peak_used_in_bytes\""; UsedInBytes int "json:\"used_in_bytes\"" } "json:\"young\""; Old struct { PeakMaxInBytes int "json:\"peak_max_in_bytes\""; MaxInBytes int "json:\"max_in_bytes\""; CommittedInBytes int "json:\"committed_in_bytes\""; PeakUsedInBytes int "json:\"peak_used_in_bytes\""; UsedInBytes int "json:\"used_in_bytes\"" } "json:\"old\""; Survivor struct { PeakMaxInBytes int "json:\"peak_max_in_bytes\""; MaxInBytes int "json:\"max_in_bytes\""; CommittedInBytes int "json:\"committed_in_bytes\""; PeakUsedInBytes int "json:\"peak_used_in_bytes\""; UsedInBytes int "json:\"used_in_bytes\"" } "json:\"survivor\"" }{ - Young: struct { PeakMaxInBytes int "json:\"peak_max_in_bytes\""; MaxInBytes int "json:\"max_in_bytes\""; CommittedInBytes int "json:\"committed_in_bytes\""; PeakUsedInBytes int "json:\"peak_used_in_bytes\""; UsedInBytes int "json:\"used_in_bytes\"" }{PeakMaxInBytes:-1, MaxInBytes:-1, CommittedInBytes:346030080, PeakUsedInBytes:326107136, UsedInBytes:180355072}, - Old: struct { PeakMaxInBytes int "json:\"peak_max_in_bytes\""; MaxInBytes int "json:\"max_in_bytes\""; CommittedInBytes int "json:\"committed_in_bytes\""; PeakUsedInBytes int "json:\"peak_used_in_bytes\""; UsedInBytes int "json:\"used_in_bytes\"" }{PeakMaxInBytes:1073741824, MaxInBytes:1073741824, CommittedInBytes:687865856, PeakUsedInBytes:73986560, UsedInBytes:73986560}, + Pools: struct { Young struct { PeakMaxInBytes int64 "json:\"peak_max_in_bytes\""; MaxInBytes int64 "json:\"max_in_bytes\""; CommittedInBytes int64 "json:\"committed_in_bytes\""; PeakUsedInBytes int64 "json:\"peak_used_in_bytes\""; UsedInBytes int64 "json:\"used_in_bytes\"" } "json:\"young\""; Old struct { PeakMaxInBytes int64 "json:\"peak_max_in_bytes\""; MaxInBytes int64 "json:\"max_in_bytes\""; CommittedInBytes int64 "json:\"committed_in_bytes\""; PeakUsedInBytes int64 "json:\"peak_used_in_bytes\""; UsedInBytes int64 "json:\"used_in_bytes\"" } "json:\"old\""; Survivor struct { PeakMaxInBytes int "json:\"peak_max_in_bytes\""; MaxInBytes int "json:\"max_in_bytes\""; CommittedInBytes int "json:\"committed_in_bytes\""; PeakUsedInBytes int "json:\"peak_used_in_bytes\""; UsedInBytes int "json:\"used_in_bytes\"" } "json:\"survivor\"" }{ + Young: struct { PeakMaxInBytes int64 "json:\"peak_max_in_bytes\""; MaxInBytes int64 "json:\"max_in_bytes\""; CommittedInBytes int64 "json:\"committed_in_bytes\""; PeakUsedInBytes int64 "json:\"peak_used_in_bytes\""; UsedInBytes int64 "json:\"used_in_bytes\"" }{PeakMaxInBytes:-1, MaxInBytes:-1, CommittedInBytes:346030080, PeakUsedInBytes:326107136, UsedInBytes:180355072}, + Old: struct { PeakMaxInBytes int64 "json:\"peak_max_in_bytes\""; MaxInBytes int64 "json:\"max_in_bytes\""; CommittedInBytes int64 "json:\"committed_in_bytes\""; PeakUsedInBytes int64 "json:\"peak_used_in_bytes\""; UsedInBytes int64 "json:\"used_in_bytes\"" }{PeakMaxInBytes:1073741824, MaxInBytes:1073741824, CommittedInBytes:687865856, PeakUsedInBytes:73986560, UsedInBytes:73986560}, Survivor: struct { PeakMaxInBytes int "json:\"peak_max_in_bytes\""; MaxInBytes int "json:\"max_in_bytes\""; CommittedInBytes int "json:\"committed_in_bytes\""; PeakUsedInBytes int "json:\"peak_used_in_bytes\""; UsedInBytes int "json:\"used_in_bytes\"" }{PeakMaxInBytes:-1, MaxInBytes:-1, CommittedInBytes:39845888, PeakUsedInBytes:39703152, UsedInBytes:39703152}, }, }, diff --git a/fetcher/responses/nodestats_response.go b/fetcher/responses/nodestats_response.go index 76b551f9..73229037 100644 --- a/fetcher/responses/nodestats_response.go +++ b/fetcher/responses/nodestats_response.go @@ -14,26 +14,26 @@ type JvmResponse struct { PeakCount int `json:"peak_count"` } `json:"threads"` Mem struct { - HeapUsedPercent int `json:"heap_used_percent"` - HeapCommittedInBytes int `json:"heap_committed_in_bytes"` - HeapMaxInBytes int `json:"heap_max_in_bytes"` - HeapUsedInBytes int `json:"heap_used_in_bytes"` - NonHeapUsedInBytes int `json:"non_heap_used_in_bytes"` - NonHeapCommittedInBytes int `json:"non_heap_committed_in_bytes"` + HeapUsedPercent int `json:"heap_used_percent"` + HeapCommittedInBytes int64 `json:"heap_committed_in_bytes"` + HeapMaxInBytes int64 `json:"heap_max_in_bytes"` + HeapUsedInBytes int64 `json:"heap_used_in_bytes"` + NonHeapUsedInBytes int64 `json:"non_heap_used_in_bytes"` + NonHeapCommittedInBytes int64 `json:"non_heap_committed_in_bytes"` Pools struct { Young struct { - PeakMaxInBytes int `json:"peak_max_in_bytes"` - MaxInBytes int `json:"max_in_bytes"` - CommittedInBytes int `json:"committed_in_bytes"` - PeakUsedInBytes int `json:"peak_used_in_bytes"` - UsedInBytes int `json:"used_in_bytes"` + PeakMaxInBytes int64 `json:"peak_max_in_bytes"` + MaxInBytes int64 `json:"max_in_bytes"` + CommittedInBytes int64 `json:"committed_in_bytes"` + PeakUsedInBytes int64 `json:"peak_used_in_bytes"` + UsedInBytes int64 `json:"used_in_bytes"` } `json:"young"` Old struct { - PeakMaxInBytes int `json:"peak_max_in_bytes"` - MaxInBytes int `json:"max_in_bytes"` - CommittedInBytes int `json:"committed_in_bytes"` - PeakUsedInBytes int `json:"peak_used_in_bytes"` - UsedInBytes int `json:"used_in_bytes"` + PeakMaxInBytes int64 `json:"peak_max_in_bytes"` + MaxInBytes int64 `json:"max_in_bytes"` + CommittedInBytes int64 `json:"committed_in_bytes"` + PeakUsedInBytes int64 `json:"peak_used_in_bytes"` + UsedInBytes int64 `json:"used_in_bytes"` } `json:"old"` Survivor struct { PeakMaxInBytes int `json:"peak_max_in_bytes"` diff --git a/fixtures/node_stats.json b/fixtures/node_stats.json index 44059ded..e872ad8f 100644 --- a/fixtures/node_stats.json +++ b/fixtures/node_stats.json @@ -19,7 +19,7 @@ }, "mem": { "heap_used_percent": 27, - "heap_committed_in_bytes": 1073741824, + "heap_committed_in_bytes": 17179869184, "heap_max_in_bytes": 1073741822, "heap_used_in_bytes": 294044784, "non_heap_used_in_bytes": 147703688, From 96fad72419e8b5b76d3f6cfc7b958ca4a59c9c4a Mon Sep 17 00:00:00 2001 From: Vincent Lepot Date: Thu, 18 Apr 2024 12:38:22 +0200 Subject: [PATCH 15/28] Update prometheus/client_golang to 1.19.0 (#325) --- collectors/collector_manager.go | 2 +- go.mod | 22 ++++++++--------- go.sum | 44 ++++++++++++++++----------------- 3 files changed, 34 insertions(+), 34 deletions(-) diff --git a/collectors/collector_manager.go b/collectors/collector_manager.go index c0881c2f..c57cade7 100644 --- a/collectors/collector_manager.go +++ b/collectors/collector_manager.go @@ -11,7 +11,7 @@ import ( "github.com/kuskoman/logstash-exporter/config" logstashclient "github.com/kuskoman/logstash-exporter/fetcher/logstash_client" "github.com/prometheus/client_golang/prometheus" - "github.com/prometheus/common/version" + "github.com/prometheus/client_golang/prometheus/collectors/version" ) type Collector interface { diff --git a/go.mod b/go.mod index 1c4b59ae..a5cb6526 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.22 require ( github.com/joho/godotenv v1.5.1 - github.com/prometheus/client_golang v1.18.0 + github.com/prometheus/client_golang v1.19.0 ) require ( @@ -12,9 +12,9 @@ require ( github.com/gkampitakis/go-diff v1.3.2 // indirect github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect - github.com/maruel/natural v1.1.0 // indirect - github.com/rogpeppe/go-internal v1.11.0 // indirect - github.com/tidwall/gjson v1.17.0 // indirect + github.com/maruel/natural v1.1.1 // indirect + github.com/rogpeppe/go-internal v1.12.0 // indirect + github.com/tidwall/gjson v1.17.1 // indirect github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/pretty v1.2.1 // indirect github.com/tidwall/sjson v1.2.5 // indirect @@ -22,11 +22,11 @@ require ( require ( github.com/beorn7/perks v1.0.1 // indirect - github.com/cespare/xxhash/v2 v2.2.0 // indirect - github.com/gkampitakis/go-snaps v0.4.12 - github.com/prometheus/client_model v0.5.0 - github.com/prometheus/common v0.47.0 - github.com/prometheus/procfs v0.12.0 // indirect - golang.org/x/sys v0.17.0 // indirect - google.golang.org/protobuf v1.32.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/gkampitakis/go-snaps v0.5.3 + github.com/prometheus/client_model v0.6.1 + github.com/prometheus/common v0.52.3 // indirect + github.com/prometheus/procfs v0.13.0 // indirect + golang.org/x/sys v0.19.0 // indirect + google.golang.org/protobuf v1.33.0 // indirect ) diff --git a/go.sum b/go.sum index 68a3686e..b5cca807 100644 --- a/go.sum +++ b/go.sum @@ -1,7 +1,7 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= -github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -9,8 +9,8 @@ github.com/gkampitakis/ciinfo v0.3.0 h1:gWZlOC2+RYYttL0hBqcoQhM7h1qNkVqvRCV1fOvp github.com/gkampitakis/ciinfo v0.3.0/go.mod h1:1NIwaOcFChN4fa/B0hEBdAb6npDlFL8Bwx4dfRLRqAo= github.com/gkampitakis/go-diff v1.3.2 h1:Qyn0J9XJSDTgnsgHRdz9Zp24RaJeKMUHg2+PDZZdC4M= github.com/gkampitakis/go-diff v1.3.2/go.mod h1:LLgOrpqleQe26cte8s36HTWcTmMEur6OPYerdAAS9tk= -github.com/gkampitakis/go-snaps v0.4.12 h1:YeMgKOm0XW3f/Pt2rYpUlpyF8nG6lYGe9oXFJw5LdME= -github.com/gkampitakis/go-snaps v0.4.12/go.mod h1:PpnF1KPXQAHBdb/DHoi/1VmlwE+ZkVHzl+QHmgzMSz8= +github.com/gkampitakis/go-snaps v0.5.3 h1:2cJnBgHzJhh0Jk5XBIyDYDe1Ylfncoa9r9bVJ5qvOAE= +github.com/gkampitakis/go-snaps v0.5.3/go.mod h1:ZABkO14uCuVxBHAXAfKG+bqNz+aa1bGPAg8jkI0Nk8Y= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= @@ -19,23 +19,23 @@ github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/maruel/natural v1.1.0 h1:2z1NgP/Vae+gYrtC0VuvrTJ6U35OuyUqDdfluLqMWuQ= -github.com/maruel/natural v1.1.0/go.mod h1:eFVhYCcUOfZFxXoDZam8Ktya72wa79fNC3lc/leA0DQ= +github.com/maruel/natural v1.1.1 h1:Hja7XhhmvEFhcByqDoHz9QZbkWey+COd9xWfCfn1ioo= +github.com/maruel/natural v1.1.1/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/prometheus/client_golang v1.18.0 h1:HzFfmkOzH5Q8L8G+kSJKUx5dtG87sewO+FoDDqP5Tbk= -github.com/prometheus/client_golang v1.18.0/go.mod h1:T+GXkCk5wSJyOqMIzVgvvjFDlkOQntgjkJWKrN5txjA= -github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= -github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= -github.com/prometheus/common v0.47.0 h1:p5Cz0FNHo7SnWOmWmoRozVcjEp0bIVU8cV7OShpjL1k= -github.com/prometheus/common v0.47.0/go.mod h1:0/KsvlIEfPQCQ5I2iNSAWKPZziNCvRs5EC6ILDTlAPc= -github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= -github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= +github.com/prometheus/client_golang v1.19.0 h1:ygXvpU1AoN1MhdzckN+PyD9QJOSD4x7kmXYlnfbA6JU= +github.com/prometheus/client_golang v1.19.0/go.mod h1:ZRM9uEAypZakd+q/x7+gmsvXdURP+DABIEIjnmDdp+k= +github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E= +github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY= +github.com/prometheus/common v0.52.3 h1:5f8uj6ZwHSscOGNdIQg6OiZv/ybiK2CO2q2drVZAQSA= +github.com/prometheus/common v0.52.3/go.mod h1:BrxBKv3FWBIGXw89Mg1AeBq7FSyRzXWI3l3e7W3RN5U= +github.com/prometheus/procfs v0.13.0 h1:GqzLlQyfsPbaEHaQkO7tbDlriv/4o5Hudv6OXHGKX7o= +github.com/prometheus/procfs v0.13.0/go.mod h1:cd4PFCR54QLnGKPaKGA6l+cfuNXtht43ZKY6tow0Y1g= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= -github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= -github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= +github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= +github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= -github.com/tidwall/gjson v1.17.0 h1:/Jocvlh98kcTfpN2+JzGQWQcqrPQwDrVEMApx/M5ZwM= -github.com/tidwall/gjson v1.17.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/gjson v1.17.1 h1:wlYEnwqAHgzmhNUFfw7Xalt2JzQvsMx2Se4PcoFCT/U= +github.com/tidwall/gjson v1.17.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= @@ -43,7 +43,7 @@ github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= -golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= -golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= -google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= +golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= From 048b78b462c840d20188958713dfed27f3a8cbdd Mon Sep 17 00:00:00 2001 From: wei Date: Fri, 12 Jul 2024 15:40:48 +0800 Subject: [PATCH 16/28] add HTTP_INSECURE config (#337) Co-authored-by: Wei Wan --- cmd/exporter/main.go | 5 +++- collectors/collector_manager.go | 4 +-- collectors/collector_manager_test.go | 2 +- config/http_config.go | 7 ++++++ config/http_config_test.go | 33 +++++++++++++++++++++++++ fetcher/logstash_client/client.go | 11 ++++++--- fetcher/logstash_client/client_test.go | 4 +-- fetcher/logstash_client/queries_test.go | 4 +-- 8 files changed, 59 insertions(+), 11 deletions(-) diff --git a/cmd/exporter/main.go b/cmd/exporter/main.go index d9ab6762..bc49bf03 100644 --- a/cmd/exporter/main.go +++ b/cmd/exporter/main.go @@ -41,6 +41,9 @@ func main() { versionInfo := config.GetVersionInfo() slog.Info(versionInfo.String()) + httpInsecure := config.GetHttpInsecure() + slog.Debug("http insecure", "insecure", httpInsecure) + httpTimeout, err := config.GetHttpTimeout() if err != nil { slog.Error("failed to get http timeout", "err", err) @@ -48,7 +51,7 @@ func main() { } slog.Debug("http timeout", "timeout", httpTimeout) - collectorManager := collectors.NewCollectorManager(logstashUrl, httpTimeout) + collectorManager := collectors.NewCollectorManager(logstashUrl, httpInsecure, httpTimeout) appServer := server.NewAppServer(host, port, httpTimeout) prometheus.MustRegister(collectorManager) diff --git a/collectors/collector_manager.go b/collectors/collector_manager.go index c57cade7..994bebc4 100644 --- a/collectors/collector_manager.go +++ b/collectors/collector_manager.go @@ -25,8 +25,8 @@ type CollectorManager struct { timeout time.Duration } -func NewCollectorManager(endpoint string, timeout time.Duration) *CollectorManager { - client := logstashclient.NewClient(endpoint) +func NewCollectorManager(endpoint string, insecure bool, timeout time.Duration) *CollectorManager { + client := logstashclient.NewClient(endpoint, insecure) collectors := getCollectors(client) diff --git a/collectors/collector_manager_test.go b/collectors/collector_manager_test.go index 81180d95..3e2e2471 100644 --- a/collectors/collector_manager_test.go +++ b/collectors/collector_manager_test.go @@ -14,7 +14,7 @@ const httpTimeout = 2 * time.Second func TestNewCollectorManager(t *testing.T) { mockEndpoint := "http://localhost:9600" - cm := NewCollectorManager(mockEndpoint, httpTimeout) + cm := NewCollectorManager(mockEndpoint, false, httpTimeout) if cm == nil { t.Error("Expected collector manager to be initialized") diff --git a/config/http_config.go b/config/http_config.go index fcccea4a..68ccca1d 100644 --- a/config/http_config.go +++ b/config/http_config.go @@ -2,12 +2,14 @@ package config import ( "os" + "strconv" "time" ) const ( defaultHttpTimeout = time.Second * 2 httpTimeoutEnvVar = "HTTP_TIMEOUT" + httpInsecureEnvVar = "HTTP_INSECURE" ) func GetHttpTimeout() (time.Duration, error) { @@ -23,3 +25,8 @@ func GetHttpTimeout() (time.Duration, error) { return timeout, nil } + +func GetHttpInsecure() bool { + b, _ := strconv.ParseBool(os.Getenv(httpInsecureEnvVar)) + return b +} diff --git a/config/http_config_test.go b/config/http_config_test.go index e8bc6b6f..d9979863 100644 --- a/config/http_config_test.go +++ b/config/http_config_test.go @@ -44,3 +44,36 @@ func TestGetHttpTimeout(t *testing.T) { } }) } + +func TestGetHttpInsecure(t *testing.T) { + t.Run("DefaultInsecure", func(t *testing.T) { + t.Parallel() + os.Unsetenv(httpInsecureEnvVar) + insecure := GetHttpInsecure() + if insecure != false { + t.Errorf("Expected default insecure of %v, got %v", false, insecure) + } + }) + + t.Run("CustomInsecure", func(t *testing.T) { + t.Parallel() + expectedInsecure := true + os.Setenv(httpInsecureEnvVar, "true") + defer os.Unsetenv(httpInsecureEnvVar) + insecure := GetHttpInsecure() + if insecure != expectedInsecure { + t.Errorf("Expected insecure of %v, got %v", expectedInsecure, insecure) + } + }) + + t.Run("InvalidInsecure", func(t *testing.T) { + t.Parallel() + expectedInsecure := false + os.Setenv(httpInsecureEnvVar, "invalid") + defer os.Unsetenv(httpInsecureEnvVar) + insecure := GetHttpInsecure() + if insecure != expectedInsecure { + t.Errorf("Expected insecure of %v, got %v", expectedInsecure, insecure) + } + }) +} diff --git a/fetcher/logstash_client/client.go b/fetcher/logstash_client/client.go index bced14a7..fc49844a 100644 --- a/fetcher/logstash_client/client.go +++ b/fetcher/logstash_client/client.go @@ -2,6 +2,7 @@ package logstash_client import ( "context" + "crypto/tls" "encoding/json" "net/http" @@ -23,14 +24,18 @@ type DefaultClient struct { const defaultLogstashEndpoint = "http://localhost:9600" // NewClient returns a new instance of the DefaultClient configured with the given endpoint -func NewClient(endpoint string) Client { +func NewClient(endpoint string, insecure bool) Client { if endpoint == "" { endpoint = defaultLogstashEndpoint } return &DefaultClient{ - httpClient: &http.Client{}, - endpoint: endpoint, + httpClient: &http.Client{ + Transport: &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: insecure}, + }, + }, + endpoint: endpoint, } } diff --git a/fetcher/logstash_client/client_test.go b/fetcher/logstash_client/client_test.go index a7e100fd..a1af3b32 100644 --- a/fetcher/logstash_client/client_test.go +++ b/fetcher/logstash_client/client_test.go @@ -16,7 +16,7 @@ type TestResponse struct { func TestNewClient(t *testing.T) { t.Run("should return a new client for the default endpoint", func(t *testing.T) { - client := NewClient("") + client := NewClient("", false) if client.(*DefaultClient).endpoint != defaultLogstashEndpoint { t.Errorf("expected endpoint to be %s, got %s", defaultLogstashEndpoint, client.(*DefaultClient).endpoint) @@ -25,7 +25,7 @@ func TestNewClient(t *testing.T) { t.Run("should return a new client for the given endpoint", func(t *testing.T) { endpoint := "http://localhost:9601" - client := NewClient(endpoint) + client := NewClient(endpoint, false) if client.(*DefaultClient).endpoint != endpoint { t.Errorf("expected endpoint to be %s, got %s", endpoint, client.(*DefaultClient).endpoint) diff --git a/fetcher/logstash_client/queries_test.go b/fetcher/logstash_client/queries_test.go index faa6701b..a05cb51e 100644 --- a/fetcher/logstash_client/queries_test.go +++ b/fetcher/logstash_client/queries_test.go @@ -22,7 +22,7 @@ func TestGetNodeInfo(t *testing.T) { })) defer ts.Close() - client := NewClient(ts.URL) + client := NewClient(ts.URL, false) response, err := client.GetNodeInfo(context.Background()) if err != nil { @@ -49,7 +49,7 @@ func TestGetNodeStats(t *testing.T) { })) defer ts.Close() - client := NewClient(ts.URL) + client := NewClient(ts.URL, false) response, err := client.GetNodeStats(context.Background()) if err != nil { From db696dbcfe5a91d288d5ad44ce8ccbea97e65978 Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Wed, 17 Jul 2024 10:08:12 +0200 Subject: [PATCH 17/28] Update multiple dependencies (#339) Update docker, helm and go dependencies --- .../Chart.lock | 10 ++++----- .../Chart.yaml | 6 ++--- Dockerfile | 2 +- Dockerfile.dev | 4 ++-- docker-compose.yml | 8 +++---- go.mod | 11 +++++----- go.sum | 22 ++++++++++--------- 7 files changed, 33 insertions(+), 30 deletions(-) diff --git a/.helm/logstash-integration-test-chart/Chart.lock b/.helm/logstash-integration-test-chart/Chart.lock index cee219bf..de60d2be 100644 --- a/.helm/logstash-integration-test-chart/Chart.lock +++ b/.helm/logstash-integration-test-chart/Chart.lock @@ -4,12 +4,12 @@ dependencies: version: 8.5.1 - name: apache repository: https://charts.bitnami.com/bitnami - version: 9.2.23 + version: 11.2.8 - name: prometheus repository: https://prometheus-community.github.io/helm-charts - version: 20.2.0 + version: 25.24.0 - name: logstash-exporter repository: file://../../chart/ - version: v1.0.2 -digest: sha256:1733217c222c212aac4b58826c147d7acb2f61fe01ce6985b139050803915d92 -generated: "2023-04-12T10:38:00.905306965+02:00" + version: 1.6.1 +digest: sha256:f42ffd0c05263f4950c9c62aea6ec9d582a68544baab4f2c454df18d17f9d150 +generated: "2024-07-17T09:59:13.512725+02:00" diff --git a/.helm/logstash-integration-test-chart/Chart.yaml b/.helm/logstash-integration-test-chart/Chart.yaml index ca757646..16b37c47 100644 --- a/.helm/logstash-integration-test-chart/Chart.yaml +++ b/.helm/logstash-integration-test-chart/Chart.yaml @@ -5,16 +5,16 @@ type: application version: 0.1.0 dependencies: - name: logstash - version: "8.5.1" + version: "^8.5.1" repository: https://helm.elastic.co condition: logstash.enabled # we are replacing elasticsearch with apache for testing purposes - name: apache - version: "^9.2.23" + version: "^11.2.8" repository: https://charts.bitnami.com/bitnami condition: apache.enabled - name: prometheus - version: "^20.2.0" + version: "^25.24.0" repository: https://prometheus-community.github.io/helm-charts condition: prometheus.enabled - name: logstash-exporter diff --git a/Dockerfile b/Dockerfile index 7020ebd5..94dcdb82 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.22.0-alpine3.19 as build +FROM golang:1.22.5-alpine3.20 as build ARG VERSION \ GIT_COMMIT \ diff --git a/Dockerfile.dev b/Dockerfile.dev index 130daf9a..a59631dc 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -1,6 +1,6 @@ -FROM cosmtrek/air:v1.49.0 as air +FROM cosmtrek/air:v1.52.3 as air -FROM golang:1.22.0-alpine3.19 +FROM golang:1.22.5-alpine3.20 WORKDIR /app diff --git a/docker-compose.yml b/docker-compose.yml index 6cf29ac5..01253196 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,7 @@ version: "3.8" services: logstash: - image: docker.elastic.co/logstash/logstash:8.9.0 + image: docker.elastic.co/logstash/logstash:8.14.3 restart: unless-stopped volumes: - logstash-data:/usr/share/logstash/data @@ -23,7 +23,7 @@ services: - ${LOGSTASH_PORT:-5044}:5044 - ${LOGSTASH_STATS_PORT:-9600}:9600 prometheus: - image: prom/prometheus:v2.46.0 + image: prom/prometheus:v2.53.1 restart: unless-stopped volumes: - prometheus-data:/prometheus @@ -42,7 +42,7 @@ services: timeout: 10s retries: 8 elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:8.9.0 + image: docker.elastic.co/elasticsearch/elasticsearch:8.14.3 restart: unless-stopped volumes: - elasticsearch-data:/usr/share/elasticsearch/data @@ -65,7 +65,7 @@ services: ports: - ${ELASTICSEARCH_PORT:-9200}:9200 grafana: - image: grafana/grafana:10.0.3 + image: grafana/grafana:10.2.8 restart: unless-stopped ports: - ${GRAFANA_PORT:-3000}:3000 diff --git a/go.mod b/go.mod index a5cb6526..a2a9ccf5 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.22 require ( github.com/joho/godotenv v1.5.1 - github.com/prometheus/client_golang v1.19.0 + github.com/prometheus/client_golang v1.19.1 ) require ( @@ -13,6 +13,7 @@ require ( github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect github.com/maruel/natural v1.1.1 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/rogpeppe/go-internal v1.12.0 // indirect github.com/tidwall/gjson v1.17.1 // indirect github.com/tidwall/match v1.1.1 // indirect @@ -25,8 +26,8 @@ require ( github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/gkampitakis/go-snaps v0.5.3 github.com/prometheus/client_model v0.6.1 - github.com/prometheus/common v0.52.3 // indirect - github.com/prometheus/procfs v0.13.0 // indirect - golang.org/x/sys v0.19.0 // indirect - google.golang.org/protobuf v1.33.0 // indirect + github.com/prometheus/common v0.55.0 // indirect + github.com/prometheus/procfs v0.15.1 // indirect + golang.org/x/sys v0.22.0 // indirect + google.golang.org/protobuf v1.34.2 // indirect ) diff --git a/go.sum b/go.sum index b5cca807..215b5ce8 100644 --- a/go.sum +++ b/go.sum @@ -21,15 +21,17 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/maruel/natural v1.1.1 h1:Hja7XhhmvEFhcByqDoHz9QZbkWey+COd9xWfCfn1ioo= github.com/maruel/natural v1.1.1/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/prometheus/client_golang v1.19.0 h1:ygXvpU1AoN1MhdzckN+PyD9QJOSD4x7kmXYlnfbA6JU= -github.com/prometheus/client_golang v1.19.0/go.mod h1:ZRM9uEAypZakd+q/x7+gmsvXdURP+DABIEIjnmDdp+k= +github.com/prometheus/client_golang v1.19.1 h1:wZWJDwK+NameRJuPGDhlnFgx8e8HN3XHQeLaYJFJBOE= +github.com/prometheus/client_golang v1.19.1/go.mod h1:mP78NwGzrVks5S2H6ab8+ZZGJLZUq1hoULYBAYBw1Ho= github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E= github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY= -github.com/prometheus/common v0.52.3 h1:5f8uj6ZwHSscOGNdIQg6OiZv/ybiK2CO2q2drVZAQSA= -github.com/prometheus/common v0.52.3/go.mod h1:BrxBKv3FWBIGXw89Mg1AeBq7FSyRzXWI3l3e7W3RN5U= -github.com/prometheus/procfs v0.13.0 h1:GqzLlQyfsPbaEHaQkO7tbDlriv/4o5Hudv6OXHGKX7o= -github.com/prometheus/procfs v0.13.0/go.mod h1:cd4PFCR54QLnGKPaKGA6l+cfuNXtht43ZKY6tow0Y1g= +github.com/prometheus/common v0.55.0 h1:KEi6DK7lXW/m7Ig5i47x0vRzuBsHuvJdi5ee6Y3G1dc= +github.com/prometheus/common v0.55.0/go.mod h1:2SECS4xJG1kd8XF9IcM1gMX6510RAEL65zxzNImwdc8= +github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc= +github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= @@ -43,7 +45,7 @@ github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= -golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= -golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= -google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI= +golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= From 8a41fefd559e9ddb2ba85ddffe6663913defef65 Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Mon, 7 Oct 2024 17:46:46 +0200 Subject: [PATCH 18/28] Upgrade toolset to use go 1.23 (#362) --- .github/workflows/go-application.yml | 26 +++++++++++++------------- Dockerfile | 2 +- Dockerfile.dev | 4 ++-- go.mod | 11 ++++++----- go.sum | 26 ++++++++++++++++++-------- 5 files changed, 40 insertions(+), 29 deletions(-) diff --git a/.github/workflows/go-application.yml b/.github/workflows/go-application.yml index 63f29691..d997173b 100644 --- a/.github/workflows/go-application.yml +++ b/.github/workflows/go-application.yml @@ -24,12 +24,12 @@ jobs: uses: actions/checkout@v4 - name: Setup Golang with cache - uses: magnetikonline/action-golang-cache@v4 + uses: magnetikonline/action-golang-cache@v5 with: - go-version: "^1.22" + go-version: "^1.23" - name: Lint code using golangci-lint - uses: golangci/golangci-lint-action@v3 + uses: golangci/golangci-lint-action@v6 with: version: latest @@ -46,9 +46,9 @@ jobs: uses: actions/checkout@v4 - name: Setup Golang with cache - uses: magnetikonline/action-golang-cache@v4 + uses: magnetikonline/action-golang-cache@v5 with: - go-version: "^1.22" + go-version: "^1.23" - name: Test code run: go test ./... -covermode=atomic -coverprofile=coverage.out @@ -169,9 +169,9 @@ jobs: uses: actions/checkout@v4 - name: Setup Golang with cache - uses: magnetikonline/action-golang-cache@v4 + uses: magnetikonline/action-golang-cache@v5 with: - go-version: "^1.22" + go-version: "^1.23" - name: Build Linux binary run: make build-linux @@ -194,9 +194,9 @@ jobs: uses: actions/checkout@v4 - name: Setup Golang with cache - uses: magnetikonline/action-golang-cache@v4 + uses: magnetikonline/action-golang-cache@v5 with: - go-version: "^1.22" + go-version: "^1.23" - name: Build Linux ARM binary run: make build-linux-arm @@ -219,9 +219,9 @@ jobs: uses: actions/checkout@v4 - name: Setup Golang with cache - uses: magnetikonline/action-golang-cache@v4 + uses: magnetikonline/action-golang-cache@v5 with: - go-version: "^1.22" + go-version: "^1.23" - name: Build Mac binary run: make build-darwin @@ -244,9 +244,9 @@ jobs: uses: actions/checkout@v4 - name: Setup Golang with cache - uses: magnetikonline/action-golang-cache@v4 + uses: magnetikonline/action-golang-cache@v5 with: - go-version: "^1.22" + go-version: "^1.23" - name: Build Windows binary run: make build-windows diff --git a/Dockerfile b/Dockerfile index 94dcdb82..7569353e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.22.5-alpine3.20 as build +FROM golang:1.23.1-alpine3.19 as build ARG VERSION \ GIT_COMMIT \ diff --git a/Dockerfile.dev b/Dockerfile.dev index a59631dc..b1704911 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -1,6 +1,6 @@ -FROM cosmtrek/air:v1.52.3 as air +FROM cosmtrek/air:v1.60.0 as air -FROM golang:1.22.5-alpine3.20 +FROM golang:1.23.1-alpine3.19 WORKDIR /app diff --git a/go.mod b/go.mod index a2a9ccf5..2568f433 100644 --- a/go.mod +++ b/go.mod @@ -1,15 +1,16 @@ module github.com/kuskoman/logstash-exporter -go 1.22 +go 1.23 require ( github.com/joho/godotenv v1.5.1 - github.com/prometheus/client_golang v1.19.1 + github.com/prometheus/client_golang v1.20.4 ) require ( github.com/gkampitakis/ciinfo v0.3.0 // indirect github.com/gkampitakis/go-diff v1.3.2 // indirect + github.com/klauspost/compress v1.17.10 // indirect github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect github.com/maruel/natural v1.1.1 // indirect @@ -26,8 +27,8 @@ require ( github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/gkampitakis/go-snaps v0.5.3 github.com/prometheus/client_model v0.6.1 - github.com/prometheus/common v0.55.0 // indirect + github.com/prometheus/common v0.60.0 // indirect github.com/prometheus/procfs v0.15.1 // indirect - golang.org/x/sys v0.22.0 // indirect - google.golang.org/protobuf v1.34.2 // indirect + golang.org/x/sys v0.26.0 // indirect + google.golang.org/protobuf v1.35.1 // indirect ) diff --git a/go.sum b/go.sum index 215b5ce8..70478987 100644 --- a/go.sum +++ b/go.sum @@ -15,26 +15,34 @@ github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/klauspost/compress v1.17.10 h1:oXAz+Vh0PMUvJczoi+flxpnBEPxoER1IaAnU/NMPtT0= +github.com/klauspost/compress v1.17.10/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/maruel/natural v1.1.1 h1:Hja7XhhmvEFhcByqDoHz9QZbkWey+COd9xWfCfn1ioo= github.com/maruel/natural v1.1.1/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/prometheus/client_golang v1.19.1 h1:wZWJDwK+NameRJuPGDhlnFgx8e8HN3XHQeLaYJFJBOE= -github.com/prometheus/client_golang v1.19.1/go.mod h1:mP78NwGzrVks5S2H6ab8+ZZGJLZUq1hoULYBAYBw1Ho= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_golang v1.20.4 h1:Tgh3Yr67PaOv/uTqloMsCEdeuFTatm5zIq5+qNN23vI= +github.com/prometheus/client_golang v1.20.4/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE= github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E= github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY= -github.com/prometheus/common v0.55.0 h1:KEi6DK7lXW/m7Ig5i47x0vRzuBsHuvJdi5ee6Y3G1dc= -github.com/prometheus/common v0.55.0/go.mod h1:2SECS4xJG1kd8XF9IcM1gMX6510RAEL65zxzNImwdc8= +github.com/prometheus/common v0.60.0 h1:+V9PAREWNvJMAuJ1x1BaWl9dewMW4YrHZQbx0sJNllA= +github.com/prometheus/common v0.60.0/go.mod h1:h0LYf1R1deLSKtD4Vdg8gy4RuOvENW2J/h19V5NADQw= github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc= github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= github.com/tidwall/gjson v1.17.1 h1:wlYEnwqAHgzmhNUFfw7Xalt2JzQvsMx2Se4PcoFCT/U= github.com/tidwall/gjson v1.17.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= @@ -45,7 +53,9 @@ github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= -golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI= -golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= -google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= +golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA= +google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= From 771ee8bb78a269301060c274a9e6552ee3de5c7d Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Wed, 9 Oct 2024 14:35:49 +0200 Subject: [PATCH 19/28] Update snapshots and fixtures to match Logstash 8.15 (#367) --- Makefile | 4 + README.md | 1 + .../nodestats/nodestats_collector_test.go | 6 +- docker-compose.yml | 5 +- .../__snapshots__/nodeinfo_response_test.snap | 16 +- .../nodestats_response_test.snap | 182 ++++--- fixtures/node_info.json | 16 +- fixtures/node_stats.json | 501 +++++++++++------- 8 files changed, 446 insertions(+), 285 deletions(-) diff --git a/Makefile b/Makefile index b12c1f44..ef3cde69 100644 --- a/Makefile +++ b/Makefile @@ -113,6 +113,10 @@ clean-elasticsearch: update-readme-descriptions: ./scripts/add_descriptions_to_readme.sh +#: Updates snapshot for test data and runs tests +update-snapshots: + UPDATE_SNAPS=true go test ./... + #: Upgrades all dependencies upgrade-dependencies: go get -u ./... diff --git a/README.md b/README.md index a92ed6a9..ed303cc7 100644 --- a/README.md +++ b/README.md @@ -130,6 +130,7 @@ All configuration variables can be checked in the [config directory](./config/). - `make helm-readme`: Generates Helm chart README.md file. - `make clean-elasticsearch`: Cleans Elasticsearch data, works only with default ES port. The command may take a very long time to complete. - `make update-readme-descriptions`: Update Makefile descriptions in main README.md. +- `make update-snapshots`: Updates snapshot for test data and runs tests. - `make upgrade-dependencies`: Upgrades all dependencies. - `make help`: Shows info about available commands. diff --git a/collectors/nodestats/nodestats_collector_test.go b/collectors/nodestats/nodestats_collector_test.go index 66381b04..686791df 100644 --- a/collectors/nodestats/nodestats_collector_test.go +++ b/collectors/nodestats/nodestats_collector_test.go @@ -86,8 +86,8 @@ func TestCollectNotNil(t *testing.T) { "logstash_stats_pipeline_plugin_events_queue_push_duration", "logstash_stats_pipeline_plugin_documents_successes", "logstash_stats_pipeline_plugin_documents_non_retryable_failures", - "logstash_stats_pipeline_plugin_bulk_requests_errors", - "logstash_stats_pipeline_plugin_bulk_requests_responses", + "logstash_stats_pipeline_plugin_bulk_requests_errors", + "logstash_stats_pipeline_plugin_bulk_requests_responses", "logstash_stats_process_cpu_percent", "logstash_stats_process_cpu_total_millis", "logstash_stats_process_cpu_load_average_1m", @@ -109,7 +109,7 @@ func TestCollectNotNil(t *testing.T) { var foundMetrics []string for metric := range ch { if metric == nil { - t.Errorf("expected metric %s not to be nil", metric.Desc().String()) + t.Error("Expected metric not to be nil") } foundMetricDesc := metric.Desc().String() diff --git a/docker-compose.yml b/docker-compose.yml index 01253196..d63f3c43 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,6 @@ -version: "3.8" services: logstash: - image: docker.elastic.co/logstash/logstash:8.14.3 + image: docker.elastic.co/logstash/logstash:8.15.0 restart: unless-stopped volumes: - logstash-data:/usr/share/logstash/data @@ -42,7 +41,7 @@ services: timeout: 10s retries: 8 elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:8.14.3 + image: docker.elastic.co/elasticsearch/elasticsearch:8.15.0 restart: unless-stopped volumes: - elasticsearch-data:/usr/share/elasticsearch/data diff --git a/fetcher/responses/__snapshots__/nodeinfo_response_test.snap b/fetcher/responses/__snapshots__/nodeinfo_response_test.snap index 0929d163..469f9045 100755 --- a/fetcher/responses/__snapshots__/nodeinfo_response_test.snap +++ b/fetcher/responses/__snapshots__/nodeinfo_response_test.snap @@ -2,17 +2,17 @@ [TestNodeInfoResponseStructure - 1] Unmarshalled NodeInfoResponse responses.NodeInfoResponse{ - Host: "9e6e14cf5532", - Version: "8.6.1", + Host: "814a8393fbd5", + Version: "8.15.0", HTTPAddress: "0.0.0.0:9600", - ID: "a2c7110e-5ccf-4226-bc9b-e773710e66a0", - Name: "9e6e14cf5532", - EphemeralID: "4a2ee04f-2733-4eaa-887d-675bad27f07c", + ID: "690de5cc-deb1-48d9-ba02-d4ec1b22e62a", + Name: "814a8393fbd5", + EphemeralID: "eb4d9042-5642-4e21-bb8d-27454b81c5bc", Status: "green", Snapshot: false, - Pipeline: struct { Workers int "json:\"workers\""; BatchSize int "json:\"batch_size\""; BatchDelay int "json:\"batch_delay\"" }{Workers:16, BatchSize:125, BatchDelay:50}, - BuildDate: "2023-01-24T10:41:55+00:00", - BuildSHA: "6a248b5091c490f09460db5651e5239d903b97cf", + Pipeline: struct { Workers int "json:\"workers\""; BatchSize int "json:\"batch_size\""; BatchDelay int "json:\"batch_delay\"" }{Workers:10, BatchSize:125, BatchDelay:50}, + BuildDate: "2024-07-24T09:37:48+00:00", + BuildSHA: "46b996c24da17cdc7a16bc3037edab5c6132ccd0", BuildSnapshot: false, } --- diff --git a/fetcher/responses/__snapshots__/nodestats_response_test.snap b/fetcher/responses/__snapshots__/nodestats_response_test.snap index 635db035..d4727f7c 100755 --- a/fetcher/responses/__snapshots__/nodestats_response_test.snap +++ b/fetcher/responses/__snapshots__/nodestats_response_test.snap @@ -2,69 +2,59 @@ [TestNodeStatsResponseStructure - 1] Unmarshalled NodestatsResponse responses.NodeStatsResponse{ - Host: "f26e584ecf05", - Version: "8.8.1", + Host: "814a8393fbd5", + Version: "8.15.0", HttpAddress: "0.0.0.0:9600", - Id: "096d672d-50d5-420b-a27c-254c089bdd78", - Name: "f26e584ecf05", - EphemeralId: "25135ee3-be69-4076-bda1-e27524d9ee93", + Id: "690de5cc-deb1-48d9-ba02-d4ec1b22e62a", + Name: "814a8393fbd5", + EphemeralId: "eb4d9042-5642-4e21-bb8d-27454b81c5bc", Status: "green", Snapshot: false, - Pipeline: responses.PipelineResponse{Workers:16, BatchSize:125, BatchDelay:50}, + Pipeline: responses.PipelineResponse{Workers:10, BatchSize:125, BatchDelay:50}, Jvm: responses.JvmResponse{ - Threads: struct { Count int "json:\"count\""; PeakCount int "json:\"peak_count\"" }{Count:60, PeakCount:60}, + Threads: struct { Count int "json:\"count\""; PeakCount int "json:\"peak_count\"" }{Count:65, PeakCount:65}, Mem: struct { HeapUsedPercent int "json:\"heap_used_percent\""; HeapCommittedInBytes int64 "json:\"heap_committed_in_bytes\""; HeapMaxInBytes int64 "json:\"heap_max_in_bytes\""; HeapUsedInBytes int64 "json:\"heap_used_in_bytes\""; NonHeapUsedInBytes int64 "json:\"non_heap_used_in_bytes\""; NonHeapCommittedInBytes int64 "json:\"non_heap_committed_in_bytes\""; Pools struct { Young struct { PeakMaxInBytes int64 "json:\"peak_max_in_bytes\""; MaxInBytes int64 "json:\"max_in_bytes\""; CommittedInBytes int64 "json:\"committed_in_bytes\""; PeakUsedInBytes int64 "json:\"peak_used_in_bytes\""; UsedInBytes int64 "json:\"used_in_bytes\"" } "json:\"young\""; Old struct { PeakMaxInBytes int64 "json:\"peak_max_in_bytes\""; MaxInBytes int64 "json:\"max_in_bytes\""; CommittedInBytes int64 "json:\"committed_in_bytes\""; PeakUsedInBytes int64 "json:\"peak_used_in_bytes\""; UsedInBytes int64 "json:\"used_in_bytes\"" } "json:\"old\""; Survivor struct { PeakMaxInBytes int "json:\"peak_max_in_bytes\""; MaxInBytes int "json:\"max_in_bytes\""; CommittedInBytes int "json:\"committed_in_bytes\""; PeakUsedInBytes int "json:\"peak_used_in_bytes\""; UsedInBytes int "json:\"used_in_bytes\"" } "json:\"survivor\"" } "json:\"pools\"" }{ - HeapUsedPercent: 27, - HeapCommittedInBytes: 17179869184, + HeapUsedPercent: 36, + HeapCommittedInBytes: 1073741824, HeapMaxInBytes: 1073741822, - HeapUsedInBytes: 294044784, - NonHeapUsedInBytes: 147703688, - NonHeapCommittedInBytes: 155189248, + HeapUsedInBytes: 395837440, + NonHeapUsedInBytes: 172607568, + NonHeapCommittedInBytes: 183304192, Pools: struct { Young struct { PeakMaxInBytes int64 "json:\"peak_max_in_bytes\""; MaxInBytes int64 "json:\"max_in_bytes\""; CommittedInBytes int64 "json:\"committed_in_bytes\""; PeakUsedInBytes int64 "json:\"peak_used_in_bytes\""; UsedInBytes int64 "json:\"used_in_bytes\"" } "json:\"young\""; Old struct { PeakMaxInBytes int64 "json:\"peak_max_in_bytes\""; MaxInBytes int64 "json:\"max_in_bytes\""; CommittedInBytes int64 "json:\"committed_in_bytes\""; PeakUsedInBytes int64 "json:\"peak_used_in_bytes\""; UsedInBytes int64 "json:\"used_in_bytes\"" } "json:\"old\""; Survivor struct { PeakMaxInBytes int "json:\"peak_max_in_bytes\""; MaxInBytes int "json:\"max_in_bytes\""; CommittedInBytes int "json:\"committed_in_bytes\""; PeakUsedInBytes int "json:\"peak_used_in_bytes\""; UsedInBytes int "json:\"used_in_bytes\"" } "json:\"survivor\"" }{ - Young: struct { PeakMaxInBytes int64 "json:\"peak_max_in_bytes\""; MaxInBytes int64 "json:\"max_in_bytes\""; CommittedInBytes int64 "json:\"committed_in_bytes\""; PeakUsedInBytes int64 "json:\"peak_used_in_bytes\""; UsedInBytes int64 "json:\"used_in_bytes\"" }{PeakMaxInBytes:-1, MaxInBytes:-1, CommittedInBytes:346030080, PeakUsedInBytes:326107136, UsedInBytes:180355072}, - Old: struct { PeakMaxInBytes int64 "json:\"peak_max_in_bytes\""; MaxInBytes int64 "json:\"max_in_bytes\""; CommittedInBytes int64 "json:\"committed_in_bytes\""; PeakUsedInBytes int64 "json:\"peak_used_in_bytes\""; UsedInBytes int64 "json:\"used_in_bytes\"" }{PeakMaxInBytes:1073741824, MaxInBytes:1073741824, CommittedInBytes:687865856, PeakUsedInBytes:73986560, UsedInBytes:73986560}, - Survivor: struct { PeakMaxInBytes int "json:\"peak_max_in_bytes\""; MaxInBytes int "json:\"max_in_bytes\""; CommittedInBytes int "json:\"committed_in_bytes\""; PeakUsedInBytes int "json:\"peak_used_in_bytes\""; UsedInBytes int "json:\"used_in_bytes\"" }{PeakMaxInBytes:-1, MaxInBytes:-1, CommittedInBytes:39845888, PeakUsedInBytes:39703152, UsedInBytes:39703152}, + Young: struct { PeakMaxInBytes int64 "json:\"peak_max_in_bytes\""; MaxInBytes int64 "json:\"max_in_bytes\""; CommittedInBytes int64 "json:\"committed_in_bytes\""; PeakUsedInBytes int64 "json:\"peak_used_in_bytes\""; UsedInBytes int64 "json:\"used_in_bytes\"" }{PeakMaxInBytes:-1, MaxInBytes:-1, CommittedInBytes:668991488, PeakUsedInBytes:351272960, UsedInBytes:257949696}, + Old: struct { PeakMaxInBytes int64 "json:\"peak_max_in_bytes\""; MaxInBytes int64 "json:\"max_in_bytes\""; CommittedInBytes int64 "json:\"committed_in_bytes\""; PeakUsedInBytes int64 "json:\"peak_used_in_bytes\""; UsedInBytes int64 "json:\"used_in_bytes\"" }{PeakMaxInBytes:1073741824, MaxInBytes:1073741824, CommittedInBytes:397410304, PeakUsedInBytes:132644864, UsedInBytes:130547712}, + Survivor: struct { PeakMaxInBytes int "json:\"peak_max_in_bytes\""; MaxInBytes int "json:\"max_in_bytes\""; CommittedInBytes int "json:\"committed_in_bytes\""; PeakUsedInBytes int "json:\"peak_used_in_bytes\""; UsedInBytes int "json:\"used_in_bytes\"" }{PeakMaxInBytes:-1, MaxInBytes:-1, CommittedInBytes:7340032, PeakUsedInBytes:47185920, UsedInBytes:7340032}, }, }, Gc: struct { Collectors struct { Young struct { CollectionCount int "json:\"collection_count\""; CollectionTimeInMillis int "json:\"collection_time_in_millis\"" } "json:\"young\""; Old struct { CollectionCount int "json:\"collection_count\""; CollectionTimeInMillis int "json:\"collection_time_in_millis\"" } "json:\"old\"" } "json:\"collectors\"" }{ Collectors: struct { Young struct { CollectionCount int "json:\"collection_count\""; CollectionTimeInMillis int "json:\"collection_time_in_millis\"" } "json:\"young\""; Old struct { CollectionCount int "json:\"collection_count\""; CollectionTimeInMillis int "json:\"collection_time_in_millis\"" } "json:\"old\"" }{ - Young: struct { CollectionCount int "json:\"collection_count\""; CollectionTimeInMillis int "json:\"collection_time_in_millis\"" }{CollectionCount:8, CollectionTimeInMillis:224}, + Young: struct { CollectionCount int "json:\"collection_count\""; CollectionTimeInMillis int "json:\"collection_time_in_millis\"" }{CollectionCount:11, CollectionTimeInMillis:110}, Old: struct { CollectionCount int "json:\"collection_count\""; CollectionTimeInMillis int "json:\"collection_time_in_millis\"" }{}, }, }, - UptimeInMillis: 53120, + UptimeInMillis: 56226, }, Process: responses.ProcessResponse{ - OpenFileDescriptors: 98, - PeakOpenFileDescriptors: 98, + OpenFileDescriptors: 108, + PeakOpenFileDescriptors: 109, MaxFileDescriptors: 1048576, - Mem: struct { TotalVirtualInBytes int64 "json:\"total_virtual_in_bytes\"" }{TotalVirtualInBytes:9305346048}, + Mem: struct { TotalVirtualInBytes int64 "json:\"total_virtual_in_bytes\"" }{TotalVirtualInBytes:8282685440}, CPU: struct { TotalInMillis int64 "json:\"total_in_millis\""; Percent int64 "json:\"percent\""; LoadAverage struct { OneM float64 "json:\"1m\""; FiveM float64 "json:\"5m\""; FifteenM float64 "json:\"15m\"" } "json:\"load_average\"" }{ - TotalInMillis: 135300, + TotalInMillis: 35730, Percent: 0, - LoadAverage: struct { OneM float64 "json:\"1m\""; FiveM float64 "json:\"5m\""; FifteenM float64 "json:\"15m\"" }{OneM:3.79, FiveM:1.29, FifteenM:0.46}, + LoadAverage: struct { OneM float64 "json:\"1m\""; FiveM float64 "json:\"5m\""; FifteenM float64 "json:\"15m\"" }{OneM:2.25, FiveM:0.68, FifteenM:0.24}, }, }, - Events: responses.EventsResponse{In:4001, Filtered:10, Out:2, DurationInMillis:5, QueuePushDurationInMillis:7}, + Events: responses.EventsResponse{In:3751, Filtered:1250, Out:1250, DurationInMillis:494960, QueuePushDurationInMillis:49451}, Flow: responses.FlowResponse{ - InputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:1, Lifetime:117.4}, - FilterThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:2.1, Lifetime:3.2}, - OutputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:4.3, Lifetime:5.4}, - QueueBackpressure: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:6.5, Lifetime:7.6}, - WorkerConcurrency: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:8.7, Lifetime:9.8}, - }, - Reloads: responses.ReloadResponse{}, - Os: responses.OsResponse{ - Cgroup: struct { Cpu struct { CfsPeriodMicros int64 "json:\"cfs_period_micros\""; CfsQuotaMicros int64 "json:\"cfs_quota_micros\""; Stat struct { TimeThrottledNanos int64 "json:\"time_throttled_nanos\""; NumberOfTimesThrottled int64 "json:\"number_of_times_throttled\""; NumberOfElapsedPeriods int64 "json:\"number_of_elapsed_periods\"" } "json:\"stat\""; ControlGroup string "json:\"control_group\"" } "json:\"cpu\""; Cpuacct struct { UsageNanos int64 "json:\"usage_nanos\""; ControlGroup string "json:\"control_group\"" } "json:\"cpuacct\"" }{ - Cpu: struct { CfsPeriodMicros int64 "json:\"cfs_period_micros\""; CfsQuotaMicros int64 "json:\"cfs_quota_micros\""; Stat struct { TimeThrottledNanos int64 "json:\"time_throttled_nanos\""; NumberOfTimesThrottled int64 "json:\"number_of_times_throttled\""; NumberOfElapsedPeriods int64 "json:\"number_of_elapsed_periods\"" } "json:\"stat\""; ControlGroup string "json:\"control_group\"" }{ - CfsPeriodMicros: 100000, - CfsQuotaMicros: -1, - Stat: struct { TimeThrottledNanos int64 "json:\"time_throttled_nanos\""; NumberOfTimesThrottled int64 "json:\"number_of_times_throttled\""; NumberOfElapsedPeriods int64 "json:\"number_of_elapsed_periods\"" }{}, - ControlGroup: "/", - }, - Cpuacct: struct { UsageNanos int64 "json:\"usage_nanos\""; ControlGroup string "json:\"control_group\"" }{UsageNanos:161531487900, ControlGroup:"/"}, - }, + InputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:73.9}, + FilterThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:24.63}, + OutputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:24.63}, + QueueBackpressure: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:1, Lifetime:0.9743}, + WorkerConcurrency: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:10, Lifetime:9.752}, }, + Reloads: responses.ReloadResponse{}, + Os: responses.OsResponse{}, Queue: responses.QueueResponse{}, Pipelines: { ".monitoring-logstash": { @@ -73,15 +63,36 @@ responses.NodeStatsResponse{ Flow: responses.FlowResponse{}, Plugins: struct { Inputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" } "json:\"events\"" } "json:\"inputs\""; Codecs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Decode struct { Out int "json:\"out\""; WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"decode\""; Encode struct { WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"encode\"" } "json:\"codecs\""; Filters []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\"" } "json:\"filters\""; Outputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\""; Documents struct { Successes int "json:\"successes\""; NonRetryableFailures int "json:\"non_retryable_failures\"" } "json:\"documents\""; BulkRequests struct { WithErrors int "json:\"with_errors\""; Responses map[string]int "json:\"responses\"" } "json:\"bulk_requests\"" } "json:\"outputs\"" }{ Inputs: { + { + ID: "9a9bed30135e19c8047fe6aa0588b70b15280fb9161fea8ed8e7368e1fb1e6d3", + Name: "", + Events: struct { Out int "json:\"out\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" }{}, + }, }, Codecs: { }, Filters: { }, Outputs: { + { + ID: "e7aab1e17ec42d6573f3be4b8fce17c5dc69db8473f9505f386b1160434b141b", + Name: "", + Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{}, + Documents: struct { Successes int "json:\"successes\""; NonRetryableFailures int "json:\"non_retryable_failures\"" }{}, + BulkRequests: struct { WithErrors int "json:\"with_errors\""; Responses map[string]int "json:\"responses\"" }{}, + }, + }, + }, + Reloads: responses.PipelineReloadResponse{ + LastFailureTimestamp: time.Date(2023, time.April, 20, 20, 0, 32, 437218256, time.UTC), + Successes: 3, + Failures: 1, + LastSuccessTimestamp: time.Date(2023, time.April, 20, 22, 30, 32, 437218256, time.UTC), + LastError: responses.LastError{ + Message: "No configuration found in the configured sources.", + Backtrace: {"org/logstash/execution/AbstractPipelineExt.java:151:in `reload_pipeline'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:181:in `block in reload_pipeline'", "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/task.rb:24:in `block in initialize'"}, }, }, - Reloads: responses.PipelineReloadResponse{}, Queue: struct { Type string "json:\"type\""; EventsCount int64 "json:\"events_count\""; QueueSizeInBytes int64 "json:\"queue_size_in_bytes\""; MaxQueueSizeInBytes int64 "json:\"max_queue_size_in_bytes\"" }{}, DeadLetterQueue: struct { MaxQueueSizeInBytes int64 "json:\"max_queue_size_in_bytes\""; QueueSizeInBytes int64 "json:\"queue_size_in_bytes\""; DroppedEvents int64 "json:\"dropped_events\""; ExpiredEvents int64 "json:\"expired_events\""; StoragePolicy string "json:\"storage_policy\"" }{}, Hash: "", @@ -89,80 +100,97 @@ responses.NodeStatsResponse{ }, "main": { Monitoring: responses.PipelineLogstashMonitoringResponse{}, - Events: struct { Out int "json:\"out\""; Filtered int "json:\"filtered\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" }{Out:0, Filtered:0, In:4001, DurationInMillis:0, QueuePushDurationInMillis:0}, + Events: struct { Out int "json:\"out\""; Filtered int "json:\"filtered\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" }{Out:1250, Filtered:1250, In:3751, DurationInMillis:495018, QueuePushDurationInMillis:49455}, Flow: responses.FlowResponse{ - InputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:6.7, Lifetime:124}, - FilterThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:7.8, Lifetime:8.9}, - OutputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:2.3, Lifetime:3.4}, - QueueBackpressure: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:1.1, Lifetime:2.2}, - WorkerConcurrency: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:4.5, Lifetime:5.6}, + InputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:74.88}, + FilterThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:24.95}, + OutputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:24.95}, + QueueBackpressure: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:1, Lifetime:0.9872}, + WorkerConcurrency: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:10, Lifetime:9.882}, }, Plugins: struct { Inputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" } "json:\"events\"" } "json:\"inputs\""; Codecs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Decode struct { Out int "json:\"out\""; WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"decode\""; Encode struct { WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"encode\"" } "json:\"codecs\""; Filters []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\"" } "json:\"filters\""; Outputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\""; Documents struct { Successes int "json:\"successes\""; NonRetryableFailures int "json:\"non_retryable_failures\"" } "json:\"documents\""; BulkRequests struct { WithErrors int "json:\"with_errors\""; Responses map[string]int "json:\"responses\"" } "json:\"bulk_requests\"" } "json:\"outputs\"" }{ Inputs: { { - ID: "c75c0c6f97fd2c8605b95a5b2694fdae97189fe49553787a923faeaa3342c54a", + ID: "5ee0ea3d45c32bab3b41963bd900e758ba6e193a11079649302574c706fd5e2f", + Name: "dead_letter_queue", + Events: struct { Out int "json:\"out\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" }{}, + }, + { + ID: "95bb3e4f2a40f87147b6ab5035e08ba31858eace7604a57d2e719db790097222", Name: "generator", - Events: struct { Out int "json:\"out\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" }{Out:4001, QueuePushDurationInMillis:0}, + Events: struct { Out int "json:\"out\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" }{Out:3751, QueuePushDurationInMillis:49454}, }, }, Codecs: { { - ID: "plain_d7fb272d-75f5-4e38-bf56-c4c798bea2d1", + ID: "plain_4ffdd0bf-b707-419b-b425-d3986a43c35a", Name: "plain", Decode: struct { Out int "json:\"out\""; WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" }{}, Encode: struct { WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" }{}, }, { - ID: "plain_35cb2d25-0d8d-441a-a714-82121715864d", + ID: "plain_b4b163b6-effd-454a-9605-c3f8ef0cde5e", Name: "plain", - Decode: struct { Out int "json:\"out\""; WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:4001, WritesIn:4001, DurationInMillis:57}, + Decode: struct { Out int "json:\"out\""; WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" }{}, + Encode: struct { WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" }{}, + }, + { + ID: "plain-codec-001", + Name: "plain", + Decode: struct { Out int "json:\"out\""; WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" }{}, Encode: struct { WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" }{}, }, }, Filters: { { - ID: "1721e7c39758977b227be1d9334f0752555f39c873b8b86a3df8546f64059112", - Name: "json", - Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:2000, In:2000, DurationInMillis:716}, + ID: "prune-http-input-fields", + Name: "prune", + Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:1250, In:1250, DurationInMillis:127}, + }, + { + ID: "ca953dac49c8fd3b00ba8275af10f9c6bcd9ca95755cd7892952966c5a13d427", + Name: "ruby", + Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:1250, In:2500, DurationInMillis:489610}, }, { - ID: "drop_b8ed8ea8c0ace91d6b617f6c8a5153141183c35a330de014182825dbceeade00", + ID: "drop-non-existent", Name: "drop", - Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:300, In:330, DurationInMillis:333}, + Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{}, + }, + { + ID: "json-filter", + Name: "json", + Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:1250, In:1250, DurationInMillis:214}, + }, + { + ID: "mutate-path-001", + Name: "mutate", + Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:1250, In:1250, DurationInMillis:170}, }, { - ID: "drop_e2e0f559b7292f788693f9f318185d5c1d30127870ca8f0e608b11d9dc560079", + ID: "drop-80-percent", Name: "drop", - Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:800, In:880, DurationInMillis:888}, + Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{}, }, }, Outputs: { { - ID: "45554a51a53a57f5dbba7d26b65aad526147453a895529f3d4698c8fd88692ef", + ID: "81d12b78dc90935689721a2220cbd7ad2f75910cdafdc01b5b77f6ca8f356cef", Name: "elasticsearch", - Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:0, In:2000, DurationInMillis:0}, - Documents: struct { Successes int "json:\"successes\""; NonRetryableFailures int "json:\"non_retryable_failures\"" }{Successes:1337, NonRetryableFailures:87}, + Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:1250, In:1250, DurationInMillis:4887}, + Documents: struct { Successes int "json:\"successes\""; NonRetryableFailures int "json:\"non_retryable_failures\"" }{Successes:1250, NonRetryableFailures:0}, BulkRequests: struct { WithErrors int "json:\"with_errors\""; Responses map[string]int "json:\"responses\"" }{ - WithErrors: 87, - Responses: {"200":87}, + WithErrors: 0, + Responses: {"200":10}, }, }, }, }, - Reloads: responses.PipelineReloadResponse{ - LastFailureTimestamp: time.Date(2023, time.April, 20, 20, 0, 32, 437218256, time.UTC), - Successes: 3, - Failures: 1, - LastSuccessTimestamp: time.Date(2023, time.April, 20, 22, 30, 32, 437218256, time.UTC), - LastError: responses.LastError{ - Message: "No configuration found in the configured sources.", - Backtrace: {"org/logstash/execution/AbstractPipelineExt.java:151:in `reload_pipeline'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:181:in `block in reload_pipeline'", "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/task.rb:24:in `block in initialize'"}, - }, - }, + Reloads: responses.PipelineReloadResponse{}, Queue: struct { Type string "json:\"type\""; EventsCount int64 "json:\"events_count\""; QueueSizeInBytes int64 "json:\"queue_size_in_bytes\""; MaxQueueSizeInBytes int64 "json:\"max_queue_size_in_bytes\"" }{Type:"memory", EventsCount:0, QueueSizeInBytes:0, MaxQueueSizeInBytes:0}, - DeadLetterQueue: struct { MaxQueueSizeInBytes int64 "json:\"max_queue_size_in_bytes\""; QueueSizeInBytes int64 "json:\"queue_size_in_bytes\""; DroppedEvents int64 "json:\"dropped_events\""; ExpiredEvents int64 "json:\"expired_events\""; StoragePolicy string "json:\"storage_policy\"" }{MaxQueueSizeInBytes:47244640256, QueueSizeInBytes:1, DroppedEvents:0, ExpiredEvents:0, StoragePolicy:"drop_newer"}, - Hash: "a73729cc9c29203931db21553c5edba063820a7e40d16cb5053be75cc3811a17", - EphemeralID: "a5c63d09-1ba6-4d67-90a5-075f468a7ab0", + DeadLetterQueue: struct { MaxQueueSizeInBytes int64 "json:\"max_queue_size_in_bytes\""; QueueSizeInBytes int64 "json:\"queue_size_in_bytes\""; DroppedEvents int64 "json:\"dropped_events\""; ExpiredEvents int64 "json:\"expired_events\""; StoragePolicy string "json:\"storage_policy\"" }{MaxQueueSizeInBytes:1073741824, QueueSizeInBytes:1, DroppedEvents:0, ExpiredEvents:0, StoragePolicy:"drop_newer"}, + Hash: "d30c4ff4da9fdb1a6b06ee390df1336aa80cc5ce6582d316af3dc0695af2d82e", + EphemeralID: "31caf4d6-162d-4eeb-bc04-411ae2e996f1", }, }, } diff --git a/fixtures/node_info.json b/fixtures/node_info.json index c7964437..8b3a6923 100644 --- a/fixtures/node_info.json +++ b/fixtures/node_info.json @@ -1,18 +1,18 @@ { - "host": "9e6e14cf5532", - "version": "8.6.1", + "host": "814a8393fbd5", + "version": "8.15.0", "http_address": "0.0.0.0:9600", - "id": "a2c7110e-5ccf-4226-bc9b-e773710e66a0", - "name": "9e6e14cf5532", - "ephemeral_id": "4a2ee04f-2733-4eaa-887d-675bad27f07c", + "id": "690de5cc-deb1-48d9-ba02-d4ec1b22e62a", + "name": "814a8393fbd5", + "ephemeral_id": "eb4d9042-5642-4e21-bb8d-27454b81c5bc", "status": "green", "snapshot": false, "pipeline": { - "workers": 16, + "workers": 10, "batch_size": 125, "batch_delay": 50 }, - "build_date": "2023-01-24T10:41:55+00:00", - "build_sha": "6a248b5091c490f09460db5651e5239d903b97cf", + "build_date": "2024-07-24T09:37:48+00:00", + "build_sha": "46b996c24da17cdc7a16bc3037edab5c6132ccd0", "build_snapshot": false } diff --git a/fixtures/node_stats.json b/fixtures/node_stats.json index e872ad8f..e269266a 100644 --- a/fixtures/node_stats.json +++ b/fixtures/node_stats.json @@ -1,58 +1,58 @@ { - "host": "f26e584ecf05", - "version": "8.8.1", + "host": "814a8393fbd5", + "version": "8.15.0", "http_address": "0.0.0.0:9600", - "id": "096d672d-50d5-420b-a27c-254c089bdd78", - "name": "f26e584ecf05", - "ephemeral_id": "25135ee3-be69-4076-bda1-e27524d9ee93", + "id": "690de5cc-deb1-48d9-ba02-d4ec1b22e62a", + "name": "814a8393fbd5", + "ephemeral_id": "eb4d9042-5642-4e21-bb8d-27454b81c5bc", "status": "green", "snapshot": false, "pipeline": { - "workers": 16, + "workers": 10, "batch_size": 125, "batch_delay": 50 }, "jvm": { "threads": { - "count": 60, - "peak_count": 60 + "count": 65, + "peak_count": 65 }, "mem": { - "heap_used_percent": 27, - "heap_committed_in_bytes": 17179869184, + "heap_used_percent": 36, + "heap_committed_in_bytes": 1073741824, "heap_max_in_bytes": 1073741822, - "heap_used_in_bytes": 294044784, - "non_heap_used_in_bytes": 147703688, - "non_heap_committed_in_bytes": 155189248, + "heap_used_in_bytes": 395837440, + "non_heap_used_in_bytes": 172607568, + "non_heap_committed_in_bytes": 183304192, "pools": { "young": { - "peak_max_in_bytes": -1, + "committed_in_bytes": 668991488, + "peak_used_in_bytes": 351272960, "max_in_bytes": -1, - "committed_in_bytes": 346030080, - "peak_used_in_bytes": 326107136, - "used_in_bytes": 180355072 - }, - "old": { - "peak_max_in_bytes": 1073741824, - "max_in_bytes": 1073741824, - "committed_in_bytes": 687865856, - "peak_used_in_bytes": 73986560, - "used_in_bytes": 73986560 + "used_in_bytes": 257949696, + "peak_max_in_bytes": -1 }, "survivor": { - "peak_max_in_bytes": -1, + "committed_in_bytes": 7340032, + "peak_used_in_bytes": 47185920, "max_in_bytes": -1, - "committed_in_bytes": 39845888, - "peak_used_in_bytes": 39703152, - "used_in_bytes": 39703152 + "used_in_bytes": 7340032, + "peak_max_in_bytes": -1 + }, + "old": { + "committed_in_bytes": 397410304, + "peak_used_in_bytes": 132644864, + "max_in_bytes": 1073741824, + "used_in_bytes": 130547712, + "peak_max_in_bytes": 1073741824 } } }, "gc": { "collectors": { "young": { - "collection_count": 8, - "collection_time_in_millis": 224 + "collection_count": 11, + "collection_time_in_millis": 110 }, "old": { "collection_count": 0, @@ -60,188 +60,379 @@ } } }, - "uptime_in_millis": 53120 + "uptime_in_millis": 56226 }, "process": { - "open_file_descriptors": 98, - "peak_open_file_descriptors": 98, + "open_file_descriptors": 108, + "peak_open_file_descriptors": 109, "max_file_descriptors": 1048576, "mem": { - "total_virtual_in_bytes": 9305346048 + "total_virtual_in_bytes": 8282685440 }, "cpu": { - "total_in_millis": 135300, + "total_in_millis": 35730, "percent": 0, "load_average": { - "1m": 3.79, - "5m": 1.29, - "15m": 0.46 + "1m": 2.25, + "5m": 0.68, + "15m": 0.24 } } }, "events": { - "in": 4001, - "filtered": 10, - "out": 2, - "duration_in_millis": 5, - "queue_push_duration_in_millis": 7 + "in": 3751, + "filtered": 1250, + "out": 1250, + "duration_in_millis": 494960, + "queue_push_duration_in_millis": 49451 }, "flow": { "input_throughput": { - "current": 1.0, - "lifetime": 117.4 + "current": 0.0, + "lifetime": 73.9 }, "filter_throughput": { - "current": 2.1, - "lifetime": 3.2 + "current": 0.0, + "lifetime": 24.63 }, "output_throughput": { - "current": 4.3, - "lifetime": 5.4 + "current": 0.0, + "lifetime": 24.63 }, "queue_backpressure": { - "current": 6.5, - "lifetime": 7.6 + "current": 1.0, + "lifetime": 0.9743 }, "worker_concurrency": { - "current": 8.7, - "lifetime": 9.8 + "current": 10.0, + "lifetime": 9.752 } }, "pipelines": { - "main": { + ".monitoring-logstash": { "events": { - "out": 0, "filtered": 0, - "in": 4001, "duration_in_millis": 0, - "queue_push_duration_in_millis": 0 + "queue_push_duration_in_millis": 0, + "out": 0, + "in": 0 }, "flow": { - "output_throughput": { - "current": 2.3, - "lifetime": 3.4 + "input_throughput": { + "current": 0.0, + "lifetime": 0.0 + }, + "filter_throughput": { + "current": 0.0, + "lifetime": 0.0 }, "worker_concurrency": { - "current": 4.5, - "lifetime": 5.6 + "current": 0.0, + "lifetime": 0.0 + }, + "queue_backpressure": { + "current": 0.0, + "lifetime": 0.0 }, + "output_throughput": { + "current": 0.0, + "lifetime": 0.0 + }, + "worker_utilization": { + "current": 0.0, + "lifetime": 0.0 + } + }, + "plugins": { + "inputs": [ + { + "id": "9a9bed30135e19c8047fe6aa0588b70b15280fb9161fea8ed8e7368e1fb1e6d3", + "events": { + "out": 0 + }, + "flow": { + "throughput": { + "current": 0.0, + "lifetime": 0.0 + } + } + } + ], + "codecs": [], + "filters": [], + "outputs": [ + { + "id": "e7aab1e17ec42d6573f3be4b8fce17c5dc69db8473f9505f386b1160434b141b", + "events": { + "duration_in_millis": 0, + "in": 0 + }, + "flow": { + "worker_millis_per_event": {}, + "worker_utilization": { + "current": 0.0, + "lifetime": 0.0 + } + } + } + ] + }, + "reloads": { + "last_failure_timestamp": "2023-04-20T20:00:32.437218256Z", + "successes": 3, + "failures": 1, + "last_success_timestamp": "2023-04-20T22:30:32.437218256Z", + "last_error": { + "message": "No configuration found in the configured sources.", + "backtrace": [ + "org/logstash/execution/AbstractPipelineExt.java:151:in `reload_pipeline'", + "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:181:in `block in reload_pipeline'", + "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/task.rb:24:in `block in initialize'" + ] + } + }, + "queue": null + }, + "main": { + "events": { + "filtered": 1250, + "duration_in_millis": 495018, + "queue_push_duration_in_millis": 49455, + "out": 1250, + "in": 3751 + }, + "flow": { "input_throughput": { - "current": 6.7, - "lifetime": 124.0 + "current": 0.0, + "lifetime": 74.88 }, "filter_throughput": { - "current": 7.8, - "lifetime": 8.9 + "current": 0.0, + "lifetime": 24.95 + }, + "worker_concurrency": { + "current": 10.0, + "lifetime": 9.882 }, "queue_backpressure": { - "current": 1.1, - "lifetime": 2.2 + "current": 1.0, + "lifetime": 0.9872 + }, + "output_throughput": { + "current": 0.0, + "lifetime": 24.95 + }, + "worker_utilization": { + "current": 100.0, + "lifetime": 98.82 } }, "plugins": { "inputs": [ { - "id": "c75c0c6f97fd2c8605b95a5b2694fdae97189fe49553787a923faeaa3342c54a", + "id": "5ee0ea3d45c32bab3b41963bd900e758ba6e193a11079649302574c706fd5e2f", + "name": "dead_letter_queue", + "events": { + "queue_push_duration_in_millis": 0, + "out": 0 + }, + "flow": { + "throughput": { + "current": 0.0, + "lifetime": 0.0 + } + } + }, + { + "id": "95bb3e4f2a40f87147b6ab5035e08ba31858eace7604a57d2e719db790097222", "name": "generator", "events": { - "out": 4001, - "queue_push_duration_in_millis": 0 + "queue_push_duration_in_millis": 49454, + "out": 3751 + }, + "flow": { + "throughput": { + "current": 0.0, + "lifetime": 74.88 + } } } ], "codecs": [ { - "id": "plain_d7fb272d-75f5-4e38-bf56-c4c798bea2d1", - "name": "plain", + "id": "plain_4ffdd0bf-b707-419b-b425-d3986a43c35a", "decode": { - "out": 0, "writes_in": 0, - "duration_in_millis": 0 + "duration_in_millis": 0, + "out": 0 }, + "name": "plain", "encode": { "writes_in": 0, "duration_in_millis": 0 } }, { - "id": "plain_35cb2d25-0d8d-441a-a714-82121715864d", - "name": "plain", + "id": "plain_b4b163b6-effd-454a-9605-c3f8ef0cde5e", "decode": { - "out": 4001, - "writes_in": 4001, - "duration_in_millis": 57 + "writes_in": 0, + "duration_in_millis": 0, + "out": 0 }, + "name": "plain", "encode": { "writes_in": 0, "duration_in_millis": 0 } + }, + { + "id": "plain-codec-001", + "name": "plain" } ], "filters": [ { - "id": "1721e7c39758977b227be1d9334f0752555f39c873b8b86a3df8546f64059112", - "name": "json", + "id": "prune-http-input-fields", + "name": "prune", "events": { - "out": 2000, - "in": 2000, - "duration_in_millis": 716 + "duration_in_millis": 127, + "out": 1250, + "in": 1250 + }, + "flow": { + "worker_millis_per_event": { + "lifetime": 0.1016 + }, + "worker_utilization": { + "current": 0.0, + "lifetime": 0.02535 + } } }, { - "id": "drop_b8ed8ea8c0ace91d6b617f6c8a5153141183c35a330de014182825dbceeade00", + "id": "ca953dac49c8fd3b00ba8275af10f9c6bcd9ca95755cd7892952966c5a13d427", + "name": "ruby", + "events": { + "duration_in_millis": 489610, + "out": 1250, + "in": 2500 + }, + "flow": { + "worker_millis_per_event": { + "current": "Infinity", + "lifetime": 195.8 + }, + "worker_utilization": { + "current": 100.0, + "lifetime": 97.74 + } + } + }, + { + "id": "drop-non-existent", "name": "drop", "events": { - "out": 300, - "in": 330, - "duration_in_millis": 333 + "duration_in_millis": 0, + "out": 0, + "in": 0 + }, + "flow": { + "worker_millis_per_event": {}, + "worker_utilization": { + "current": 0.0, + "lifetime": 0.0 + } + } + }, + { + "id": "json-filter", + "name": "json", + "events": { + "duration_in_millis": 214, + "out": 1250, + "in": 1250 + }, + "flow": { + "worker_millis_per_event": { + "lifetime": 0.1712 + }, + "worker_utilization": { + "current": 0.0, + "lifetime": 0.04272 + } + } + }, + { + "id": "mutate-path-001", + "name": "mutate", + "events": { + "duration_in_millis": 170, + "out": 1250, + "in": 1250 + }, + "flow": { + "worker_millis_per_event": { + "lifetime": 0.136 + }, + "worker_utilization": { + "current": 0.0, + "lifetime": 0.03394 + } } }, { - "id": "drop_e2e0f559b7292f788693f9f318185d5c1d30127870ca8f0e608b11d9dc560079", + "id": "drop-80-percent", "name": "drop", "events": { - "out": 800, - "in": 880, - "duration_in_millis": 888 + "duration_in_millis": 0, + "out": 0, + "in": 0 + }, + "flow": { + "worker_millis_per_event": {}, + "worker_utilization": { + "current": 0.0, + "lifetime": 0.0 + } } } ], "outputs": [ { - "id": "45554a51a53a57f5dbba7d26b65aad526147453a895529f3d4698c8fd88692ef", + "id": "81d12b78dc90935689721a2220cbd7ad2f75910cdafdc01b5b77f6ca8f356cef", "name": "elasticsearch", "events": { - "out": 0, - "in": 2000, - "duration_in_millis": 0 - }, - "documents": { - "successes": 1337, - "non_retryable_failures": 87 + "duration_in_millis": 4887, + "out": 1250, + "in": 1250 }, "bulk_requests": { - "with_errors": 87, + "successes": 10, "responses": { - "200": 87 + "200": 10 + } + }, + "documents": { + "successes": 1250 + }, + "flow": { + "worker_millis_per_event": { + "lifetime": 3.91 + }, + "worker_utilization": { + "current": 0.0, + "lifetime": 0.9756 } } } ] }, "reloads": { - "last_failure_timestamp": "2023-04-20T20:00:32.437218256Z", - "successes": 3, - "failures": 1, - "last_success_timestamp": "2023-04-20T22:30:32.437218256Z", - "last_error": { - "message": "No configuration found in the configured sources.", - "backtrace": [ - "org/logstash/execution/AbstractPipelineExt.java:151:in `reload_pipeline'", - "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:181:in `block in reload_pipeline'", - "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/task.rb:24:in `block in initialize'" - ] - } + "last_success_timestamp": null, + "failures": 0, + "last_error": null, + "successes": 0, + "last_failure_timestamp": null }, "queue": { "type": "memory", @@ -250,84 +441,22 @@ "max_queue_size_in_bytes": 0 }, "dead_letter_queue": { - "max_queue_size_in_bytes": 47244640256, - "last_error": "no errors", "queue_size_in_bytes": 1, - "dropped_events": 0, + "last_error": "no errors", "expired_events": 0, - "storage_policy": "drop_newer" - }, - "hash": "a73729cc9c29203931db21553c5edba063820a7e40d16cb5053be75cc3811a17", - "ephemeral_id": "a5c63d09-1ba6-4d67-90a5-075f468a7ab0" - }, - ".monitoring-logstash": { - "events": { - "out": 0, - "filtered": 0, - "in": 0, - "duration_in_millis": 0, - "queue_push_duration_in_millis": 0 - }, - "flow": { - "output_throughput": { - "current": 0.0, - "lifetime": 0.0 - }, - "worker_concurrency": { - "current": 0.0, - "lifetime": 0.0 - }, - "input_throughput": { - "current": 0.0, - "lifetime": 0.0 - }, - "filter_throughput": { - "current": 0.0, - "lifetime": 0.0 - }, - "queue_backpressure": { - "current": 0.0, - "lifetime": 0.0 - } - }, - "plugins": { - "inputs": [], - "codecs": [], - "filters": [], - "outputs": [] - }, - "reloads": { - "last_failure_timestamp": null, - "successes": 0, - "failures": 0, - "last_success_timestamp": null, - "last_error": null + "storage_policy": "drop_newer", + "dropped_events": 0, + "max_queue_size_in_bytes": 1073741824 }, - "queue": null + "hash": "d30c4ff4da9fdb1a6b06ee390df1336aa80cc5ce6582d316af3dc0695af2d82e", + "ephemeral_id": "31caf4d6-162d-4eeb-bc04-411ae2e996f1" } }, "reloads": { - "successes": 0, - "failures": 0 - }, - "os": { - "cgroup": { - "cpu": { - "cfs_period_micros": 100000, - "cfs_quota_micros": -1, - "stat": { - "time_throttled_nanos": 0, - "number_of_times_throttled": 0, - "number_of_elapsed_periods": 0 - }, - "control_group": "/" - }, - "cpuacct": { - "usage_nanos": 161531487900, - "control_group": "/" - } - } + "failures": 0, + "successes": 0 }, + "os": {}, "queue": { "events_count": 0 } From 2b5a41ab518f5ea40c52e55727b9c9689785c324 Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Wed, 9 Oct 2024 15:22:37 +0200 Subject: [PATCH 20/28] Add logstash_stats_pipeline_flow_worker_ metrics, fix other flow metrics (#369) --- README.md | 1 - collectors/nodestats/pipeline_subcollector.go | 26 ++++++++----- docker-compose.yml | 4 +- .../nodestats_response_test.snap | 39 +++++++++++++++++-- fetcher/responses/nodestats_response.go | 19 +++++++++ fixtures/node_stats.json | 28 ++++++------- scripts/snapshots/metric_names.txt | 2 + 7 files changed, 89 insertions(+), 30 deletions(-) diff --git a/README.md b/README.md index ed303cc7..2fa3c750 100644 --- a/README.md +++ b/README.md @@ -97,7 +97,6 @@ The application can be configured using the following environment variables, whi | `LOG_LEVEL` | [Log level](https://pkg.go.dev/golang.org/x/exp/slog#Level) (defaults to "info" if not set) | `""` (empty string) | | `HTTP_TIMEOUT`| Timeout for HTTP requests to Logstash API in [Go duration format](https://golang.org/pkg/time/#ParseDuration) | `2s` | - All configuration variables can be checked in the [config directory](./config/). ## Building diff --git a/collectors/nodestats/pipeline_subcollector.go b/collectors/nodestats/pipeline_subcollector.go index bc5abef3..8ca7ce78 100644 --- a/collectors/nodestats/pipeline_subcollector.go +++ b/collectors/nodestats/pipeline_subcollector.go @@ -55,6 +55,8 @@ type PipelineSubcollector struct { FlowQueueBackpressureLifetime *prometheus.Desc FlowWorkerConcurrencyCurrent *prometheus.Desc FlowWorkerConcurrencyLifetime *prometheus.Desc + FlowWorkerUtilizationCurrent *prometheus.Desc + FlowWorkerUtilizationLifetime *prometheus.Desc DeadLetterQueueMaxSizeInBytes *prometheus.Desc DeadLetterQueueSizeInBytes *prometheus.Desc @@ -102,6 +104,8 @@ func NewPipelineSubcollector() *PipelineSubcollector { FlowQueueBackpressureLifetime: descHelper.NewDescWithHelpAndLabels("flow_queue_backpressure_lifetime", "Lifetime number of events in the backpressure queue.", "pipeline"), FlowWorkerConcurrencyCurrent: descHelper.NewDescWithHelpAndLabels("flow_worker_concurrency_current", "Current number of workers.", "pipeline"), FlowWorkerConcurrencyLifetime: descHelper.NewDescWithHelpAndLabels("flow_worker_concurrency_lifetime", "Lifetime number of workers.", "pipeline"), + FlowWorkerUtilizationCurrent: descHelper.NewDescWithHelpAndLabels("flow_worker_utilization_current", "Current worker utilization.", "pipeline"), + FlowWorkerUtilizationLifetime: descHelper.NewDescWithHelpAndLabels("flow_worker_utilization_lifetime", "Lifetime worker utilization.", "pipeline"), DeadLetterQueueMaxSizeInBytes: descHelper.NewDescWithHelpAndLabels("dead_letter_queue_max_size_in_bytes", "Maximum size of the dead letter queue in bytes.", "pipeline"), DeadLetterQueueSizeInBytes: descHelper.NewDescWithHelpAndLabels("dead_letter_queue_size_in_bytes", "Current size of the dead letter queue in bytes.", "pipeline"), @@ -137,16 +141,18 @@ func (collector *PipelineSubcollector) Collect(pipeStats *responses.SinglePipeli ch <- prometheus.MustNewConstMetric(collector.QueueMaxQueueSizeInBytes, prometheus.CounterValue, float64(pipeStats.Queue.MaxQueueSizeInBytes), pipelineID) flowStats := pipeStats.Flow - ch <- prometheus.MustNewConstMetric(collector.FlowInputCurrent, prometheus.GaugeValue, float64(flowStats.InputThroughput.Current), pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowInputLifetime, prometheus.CounterValue, float64(flowStats.InputThroughput.Lifetime), pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowFilterCurrent, prometheus.GaugeValue, float64(flowStats.FilterThroughput.Current), pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowFilterLifetime, prometheus.CounterValue, float64(flowStats.FilterThroughput.Lifetime), pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowOutputCurrent, prometheus.GaugeValue, float64(flowStats.OutputThroughput.Current), pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowOutputLifetime, prometheus.CounterValue, float64(flowStats.OutputThroughput.Lifetime), pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowQueueBackpressureCurrent, prometheus.GaugeValue, float64(flowStats.QueueBackpressure.Current), pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowQueueBackpressureLifetime, prometheus.CounterValue, float64(flowStats.QueueBackpressure.Lifetime), pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowWorkerConcurrencyCurrent, prometheus.GaugeValue, float64(flowStats.WorkerConcurrency.Current), pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowWorkerConcurrencyLifetime, prometheus.CounterValue, float64(flowStats.WorkerConcurrency.Lifetime), pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowInputCurrent, prometheus.GaugeValue, flowStats.InputThroughput.Current, pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowInputLifetime, prometheus.GaugeValue, flowStats.InputThroughput.Lifetime, pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowFilterCurrent, prometheus.GaugeValue, flowStats.FilterThroughput.Current, pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowFilterLifetime, prometheus.GaugeValue, flowStats.FilterThroughput.Lifetime, pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowOutputCurrent, prometheus.GaugeValue, flowStats.OutputThroughput.Current, pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowOutputLifetime, prometheus.GaugeValue, flowStats.OutputThroughput.Lifetime, pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowQueueBackpressureCurrent, prometheus.GaugeValue, flowStats.QueueBackpressure.Current, pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowQueueBackpressureLifetime, prometheus.GaugeValue, flowStats.QueueBackpressure.Lifetime, pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowWorkerConcurrencyCurrent, prometheus.GaugeValue, flowStats.WorkerConcurrency.Current, pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowWorkerConcurrencyLifetime, prometheus.GaugeValue, flowStats.WorkerConcurrency.Lifetime, pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowWorkerUtilizationCurrent, prometheus.GaugeValue, flowStats.WorkerUtilization.Current, pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowWorkerUtilizationLifetime, prometheus.GaugeValue, flowStats.WorkerUtilization.Lifetime, pipelineID) deadLetterQueueStats := pipeStats.DeadLetterQueue ch <- prometheus.MustNewConstMetric(collector.DeadLetterQueueMaxSizeInBytes, prometheus.GaugeValue, float64(deadLetterQueueStats.MaxQueueSizeInBytes), pipelineID) diff --git a/docker-compose.yml b/docker-compose.yml index d63f3c43..ee75ea73 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,6 @@ services: logstash: - image: docker.elastic.co/logstash/logstash:8.15.0 + image: docker.elastic.co/logstash/logstash:8.15.2 restart: unless-stopped volumes: - logstash-data:/usr/share/logstash/data @@ -41,7 +41,7 @@ services: timeout: 10s retries: 8 elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:8.15.0 + image: docker.elastic.co/elasticsearch/elasticsearch:8.15.2 restart: unless-stopped volumes: - elasticsearch-data:/usr/share/elasticsearch/data diff --git a/fetcher/responses/__snapshots__/nodestats_response_test.snap b/fetcher/responses/__snapshots__/nodestats_response_test.snap index d4727f7c..6063488c 100755 --- a/fetcher/responses/__snapshots__/nodestats_response_test.snap +++ b/fetcher/responses/__snapshots__/nodestats_response_test.snap @@ -52,6 +52,7 @@ responses.NodeStatsResponse{ OutputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:24.63}, QueueBackpressure: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:1, Lifetime:0.9743}, WorkerConcurrency: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:10, Lifetime:9.752}, + WorkerUtilization: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{}, }, Reloads: responses.ReloadResponse{}, Os: responses.OsResponse{}, @@ -60,8 +61,15 @@ responses.NodeStatsResponse{ ".monitoring-logstash": { Monitoring: responses.PipelineLogstashMonitoringResponse{}, Events: struct { Out int "json:\"out\""; Filtered int "json:\"filtered\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" }{}, - Flow: responses.FlowResponse{}, - Plugins: struct { Inputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" } "json:\"events\"" } "json:\"inputs\""; Codecs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Decode struct { Out int "json:\"out\""; WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"decode\""; Encode struct { WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"encode\"" } "json:\"codecs\""; Filters []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\"" } "json:\"filters\""; Outputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\""; Documents struct { Successes int "json:\"successes\""; NonRetryableFailures int "json:\"non_retryable_failures\"" } "json:\"documents\""; BulkRequests struct { WithErrors int "json:\"with_errors\""; Responses map[string]int "json:\"responses\"" } "json:\"bulk_requests\"" } "json:\"outputs\"" }{ + Flow: responses.FlowResponse{ + InputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:1, Lifetime:2}, + FilterThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:3, Lifetime:4}, + OutputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:9, Lifetime:10}, + QueueBackpressure: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:7, Lifetime:8}, + WorkerConcurrency: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:5, Lifetime:6}, + WorkerUtilization: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:11, Lifetime:12}, + }, + Plugins: struct { Inputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" } "json:\"events\"" } "json:\"inputs\""; Codecs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Decode struct { Out int "json:\"out\""; WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"decode\""; Encode struct { WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"encode\"" } "json:\"codecs\""; Filters []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\""; Flow struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime float64 "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" } "json:\"flow\"" } "json:\"filters\""; Outputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\""; Documents struct { Successes int "json:\"successes\""; NonRetryableFailures int "json:\"non_retryable_failures\"" } "json:\"documents\""; BulkRequests struct { WithErrors int "json:\"with_errors\""; Responses map[string]int "json:\"responses\"" } "json:\"bulk_requests\""; Flow struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\"" } "json:\"flow\"" } "json:\"outputs\"" }{ Inputs: { { ID: "9a9bed30135e19c8047fe6aa0588b70b15280fb9161fea8ed8e7368e1fb1e6d3", @@ -80,6 +88,9 @@ responses.NodeStatsResponse{ Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{}, Documents: struct { Successes int "json:\"successes\""; NonRetryableFailures int "json:\"non_retryable_failures\"" }{}, BulkRequests: struct { WithErrors int "json:\"with_errors\""; Responses map[string]int "json:\"responses\"" }{}, + Flow: struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\"" }{ + WorkerUtilization: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0.5, Lifetime:1.5}, + }, }, }, }, @@ -107,8 +118,9 @@ responses.NodeStatsResponse{ OutputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:24.95}, QueueBackpressure: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:1, Lifetime:0.9872}, WorkerConcurrency: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:10, Lifetime:9.882}, + WorkerUtilization: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:100, Lifetime:98.82}, }, - Plugins: struct { Inputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" } "json:\"events\"" } "json:\"inputs\""; Codecs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Decode struct { Out int "json:\"out\""; WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"decode\""; Encode struct { WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"encode\"" } "json:\"codecs\""; Filters []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\"" } "json:\"filters\""; Outputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\""; Documents struct { Successes int "json:\"successes\""; NonRetryableFailures int "json:\"non_retryable_failures\"" } "json:\"documents\""; BulkRequests struct { WithErrors int "json:\"with_errors\""; Responses map[string]int "json:\"responses\"" } "json:\"bulk_requests\"" } "json:\"outputs\"" }{ + Plugins: struct { Inputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" } "json:\"events\"" } "json:\"inputs\""; Codecs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Decode struct { Out int "json:\"out\""; WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"decode\""; Encode struct { WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"encode\"" } "json:\"codecs\""; Filters []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\""; Flow struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime float64 "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" } "json:\"flow\"" } "json:\"filters\""; Outputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\""; Documents struct { Successes int "json:\"successes\""; NonRetryableFailures int "json:\"non_retryable_failures\"" } "json:\"documents\""; BulkRequests struct { WithErrors int "json:\"with_errors\""; Responses map[string]int "json:\"responses\"" } "json:\"bulk_requests\""; Flow struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\"" } "json:\"flow\"" } "json:\"outputs\"" }{ Inputs: { { ID: "5ee0ea3d45c32bab3b41963bd900e758ba6e193a11079649302574c706fd5e2f", @@ -146,31 +158,49 @@ responses.NodeStatsResponse{ ID: "prune-http-input-fields", Name: "prune", Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:1250, In:1250, DurationInMillis:127}, + Flow: struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime float64 "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{ + WorkerUtilization: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:0.02535}, + WorkerMillisPerEvent: struct { Lifetime float64 "json:\"lifetime\"" }{Lifetime:0.1016}, + }, }, { ID: "ca953dac49c8fd3b00ba8275af10f9c6bcd9ca95755cd7892952966c5a13d427", Name: "ruby", Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:1250, In:2500, DurationInMillis:489610}, + Flow: struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime float64 "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{ + WorkerUtilization: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:100, Lifetime:97.74}, + WorkerMillisPerEvent: struct { Lifetime float64 "json:\"lifetime\"" }{Lifetime:195.8}, + }, }, { ID: "drop-non-existent", Name: "drop", Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{}, + Flow: struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime float64 "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{}, }, { ID: "json-filter", Name: "json", Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:1250, In:1250, DurationInMillis:214}, + Flow: struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime float64 "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{ + WorkerUtilization: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:0.04272}, + WorkerMillisPerEvent: struct { Lifetime float64 "json:\"lifetime\"" }{Lifetime:0.1712}, + }, }, { ID: "mutate-path-001", Name: "mutate", Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:1250, In:1250, DurationInMillis:170}, + Flow: struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime float64 "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{ + WorkerUtilization: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:0.03394}, + WorkerMillisPerEvent: struct { Lifetime float64 "json:\"lifetime\"" }{Lifetime:0.136}, + }, }, { ID: "drop-80-percent", Name: "drop", Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{}, + Flow: struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime float64 "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{}, }, }, Outputs: { @@ -183,6 +213,9 @@ responses.NodeStatsResponse{ WithErrors: 0, Responses: {"200":10}, }, + Flow: struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\"" }{ + WorkerUtilization: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:0.9756}, + }, }, }, }, diff --git a/fetcher/responses/nodestats_response.go b/fetcher/responses/nodestats_response.go index 73229037..258586cb 100644 --- a/fetcher/responses/nodestats_response.go +++ b/fetcher/responses/nodestats_response.go @@ -106,6 +106,10 @@ type FlowResponse struct { Current float64 `json:"current"` Lifetime float64 `json:"lifetime"` } `json:"worker_concurrency"` + WorkerUtilization struct { + Current float64 `json:"current"` + Lifetime float64 `json:"lifetime"` + } `json:"worker_utilization"` } type SinglePipelineResponse struct { @@ -148,6 +152,15 @@ type SinglePipelineResponse struct { In int `json:"in"` DurationInMillis int `json:"duration_in_millis"` } `json:"events"` + Flow struct { + WorkerUtilization struct { + Current float64 `json:"current"` + Lifetime float64 `json:"lifetime"` + } `json:"worker_utilization"` + WorkerMillisPerEvent struct { + Lifetime float64 `json:"lifetime"` + } `json:"worker_millis_per_event"` + } `json:"flow"` } `json:"filters"` Outputs []struct { ID string `json:"id"` @@ -165,6 +178,12 @@ type SinglePipelineResponse struct { WithErrors int `json:"with_errors"` Responses map[string]int `json:"responses"` } `json:"bulk_requests"` + Flow struct { + WorkerUtilization struct { + Current float64 `json:"current"` + Lifetime float64 `json:"lifetime"` + } `json:"worker_utilization"` + } `json:"flow"` } `json:"outputs"` } `json:"plugins"` Reloads PipelineReloadResponse `json:"reloads"` diff --git a/fixtures/node_stats.json b/fixtures/node_stats.json index e269266a..e832ed4d 100644 --- a/fixtures/node_stats.json +++ b/fixtures/node_stats.json @@ -119,28 +119,28 @@ }, "flow": { "input_throughput": { - "current": 0.0, - "lifetime": 0.0 + "current": 1.0, + "lifetime": 2.0 }, "filter_throughput": { - "current": 0.0, - "lifetime": 0.0 + "current": 3.0, + "lifetime": 4.0 }, "worker_concurrency": { - "current": 0.0, - "lifetime": 0.0 + "current": 5.0, + "lifetime": 6.0 }, "queue_backpressure": { - "current": 0.0, - "lifetime": 0.0 + "current": 7.0, + "lifetime": 8.0 }, "output_throughput": { - "current": 0.0, - "lifetime": 0.0 + "current": 9.0, + "lifetime": 10.0 }, "worker_utilization": { - "current": 0.0, - "lifetime": 0.0 + "current": 11.0, + "lifetime": 12.0 } }, "plugins": { @@ -170,8 +170,8 @@ "flow": { "worker_millis_per_event": {}, "worker_utilization": { - "current": 0.0, - "lifetime": 0.0 + "current": 0.5, + "lifetime": 1.5 } } } diff --git a/scripts/snapshots/metric_names.txt b/scripts/snapshots/metric_names.txt index e6349c83..8dea59e9 100644 --- a/scripts/snapshots/metric_names.txt +++ b/scripts/snapshots/metric_names.txt @@ -80,3 +80,5 @@ logstash_stats_flow_queue_backpressure_current logstash_stats_flow_queue_backpressure_lifetime logstash_stats_flow_worker_concurrency_current logstash_stats_flow_worker_concurrency_lifetime +logstash_stats_pipeline_flow_worker_utilization_current +logstash_stats_pipeline_flow_worker_utilization_lifetime From 1e60b12299b747a658185ef01cc7364f43b23995 Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Wed, 9 Oct 2024 15:38:09 +0200 Subject: [PATCH 21/28] Fix create release notes script (#370) Change script, so that v1 and v2 will be taked into account separately --- scripts/create_release_notes.sh | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/scripts/create_release_notes.sh b/scripts/create_release_notes.sh index b29df70d..c793650f 100755 --- a/scripts/create_release_notes.sh +++ b/scripts/create_release_notes.sh @@ -11,12 +11,12 @@ else previous_tag_hash=$(git rev-list --tags --max-count=1) fi -previous_tag=$(git describe --abbrev=0 --tags $previous_tag_hash) +previous_tag=$(git describe --abbrev=0 --tags "$previous_tag_hash") range="$previous_tag..HEAD" -commits_since_previous_tag=$(git log --no-merges --pretty=format:"* %s" $range) +commits_since_previous_tag=$(git log --no-merges --pretty=format:"* %s" "$range") notes_file="release_notes.txt" @@ -24,8 +24,10 @@ if [ -z "$commits_since_previous_tag" ]; then echo "No changes from previous release" > "$notes_file" else echo "Release Notes ($current_tag):" > "$notes_file" - echo "" >> "$notes_file" - echo "Since the last tag ($previous_tag), the following changes have been made:" >> "$notes_file" - echo "" >> "$notes_file" - echo "$commits_since_previous_tag" >> "$notes_file" + { + echo "" + echo "Since the last tag ($previous_tag), the following changes have been made:" + echo "" + echo "$commits_since_previous_tag" + } >> "$notes_file" fi From 7a7e6a5b1f01d891e080997d94a807cee8b62d0c Mon Sep 17 00:00:00 2001 From: bas smit <474727+fbs@users.noreply.github.com> Date: Fri, 11 Oct 2024 11:23:05 +0000 Subject: [PATCH 22/28] node stats: support infinity (#373) --- collectors/nodestats/pipeline_subcollector.go | 24 +++---- .../nodestats_response_test.snap | 68 +++++++++---------- fetcher/responses/nodestats_response.go | 63 ++++++++++++----- fetcher/responses/nodestats_response_test.go | 31 +++++++++ fixtures/node_stats.json | 2 +- 5 files changed, 125 insertions(+), 63 deletions(-) diff --git a/collectors/nodestats/pipeline_subcollector.go b/collectors/nodestats/pipeline_subcollector.go index 8ca7ce78..1d034e8d 100644 --- a/collectors/nodestats/pipeline_subcollector.go +++ b/collectors/nodestats/pipeline_subcollector.go @@ -141,18 +141,18 @@ func (collector *PipelineSubcollector) Collect(pipeStats *responses.SinglePipeli ch <- prometheus.MustNewConstMetric(collector.QueueMaxQueueSizeInBytes, prometheus.CounterValue, float64(pipeStats.Queue.MaxQueueSizeInBytes), pipelineID) flowStats := pipeStats.Flow - ch <- prometheus.MustNewConstMetric(collector.FlowInputCurrent, prometheus.GaugeValue, flowStats.InputThroughput.Current, pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowInputLifetime, prometheus.GaugeValue, flowStats.InputThroughput.Lifetime, pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowFilterCurrent, prometheus.GaugeValue, flowStats.FilterThroughput.Current, pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowFilterLifetime, prometheus.GaugeValue, flowStats.FilterThroughput.Lifetime, pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowOutputCurrent, prometheus.GaugeValue, flowStats.OutputThroughput.Current, pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowOutputLifetime, prometheus.GaugeValue, flowStats.OutputThroughput.Lifetime, pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowQueueBackpressureCurrent, prometheus.GaugeValue, flowStats.QueueBackpressure.Current, pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowQueueBackpressureLifetime, prometheus.GaugeValue, flowStats.QueueBackpressure.Lifetime, pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowWorkerConcurrencyCurrent, prometheus.GaugeValue, flowStats.WorkerConcurrency.Current, pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowWorkerConcurrencyLifetime, prometheus.GaugeValue, flowStats.WorkerConcurrency.Lifetime, pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowWorkerUtilizationCurrent, prometheus.GaugeValue, flowStats.WorkerUtilization.Current, pipelineID) - ch <- prometheus.MustNewConstMetric(collector.FlowWorkerUtilizationLifetime, prometheus.GaugeValue, flowStats.WorkerUtilization.Lifetime, pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowInputCurrent, prometheus.GaugeValue, float64(flowStats.InputThroughput.Current), pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowInputLifetime, prometheus.GaugeValue, float64(flowStats.InputThroughput.Lifetime), pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowFilterCurrent, prometheus.GaugeValue, float64(flowStats.FilterThroughput.Current), pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowFilterLifetime, prometheus.GaugeValue, float64(flowStats.FilterThroughput.Lifetime), pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowOutputCurrent, prometheus.GaugeValue, float64(flowStats.OutputThroughput.Current), pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowOutputLifetime, prometheus.GaugeValue, float64(flowStats.OutputThroughput.Lifetime), pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowQueueBackpressureCurrent, prometheus.GaugeValue, float64(flowStats.QueueBackpressure.Current), pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowQueueBackpressureLifetime, prometheus.GaugeValue, float64(flowStats.QueueBackpressure.Lifetime), pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowWorkerConcurrencyCurrent, prometheus.GaugeValue, float64(flowStats.WorkerConcurrency.Current), pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowWorkerConcurrencyLifetime, prometheus.GaugeValue, float64(flowStats.WorkerConcurrency.Lifetime), pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowWorkerUtilizationCurrent, prometheus.GaugeValue, float64(flowStats.WorkerUtilization.Current), pipelineID) + ch <- prometheus.MustNewConstMetric(collector.FlowWorkerUtilizationLifetime, prometheus.GaugeValue, float64(flowStats.WorkerUtilization.Lifetime), pipelineID) deadLetterQueueStats := pipeStats.DeadLetterQueue ch <- prometheus.MustNewConstMetric(collector.DeadLetterQueueMaxSizeInBytes, prometheus.GaugeValue, float64(deadLetterQueueStats.MaxQueueSizeInBytes), pipelineID) diff --git a/fetcher/responses/__snapshots__/nodestats_response_test.snap b/fetcher/responses/__snapshots__/nodestats_response_test.snap index 6063488c..5300a6d3 100755 --- a/fetcher/responses/__snapshots__/nodestats_response_test.snap +++ b/fetcher/responses/__snapshots__/nodestats_response_test.snap @@ -47,12 +47,12 @@ responses.NodeStatsResponse{ }, Events: responses.EventsResponse{In:3751, Filtered:1250, Out:1250, DurationInMillis:494960, QueuePushDurationInMillis:49451}, Flow: responses.FlowResponse{ - InputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:73.9}, - FilterThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:24.63}, - OutputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:24.63}, - QueueBackpressure: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:1, Lifetime:0.9743}, - WorkerConcurrency: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:10, Lifetime:9.752}, - WorkerUtilization: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{}, + InputThroughput: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:0, Lifetime:73.9}, + FilterThroughput: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:0, Lifetime:24.63}, + OutputThroughput: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:0, Lifetime:24.63}, + QueueBackpressure: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:1, Lifetime:0.9743}, + WorkerConcurrency: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:10, Lifetime:9.752}, + WorkerUtilization: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{}, }, Reloads: responses.ReloadResponse{}, Os: responses.OsResponse{}, @@ -62,14 +62,14 @@ responses.NodeStatsResponse{ Monitoring: responses.PipelineLogstashMonitoringResponse{}, Events: struct { Out int "json:\"out\""; Filtered int "json:\"filtered\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" }{}, Flow: responses.FlowResponse{ - InputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:1, Lifetime:2}, - FilterThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:3, Lifetime:4}, - OutputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:9, Lifetime:10}, - QueueBackpressure: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:7, Lifetime:8}, - WorkerConcurrency: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:5, Lifetime:6}, - WorkerUtilization: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:11, Lifetime:12}, + InputThroughput: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:1, Lifetime:2}, + FilterThroughput: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:3, Lifetime:4}, + OutputThroughput: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:9, Lifetime:10}, + QueueBackpressure: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:7, Lifetime:8}, + WorkerConcurrency: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:5, Lifetime:6}, + WorkerUtilization: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:11, Lifetime:12}, }, - Plugins: struct { Inputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" } "json:\"events\"" } "json:\"inputs\""; Codecs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Decode struct { Out int "json:\"out\""; WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"decode\""; Encode struct { WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"encode\"" } "json:\"codecs\""; Filters []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\""; Flow struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime float64 "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" } "json:\"flow\"" } "json:\"filters\""; Outputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\""; Documents struct { Successes int "json:\"successes\""; NonRetryableFailures int "json:\"non_retryable_failures\"" } "json:\"documents\""; BulkRequests struct { WithErrors int "json:\"with_errors\""; Responses map[string]int "json:\"responses\"" } "json:\"bulk_requests\""; Flow struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\"" } "json:\"flow\"" } "json:\"outputs\"" }{ + Plugins: struct { Inputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" } "json:\"events\"" } "json:\"inputs\""; Codecs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Decode struct { Out int "json:\"out\""; WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"decode\""; Encode struct { WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"encode\"" } "json:\"codecs\""; Filters []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\""; Flow struct { WorkerUtilization struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime responses.InfinityFloat "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" } "json:\"flow\"" } "json:\"filters\""; Outputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\""; Documents struct { Successes int "json:\"successes\""; NonRetryableFailures int "json:\"non_retryable_failures\"" } "json:\"documents\""; BulkRequests struct { WithErrors int "json:\"with_errors\""; Responses map[string]int "json:\"responses\"" } "json:\"bulk_requests\""; Flow struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\"" } "json:\"flow\"" } "json:\"outputs\"" }{ Inputs: { { ID: "9a9bed30135e19c8047fe6aa0588b70b15280fb9161fea8ed8e7368e1fb1e6d3", @@ -113,14 +113,14 @@ responses.NodeStatsResponse{ Monitoring: responses.PipelineLogstashMonitoringResponse{}, Events: struct { Out int "json:\"out\""; Filtered int "json:\"filtered\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" }{Out:1250, Filtered:1250, In:3751, DurationInMillis:495018, QueuePushDurationInMillis:49455}, Flow: responses.FlowResponse{ - InputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:74.88}, - FilterThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:24.95}, - OutputThroughput: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:24.95}, - QueueBackpressure: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:1, Lifetime:0.9872}, - WorkerConcurrency: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:10, Lifetime:9.882}, - WorkerUtilization: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:100, Lifetime:98.82}, + InputThroughput: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:0, Lifetime:74.88}, + FilterThroughput: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:0, Lifetime:24.95}, + OutputThroughput: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:0, Lifetime:24.95}, + QueueBackpressure: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:1, Lifetime:0.9872}, + WorkerConcurrency: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:10, Lifetime:9.882}, + WorkerUtilization: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:100, Lifetime:98.82}, }, - Plugins: struct { Inputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" } "json:\"events\"" } "json:\"inputs\""; Codecs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Decode struct { Out int "json:\"out\""; WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"decode\""; Encode struct { WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"encode\"" } "json:\"codecs\""; Filters []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\""; Flow struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime float64 "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" } "json:\"flow\"" } "json:\"filters\""; Outputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\""; Documents struct { Successes int "json:\"successes\""; NonRetryableFailures int "json:\"non_retryable_failures\"" } "json:\"documents\""; BulkRequests struct { WithErrors int "json:\"with_errors\""; Responses map[string]int "json:\"responses\"" } "json:\"bulk_requests\""; Flow struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\"" } "json:\"flow\"" } "json:\"outputs\"" }{ + Plugins: struct { Inputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; QueuePushDurationInMillis int "json:\"queue_push_duration_in_millis\"" } "json:\"events\"" } "json:\"inputs\""; Codecs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Decode struct { Out int "json:\"out\""; WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"decode\""; Encode struct { WritesIn int "json:\"writes_in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"encode\"" } "json:\"codecs\""; Filters []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\""; Flow struct { WorkerUtilization struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime responses.InfinityFloat "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" } "json:\"flow\"" } "json:\"filters\""; Outputs []struct { ID string "json:\"id\""; Name string "json:\"name\""; Events struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" } "json:\"events\""; Documents struct { Successes int "json:\"successes\""; NonRetryableFailures int "json:\"non_retryable_failures\"" } "json:\"documents\""; BulkRequests struct { WithErrors int "json:\"with_errors\""; Responses map[string]int "json:\"responses\"" } "json:\"bulk_requests\""; Flow struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\"" } "json:\"flow\"" } "json:\"outputs\"" }{ Inputs: { { ID: "5ee0ea3d45c32bab3b41963bd900e758ba6e193a11079649302574c706fd5e2f", @@ -158,49 +158,49 @@ responses.NodeStatsResponse{ ID: "prune-http-input-fields", Name: "prune", Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:1250, In:1250, DurationInMillis:127}, - Flow: struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime float64 "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{ - WorkerUtilization: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:0.02535}, - WorkerMillisPerEvent: struct { Lifetime float64 "json:\"lifetime\"" }{Lifetime:0.1016}, + Flow: struct { WorkerUtilization struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime responses.InfinityFloat "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{ + WorkerUtilization: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:0, Lifetime:0.02535}, + WorkerMillisPerEvent: struct { Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Lifetime:0.1016}, }, }, { ID: "ca953dac49c8fd3b00ba8275af10f9c6bcd9ca95755cd7892952966c5a13d427", Name: "ruby", Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:1250, In:2500, DurationInMillis:489610}, - Flow: struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime float64 "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{ - WorkerUtilization: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:100, Lifetime:97.74}, - WorkerMillisPerEvent: struct { Lifetime float64 "json:\"lifetime\"" }{Lifetime:195.8}, + Flow: struct { WorkerUtilization struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime responses.InfinityFloat "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{ + WorkerUtilization: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:-Inf, Lifetime:97.74}, + WorkerMillisPerEvent: struct { Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Lifetime:195.8}, }, }, { ID: "drop-non-existent", Name: "drop", Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{}, - Flow: struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime float64 "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{}, + Flow: struct { WorkerUtilization struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime responses.InfinityFloat "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{}, }, { ID: "json-filter", Name: "json", Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:1250, In:1250, DurationInMillis:214}, - Flow: struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime float64 "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{ - WorkerUtilization: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:0.04272}, - WorkerMillisPerEvent: struct { Lifetime float64 "json:\"lifetime\"" }{Lifetime:0.1712}, + Flow: struct { WorkerUtilization struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime responses.InfinityFloat "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{ + WorkerUtilization: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:0, Lifetime:0.04272}, + WorkerMillisPerEvent: struct { Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Lifetime:0.1712}, }, }, { ID: "mutate-path-001", Name: "mutate", Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{Out:1250, In:1250, DurationInMillis:170}, - Flow: struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime float64 "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{ - WorkerUtilization: struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" }{Current:0, Lifetime:0.03394}, - WorkerMillisPerEvent: struct { Lifetime float64 "json:\"lifetime\"" }{Lifetime:0.136}, + Flow: struct { WorkerUtilization struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime responses.InfinityFloat "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{ + WorkerUtilization: struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Current:0, Lifetime:0.03394}, + WorkerMillisPerEvent: struct { Lifetime responses.InfinityFloat "json:\"lifetime\"" }{Lifetime:0.136}, }, }, { ID: "drop-80-percent", Name: "drop", Events: struct { Out int "json:\"out\""; In int "json:\"in\""; DurationInMillis int "json:\"duration_in_millis\"" }{}, - Flow: struct { WorkerUtilization struct { Current float64 "json:\"current\""; Lifetime float64 "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime float64 "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{}, + Flow: struct { WorkerUtilization struct { Current responses.InfinityFloat "json:\"current\""; Lifetime responses.InfinityFloat "json:\"lifetime\"" } "json:\"worker_utilization\""; WorkerMillisPerEvent struct { Lifetime responses.InfinityFloat "json:\"lifetime\"" } "json:\"worker_millis_per_event\"" }{}, }, }, Outputs: { diff --git a/fetcher/responses/nodestats_response.go b/fetcher/responses/nodestats_response.go index 258586cb..64a60703 100644 --- a/fetcher/responses/nodestats_response.go +++ b/fetcher/responses/nodestats_response.go @@ -1,6 +1,11 @@ package responses -import "time" +import ( + "encoding/json" + "fmt" + "math" + "time" +) type PipelineResponse struct { Workers int `json:"workers"` @@ -87,28 +92,28 @@ type EventsResponse struct { type FlowResponse struct { InputThroughput struct { - Current float64 `json:"current"` - Lifetime float64 `json:"lifetime"` + Current InfinityFloat `json:"current"` + Lifetime InfinityFloat `json:"lifetime"` } `json:"input_throughput"` FilterThroughput struct { - Current float64 `json:"current"` - Lifetime float64 `json:"lifetime"` + Current InfinityFloat `json:"current"` + Lifetime InfinityFloat `json:"lifetime"` } `json:"filter_throughput"` OutputThroughput struct { - Current float64 `json:"current"` - Lifetime float64 `json:"lifetime"` + Current InfinityFloat `json:"current"` + Lifetime InfinityFloat `json:"lifetime"` } `json:"output_throughput"` QueueBackpressure struct { - Current float64 `json:"current"` - Lifetime float64 `json:"lifetime"` + Current InfinityFloat `json:"current"` + Lifetime InfinityFloat `json:"lifetime"` } `json:"queue_backpressure"` WorkerConcurrency struct { - Current float64 `json:"current"` - Lifetime float64 `json:"lifetime"` + Current InfinityFloat `json:"current"` + Lifetime InfinityFloat `json:"lifetime"` } `json:"worker_concurrency"` WorkerUtilization struct { - Current float64 `json:"current"` - Lifetime float64 `json:"lifetime"` + Current InfinityFloat `json:"current"` + Lifetime InfinityFloat `json:"lifetime"` } `json:"worker_utilization"` } @@ -154,11 +159,11 @@ type SinglePipelineResponse struct { } `json:"events"` Flow struct { WorkerUtilization struct { - Current float64 `json:"current"` - Lifetime float64 `json:"lifetime"` + Current InfinityFloat `json:"current"` + Lifetime InfinityFloat `json:"lifetime"` } `json:"worker_utilization"` WorkerMillisPerEvent struct { - Lifetime float64 `json:"lifetime"` + Lifetime InfinityFloat `json:"lifetime"` } `json:"worker_millis_per_event"` } `json:"flow"` } `json:"filters"` @@ -286,3 +291,29 @@ type NodeStatsResponse struct { Pipelines map[string]SinglePipelineResponse `json:"pipelines"` } + +// InfinityFloat is a float type that also accepts the string Infinity +type InfinityFloat float64 + +func (i *InfinityFloat) UnmarshalJSON(data []byte) error { + var s string + err := json.Unmarshal(data, &s) + if err == nil { + if s == "Infinity" { + *i = InfinityFloat(math.Inf(1)) + return nil + } else if s == "-Infinity" { + *i = InfinityFloat(math.Inf(-1)) + return nil + } + fmt.Errorf("Invalid string value for InfinityFloat: %s", s) + } + + var f float64 + if err := json.Unmarshal(data, &f); err != nil { + return err + } + + *i = InfinityFloat(f) + return nil +} diff --git a/fetcher/responses/nodestats_response_test.go b/fetcher/responses/nodestats_response_test.go index d4b2a6b8..e1b77d51 100644 --- a/fetcher/responses/nodestats_response_test.go +++ b/fetcher/responses/nodestats_response_test.go @@ -23,3 +23,34 @@ func TestNodeStatsResponseStructure(t *testing.T) { snaps.MatchSnapshot(t, "Unmarshalled NodestatsResponse", target) } + +func TestNodeStatsInfinityResponse(t *testing.T) { + type Data struct { + F responses.InfinityFloat + } + var d Data + + okData := [][]byte{ + []byte(`{"F": "Infinity"}`), + []byte(`{"F": "-Infinity"}`), + []byte(`{"F": 13.37}`), + } + for _, e := range okData { + if err := json.Unmarshal(e, &d); err != nil { + t.Errorf("unexpected error: %s", err) + } + } + + badData := [][]byte{ + []byte(`{"F": "-infinity"}`), + []byte(`{"F": "--Infinity"}`), + []byte(`{"F": "13.3"}`), + } + for _, e := range badData { + if err := json.Unmarshal(e, &d); err == nil { + t.Errorf("expected error for: %s, got: %+v", string(e), d) + } + } + + +} diff --git a/fixtures/node_stats.json b/fixtures/node_stats.json index e832ed4d..87cb9fd4 100644 --- a/fixtures/node_stats.json +++ b/fixtures/node_stats.json @@ -323,7 +323,7 @@ "lifetime": 195.8 }, "worker_utilization": { - "current": 100.0, + "current": "-Infinity", "lifetime": 97.74 } } From 078156893ad9ccf5b35de9a7240074be6ba4c632 Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Mon, 14 Oct 2024 14:05:48 +0200 Subject: [PATCH 23/28] Fix improper error handling in infinity float (#375) --- fetcher/responses/nodestats_response.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/fetcher/responses/nodestats_response.go b/fetcher/responses/nodestats_response.go index 64a60703..cfa8d2aa 100644 --- a/fetcher/responses/nodestats_response.go +++ b/fetcher/responses/nodestats_response.go @@ -306,7 +306,8 @@ func (i *InfinityFloat) UnmarshalJSON(data []byte) error { *i = InfinityFloat(math.Inf(-1)) return nil } - fmt.Errorf("Invalid string value for InfinityFloat: %s", s) + + return fmt.Errorf("invalid string value for InfinityFloat: %s", s) } var f float64 From d109bd1df69ced8cbd6699a5bef68af2b2476331 Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Wed, 16 Oct 2024 23:05:40 +0200 Subject: [PATCH 24/28] Upgrade multiple dependencies (#378) --- Dockerfile | 2 +- Dockerfile.dev | 4 ++-- go.mod | 4 ++-- go.sum | 8 ++++---- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Dockerfile b/Dockerfile index 7569353e..59ca51ca 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.23.1-alpine3.19 as build +FROM golang:1.23.2-alpine3.20 as build ARG VERSION \ GIT_COMMIT \ diff --git a/Dockerfile.dev b/Dockerfile.dev index b1704911..10303a46 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -1,6 +1,6 @@ -FROM cosmtrek/air:v1.60.0 as air +FROM cosmtrek/air:v1.61.0 as air -FROM golang:1.23.1-alpine3.19 +FROM golang:1.23.2-alpine3.20 WORKDIR /app diff --git a/go.mod b/go.mod index 2568f433..351572a1 100644 --- a/go.mod +++ b/go.mod @@ -4,13 +4,13 @@ go 1.23 require ( github.com/joho/godotenv v1.5.1 - github.com/prometheus/client_golang v1.20.4 + github.com/prometheus/client_golang v1.20.5 ) require ( github.com/gkampitakis/ciinfo v0.3.0 // indirect github.com/gkampitakis/go-diff v1.3.2 // indirect - github.com/klauspost/compress v1.17.10 // indirect + github.com/klauspost/compress v1.17.11 // indirect github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect github.com/maruel/natural v1.1.1 // indirect diff --git a/go.sum b/go.sum index 70478987..20a52aae 100644 --- a/go.sum +++ b/go.sum @@ -15,8 +15,8 @@ github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= -github.com/klauspost/compress v1.17.10 h1:oXAz+Vh0PMUvJczoi+flxpnBEPxoER1IaAnU/NMPtT0= -github.com/klauspost/compress v1.17.10/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= +github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc= +github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= @@ -30,8 +30,8 @@ github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8m github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_golang v1.20.4 h1:Tgh3Yr67PaOv/uTqloMsCEdeuFTatm5zIq5+qNN23vI= -github.com/prometheus/client_golang v1.20.4/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE= +github.com/prometheus/client_golang v1.20.5 h1:cxppBPuYhUnsO6yo/aoRol4L7q7UFfdm+bR9r+8l63Y= +github.com/prometheus/client_golang v1.20.5/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE= github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E= github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY= github.com/prometheus/common v0.60.0 h1:+V9PAREWNvJMAuJ1x1BaWl9dewMW4YrHZQbx0sJNllA= From 422e1c54daf29f9f1ff89ffce8c9e25a9377b654 Mon Sep 17 00:00:00 2001 From: Julien Orain Date: Tue, 5 Nov 2024 21:10:47 +0000 Subject: [PATCH 25/28] fix: enhance chart --- chart/templates/_helpers.tpl | 21 +++++++++++++++ chart/templates/deployment.yaml | 4 +++ chart/templates/podmonitor.yaml | 45 +++++++++++++++++++++++++++++++++ chart/values.yaml | 24 ++++++++++++++++++ 4 files changed, 94 insertions(+) create mode 100644 chart/templates/podmonitor.yaml diff --git a/chart/templates/_helpers.tpl b/chart/templates/_helpers.tpl index f5987e57..bc71d116 100644 --- a/chart/templates/_helpers.tpl +++ b/chart/templates/_helpers.tpl @@ -39,3 +39,24 @@ It checks if .Values.image.tag is provided, and if not, it returns a tag with "v {{- printf "v%s" .Chart.AppVersion -}} {{- end -}} {{- end -}} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "logstash-exporter.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Common labels +*/}} +{{- define "logstash-exporter.labels" -}} +helm.sh/chart: {{ include "logstash-exporter.chart" . }} +{{- with .Chart.AppVersion }} +app.kubernetes.io/version: {{ . | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- if .Values.commonLabels }} +{{ toYaml .Values.commonLabels }} +{{- end }} +{{- end }} diff --git a/chart/templates/deployment.yaml b/chart/templates/deployment.yaml index 3efece3e..f9268323 100644 --- a/chart/templates/deployment.yaml +++ b/chart/templates/deployment.yaml @@ -89,6 +89,10 @@ spec: value: {{ required "logstash.url is required" .Values.logstash.url | quote }} - name: PORT value: {{ required "service.port is required" .Values.service.port | quote }} + - name: HTTP_TIMEOUT + value: {{ required "logstash.httpTimeout is required" .Values.logstash.httpTimeout | quote }} + - name: HTTP_INSECURE + value: {{ required "logstash.httpInsecure is required" .Values.logstash.httpInsecure | quote }} {{- range $key, $value := .Values.deployment.env }} - name: {{ $key | quote }} value: {{ $value | quote }} diff --git a/chart/templates/podmonitor.yaml b/chart/templates/podmonitor.yaml new file mode 100644 index 00000000..847c44aa --- /dev/null +++ b/chart/templates/podmonitor.yaml @@ -0,0 +1,45 @@ +{{- if .Values.podMonitor.enabled }} +apiVersion: {{ .Values.podMonitor.apiVersion }} +kind: PodMonitor +metadata: + name: {{ template "logstash-exporter.fullname" . }} + {{- if .Values.podMonitor.namespace }} + namespace: {{ .Values.podMonitor.namespace }} + {{- end }} + labels: + {{- include "logstash-exporter.labels" . | nindent 4 }} + {{- if .Values.podMonitor.labels }} + {{- toYaml .Values.podMonitor.labels | nindent 4 }} + {{- end }} +spec: + namespaceSelector: + matchNames: + - {{ .Release.Namespace }} + podMetricsEndpoints: + - path: {{ .Values.web.path }} + port: {{ .Values.deployment.metricsPort.name }} + {{- if .Values.podMonitor.scheme }} + scheme: {{ .Values.podMonitor.scheme }} + {{- end }} + {{- if .Values.podMonitor.interval }} + interval: {{ .Values.podMonitor.interval }} + {{- end }} + {{- if .Values.podMonitor.scrapeTimeout }} + scrapeTimeout: {{ .Values.podMonitor.scrapeTimeout }} + {{- end }} + {{- if .Values.podMonitor.honorLabels }} + honorLabels: true + {{- end }} + {{- if .Values.podMonitor.metricRelabelings }} + metricRelabelings: + {{- toYaml .Values.podMonitor.metricRelabelings | nindent 6 }} + {{- end }} + {{- if .Values.podMonitor.relabelings }} + relabelings: + {{- toYaml .Values.podMonitor.relabelings | nindent 6 }} + {{- end }} + selector: + matchLabels: + app: {{ template "logstash-exporter.name" . }} + release: {{ .Release.Name }} +{{- end }} \ No newline at end of file diff --git a/chart/values.yaml b/chart/values.yaml index f36c0a6a..82a227e3 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -4,6 +4,28 @@ logstash: ## @param logstash.url Logstash instance URL ## url: "http://logstash:9600" + httpTimeout: "3s" + httpInsecure: false + +web: + ## Path under which to expose metrics. + ## + path: / + +podMonitor: + ## If true, a PodMonitor CRD is created for a Prometheus Operator + ## https://prometheus-operator.dev/docs/operator/api/#monitoring.coreos.com/v1.PodMonitor + ## + enabled: false + apiVersion: "monitoring.coreos.com/v1" + namespace: "" + labels: {} + interval: 60s + scrapeTimeout: 10s + honorLabels: true + scheme: http + relabelings: [] + metricRelabelings: [] ## @section Image settings ## @@ -120,6 +142,8 @@ deployment: ## @param deployment.rollingUpdate.maxUnavailable Maximum unavailable for rolling update ## maxUnavailable: 0 + metricsPort: + name: http ## @section Service settings ## From 129212be66eb555dc9377b464b6ee75da1dabf7a Mon Sep 17 00:00:00 2001 From: Julien Orain Date: Wed, 6 Nov 2024 12:30:14 +0000 Subject: [PATCH 26/28] fix(helm): add readme values --- chart/values.yaml | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/chart/values.yaml b/chart/values.yaml index 82a227e3..cc81768a 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -4,27 +4,55 @@ logstash: ## @param logstash.url Logstash instance URL ## url: "http://logstash:9600" + ## @param logstash.httpTimeout http timeout + ## httpTimeout: "3s" + ## @param logstash.httpInsecure http insecure + ## httpInsecure: false +## @section Web settings +## web: - ## Path under which to expose metrics. + ## @param web.path Path under which to expose metrics. ## path: / +## @section PodMonitor settings +## podMonitor: ## If true, a PodMonitor CRD is created for a Prometheus Operator ## https://prometheus-operator.dev/docs/operator/api/#monitoring.coreos.com/v1.PodMonitor ## + ## @param podMonitor.enabled Enable pod monitor creation + ## enabled: false + ## @param podMonitor.apiVersion Set pod monitor apiVersion + ## apiVersion: "monitoring.coreos.com/v1" + ## @param podMonitor.namespace Set pod monitor namespace + ## namespace: "" + ## @param podMonitor.labels Set pod monitor labels + ## labels: {} + ## @param podMonitor.interval Set pod monitor interval + ## interval: 60s + ## @param podMonitor.scrapeTimeout Set pod monitor scrapeTimeout + ## scrapeTimeout: 10s + ## @param podMonitor.honorLabels Set pod monitor honorLabels + ## honorLabels: true + ## @param podMonitor.scheme Set pod monitor scheme + ## scheme: http + ## @param podMonitor.relabelings Set pod monitor relabelings + ## relabelings: [] + ## @param podMonitor.metricRelabelings Set pod monitor metricRelabelings + ## metricRelabelings: [] ## @section Image settings From d604a9cf904520f90e50947d2ac8177f58c08b5d Mon Sep 17 00:00:00 2001 From: Julien Orain Date: Wed, 6 Nov 2024 15:19:25 +0000 Subject: [PATCH 27/28] fix(helm): add readme values --- chart/values.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/chart/values.yaml b/chart/values.yaml index cc81768a..259b7100 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -170,7 +170,11 @@ deployment: ## @param deployment.rollingUpdate.maxUnavailable Maximum unavailable for rolling update ## maxUnavailable: 0 + ## @section metricsPort settings + ## metricsPort: + ## @param metricsPort.name Name of the port + ## name: http ## @section Service settings From 3be6302b673618082a94d87aee2ec07c0d68f0b4 Mon Sep 17 00:00:00 2001 From: Jakub Surdej Date: Thu, 7 Nov 2024 16:20:48 +0100 Subject: [PATCH 28/28] PR 384 --- chart/README.md | 35 +++++++++++++++++-- chart/schema.json | 87 +++++++++++++++++++++++++++++++++++++++++++++++ chart/values.yaml | 10 +----- 3 files changed, 120 insertions(+), 12 deletions(-) diff --git a/chart/README.md b/chart/README.md index f9195f43..df3ecccb 100644 --- a/chart/README.md +++ b/chart/README.md @@ -4,9 +4,32 @@ ### Logstash configuration -| Name | Description | Value | -| -------------- | --------------------- | ---------------------- | -| `logstash.url` | Logstash instance URL | `http://logstash:9600` | +| Name | Description | Value | +| ----------------------- | --------------------- | ---------------------- | +| `logstash.url` | Logstash instance URL | `http://logstash:9600` | +| `logstash.httpTimeout` | http timeout | `3s` | +| `logstash.httpInsecure` | http insecure | `false` | + +### Web settings + +| Name | Description | Value | +| ---------- | ----------------------------------- | ----- | +| `web.path` | Path under which to expose metrics. | `/` | + +### PodMonitor settings + +| Name | Description | Value | +| ------------------------------ | --------------------------------- | -------------------------- | +| `podMonitor.enabled` | Enable pod monitor creation | `false` | +| `podMonitor.apiVersion` | Set pod monitor apiVersion | `monitoring.coreos.com/v1` | +| `podMonitor.namespace` | Set pod monitor namespace | `""` | +| `podMonitor.labels` | Set pod monitor labels | `{}` | +| `podMonitor.interval` | Set pod monitor interval | `60s` | +| `podMonitor.scrapeTimeout` | Set pod monitor scrapeTimeout | `10s` | +| `podMonitor.honorLabels` | Set pod monitor honorLabels | `true` | +| `podMonitor.scheme` | Set pod monitor scheme | `http` | +| `podMonitor.relabelings` | Set pod monitor relabelings | `[]` | +| `podMonitor.metricRelabelings` | Set pod monitor metricRelabelings | `[]` | ### Image settings @@ -58,6 +81,12 @@ | `deployment.rollingUpdate.maxSurge` | Maximum surge for rolling update | `1` | | `deployment.rollingUpdate.maxUnavailable` | Maximum unavailable for rolling update | `0` | +### metricsPort settings + +| Name | Description | Value | +| ----------------------------- | ---------------- | ------ | +| `deployment.metricsPort.name` | Name of the port | `http` | + ### Service settings | Name | Description | Value | diff --git a/chart/schema.json b/chart/schema.json index 8cde27ba..b01ccf33 100644 --- a/chart/schema.json +++ b/chart/schema.json @@ -9,6 +9,83 @@ "type": "string", "description": "Logstash instance URL", "default": "http://logstash:9600" + }, + "httpTimeout": { + "type": "string", + "description": "http timeout", + "default": "3s" + }, + "httpInsecure": { + "type": "boolean", + "description": "http insecure", + "default": false + } + } + }, + "web": { + "type": "object", + "properties": { + "path": { + "type": "string", + "description": "Path under which to expose metrics.", + "default": "/" + } + } + }, + "podMonitor": { + "type": "object", + "properties": { + "enabled": { + "type": "boolean", + "description": "Enable pod monitor creation", + "default": false + }, + "apiVersion": { + "type": "string", + "description": "Set pod monitor apiVersion", + "default": "monitoring.coreos.com/v1" + }, + "namespace": { + "type": "string", + "description": "Set pod monitor namespace", + "default": "" + }, + "labels": { + "type": "object", + "description": "Set pod monitor labels", + "default": {} + }, + "interval": { + "type": "string", + "description": "Set pod monitor interval", + "default": "60s" + }, + "scrapeTimeout": { + "type": "string", + "description": "Set pod monitor scrapeTimeout", + "default": "10s" + }, + "honorLabels": { + "type": "boolean", + "description": "Set pod monitor honorLabels", + "default": true + }, + "scheme": { + "type": "string", + "description": "Set pod monitor scheme", + "default": "http" + }, + "relabelings": { + "type": "array", + "description": "Set pod monitor relabelings", + "default": [], + "items": {} + }, + "metricRelabelings": { + "type": "array", + "description": "Set pod monitor metricRelabelings", + "default": [], + "items": {} } } }, @@ -187,6 +264,16 @@ "default": 0 } } + }, + "metricsPort": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Name of the port", + "default": "http" + } + } } } }, diff --git a/chart/values.yaml b/chart/values.yaml index 259b7100..5e075e78 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -85,17 +85,9 @@ deployment: restartPolicy: Always ## @param deployment.annotations Additional deployment annotations ## - ## Example: - ## annotations: - ## kubernetes.io/foo: bar - ## annotations: {} ## @param deployment.labels Additional deployment labels ## - ## Example: - ## labels: - ## foo: bar - ## labels: {} ## @param deployment.pullSecret Kubernetes secret for pulling the image ## @@ -173,7 +165,7 @@ deployment: ## @section metricsPort settings ## metricsPort: - ## @param metricsPort.name Name of the port + ## @param deployment.metricsPort.name Name of the port ## name: http