Skip to content

Commit

Permalink
refactor: Extract out stub_status scraper for external use
Browse files Browse the repository at this point in the history
  • Loading branch information
motoki317 committed Nov 28, 2024
1 parent 52fce52 commit e76ea47
Showing 1 changed file with 50 additions and 44 deletions.
94 changes: 50 additions & 44 deletions internal/ingress/metric/collectors/nginx_status.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ limitations under the License.
package collectors

import (
"log"
"fmt"
"regexp"
"strconv"

Expand All @@ -35,7 +35,10 @@ var (
)

type (
NginxStatusScraper struct{}

nginxStatusCollector struct {
scraper NginxStatusScraper
scrapeChan chan scrapeRequest

data *nginxStatusData
Expand All @@ -47,7 +50,7 @@ type (
connections *prometheus.Desc
}

basicStatus struct {
NginxStubStatus struct {
// Active total number of active connections
Active int
// Accepted total number of accepted client connections
Expand All @@ -65,6 +68,49 @@ type (
}
)

func toInt(data []string, pos int) int {
if len(data) == 0 {
return 0
}
if pos > len(data) {
return 0
}
if v, err := strconv.Atoi(data[pos]); err == nil {
return v
}
return 0
}

func parse(data string) *NginxStubStatus {
acr := ac.FindStringSubmatch(data)
sahrr := sahr.FindStringSubmatch(data)
readingr := reading.FindStringSubmatch(data)
writingr := writing.FindStringSubmatch(data)
waitingr := waiting.FindStringSubmatch(data)

return &NginxStubStatus{
toInt(acr, 1),
toInt(sahrr, 1),
toInt(sahrr, 2),
toInt(sahrr, 3),
toInt(readingr, 1),
toInt(writingr, 1),
toInt(waitingr, 1),
}
}

func (s *NginxStatusScraper) Scrape() (*NginxStubStatus, error) {
klog.V(3).InfoS("starting scraping socket", "path", nginx.StatusPath)
status, data, err := nginx.NewGetStatusRequest(nginx.StatusPath)
if err != nil {
return nil, fmt.Errorf("obtaining nginx status info: %w", err)
}
if status < 200 || status >= 400 {
return nil, fmt.Errorf("obtaining nginx status info (status %v)", status)
}
return parse(string(data)), nil
}

// NGINXStatusCollector defines a status collector interface
type NGINXStatusCollector interface {
prometheus.Collector
Expand Down Expand Up @@ -131,54 +177,14 @@ func (p nginxStatusCollector) Stop() {
close(p.scrapeChan)
}

func toInt(data []string, pos int) int {
if len(data) == 0 {
return 0
}
if pos > len(data) {
return 0
}
if v, err := strconv.Atoi(data[pos]); err == nil {
return v
}
return 0
}

func parse(data string) *basicStatus {
acr := ac.FindStringSubmatch(data)
sahrr := sahr.FindStringSubmatch(data)
readingr := reading.FindStringSubmatch(data)
writingr := writing.FindStringSubmatch(data)
waitingr := waiting.FindStringSubmatch(data)

return &basicStatus{
toInt(acr, 1),
toInt(sahrr, 1),
toInt(sahrr, 2),
toInt(sahrr, 3),
toInt(readingr, 1),
toInt(writingr, 1),
toInt(waitingr, 1),
}
}

// nginxStatusCollector scrape the nginx status
func (p nginxStatusCollector) scrape(ch chan<- prometheus.Metric) {
klog.V(3).InfoS("starting scraping socket", "path", nginx.StatusPath)
status, data, err := nginx.NewGetStatusRequest(nginx.StatusPath)
s, err := p.scraper.Scrape()
if err != nil {
log.Printf("%v", err)
klog.Warningf("unexpected error obtaining nginx status info: %v", err)
klog.Warningf("failed to scrape nginx status: %v", err)
return
}

if status < 200 || status >= 400 {
klog.Warningf("unexpected error obtaining nginx status info (status %v)", status)
return
}

s := parse(string(data))

ch <- prometheus.MustNewConstMetric(p.data.connectionsTotal,
prometheus.CounterValue, float64(s.Accepted), "accepted")
ch <- prometheus.MustNewConstMetric(p.data.connectionsTotal,
Expand Down

0 comments on commit e76ea47

Please sign in to comment.