Skip to content

Commit

Permalink
deps!: bump github.com/prometheus/exporter-toolkit from 0.11.0 to 0.1…
Browse files Browse the repository at this point in the history
…3.0 (#596)

* deps: bump github.com/prometheus/exporter-toolkit from 0.11.0 to 0.13.0

Bumps [github.com/prometheus/exporter-toolkit](https://github.com/prometheus/exporter-toolkit) from 0.11.0 to 0.13.0.
- [Release notes](https://github.com/prometheus/exporter-toolkit/releases)
- [Changelog](https://github.com/prometheus/exporter-toolkit/blob/master/CHANGELOG.md)
- [Commits](prometheus/exporter-toolkit@v0.11.0...v0.13.0)

---
updated-dependencies:
- dependency-name: github.com/prometheus/exporter-toolkit
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <[email protected]>

* refactor: migrate to log/slog

* refactor: extract logging logic to log.go, adjust loglevels

* fix: refactoring issues

* chore: go mod tidy

* style: set to SlogStyle

* fix: minor log changes

---------

Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Sergei Zyubin <[email protected]>
Co-authored-by: Sergei Zyubin <[email protected]>
  • Loading branch information
3 people authored Oct 26, 2024
1 parent d1db481 commit fd44637
Show file tree
Hide file tree
Showing 14 changed files with 153 additions and 119 deletions.
60 changes: 60 additions & 0 deletions cmd/sql_exporter/log.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
package main

import (
"fmt"
"log/slog"
"os"

"github.com/prometheus/common/promslog"
)

type logConfig struct {
logger *slog.Logger
logFileHandler *os.File
}

// initLogFile opens the log file for writing if a log file is specified.
func initLogFile(logFile string) (*os.File, error) {
if logFile == "" {
return nil, nil
}
logFileHandler, err := os.OpenFile(logFile, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0o644)
if err != nil {
return nil, fmt.Errorf("error opening log file: %w", err)
}
return logFileHandler, nil
}

// initLogConfig configures and initializes the logging system.
func initLogConfig(logLevel, logFormat string, logFile string) (*logConfig, error) {
logFileHandler, err := initLogFile(logFile)
if err != nil {
return nil, err
}

if logFileHandler == nil {
logFileHandler = os.Stderr
}

promslogConfig := &promslog.Config{
Level: &promslog.AllowedLevel{},
Format: &promslog.AllowedFormat{},
Style: promslog.SlogStyle,
Writer: logFileHandler,
}

if err := promslogConfig.Level.Set(logLevel); err != nil {
return nil, err
}

if err := promslogConfig.Format.Set(logFormat); err != nil {
return nil, err
}
// Initialize logger.
logger := promslog.New(promslogConfig)

return &logConfig{
logger: logger,
logFileHandler: logFileHandler,
}, nil
}
57 changes: 22 additions & 35 deletions cmd/sql_exporter/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package main
import (
"flag"
"fmt"
"log/slog"
"net/http"
"os"
"os/signal"
Expand All @@ -12,23 +13,19 @@ import (

"github.com/burningalchemist/sql_exporter"
cfg "github.com/burningalchemist/sql_exporter/config"
"github.com/go-kit/log"
_ "github.com/kardianos/minwinsvc"
"github.com/prometheus/client_golang/prometheus"
info "github.com/prometheus/client_golang/prometheus/collectors/version"
"github.com/prometheus/client_golang/prometheus/promhttp"
"github.com/prometheus/common/model"
"github.com/prometheus/common/promlog"
"github.com/prometheus/common/version"
"github.com/prometheus/exporter-toolkit/web"
"k8s.io/klog/v2"
)

const (
appName string = "sql_exporter"

httpReadHeaderTimeout time.Duration = time.Duration(time.Second * 60)
debugMaxLevel klog.Level = 3
)

var (
Expand All @@ -40,6 +37,7 @@ var (
configFile = flag.String("config.file", "sql_exporter.yml", "SQL Exporter configuration file path")
logFormat = flag.String("log.format", "logfmt", "Set log output format")
logLevel = flag.String("log.level", "info", "Set log level")
logFile = flag.String("log.file", "", "Log file to write to, leave empty to write to stderr")
)

func init() {
Expand All @@ -65,21 +63,30 @@ func main() {
}

// Setup logging.
logger, err := setupLogging(*logLevel, *logFormat)
logConfig, err := initLogConfig(*logLevel, *logFormat, *logFile)
if err != nil {
fmt.Printf("Error initializing exporter: %s\n", err)
os.Exit(1)
}

defer func() {
if logConfig.logFileHandler != nil {
logConfig.logFileHandler.Close()
}
}()

slog.SetDefault(logConfig.logger)

// Override the config.file default with the SQLEXPORTER_CONFIG environment variable if set.
if val, ok := os.LookupEnv(cfg.EnvConfigFile); ok {
*configFile = val
}

klog.Warningf("Starting SQL exporter %s %s", version.Info(), version.BuildContext())
slog.Warn("Starting SQL exporter", "versionInfo", version.Info(), "buildContext", version.BuildContext())
exporter, err := sql_exporter.NewExporter(*configFile)
if err != nil {
klog.Fatalf("Error creating exporter: %s", err)
slog.Error("Error creating exporter", "error", err)
os.Exit(1)
}

// Start the scrape_errors_total metric drop ticker if configured.
Expand All @@ -104,16 +111,18 @@ func main() {
if err := web.ListenAndServe(server, &web.FlagConfig{
WebListenAddresses: &([]string{*listenAddress}),
WebConfigFile: webConfigFile, WebSystemdSocket: OfBool(false),
}, logger); err != nil {
klog.Fatal(err)
}, logConfig.logger); err != nil {
slog.Error("Error starting web server", "error", err)
os.Exit(1)

}
}

// reloadHandler returns a handler that reloads collector and target data.
func reloadHandler(e sql_exporter.Exporter, configFile string) func(http.ResponseWriter, *http.Request) {
func reloadHandler(e sql_exporter.Exporter, configFile string) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
if err := sql_exporter.Reload(e, &configFile); err != nil {
klog.Error(err)
slog.Error("Error reloading collector and target data", "error", err)
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
Expand All @@ -128,7 +137,7 @@ func signalHandler(e sql_exporter.Exporter, configFile string) {
go func() {
for range c {
if err := sql_exporter.Reload(e, &configFile); err != nil {
klog.Error(err)
slog.Error("Error reloading collector and target data", "error", err)
}
}
}()
Expand All @@ -141,33 +150,11 @@ func startScrapeErrorsDropTicker(exporter sql_exporter.Exporter, interval model.
}

ticker := time.NewTicker(time.Duration(interval))
klog.Warning("Started scrape_errors_total metrics drop ticker: ", interval)
slog.Warn("Started scrape_errors_total metrics drop ticker", "interval", interval)
go func() {
defer ticker.Stop()
for range ticker.C {
exporter.DropErrorMetrics()
}
}()
}

// setupLogging configures and initializes the logging system.
func setupLogging(logLevel, logFormat string) (log.Logger, error) {
promlogConfig := &promlog.Config{
Level: &promlog.AllowedLevel{},
Format: &promlog.AllowedFormat{},
}

if err := promlogConfig.Level.Set(logLevel); err != nil {
return nil, err
}

if err := promlogConfig.Format.Set(logFormat); err != nil {
return nil, err
}
// Overriding the default klog with our go-kit klog implementation.
logger := promlog.New(promlogConfig)
klog.SetLogger(logger)
klog.ClampLevel(debugMaxLevel)

return logger, nil
}
23 changes: 12 additions & 11 deletions cmd/sql_exporter/promhttp.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,14 @@ import (
"context"
"errors"
"io"
"log/slog"
"net/http"
"strconv"
"time"

"github.com/burningalchemist/sql_exporter"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/expfmt"
"k8s.io/klog/v2"
)

const (
Expand Down Expand Up @@ -46,16 +46,17 @@ func ExporterHandlerFor(exporter sql_exporter.Exporter) http.Handler {
case prometheus.MultiError:
for _, err := range t {
if errors.Is(err, context.DeadlineExceeded) {
klog.Errorf("%s: timeout collecting metrics", err)
slog.Error("Timeout while collecting metrics", "error", err)

} else {
klog.Errorf("Error gathering metrics: %s", err)
slog.Error("Error gathering metrics", "error", err)
}
}
default:
klog.Errorf("Error gathering metrics: %s", err)
slog.Error("Error gathering metrics", "error", err)
}
if len(mfs) == 0 {
klog.Errorf("%s: %s", noMetricsGathered, err)
slog.Error("No metrics gathered", "error", err)
http.Error(w, noMetricsGathered+", "+err.Error(), http.StatusInternalServerError)
return
}
Expand All @@ -70,14 +71,15 @@ func ExporterHandlerFor(exporter sql_exporter.Exporter) http.Handler {
for _, mf := range mfs {
if err := enc.Encode(mf); err != nil {
errs = append(errs, err)
klog.Errorf("Error encoding metric family %q: %s", mf.GetName(), err)
slog.Error("Error encoding metric family", "name", mf.GetName(), "error", err)

}
}
if closer, ok := writer.(io.Closer); ok {
closer.Close()
}
if errs.MaybeUnwrap() != nil && buf.Len() == 0 {
klog.Errorf("%s: %s", noMetricsEncoded, errs)
slog.Error("No metrics encoded", "error", errs)
http.Error(w, noMetricsEncoded+", "+errs.Error(), http.StatusInternalServerError)
return
}
Expand All @@ -100,18 +102,17 @@ func contextFor(req *http.Request, exporter sql_exporter.Exporter) (context.Cont
if err != nil {
switch {
case errors.Is(err, strconv.ErrSyntax):
klog.Errorf("%s: unsupported value", prometheusHeaderErr)
slog.Error("Failed to parse timeout from Prometheus header", "error", err)
case errors.Is(err, strconv.ErrRange):
klog.Errorf("%s: value out of range", prometheusHeaderErr)
slog.Error(prometheusHeaderErr, "error", err)
}
} else {
timeout = time.Duration(timeoutSeconds * float64(time.Second))

// Subtract the timeout offset, unless the result would be negative or zero.
timeoutOffset := time.Duration(exporter.Config().Globals.TimeoutOffset)
if timeoutOffset > timeout {
klog.Errorf("global.scrape_timeout_offset (`%s`) is greater than Prometheus' scraping timeout (`%s`), ignoring",
timeoutOffset, timeout)
slog.Error("global.scrape_timeout_offset is greater than Prometheus' scraping timeout, ignoring", "timeout", timeout, "timeoutOffset", timeoutOffset)
} else {
timeout -= timeoutOffset
}
Expand Down
14 changes: 6 additions & 8 deletions collector.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@ import (
"context"
"database/sql"
"fmt"
"log/slog"
"sync"
"time"

"github.com/burningalchemist/sql_exporter/config"
"github.com/burningalchemist/sql_exporter/errors"
dto "github.com/prometheus/client_model/go"
"k8s.io/klog/v2"
)

// Collector is a self-contained group of SQL queries and metric families to collect from a specific database. It is
Expand Down Expand Up @@ -64,7 +64,7 @@ func NewCollector(logContext string, cc *config.CollectorConfig, constLabels []*
logContext: logContext,
}
if c.config.MinInterval > 0 {
klog.V(2).Infof("[%s] Non-zero min_interval (%s), using cached collector.", logContext, c.config.MinInterval)
slog.Warn("Non-zero min_interval, using cached collector.", "logContext", logContext, "min_interval", c.config.MinInterval)
return newCachingCollector(&c), nil
}
return &c, nil
Expand Down Expand Up @@ -114,15 +114,14 @@ func (cc *cachingCollector) Collect(ctx context.Context, conn *sql.DB, ch chan<-
ch <- NewInvalidMetric(errors.Wrap(cc.rawColl.logContext, ctx.Err()))
return
}
klog.Infof("Cache size: %d", len(cc.cache))
slog.Debug("Cache size", "length", len(cc.cache))
collTime := time.Now()
select {
case cacheTime := <-cc.cacheSem:
// Have the lock.
if age := collTime.Sub(cacheTime); age > cc.minInterval || len(cc.cache) == 0 {
// Cache contents are older than minInterval, collect fresh metrics, cache them and pipe them through.
klog.V(2).Infof("[%s] Collecting fresh metrics: min_interval=%.3fs cache_age=%.3fs",
cc.rawColl.logContext, cc.minInterval.Seconds(), age.Seconds())
slog.Debug("Collecting fresh metrics", "logContext", cc.rawColl.logContext, "min_interval", cc.minInterval.Seconds(), "cache_age", age.Seconds())
cacheChan := make(chan Metric, capMetricChan)
cc.cache = make([]Metric, 0, len(cc.cache))
go func() {
Expand All @@ -132,7 +131,7 @@ func (cc *cachingCollector) Collect(ctx context.Context, conn *sql.DB, ch chan<-
for metric := range cacheChan {
// catch invalid metrics and return them immediately, don't cache them
if ctx.Err() != nil {
klog.V(2).Infof("[%s] Context closed, returning invalid metric", cc.rawColl.logContext)
slog.Debug("Context closed, returning invalid metric", "logContext", cc.rawColl.logContext)
ch <- NewInvalidMetric(errors.Wrap(cc.rawColl.logContext, ctx.Err()))
continue
}
Expand All @@ -142,8 +141,7 @@ func (cc *cachingCollector) Collect(ctx context.Context, conn *sql.DB, ch chan<-
}
cacheTime = collTime
} else {
klog.V(2).Infof("[%s] Returning cached metrics: min_interval=%.3fs cache_age=%.3fs",
cc.rawColl.logContext, cc.minInterval.Seconds(), age.Seconds())
slog.Debug("Returning cached metrics", "logContext", cc.rawColl.logContext, "min_interval", cc.minInterval.Seconds(), "cache_age", age.Seconds())
for _, metric := range cc.cache {
ch <- metric
}
Expand Down
8 changes: 4 additions & 4 deletions config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@ package config
import (
"context"
"fmt"
"log/slog"
"os"
"path/filepath"

"github.com/sethvargo/go-envconfig"
"gopkg.in/yaml.v3"
"k8s.io/klog/v2"
)

// MaxInt32 defines the maximum value of allowed integers
Expand All @@ -32,7 +32,7 @@ var (

// Load attempts to parse the given config file and return a Config object.
func Load(configFile string) (*Config, error) {
klog.Infof("Loading configuration from %s", configFile)
slog.Debug("Loading configuration", "file", configFile)
buf, err := os.ReadFile(configFile)
if err != nil {
return nil, err
Expand Down Expand Up @@ -185,7 +185,7 @@ func (c *Config) loadCollectorFiles() error {

// Resolve the glob to actual filenames.
cfs, err := filepath.Glob(cfglob)
klog.Infof("External collector files found: %v", len(cfs))
slog.Debug("External collector files found", "count", len(cfs), "glob", cfglob)
if err != nil {
// The only error can be a bad pattern.
return fmt.Errorf("error resolving collector files for %s: %w", cfglob, err)
Expand All @@ -205,7 +205,7 @@ func (c *Config) loadCollectorFiles() error {
}

c.Collectors = append(c.Collectors, &cc)
klog.Infof("Loaded collector '%s' from %s", cc.Name, cf)
slog.Debug("Loaded collector", "name", cc.Name, "file", cf)
}
}

Expand Down
Loading

0 comments on commit fd44637

Please sign in to comment.