chore(exporter): rework exporter to fix wrong histogram usage and cache metric data
All checks were successful
ci/woodpecker/push/lint Pipeline was successful
ci/woodpecker/push/test Pipeline was successful
ci/woodpecker/push/build Pipeline was successful
ci/woodpecker/push/deploy Pipeline was successful

This commit is contained in:
Tom Neuber 2025-01-10 14:33:44 +01:00
parent 4d2a7acebc
commit 8b7f45563a
Signed by: tom
GPG key ID: F17EFE4272D89FF6
6 changed files with 180 additions and 312 deletions

63
main.go
View file

@ -6,6 +6,7 @@ import (
"os"
"os/signal"
"syscall"
"time"
apiv1 "git.ar21.de/yolokube/country-geo-locations/api/v1"
"git.ar21.de/yolokube/country-geo-locations/internal/cache"
@ -17,29 +18,24 @@ import (
"github.com/go-chi/chi/v5"
"github.com/go-chi/chi/v5/middleware"
"github.com/go-chi/render"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp"
)
func main() {
cli := cmd.CLI{}
queue := exporter.NewRequestDataQueue()
appSettings, err := cli.Parse()
config, err := cli.Parse()
if err != nil {
log.Fatal(err)
}
handleGracefulShutdown()
exporterMiddleware := exporter.NewMiddleware(queue)
r := chi.NewRouter()
r.Use(middleware.RequestID)
r.Use(middleware.Logger)
r.Use(middleware.Recoverer)
r.Use(exporterMiddleware)
r.Use(render.SetContentType(render.ContentTypeJSON))
ctx := downloader.NewContext(appSettings.DataFile, appSettings.DataURL)
ctx := downloader.NewContext(config.DataFile, config.DataURL)
if !ctx.FileExists() {
if downloadErr := ctx.Download(); downloadErr != nil {
log.Fatal(downloadErr)
@ -47,24 +43,42 @@ func main() {
log.Printf("saved file to %s\n", ctx.Filename)
}
cache := cache.NewCache(appSettings.CacheTTL)
db, err := database.NewDatabase(appSettings)
cache := cache.NewCache(config.CacheTTL)
db, err := database.NewDatabase(config)
if err != nil {
log.Fatal("database creation failed", err)
}
if appSettings.EnableExporter {
if config.EnableExporter {
exporter := exporter.NewExporter(config, cache, db)
r.Use(exporter.Middleware())
ticker := time.NewTicker(config.ExporterInterval)
exit := make(chan struct{})
go func() {
err = enableExporter(appSettings, cache, db, queue)
for {
select {
case <-ticker.C:
exporter.Collect()
case <-exit:
ticker.Stop()
return
}
}
}()
log.Println("prometheus exporter refreshes metric data every", config.ExporterInterval)
go func() {
err = exporter.Start()
if err != nil {
log.Panic(err)
}
}()
log.Println("prometheus exporter started at", appSettings.ExporterAddress)
log.Println("prometheus exporter started at", config.ExporterAddress)
}
log.Println("importing data from file", appSettings.DataFile)
err = csvimporter.ImportCSV(appSettings.DataFile, db)
log.Println("importing data from file", config.DataFile)
err = csvimporter.ImportCSV(config.DataFile, db)
if err != nil {
log.Fatal("data Import from file failed", err)
}
@ -74,9 +88,9 @@ func main() {
r.Mount("/api/v1", apiv1.NewRouter(lh))
server := &http.Server{
Addr: appSettings.ServerAddress,
Addr: config.ServerAddress,
Handler: r,
ReadHeaderTimeout: appSettings.ReadHeaderTimeout,
ReadHeaderTimeout: config.ReadHeaderTimeout,
}
log.Println("starting server at", server.Addr)
@ -85,23 +99,6 @@ func main() {
}
}
func enableExporter(
settings *cmd.AppSettings,
cache *cache.Cache,
db *database.Database,
queue *exporter.RequestDataQueue,
) error {
prometheus.MustRegister(exporter.NewCollector(settings, cache, db, queue))
metricsServer := &http.Server{
Addr: settings.ExporterAddress,
Handler: promhttp.Handler(),
ReadHeaderTimeout: settings.ReadHeaderTimeout,
}
return metricsServer.ListenAndServe()
}
func handleGracefulShutdown() {
var signals = make(chan os.Signal, 1)