Skip to content

Commit

Permalink
feat: logging unification via zerolog, continued
Browse files Browse the repository at this point in the history
  • Loading branch information
d-led committed Nov 20, 2024
1 parent 1bca473 commit 5d5e08d
Show file tree
Hide file tree
Showing 8 changed files with 82 additions and 75 deletions.
8 changes: 4 additions & 4 deletions cmd/check.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ import (
"context"
"encoding/json"
"fmt"
"io"
"log"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"

"github.com/siemens/link-checker-service/infrastructure"
"github.com/spf13/cobra"
Expand All @@ -24,14 +24,14 @@ var checkCmd = &cobra.Command{
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
// disable logging so that there's only JSON output in the console
log.SetOutput(io.Discard)
zerolog.SetGlobalLevel(zerolog.Disabled)
checker := infrastructure.NewURLCheckerClient()
checkResult := checker.CheckURL(context.Background(), args[0])

// prints a JSON-formatted raw check result representation
b, err := json.MarshalIndent(checkResult, "", " ")
if err != nil {
log.Fatal(fmt.Errorf("ERROR: %v", err))
log.Fatal().Err(err).Msg("check command failed")
}
fmt.Println(string(b))
},
Expand Down
13 changes: 7 additions & 6 deletions cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,12 @@ package cmd

import (
"fmt"
"log"
"os"
"path/filepath"
"strings"

"github.com/rs/zerolog/log"

"github.com/siemens/link-checker-service/infrastructure"

"github.com/spf13/cobra"
Expand Down Expand Up @@ -177,24 +178,24 @@ func initConfig() {
}

func echoConfig() {
log.Printf("Link Checker Service. Version: %v", infrastructure.BinaryVersion())
log.Info().Msgf("Link Checker Service. Version: %v", infrastructure.BinaryVersion())

if viper.ConfigFileUsed() != "" {
log.Printf("Using config file: %v", viper.ConfigFileUsed())
log.Info().Msgf("Using config file: %v", viper.ConfigFileUsed())
}

proxyURL := viper.GetString(proxyKey)
if proxyURL != "" {
log.Printf("Proxy: %v", proxyURL)
log.Info().Msgf("Proxy: %v", proxyURL)
}

maxConcurrency := viper.GetUint(maxConcurrentHTTPRequestsKey)
if maxConcurrency > 0 {
log.Printf("Max HTTP concurrency: %v", maxConcurrency)
log.Info().Msgf("Max HTTP concurrency: %v", maxConcurrency)
}

globCount := len(domainBlacklistGlobs)
if globCount > 0 {
log.Printf("Domain blacklist globs (%v): %v", globCount, domainBlacklistGlobs)
log.Info().Msgf("Domain blacklist globs (%v): %v", globCount, domainBlacklistGlobs)
}
}
21 changes: 11 additions & 10 deletions infrastructure/cached_url_checker.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,10 @@ package infrastructure

import (
"context"
"log"
"time"

"github.com/rs/zerolog/log"

"github.com/spf13/viper"
)

Expand Down Expand Up @@ -54,22 +55,22 @@ func fetchCachedURLCheckerSettings() cacheSettings {

cacheExpirationInterval := viper.GetString("cacheExpirationInterval")
if d, err := time.ParseDuration(cacheExpirationInterval); err != nil {
log.Printf("Ignoring cacheExpirationInterval %v -> %v (%v)", cacheExpirationInterval, defaultCacheExpirationInterval, err)
log.Info().Msgf("Ignoring cacheExpirationInterval %v -> %v (%v)", cacheExpirationInterval, defaultCacheExpirationInterval, err)
} else {
s.cacheExpirationInterval = d
log.Printf("cacheExpirationInterval: %v", cacheExpirationInterval)
log.Info().Msgf("cacheExpirationInterval: %v", cacheExpirationInterval)
}

cacheCleanupInterval := viper.GetString("cacheCleanupInterval")
if d, err := time.ParseDuration(cacheCleanupInterval); err != nil {
log.Printf("Ignoring cacheCleanupInterval %v -> %v (%v)", cacheCleanupInterval, defaultCacheCleanupInterval, err)
log.Info().Msgf("Ignoring cacheCleanupInterval %v -> %v (%v)", cacheCleanupInterval, defaultCacheCleanupInterval, err)
} else {
log.Printf("cacheCleanupInterval: %v", cacheCleanupInterval)
log.Info().Msgf("cacheCleanupInterval: %v", cacheCleanupInterval)
s.cacheCleanupInterval = d
}

cacheUseRistretto := viper.GetBool("cacheUseRistretto")
log.Printf("cacheUseRistretto: %v", cacheUseRistretto)
log.Info().Msgf("cacheUseRistretto: %v", cacheUseRistretto)
s.cacheUseRistretto = cacheUseRistretto

cacheMaxSize := defaultCacheMaxSize
Expand All @@ -85,15 +86,15 @@ func fetchCachedURLCheckerSettings() cacheSettings {
s.cacheNumCounters = cacheNumCounters

if cacheUseRistretto {
log.Printf("cacheMaxSize: %v", cacheMaxSize)
log.Printf("cacheNumCounters: %v", cacheNumCounters)
log.Info().Msgf("cacheMaxSize: %v", cacheMaxSize)
log.Info().Msgf("cacheNumCounters: %v", cacheNumCounters)
}

retryFailedAfter := viper.GetString("retryFailedAfter")
if d, err := time.ParseDuration(retryFailedAfter); err != nil {
log.Printf("Ignoring retryFailedAfter %v -> %v (%v)", cacheCleanupInterval, defaultRetryFailedAfter, err)
log.Info().Msgf("Ignoring retryFailedAfter %v -> %v (%v)", cacheCleanupInterval, defaultRetryFailedAfter, err)
} else {
log.Printf("retryFailedAfter: %v", retryFailedAfter)
log.Info().Msgf("retryFailedAfter: %v", retryFailedAfter)
s.retryFailedAfter = d
}
return s
Expand Down
9 changes: 5 additions & 4 deletions infrastructure/cc_limited_url_checker.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@ package infrastructure
import (
"context"
"fmt"
"log"
"time"

"github.com/rs/zerolog/log"

"golang.org/x/time/rate"

"github.com/spf13/viper"
Expand Down Expand Up @@ -46,7 +47,7 @@ func NewCCLimitedURLChecker() *CCLimitedURLChecker {
core.EmptyMetricRegistryInstance,
)
if err != nil {
log.Fatalf("Error creating limiter err=%v\n", err)
log.Fatal().Err(err).Msg("Error creating limiter")
}
guard := limiter.NewBlockingLimiter(defaultLimiter, 0, nil /*logger*/)
ratePerSecond := getDomainRatePerSecond()
Expand All @@ -68,7 +69,7 @@ func getDomainRatePerSecond() rate.Limit {
func getMaxConcurrentRequests() int {
maxConcurrency := viper.GetUint("maxConcurrentHTTPRequests")
if maxConcurrency > 0 {
log.Printf("CCLimitedURLChecker is using max HTTP concurrency of %v", maxConcurrency)
log.Info().Msgf("CCLimitedURLChecker is using max HTTP concurrency of %v", maxConcurrency)
}
return defaultMaxConcurrentRequests
}
Expand All @@ -87,7 +88,7 @@ func (r *CCLimitedURLChecker) checkURL(ctx context.Context, url string) *URLChec
token, ok := r.guard.Acquire(ctx)
if !ok {
// short-circuited - no need to try
log.Printf("guarded request short circuited for url '%v'\n", sanitizeUserLogInput(url))
log.Info().Msgf("guarded request short circuited for url '%v'\n", sanitizeUserLogInput(url))
if token != nil {
token.OnDropped()
}
Expand Down
5 changes: 3 additions & 2 deletions infrastructure/domain_rate_limited_checker.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,11 @@ package infrastructure
import (
"context"
"fmt"
"log"
"sync"
"time"

"github.com/rs/zerolog/log"

"golang.org/x/time/rate"
)

Expand All @@ -26,7 +27,7 @@ type DomainRateLimitedChecker struct {
// NewDomainRateLimitedChecker Creates a new domain-rate-limited URLCheckerClient instance
func NewDomainRateLimitedChecker(ratePerSecond rate.Limit) *DomainRateLimitedChecker {
if ratePerSecond > 0 {
log.Printf("Limiting amount of requests per domain to %v/s", ratePerSecond)
log.Info().Msgf("Limiting amount of requests per domain to %v/s", ratePerSecond)
}
return &DomainRateLimitedChecker{
domains: sync.Map{},
Expand Down
45 changes: 23 additions & 22 deletions infrastructure/url_checker.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,12 @@ import (
"crypto/tls"
"fmt"
"io"
"log"
"net"
"net/http"
"net/http/httptrace"

"github.com/rs/zerolog/log"

"regexp"
"strings"
"time"
Expand Down Expand Up @@ -113,13 +114,13 @@ func NewURLCheckerClient() *URLCheckerClient {
case "urlcheck":
// default client
checkers = addChecker(checkers, newLocalURLChecker(c, "urlcheck", buildClient(urlCheckerSettings)))
log.Println("Added the defaut URL checker")
log.Info().Msg("Added the defaut URL checker")
case "urlcheck-pac":
if c.settings.PacScriptURL == "" {
panic("Cannot instantiate a 'urlcheck-pac' checker without a proxy auto-config script configured")
}
checkers = addChecker(checkers, newLocalURLChecker(c, "urlcheck-pac", nil))
log.Println("Added the PAC file based URL checker")
log.Info().Msg("Added the PAC file based URL checker")
case "urlcheck-noproxy":
// if proxy is defined, add one without the proxy as fallback
if urlCheckerSettings.ProxyURL == "" {
Expand All @@ -129,7 +130,7 @@ func NewURLCheckerClient() *URLCheckerClient {
urlCheckerSettingsNoProxy := urlCheckerSettings
urlCheckerSettingsNoProxy.ProxyURL = ""
checkers = addChecker(checkers, newLocalURLChecker(c, "urlcheck-noproxy", buildClient(urlCheckerSettingsNoProxy)))
log.Println("Added the URL checker that doesn't use a proxy")
log.Info().Msg("Added the URL checker that doesn't use a proxy")
case "_ok_after_1s_on_delay.com":
// fake client for testing
checkers = addChecker(checkers, &fakeURLChecker{1 * time.Second, &URLCheckResult{
Expand All @@ -140,7 +141,7 @@ func NewURLCheckerClient() *URLCheckerClient {
BodyPatternsFound: nil,
RemoteAddr: "",
}, "_ok_after_1s_on_delay.com"})
log.Println("Added the _always_ok checker")
log.Info().Msg("Added the _always_ok checker")
case "_always_ok":
// fake client for testing
checkers = addChecker(checkers, &fakeURLChecker{0, &URLCheckResult{
Expand All @@ -151,7 +152,7 @@ func NewURLCheckerClient() *URLCheckerClient {
BodyPatternsFound: nil,
RemoteAddr: "",
}, "_always_ok"})
log.Println("Added the _always_ok checker")
log.Info().Msg("Added the _always_ok checker")
case "_always_bad":
// fake client for testing
checkers = addChecker(checkers, &fakeURLChecker{0, &URLCheckResult{
Expand All @@ -162,7 +163,7 @@ func NewURLCheckerClient() *URLCheckerClient {
BodyPatternsFound: nil,
RemoteAddr: "",
}, "_always_bad"})
log.Println("Added the _always_bad checker")
log.Info().Msg("Added the _always_bad checker")
default:
panic(fmt.Errorf("unknown checker: %v", checkerName))
}
Expand All @@ -182,7 +183,7 @@ func parsePacScript(scriptURL string) *gpac.Parser {
if err != nil {
panic(fmt.Errorf("could not fetch a PAC script from %v: %v", scriptURL, err.Error()))
}
log.Printf("Read PAC script from %v", scriptURL)
log.Info().Msgf("Read PAC script from %v", scriptURL)
script := string(res.Body())
pac, err := gpac.New(script)
if err != nil {
Expand Down Expand Up @@ -212,9 +213,9 @@ func getURLCheckerSettings() urlCheckerSettings {
if proxyURL := viper.GetString("proxy"); proxyURL != "" {
_, err := netUrl.Parse(proxyURL)
if err != nil {
log.Printf("Rejected proxyURL: %v", proxyURL)
log.Warn().Err(err).Msgf("Rejected proxyURL: %v", proxyURL)
} else {
log.Printf("URLCheckerClient is using a proxy: %v", proxyURL)
log.Info().Msgf("URLCheckerClient is using a proxy: %v", proxyURL)
s.ProxyURL = proxyURL
}
}
Expand All @@ -238,20 +239,20 @@ func getURLCheckerSettings() urlCheckerSettings {
s.SkipCertificateCheck = viper.GetBool("HTTPClient.skipCertificateCheck")
s.EnableRequestTracing = viper.GetBool("HTTPClient.enableRequestTracing")

log.Printf("HTTP client MaxRedirectsCount: %v", s.MaxRedirectsCount)
log.Printf("HTTP client TimeoutSeconds: %v", s.TimeoutSeconds)
log.Printf("HTTP client UserAgent: %v", s.UserAgent)
log.Printf("HTTP client BrowserUserAgent: %v", s.BrowserUserAgent)
log.Printf("HTTP client AcceptHeader: %v", s.AcceptHeader)
log.Printf("HTTP client SkipCertificateCheck: %v", s.SkipCertificateCheck)
log.Printf("HTTP client EnableRequestTracing: %v", s.EnableRequestTracing)
log.Printf("HTTP client LimitBodyToNBytes: %v", s.LimitBodyToNBytes)
log.Info().Msgf("HTTP client MaxRedirectsCount: %v", s.MaxRedirectsCount)
log.Info().Msgf("HTTP client TimeoutSeconds: %v", s.TimeoutSeconds)
log.Info().Msgf("HTTP client UserAgent: %v", s.UserAgent)
log.Info().Msgf("HTTP client BrowserUserAgent: %v", s.BrowserUserAgent)
log.Info().Msgf("HTTP client AcceptHeader: %v", s.AcceptHeader)
log.Info().Msgf("HTTP client SkipCertificateCheck: %v", s.SkipCertificateCheck)
log.Info().Msgf("HTTP client EnableRequestTracing: %v", s.EnableRequestTracing)
log.Info().Msgf("HTTP client LimitBodyToNBytes: %v", s.LimitBodyToNBytes)

// advanced configuration feature: only configurable via the config file
s.SearchForBodyPatterns = viper.GetBool("searchForBodyPatterns")

if s.SearchForBodyPatterns {
log.Printf("Will search for regex patterns found in HTTP response bodies")
log.Info().Msg("Will search for regex patterns found in HTTP response bodies")
var configBodyPatterns []BodyPatternConfig
// advanced configuration feature: only configurable via the config file
if err := viper.UnmarshalKey("bodyPatterns", &configBodyPatterns); err == nil {
Expand All @@ -261,7 +262,7 @@ func getURLCheckerSettings() urlCheckerSettings {
name: pattern.Name,
pattern: r,
})
log.Printf("Body search pattern found. Name: '%v', Regex: '%v'", pattern.Name, pattern.Regex)
log.Info().Msgf("Body search pattern found. Name: '%v', Regex: '%v'", pattern.Name, pattern.Regex)
}
}
}
Expand Down Expand Up @@ -361,7 +362,7 @@ func (l *localURLChecker) autoSelectClientFor(urlToCheck string) *resty.Client {
}
}
} else {
log.Printf("Could not find a proxy for %v", sanitizeUserLogInput(urlToCheck))
log.Warn().Msgf("Could not find a proxy for %v", sanitizeUserLogInput(urlToCheck))
}
return buildClient(tmpSettings)
}
Expand Down Expand Up @@ -546,7 +547,7 @@ func (c *URLCheckerClient) resolveAndCacheTCPAddr(network string, err error, add
} else {
GlobalStats().OnDNSResolutionFailed(domain)
c.dnsCache.Set(addrToResolve, "DNS resolution failed", defaultRetryFailedAfter)
log.Printf("ERROR in resolveAndCacheTCPAddr: %v", err)
log.Error().Err(err).Msg("resolveAndCacheTCPAddr")
}
}
return remoteAddr
Expand Down
3 changes: 2 additions & 1 deletion infrastructure/url_checker_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,15 @@ import (
"errors"
"fmt"
"io"
"log"
"net/http"
"net/http/httptest"
"os"
"strings"
"testing"
"time"

"log"

"github.com/stretchr/testify/require"

"github.com/spf13/viper"
Expand Down
Loading

0 comments on commit 5d5e08d

Please sign in to comment.