diff --git a/internal/collector/config.go b/internal/collector/config.go index 82b6fe74..d7601538 100644 --- a/internal/collector/config.go +++ b/internal/collector/config.go @@ -5,14 +5,15 @@ import ( "context" "errors" "fmt" - "github.com/cherts/pgscv/internal/cache" - "github.com/jackc/pgx/v5" - "github.com/jackc/pgx/v5/pgxpool" "net" "strconv" "strings" "time" + "github.com/cherts/pgscv/internal/cache" + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" + "github.com/cherts/pgscv/internal/log" "github.com/cherts/pgscv/internal/model" "github.com/cherts/pgscv/internal/store" @@ -42,6 +43,7 @@ type Config struct { ConcurrencyLimit *int CacheConfig *cache.Config DB *store.DB + LogDirectory string } // postgresServiceConfig defines Postgres-specific stuff required during collecting Postgres metrics. @@ -59,7 +61,7 @@ type postgresServiceConfig struct { // loggingCollector defines value of 'logging_collector' GUC. loggingCollector bool // logDestination defines value of 'log_destination' GUC. - logDestination string + logDestination postgresLogsDestination // pgStatStatements defines is pg_stat_statements available in shared_preload_libraries and available for queries. pgStatStatements bool // pgStatStatementsSchema defines the schema name where pg_stat_statements is installed. @@ -209,7 +211,7 @@ func newPostgresServiceConfig(connStr string, connTimeout int) (postgresServiceC return config, fmt.Errorf("failed to get log_destination setting from pg_settings, %s, please check user grants", err) } - config.logDestination = setting + config.logDestination = postgresLogsDestination(setting) // Discover pg_stat_statements. exists, schema, err := discoverPgStatStatements(conn) diff --git a/internal/collector/postgres_logs.go b/internal/collector/postgres_logs.go index 5e6934fb..6b46022b 100644 --- a/internal/collector/postgres_logs.go +++ b/internal/collector/postgres_logs.go @@ -3,8 +3,10 @@ package collector import ( "context" + "encoding/json" "fmt" "io" + "path/filepath" "regexp" "strings" "sync" @@ -15,6 +17,13 @@ import ( "github.com/prometheus/client_golang/prometheus" ) +type postgresLogsDestination string + +const ( + postgresLogsDestinationJSON postgresLogsDestination = "jsonlog" + postgresLogsDestinationStderr postgresLogsDestination = "stderr" +) + // Current implementation has an issue described here: https://github.com/nxadm/tail/issues/18. // When attempting to tail previously tailed logfiles, new messages are not coming from the Lines channel. // At the same time, test Test_runTailLoop works as intended and doesn't show the problem. @@ -27,11 +36,12 @@ type syncKV struct { type postgresLogsCollector struct { updateLogfile chan string // updateLogfile used for notify tail/collect goroutine when logfile has been changed. currentLogfile string // currentLogfile contains logfile name currently tailed and used for collecting stat. - totals syncKV // totals contains collected stats about total number of log messages. - panics syncKV // panics contains all collected messages with PANIC severity. - fatals syncKV // fatals contains all collected messages with FATAL severity. - errors syncKV // errors contains all collected messages with ERROR severity. - warnings syncKV // warnings contains all collected messages with WARNING severity. + logDestination postgresLogsDestination + totals syncKV // totals contains collected stats about total number of log messages. + panics syncKV // panics contains all collected messages with PANIC severity. + fatals syncKV // fatals contains all collected messages with FATAL severity. + errors syncKV // errors contains all collected messages with ERROR severity. + warnings syncKV // warnings contains all collected messages with WARNING severity. messagesTotal typedDesc panicMessages typedDesc fatalMessages typedDesc @@ -109,8 +119,7 @@ func NewPostgresLogsCollector(constLabels labels, settings model.CollectorSettin // Update method generates metrics based on collected log messages. func (c *postgresLogsCollector) Update(_ context.Context, config Config, ch chan<- prometheus.Metric) error { if !config.localService { - log.Debugln("[postgres log collector]: skip collecting metrics from remote services") - return nil + log.Warnln("[postgres log collector]: collecting metrics from remote services, proper mounting is necessary") } if !config.loggingCollector { @@ -122,11 +131,15 @@ func (c *postgresLogsCollector) Update(_ context.Context, config Config, ch chan return nil } - if config.logDestination != "stderr" { - log.Debugln("[postgres log collector]: PostgreSQL parameter log_destination not set to stderr, log collector disabled") + switch config.logDestination { + case postgresLogsDestinationStderr, postgresLogsDestinationJSON: + default: + log.Debugln("[postgres log collector]: PostgreSQL parameter log_destination not set to stderr or jsonlog, log collector disabled") return nil } + c.logDestination = config.logDestination + // Notify log collector goroutine if logfile has been changed. logfile, err := queryCurrentLogfile(config) if err != nil { @@ -221,7 +234,7 @@ func tailCollect(ctx context.Context, logfile string, init bool, wg *sync.WaitGr tailConfig.Location = &tail.SeekInfo{Whence: io.SeekEnd} } - parser := newLogParser() + parser := newLogParser(c.logDestination) log.Infof("starting tail of %s from the %s", logfile, offset) t, err := tail.TailFile(logfile, tailConfig) if err != nil { @@ -261,22 +274,42 @@ func queryCurrentLogfile(config Config) (string, error) { return "", err } + if config.LogDirectory != "" { + datadir = config.LogDirectory + } + if !strings.HasPrefix(logfile, "/") { - logfile = datadir + "/" + logfile + logfile = filepath.Join(datadir, logfile) } return logfile, nil } -// logParser contains set or regexp patterns used for parse log messages. -type logParser struct { +type logParser interface { + updateMessagesStats(line string, c *postgresLogsCollector) +} + +func newLogParser(format postgresLogsDestination) logParser { + if format == postgresLogsDestinationStderr { + return newStderrLogParser() + } + + if format == postgresLogsDestinationJSON { + return newJSONLogParser() + } + + return nil +} + +// stderrLogParser contains set or regexp patterns used for parse log messages. +type stderrLogParser struct { reSeverity map[string]*regexp.Regexp // regexp to determine messages severity. reExtract *regexp.Regexp // regexp for extracting exact messages from the whole line (drop log_line_prefix stuff). reNormalize []*regexp.Regexp // regexp for normalizing log message. } -// newLogParser creates a new logParser with necessary compiled regexp objects. -func newLogParser() *logParser { +// newStderrLogParser creates a new logParser with necessary compiled regexp objects. +func newStderrLogParser() *stderrLogParser { severityPatterns := map[string]string{ "log": `\s?LOG:\s+`, "warning": `\s?WARNING:\s+`, @@ -290,7 +323,7 @@ func newLogParser() *logParser { `(\s+".+?"\s?)`, } - p := &logParser{ + p := &stderrLogParser{ reSeverity: map[string]*regexp.Regexp{}, reNormalize: make([]*regexp.Regexp, len(normalizePatterns)), } @@ -309,45 +342,17 @@ func newLogParser() *logParser { } // updateMessagesStats process the message string, parse and update stats. -func (p *logParser) updateMessagesStats(line string, c *postgresLogsCollector) { +func (p *stderrLogParser) updateMessagesStats(line string, c *postgresLogsCollector) { m, found := p.parseMessageSeverity(line) if !found { return } - // Update totals. - c.totals.mu.Lock() - c.totals.store[m]++ - c.totals.mu.Unlock() - - if m == "log" { - return - } - - // Message with severity higher than LOG, normalize them and update. - normalized := p.normalizeMessage(line) - switch m { - case "panic": - c.panics.mu.Lock() - c.panics.store[normalized]++ - c.panics.mu.Unlock() - case "fatal": - c.fatals.mu.Lock() - c.fatals.store[normalized]++ - c.fatals.mu.Unlock() - case "error": - c.errors.mu.Lock() - c.errors.store[normalized]++ - c.errors.mu.Unlock() - case "warning": - c.warnings.mu.Lock() - c.warnings.store[normalized]++ - c.warnings.mu.Unlock() - } + updateMessagesStats(m, p.normalizeMessage(line), c) } // parseMessageSeverity accepts lines and parse it using patterns from logParser. -func (p *logParser) parseMessageSeverity(line string) (string, bool) { +func (p *stderrLogParser) parseMessageSeverity(line string) (string, bool) { if line == "" { return "", false } @@ -363,7 +368,7 @@ func (p *logParser) parseMessageSeverity(line string) (string, bool) { } // normalizeMessage used for normalizing log messages and removing unique elements like names or ids. -func (p *logParser) normalizeMessage(message string) string { +func (p *stderrLogParser) normalizeMessage(message string) string { parts := p.reExtract.FindStringSubmatch(message) if len(parts) < 2 { return "" @@ -377,3 +382,120 @@ func (p *logParser) normalizeMessage(message string) string { return message } + +// jsonLogParser contains set or regexp patterns used for parse log messages. +type jsonLogParser struct { + reNormalize []*regexp.Regexp // regexp for normalizing log message. +} + +// newLogParser creates a new logParser with necessary compiled regexp objects. +func newJSONLogParser() *jsonLogParser { + normalizePatterns := []string{ + `(\s+\d+\s?)`, + `(\s+".+?"\s?)`, + } + + p := &jsonLogParser{ + reNormalize: make([]*regexp.Regexp, len(normalizePatterns)), + } + + for i, pattern := range normalizePatterns { + p.reNormalize[i] = regexp.MustCompile(pattern) + } + + return p +} + +type jsonLine struct { + ErrorSeverity string `json:"error_severity"` + Message string `json:"message"` +} + +// updateMessagesStats process the message string, parse and update stats. +func (p *jsonLogParser) updateMessagesStats(line string, c *postgresLogsCollector) { + jsonLine, err := p.jsonLine(line) + if err != nil { + log.Errorln(err) + return + } + + sev, found := p.parseMessageSeverity(jsonLine) + if !found { + return + } + + updateMessagesStats(sev, p.normalizeMessage(jsonLine), c) +} + +func (p *jsonLogParser) jsonLine(line string) (jsonLine, error) { + var parsedLine jsonLine + err := json.Unmarshal([]byte(line), &parsedLine) + if err != nil { + return jsonLine{}, fmt.Errorf("can't unmarshal %s as jsonLine: %w", line, err) + } + + return parsedLine, nil +} + +func (p *jsonLogParser) parseMessageSeverity(line jsonLine) (string, bool) { + switch line.ErrorSeverity { + case "PANIC": + return "panic", true + case "FATAL": + return "fatal", true + case "ERROR": + return "error", true + case "WARNING": + return "warning", true + case "LOG": + return "log", true + default: + return "", false + } +} + +// normalizeMessage used for normalizing log messages and removing unique elements like names or ids. +func (p *jsonLogParser) normalizeMessage(jsonLine jsonLine) string { + switch jsonLine.ErrorSeverity { + case "PANIC", "FATAL", "ERROR", "WARNING": + default: + return "" + } + + message := jsonLine.Message + for _, re := range p.reNormalize { + message = strings.TrimSpace(re.ReplaceAllString(message, " ? ")) + } + + return message +} + +func updateMessagesStats(sev, normalized string, c *postgresLogsCollector) { + // Update totals. + c.totals.mu.Lock() + c.totals.store[sev]++ + c.totals.mu.Unlock() + + if sev == "log" { + return + } + + switch sev { + case "panic": + c.panics.mu.Lock() + c.panics.store[normalized]++ + c.panics.mu.Unlock() + case "fatal": + c.fatals.mu.Lock() + c.fatals.store[normalized]++ + c.fatals.mu.Unlock() + case "error": + c.errors.mu.Lock() + c.errors.store[normalized]++ + c.errors.mu.Unlock() + case "warning": + c.warnings.mu.Lock() + c.warnings.store[normalized]++ + c.warnings.mu.Unlock() + } +} diff --git a/internal/collector/postgres_logs_test.go b/internal/collector/postgres_logs_test.go index 51b67a10..5ed95bb4 100644 --- a/internal/collector/postgres_logs_test.go +++ b/internal/collector/postgres_logs_test.go @@ -4,13 +4,15 @@ import ( "bufio" "context" "fmt" - "github.com/cherts/pgscv/internal/model" - "github.com/cherts/pgscv/internal/store" - "github.com/stretchr/testify/assert" "os" + "strings" "sync" "testing" "time" + + "github.com/cherts/pgscv/internal/model" + "github.com/cherts/pgscv/internal/store" + "github.com/stretchr/testify/assert" ) func Test_runTailLoop(t *testing.T) { @@ -18,6 +20,7 @@ func Test_runTailLoop(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, c) lc := c.(*postgresLogsCollector) + lc.logDestination = postgresLogsDestinationStderr fname1 := "/tmp/pgscv_postgres_logs_test_sample_1.log" fstr1 := "2020-09-30 14:26:29.777 +05 797922 LOG: PID 0 in cancel request did not match any process\n" @@ -124,6 +127,7 @@ func Test_tailCollect(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, c) lc := c.(*postgresLogsCollector) + lc.logDestination = postgresLogsDestinationStderr ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) defer cancel() @@ -139,6 +143,27 @@ func Test_tailCollect(t *testing.T) { wg.Wait() } +func Test_tailCollect_jsonLogParser(t *testing.T) { + c, err := NewPostgresLogsCollector(nil, model.CollectorSettings{}) + assert.NoError(t, err) + assert.NotNil(t, c) + lc := c.(*postgresLogsCollector) + lc.logDestination = postgresLogsDestinationJSON + + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) + defer cancel() + + var wg sync.WaitGroup + + wg.Add(1) + tailCollect(ctx, "testdata/datadir/postgresql.jsonlog.golden", false, &wg, lc) + assert.Equal(t, float64(2), lc.totals.store["log"]) + assert.Equal(t, float64(3), lc.totals.store["error"]) + assert.Equal(t, float64(0), lc.totals.store["fatal"]) + + wg.Wait() +} + func Test_queryCurrentLogfile(t *testing.T) { got, err := queryCurrentLogfile(Config{DB: store.NewTest(t)}) assert.NoError(t, err) @@ -147,22 +172,26 @@ func Test_queryCurrentLogfile(t *testing.T) { got, err = queryCurrentLogfile(Config{DB: nil}) assert.Error(t, err) assert.Equal(t, got, "") + + got, err = queryCurrentLogfile(Config{DB: store.NewTest(t), LogDirectory: "/custom/dir"}) + assert.NoError(t, err) + assert.True(t, strings.HasPrefix(got, "/custom/dir")) } -func Test_newLogParser(t *testing.T) { - p := newLogParser() +func Test_newStderrLogParser(t *testing.T) { + p := newStderrLogParser() assert.NotNil(t, p) assert.Greater(t, len(p.reSeverity), 0) assert.Greater(t, len(p.reNormalize), 0) } -func Test_logParser_updateMessagesStats(t *testing.T) { +func Test_stderrLogParser_updateMessagesStats(t *testing.T) { c, err := NewPostgresLogsCollector(nil, model.CollectorSettings{}) assert.NoError(t, err) assert.NotNil(t, c) lc := c.(*postgresLogsCollector) - p := newLogParser() + p := newStderrLogParser() f, err := os.Open("testdata/datadir/postgresql.log.golden") assert.NoError(t, err) @@ -189,7 +218,7 @@ func Test_logParser_updateMessagesStats(t *testing.T) { lc.panics.mu.RUnlock() } -func Test_logParser_parseMessageSeverity(t *testing.T) { +func Test_stderrLogParser_parseMessageSeverity(t *testing.T) { testcases := []struct { line string want string @@ -209,7 +238,7 @@ func Test_logParser_parseMessageSeverity(t *testing.T) { {line: "test", want: "", found: false}, } - p := newLogParser() + p := newStderrLogParser() for _, tc := range testcases { got, ok := p.parseMessageSeverity(tc.line) @@ -218,7 +247,7 @@ func Test_logParser_parseMessageSeverity(t *testing.T) { } } -func Test_logParser_normalizeMessage(t *testing.T) { +func Test_stderrLogParser_normalizeMessage(t *testing.T) { testcases := []struct { in string want string @@ -249,9 +278,135 @@ func Test_logParser_normalizeMessage(t *testing.T) { }, } - parser := newLogParser() + parser := newStderrLogParser() for _, tc := range testcases { assert.Equal(t, tc.want, parser.normalizeMessage(tc.in)) } } + +func Test_jsonLogParser_normalizeMessage(t *testing.T) { + testcases := []struct { + jsonLine jsonLine + want string + }{ + { + jsonLine: jsonLine{ + Message: `syntax error at or near "invalid" at character 1`, + ErrorSeverity: "ERROR", + }, + want: `syntax error at or near ? at character ?`, + }, + { + jsonLine: jsonLine{ + Message: `syntax error at or near ")" at character 721`, + ErrorSeverity: "ERROR", + }, + want: `syntax error at or near ? at character ?`, + }, + { + jsonLine: jsonLine{ + Message: `duplicate key value violates unique constraint "oauth_access_token_authentication_id_uindex"`, + ErrorSeverity: "ERROR", + }, + want: `duplicate key value violates unique constraint ?`, + }, + { + jsonLine: jsonLine{ + Message: `insert or update on table "offer_offer_products" violates foreign key constraint "fkbbwwdruntj50nng01y0g98cof"`, + ErrorSeverity: "ERROR", + }, + want: `insert or update on table ? violates foreign key constraint ?`, + }, + { + jsonLine: jsonLine{ + Message: `could not serialize access due to concurrent update`, + ErrorSeverity: "ERROR", + }, + want: `could not serialize access due to concurrent update`, + }, + { + jsonLine: jsonLine{ + Message: `test`, + ErrorSeverity: "LOG", + }, + want: "", + }, + } + + parser := newJSONLogParser() + + for _, tc := range testcases { + assert.Equal(t, tc.want, parser.normalizeMessage(tc.jsonLine)) + } +} + +func Test_jsonLogParser_jsonLine(t *testing.T) { + tests := []struct { + name string + line string + want jsonLine + wantErr bool + }{ + { + line: `{"error_severity":"ERROR","message":"syntax error at or near \"invalid\" at character 1"}`, + want: jsonLine{ + ErrorSeverity: "ERROR", + Message: "syntax error at or near \"invalid\" at character 1", + }, + }, + { + line: `{"error_severity":"ERROR","message":"duplicate key value violates unique constraint \"oauth_access_token_authentication_id_uindex\""}`, + want: jsonLine{ + ErrorSeverity: "ERROR", + Message: "duplicate key value violates unique constraint \"oauth_access_token_authentication_id_uindex\"", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + parser := newJSONLogParser() + + jsonLine, err := parser.jsonLine(tt.line) + if tt.wantErr { + assert.Error(t, err) + + return + } + assert.NoError(t, err) + assert.Equal(t, tt.want, jsonLine) + }) + } +} + +func Test_jsonLogParser_updateMessagesStats(t *testing.T) { + c, err := NewPostgresLogsCollector(nil, model.CollectorSettings{}) + assert.NoError(t, err) + assert.NotNil(t, c) + lc := c.(*postgresLogsCollector) + + p := newJSONLogParser() + + f, err := os.Open("testdata/datadir/postgresql.jsonlog.golden") + assert.NoError(t, err) + + scanner := bufio.NewScanner(f) + for scanner.Scan() { + p.updateMessagesStats(scanner.Text(), lc) + } + + lc.totals.mu.RLock() + assert.Equal(t, float64(3), lc.totals.store["error"]) + assert.Equal(t, float64(2), lc.totals.store["log"]) + lc.totals.mu.RUnlock() + + lc.fatals.mu.RLock() + fmt.Println() + assert.Equal(t, 1, len(lc.errors.store)) + lc.fatals.mu.RUnlock() + + lc.panics.mu.RLock() + assert.Equal(t, 0, len(lc.panics.store)) + lc.panics.mu.RUnlock() +} diff --git a/internal/collector/testdata/datadir/postgresql.jsonlog.golden b/internal/collector/testdata/datadir/postgresql.jsonlog.golden new file mode 100644 index 00000000..71d0f452 --- /dev/null +++ b/internal/collector/testdata/datadir/postgresql.jsonlog.golden @@ -0,0 +1,5 @@ +{"timestamp":"2025-09-19 12:00:05.751 MSK","user":"user","dbname":"dbname","pid":3985966,"remote_host":"[local]","session_id":"68cd12ce.3cd22e","line_num":20,"ps":"INSERT","session_start":"2025-09-19 11:22:38 MSK","vxid":"37/11788595","txid":6370569,"error_severity":"ERROR","state_code":"23505","message":"duplicate key value violates unique constraint \"project_name_key\""} +{"timestamp":"2025-09-19 09:00:12.522 MSK","user":"user","dbname":"dbname","pid":3836185,"remote_host":"[local]","session_id":"68ccf16c.3a8919","line_num":4,"ps":"INSERT","session_start":"2025-09-19 09:00:12 MSK","vxid":"68/10963362","txid":6367224,"error_severity":"ERROR","state_code":"23505","message":"duplicate key value violates unique constraint \"project_name_key\""} +{"timestamp":"2025-09-18 19:00:09.069 MSK","user":"user","dbname":"dbname","pid":2952899,"remote_host":"[local]","session_id":"68cc2c88.2d0ec3","line_num":4,"ps":"INSERT","session_start":"2025-09-18 19:00:08 MSK","vxid":"41/4336086","txid":6361970,"error_severity":"ERROR","state_code":"23505","message":"duplicate key value violates unique constraint \"project_name_key\""} +{"timestamp":"2025-09-23 18:58:21.848 MSK","user":"user","dbname":"dbname","pid":2090646,"remote_host":"[local]","session_id":"68d2ba38.1fe696","line_num":18,"ps":"idle in transaction","session_start":"2025-09-23 18:18:16 MSK","vxid":"33/18476998","txid":0,"error_severity":"LOG","message":"statement: INSERT INTO event_logs"} +{"timestamp":"2025-09-23 18:58:21.848 MSK","user":"user","dbname":"dbname","pid":2090646,"remote_host":"[local]","session_id":"68d2ba38.1fe696","line_num":18,"ps":"idle in transaction","session_start":"2025-09-23 18:18:16 MSK","vxid":"33/18476998","txid":0,"error_severity":"LOG","message":"statement: INSERT INTO event_logs"} \ No newline at end of file diff --git a/internal/pgscv/config.go b/internal/pgscv/config.go index e1d35797..8d03205c 100644 --- a/internal/pgscv/config.go +++ b/internal/pgscv/config.go @@ -3,6 +3,14 @@ package pgscv import ( "fmt" + "io/fs" + "maps" + "os" + "path/filepath" + "regexp" + "strconv" + "strings" + sd "github.com/cherts/pgscv/discovery" "github.com/cherts/pgscv/internal/cache" "github.com/cherts/pgscv/internal/http" @@ -12,13 +20,6 @@ import ( "github.com/go-playground/validator/v10" "github.com/jackc/pgx/v5" "gopkg.in/yaml.v2" - "io/fs" - "maps" - "os" - "path/filepath" - "regexp" - "strconv" - "strings" ) const ( @@ -49,6 +50,7 @@ type Config struct { ConcurrencyLimit *int `yaml:"concurrency_limit"` CacheConfig *cache.Config `yaml:"cache" validate:"omitempty"` PoolerConfig *PoolConfig `yaml:"pooler" validate:"omitempty,pool_config"` + LogDirectory string `yaml:"log_directory" validate:"omitempty` // Log directory of postgres. Priority over the path provided by the database. } // PoolConfig defines pgxPool configuration. diff --git a/internal/pgscv/pgscv.go b/internal/pgscv/pgscv.go index 5dbf6a8f..ecea9794 100644 --- a/internal/pgscv/pgscv.go +++ b/internal/pgscv/pgscv.go @@ -6,6 +6,10 @@ import ( "encoding/json" "errors" "fmt" + net_http "net/http" + "strings" + "sync" + "github.com/cherts/pgscv/discovery" sd "github.com/cherts/pgscv/internal/discovery/service" "github.com/cherts/pgscv/internal/http" @@ -14,9 +18,6 @@ import ( "github.com/cherts/pgscv/internal/service" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promhttp" - net_http "net/http" - "strings" - "sync" ) const pgSCVSubscriber = "pgscv_subscriber" @@ -47,6 +48,7 @@ func Start(ctx context.Context, config *Config) error { ConcurrencyLimit: config.ConcurrencyLimit, CacheConfig: config.CacheConfig, ConstLabels: &constLabels, + LogDirectory: config.LogDirectory, } if config.PoolerConfig != nil { serviceConfig.PoolerConfig = &service.PoolConfig{ @@ -163,6 +165,7 @@ func subscribe(ds *discovery.Discovery, config *Config, serviceRepo *service.Rep ConnTimeout: config.ConnTimeout, ConcurrencyLimit: config.ConcurrencyLimit, CacheConfig: config.CacheConfig, + LogDirectory: config.LogDirectory, } if config.PoolerConfig != nil { serviceDiscoveryConfig.PoolerConfig = &service.PoolConfig{ diff --git a/internal/service/service.go b/internal/service/service.go index 72a9aa9a..37f7dd86 100644 --- a/internal/service/service.go +++ b/internal/service/service.go @@ -4,13 +4,14 @@ package service import ( "context" "fmt" - "github.com/cherts/pgscv/internal/cache" - "github.com/cherts/pgscv/internal/registry" - "github.com/jackc/pgx/v5/pgxpool" "strings" "sync" "time" + "github.com/cherts/pgscv/internal/cache" + "github.com/cherts/pgscv/internal/registry" + "github.com/jackc/pgx/v5/pgxpool" + "slices" "github.com/cherts/pgscv/internal/collector" @@ -62,6 +63,7 @@ type Config struct { CacheKey string Pool *pgxpool.Pool PoolerConfig *PoolConfig + LogDirectory string } // PoolConfig defines pgxPool configuration. @@ -322,6 +324,7 @@ func (repo *Repository) setupServices(config Config) error { ConcurrencyLimit: config.ConcurrencyLimit, CacheConfig: config.CacheConfig, DB: service.DB, + LogDirectory: config.LogDirectory, } if config.ConstLabels != nil && (*config.ConstLabels)[id] != nil { collectorConfig.ConstLabels = (*config.ConstLabels)[id]