diff --git a/input/system/google_cloudsql/logs.go b/input/system/google_cloudsql/logs.go index b56e3ede5..4ae29a309 100644 --- a/input/system/google_cloudsql/logs.go +++ b/input/system/google_cloudsql/logs.go @@ -169,6 +169,9 @@ func SetupLogSubscriber(ctx context.Context, wg *sync.WaitGroup, globalCollectio return nil } +// AlloyDB adds a special [filename:lineno] prefix to all log lines (not part of log_line_prefix) +var alloyPrefix = regexp.MustCompile(`(?s)^\[[\w.-]+:\d+\] (.*)`) + func setupLogTransformer(ctx context.Context, wg *sync.WaitGroup, servers []*state.Server, in <-chan LogStreamItem, out chan state.ParsedLogStreamItem, logger *util.Logger) { wg.Add(1) go func() { @@ -203,14 +206,12 @@ func setupLogTransformer(ctx context.Context, wg *sync.WaitGroup, servers []*sta continue } - parser := server.GetLogParser() - // We ignore failures here since we want the per-backend stitching logic // that runs later on (and any other parsing errors will just be ignored). // Note that we need to restore the original trailing newlines since // AnalyzeStreamInGroups expects them and they are not present in the GCP // log stream. - logLine, _ := parser.ParseLine(in.Content + "\n") + logLine, _ := server.GetLogParser().ParseLine(in.Content + "\n") logLine.OccurredAt = in.OccurredAt // Ignore loglines which are outside our time window @@ -219,8 +220,7 @@ func setupLogTransformer(ctx context.Context, wg *sync.WaitGroup, servers []*sta } if isAlloyDBCluster { - // AlloyDB adds a special [filename:lineno] prefix to all log lines (not part of log_line_prefix) - parts := regexp.MustCompile(`(?s)^\[[\w.-]+:\d+\] (.*)`).FindStringSubmatch(string(logLine.Content)) + parts := alloyPrefix.FindStringSubmatch(string(logLine.Content)) if len(parts) == 2 { logLine.Content = parts[1] } diff --git a/logs/querysample/querysample.go b/logs/querysample/querysample.go index 85ab33289..a3088e0f2 100644 --- a/logs/querysample/querysample.go +++ b/logs/querysample/querysample.go @@ -117,13 +117,14 @@ func TransformLogMinDurationStatementToQuerySample(logLine state.LogLine, queryT return sample, true } +// Regular expression to find all values in single quotes or NULL +// Query Parameters example: $1 = 'foo', $2 = '123', $3 = NULL, $4 = 'bo”o' +var valueRegexp = regexp.MustCompile(`(?:(NULL)|'((?:[^']|'')*)')`) + func findQueryParameters(paramText string) []null.String { // Handle Query Parameters (available from Postgres 16+) var parameters []null.String - // Regular expression to find all values in single quotes or NULL - // Query Parameters example: $1 = 'foo', $2 = '123', $3 = NULL, $4 = 'bo''o' - re := regexp.MustCompile(`(?:(NULL)|'((?:[^']|'')*)')`) - for _, part := range re.FindAllString(paramText, -1) { + for _, part := range valueRegexp.FindAllString(paramText, -1) { if part == "NULL" { parameters = append(parameters, null.NewString("", false)) } else { diff --git a/util/clean_http_error.go b/util/clean_http_error.go index 1d498a06d..29e8e014d 100644 --- a/util/clean_http_error.go +++ b/util/clean_http_error.go @@ -6,10 +6,11 @@ import ( "regexp" ) +var regex = regexp.MustCompile("(?i): (get|post|patch) ") + // Removes duplicate URLs added by retryablehttp func CleanHTTPError(error error) error { message := fmt.Sprintf("%v", error) - regex := regexp.MustCompile("(?i): (get|post|patch) ") array := regex.Split(message, -1) return errors.New(array[len(array)-1]) }