From 3d42b0058e091ec06563e4cf47e32d0d063bab7d Mon Sep 17 00:00:00 2001 From: Abhishek Kumar Singh Date: Wed, 3 Dec 2025 18:43:32 +0530 Subject: [PATCH] chore: Query filter extraction API (#9617) --- ee/query-service/app/api/api.go | 2 + pkg/query-service/app/http_handler.go | 8 + pkg/query-service/app/server.go | 2 + pkg/queryparser/api.go | 49 ++++ pkg/queryparser/api_test.go | 258 ++++++++++++++++++ .../queryfilterextractor/clickhouse.go | 0 .../clickhouse_originparser.go | 0 .../clickhouse_originparser_test.go | 0 .../queryfilterextractor/clickhouse_test.go | 0 .../queryfilterextractor/promql.go | 0 .../queryfilterextractor/promql_test.go | 0 .../queryfilterextractor.go | 23 +- pkg/queryparser/queryparser.go | 14 + pkg/queryparser/queryparser_impl.go | 40 +++ pkg/signoz/signoz.go | 6 + pkg/types/parsertypes/queryfilteranalyzer.go | 49 ++++ 16 files changed, 443 insertions(+), 8 deletions(-) create mode 100644 pkg/queryparser/api.go create mode 100644 pkg/queryparser/api_test.go rename pkg/{parser => queryparser}/queryfilterextractor/clickhouse.go (100%) rename pkg/{parser => queryparser}/queryfilterextractor/clickhouse_originparser.go (100%) rename pkg/{parser => queryparser}/queryfilterextractor/clickhouse_originparser_test.go (100%) rename pkg/{parser => queryparser}/queryfilterextractor/clickhouse_test.go (100%) rename pkg/{parser => queryparser}/queryfilterextractor/promql.go (100%) rename pkg/{parser => queryparser}/queryfilterextractor/promql_test.go (100%) rename pkg/{parser => queryparser}/queryfilterextractor/queryfilterextractor.go (69%) create mode 100644 pkg/queryparser/queryparser.go create mode 100644 pkg/queryparser/queryparser_impl.go create mode 100644 pkg/types/parsertypes/queryfilteranalyzer.go diff --git a/ee/query-service/app/api/api.go b/ee/query-service/app/api/api.go index 4b3b382103..be351341c2 100644 --- a/ee/query-service/app/api/api.go +++ b/ee/query-service/app/api/api.go @@ -19,6 +19,7 @@ import ( "github.com/SigNoz/signoz/pkg/query-service/interfaces" basemodel "github.com/SigNoz/signoz/pkg/query-service/model" rules "github.com/SigNoz/signoz/pkg/query-service/rules" + "github.com/SigNoz/signoz/pkg/queryparser" "github.com/SigNoz/signoz/pkg/signoz" "github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types/authtypes" @@ -60,6 +61,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore), Signoz: signoz, QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics), + QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser), }) if err != nil { diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index 5fd5ca5db6..bee0c46214 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -9,6 +9,7 @@ import ( "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/modules/thirdpartyapi" + "github.com/SigNoz/signoz/pkg/queryparser" "io" "math" @@ -146,6 +147,8 @@ type APIHandler struct { QuerierAPI *querierAPI.API + QueryParserAPI *queryparser.API + Signoz *signoz.SigNoz } @@ -176,6 +179,8 @@ type APIHandlerOpts struct { QuerierAPI *querierAPI.API + QueryParserAPI *queryparser.API + Signoz *signoz.SigNoz } @@ -238,6 +243,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) { Signoz: opts.Signoz, FieldsAPI: opts.FieldsAPI, QuerierAPI: opts.QuerierAPI, + QueryParserAPI: opts.QueryParserAPI, } logsQueryBuilder := logsv4.PrepareLogsQuery @@ -632,6 +638,8 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) { router.HandleFunc("/api/v1/span_percentile", am.ViewAccess(aH.Signoz.Handlers.SpanPercentile.GetSpanPercentileDetails)).Methods(http.MethodPost) + // Query Filter Analyzer api used to extract metric names and grouping columns from a query + router.HandleFunc("/api/v1/query_filter/analyze", am.ViewAccess(aH.QueryParserAPI.AnalyzeQueryFilter)).Methods(http.MethodPost) } func (ah *APIHandler) MetricExplorerRoutes(router *mux.Router, am *middleware.AuthZ) { diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go index d79aeb3d4a..a3400da342 100644 --- a/pkg/query-service/app/server.go +++ b/pkg/query-service/app/server.go @@ -10,6 +10,7 @@ import ( "slices" "github.com/SigNoz/signoz/pkg/cache/memorycache" + "github.com/SigNoz/signoz/pkg/queryparser" "github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore" "github.com/gorilla/handlers" @@ -132,6 +133,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) { FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore), Signoz: signoz, QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics), + QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser), }) if err != nil { return nil, err diff --git a/pkg/queryparser/api.go b/pkg/queryparser/api.go new file mode 100644 index 0000000000..6d9e738775 --- /dev/null +++ b/pkg/queryparser/api.go @@ -0,0 +1,49 @@ +package queryparser + +import ( + "net/http" + + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/http/binding" + "github.com/SigNoz/signoz/pkg/http/render" + "github.com/SigNoz/signoz/pkg/types/parsertypes" +) + +type API struct { + queryParser QueryParser + settings factory.ProviderSettings +} + +func NewAPI(settings factory.ProviderSettings, queryParser QueryParser) *API { + return &API{settings: settings, queryParser: queryParser} +} + +// AnalyzeQueryFilter analyzes a query and extracts metric names and grouping columns +func (a *API) AnalyzeQueryFilter(w http.ResponseWriter, r *http.Request) { + // Limit request body size to 255 KB (CH query limit is 256 KB) + r.Body = http.MaxBytesReader(w, r.Body, 255*1024) + + var req parsertypes.QueryFilterAnalyzeRequest + if err := binding.JSON.BindBody(r.Body, &req); err != nil { + render.Error(w, err) + return + } + + result, err := a.queryParser.AnalyzeQueryFilter(r.Context(), req.QueryType, req.Query) + if err != nil { + a.settings.Logger.ErrorContext(r.Context(), "failed to analyze query filter", "error", err) + render.Error(w, err) + return + } + + // prepare the response + var resp parsertypes.QueryFilterAnalyzeResponse + for _, group := range result.GroupByColumns { + resp.Groups = append(resp.Groups, parsertypes.ColumnInfoResponse{ + Name: group.Name, + Alias: group.Alias, + }) // add the group name and alias to the response + } + resp.MetricNames = append(resp.MetricNames, result.MetricNames...) // add the metric names to the response + render.Success(w, http.StatusOK, resp) +} diff --git a/pkg/queryparser/api_test.go b/pkg/queryparser/api_test.go new file mode 100644 index 0000000000..22f72fb5bb --- /dev/null +++ b/pkg/queryparser/api_test.go @@ -0,0 +1,258 @@ +package queryparser + +import ( + "bytes" + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "reflect" + "sort" + "strings" + "testing" + + "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" + "github.com/SigNoz/signoz/pkg/types/parsertypes" + "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" +) + +func TestAPI_AnalyzeQueryFilter(t *testing.T) { + queryParser := New(instrumentationtest.New().ToProviderSettings()) + aH := NewAPI(instrumentationtest.New().ToProviderSettings(), queryParser) + + tests := []struct { + name string + requestBody parsertypes.QueryFilterAnalyzeRequest + expectedStatus int + expectedStatusStr string + expectedError bool + errorContains string + expectedMetrics []string + expectedGroups []parsertypes.ColumnInfoResponse + }{ + { + name: "PromQL - Nested aggregation inside subquery", + requestBody: parsertypes.QueryFilterAnalyzeRequest{ + Query: `max_over_time(sum(rate(cpu_usage_total[5m]))[1h:5m])`, + QueryType: querybuildertypesv5.QueryTypePromQL, + }, + expectedStatus: http.StatusOK, + expectedStatusStr: "success", + expectedError: false, + expectedMetrics: []string{"cpu_usage_total"}, + expectedGroups: []parsertypes.ColumnInfoResponse{}, + }, + { + name: "PromQL - Subquery with multiple metrics", + requestBody: parsertypes.QueryFilterAnalyzeRequest{ + Query: `avg_over_time((foo + bar)[10m:1m])`, + QueryType: querybuildertypesv5.QueryTypePromQL, + }, + expectedStatus: http.StatusOK, + expectedStatusStr: "success", + expectedError: false, + expectedMetrics: []string{"bar", "foo"}, + expectedGroups: []parsertypes.ColumnInfoResponse{}, + }, + { + name: "PromQL - Simple meta-metric with grouping", + requestBody: parsertypes.QueryFilterAnalyzeRequest{ + Query: `sum by (pod) (up)`, + QueryType: querybuildertypesv5.QueryTypePromQL, + }, + expectedStatus: http.StatusOK, + expectedStatusStr: "success", + expectedError: false, + expectedMetrics: []string{"up"}, + expectedGroups: []parsertypes.ColumnInfoResponse{{Name: "pod", Alias: ""}}, + }, + { + name: "ClickHouse - Simple CTE with GROUP BY", + requestBody: parsertypes.QueryFilterAnalyzeRequest{ + Query: `WITH aggregated AS ( + SELECT region as region_alias, sum(value) AS total + FROM metrics + WHERE metric_name = 'cpu_usage' + GROUP BY region + ) + SELECT * FROM aggregated`, + QueryType: querybuildertypesv5.QueryTypeClickHouseSQL, + }, + expectedStatus: http.StatusOK, + expectedStatusStr: "success", + expectedError: false, + expectedMetrics: []string{"cpu_usage"}, + expectedGroups: []parsertypes.ColumnInfoResponse{{Name: "region", Alias: "region_alias"}}, + }, + { + name: "ClickHouse - CTE chain with last GROUP BY + Alias should be returned if exists", + requestBody: parsertypes.QueryFilterAnalyzeRequest{ + Query: `WITH step1 AS ( + SELECT service as service_alias, timestamp as ts, value + FROM metrics + WHERE metric_name = 'requests' + GROUP BY service, timestamp + ), + step2 AS ( + SELECT ts, avg(value) AS avg_value + FROM step1 + GROUP BY ts + ) + SELECT * FROM step2`, + QueryType: querybuildertypesv5.QueryTypeClickHouseSQL, + }, + expectedStatus: http.StatusOK, + expectedStatusStr: "success", + expectedError: false, + expectedMetrics: []string{"requests"}, + expectedGroups: []parsertypes.ColumnInfoResponse{{Name: "ts", Alias: ""}}, + }, + { + name: "ClickHouse - Outer GROUP BY overrides CTE GROUP BY + Alias should be returned if exists", + requestBody: parsertypes.QueryFilterAnalyzeRequest{ + Query: `WITH cte AS ( + SELECT region, service, value + FROM metrics + WHERE metric_name = 'memory' + GROUP BY region, service + ) + SELECT region as region_alias, sum(value) as total + FROM cte + GROUP BY region`, + QueryType: querybuildertypesv5.QueryTypeClickHouseSQL, + }, + expectedStatus: http.StatusOK, + expectedStatusStr: "success", + expectedError: false, + expectedMetrics: []string{"memory"}, + expectedGroups: []parsertypes.ColumnInfoResponse{{Name: "region", Alias: "region_alias"}}, + }, + { + name: "ClickHouse - Invalid query should return error", + requestBody: parsertypes.QueryFilterAnalyzeRequest{ + Query: `SELECT WHERE metric_name = 'memory' GROUP BY region, service`, + QueryType: querybuildertypesv5.QueryTypeClickHouseSQL, + }, + expectedStatus: http.StatusBadRequest, + expectedStatusStr: "error", + expectedError: true, + errorContains: "failed to parse clickhouse query", + }, + { + name: "Empty query should return error", + requestBody: parsertypes.QueryFilterAnalyzeRequest{ + Query: "", + QueryType: querybuildertypesv5.QueryTypePromQL, + }, + expectedStatus: http.StatusBadRequest, + expectedStatusStr: "error", + expectedError: true, + errorContains: "query is required and cannot be empty", + }, + { + name: "Invalid queryType should return error", + requestBody: parsertypes.QueryFilterAnalyzeRequest{ + Query: `sum(rate(cpu_usage[5m]))`, + QueryType: querybuildertypesv5.QueryTypeUnknown, + }, + expectedStatus: http.StatusBadRequest, + expectedStatusStr: "error", + expectedError: true, + errorContains: "unsupported queryType", + }, + { + name: "Invalid PromQL syntax should return error", + requestBody: parsertypes.QueryFilterAnalyzeRequest{ + Query: `sum by ((foo)(bar))(http_requests_total)`, + QueryType: querybuildertypesv5.QueryTypePromQL, + }, + expectedStatus: http.StatusBadRequest, + expectedStatusStr: "error", + expectedError: true, + errorContains: "failed to parse promql query", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Create request body + reqBody, err := json.Marshal(tt.requestBody) + if err != nil { + t.Fatalf("failed to marshal request body: %v", err) + } + + // Create HTTP request + req := httptest.NewRequestWithContext(context.Background(), http.MethodPost, "/api/v1/query_filter/analyze", bytes.NewBuffer(reqBody)) + req.Header.Set("Content-Type", "application/json") + + // Create response recorder + rr := httptest.NewRecorder() + + // Call handler + aH.AnalyzeQueryFilter(rr, req) + + // Check status code + if rr.Code != tt.expectedStatus { + t.Errorf("expected status %d, got %d", tt.expectedStatus, rr.Code) + } + + // Parse response + var resp map[string]interface{} + if err := json.Unmarshal(rr.Body.Bytes(), &resp); err != nil { + t.Fatalf("failed to unmarshal response: %v, body: %s", err, rr.Body.String()) + } + + // Check status string + if resp["status"] != tt.expectedStatusStr { + t.Errorf("expected status '%s', got %v", tt.expectedStatusStr, resp["status"]) + } + + if tt.expectedError { + errorObj, ok := resp["error"].(map[string]interface{}) + if !ok { + t.Fatalf("expected error to be a map, got %T", resp["error"]) + } + errorMsg, ok := errorObj["message"].(string) + if !ok { + t.Fatalf("expected error message to be a string, got %T", errorObj["message"]) + } + if !strings.Contains(errorMsg, tt.errorContains) { + t.Errorf("expected error message to contain '%s', got '%s'", tt.errorContains, errorMsg) + } + } else { + // Validate success response + data, ok := resp["data"].(map[string]interface{}) + if !ok { + t.Fatalf("expected data to be a map, got %T", resp["data"]) + } + + // Marshal data back to JSON and unmarshal into QueryFilterAnalyzeResponse struct + dataBytes, err := json.Marshal(data) + if err != nil { + t.Fatalf("failed to marshal data: %v", err) + } + + var responseData parsertypes.QueryFilterAnalyzeResponse + if err := json.Unmarshal(dataBytes, &responseData); err != nil { + t.Fatalf("failed to unmarshal data into QueryFilterAnalyzeResponse: %v", err) + } + + // Sort the arrays for comparison + gotMetrics := make([]string, len(responseData.MetricNames)) + copy(gotMetrics, responseData.MetricNames) + sort.Strings(gotMetrics) + + gotGroups := make([]parsertypes.ColumnInfoResponse, len(responseData.Groups)) + copy(gotGroups, responseData.Groups) + + // Compare using deep equal + if !reflect.DeepEqual(gotMetrics, tt.expectedMetrics) { + t.Errorf("expected metricNames %v, got %v", tt.expectedMetrics, gotMetrics) + } + if !reflect.DeepEqual(gotGroups, tt.expectedGroups) { + t.Errorf("expected groups %v, got %v", tt.expectedGroups, gotGroups) + } + } + }) + } +} diff --git a/pkg/parser/queryfilterextractor/clickhouse.go b/pkg/queryparser/queryfilterextractor/clickhouse.go similarity index 100% rename from pkg/parser/queryfilterextractor/clickhouse.go rename to pkg/queryparser/queryfilterextractor/clickhouse.go diff --git a/pkg/parser/queryfilterextractor/clickhouse_originparser.go b/pkg/queryparser/queryfilterextractor/clickhouse_originparser.go similarity index 100% rename from pkg/parser/queryfilterextractor/clickhouse_originparser.go rename to pkg/queryparser/queryfilterextractor/clickhouse_originparser.go diff --git a/pkg/parser/queryfilterextractor/clickhouse_originparser_test.go b/pkg/queryparser/queryfilterextractor/clickhouse_originparser_test.go similarity index 100% rename from pkg/parser/queryfilterextractor/clickhouse_originparser_test.go rename to pkg/queryparser/queryfilterextractor/clickhouse_originparser_test.go diff --git a/pkg/parser/queryfilterextractor/clickhouse_test.go b/pkg/queryparser/queryfilterextractor/clickhouse_test.go similarity index 100% rename from pkg/parser/queryfilterextractor/clickhouse_test.go rename to pkg/queryparser/queryfilterextractor/clickhouse_test.go diff --git a/pkg/parser/queryfilterextractor/promql.go b/pkg/queryparser/queryfilterextractor/promql.go similarity index 100% rename from pkg/parser/queryfilterextractor/promql.go rename to pkg/queryparser/queryfilterextractor/promql.go diff --git a/pkg/parser/queryfilterextractor/promql_test.go b/pkg/queryparser/queryfilterextractor/promql_test.go similarity index 100% rename from pkg/parser/queryfilterextractor/promql_test.go rename to pkg/queryparser/queryfilterextractor/promql_test.go diff --git a/pkg/parser/queryfilterextractor/queryfilterextractor.go b/pkg/queryparser/queryfilterextractor/queryfilterextractor.go similarity index 69% rename from pkg/parser/queryfilterextractor/queryfilterextractor.go rename to pkg/queryparser/queryfilterextractor/queryfilterextractor.go index fdaf7bac2b..f768d9e19f 100644 --- a/pkg/parser/queryfilterextractor/queryfilterextractor.go +++ b/pkg/queryparser/queryfilterextractor/queryfilterextractor.go @@ -4,11 +4,18 @@ // This is useful for metrics discovery, and query analysis. package queryfilterextractor -import "github.com/SigNoz/signoz/pkg/errors" +import ( + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/valuer" +) -const ( - ExtractorCH = "qfe_ch" - ExtractorPromQL = "qfe_promql" +type ExtractorType struct { + valuer.String +} + +var ( + ExtractorTypeClickHouseSQL = ExtractorType{valuer.NewString("qfe_ch")} + ExtractorTypePromQL = ExtractorType{valuer.NewString("qfe_promql")} ) // ColumnInfo represents a column in the query @@ -46,13 +53,13 @@ type FilterExtractor interface { Extract(query string) (*FilterResult, error) } -func NewExtractor(extractorType string) (FilterExtractor, error) { +func NewExtractor(extractorType ExtractorType) (FilterExtractor, error) { switch extractorType { - case ExtractorCH: + case ExtractorTypeClickHouseSQL: return NewClickHouseFilterExtractor(), nil - case ExtractorPromQL: + case ExtractorTypePromQL: return NewPromQLFilterExtractor(), nil default: - return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid extractor type: %s", extractorType) + return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid extractor type: %s", extractorType) } } diff --git a/pkg/queryparser/queryparser.go b/pkg/queryparser/queryparser.go new file mode 100644 index 0000000000..0c29546d03 --- /dev/null +++ b/pkg/queryparser/queryparser.go @@ -0,0 +1,14 @@ +package queryparser + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/queryparser/queryfilterextractor" + "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" +) + +// QueryParser defines the interface for parsing and analyzing queries. +type QueryParser interface { + // AnalyzeQueryFilter extracts filter conditions from a given query string. + AnalyzeQueryFilter(ctx context.Context, queryType querybuildertypesv5.QueryType, query string) (*queryfilterextractor.FilterResult, error) +} diff --git a/pkg/queryparser/queryparser_impl.go b/pkg/queryparser/queryparser_impl.go new file mode 100644 index 0000000000..b79d4533a0 --- /dev/null +++ b/pkg/queryparser/queryparser_impl.go @@ -0,0 +1,40 @@ +package queryparser + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/queryparser/queryfilterextractor" + "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" +) + +type queryParserImpl struct { + settings factory.ProviderSettings +} + +// New creates a new implementation of the QueryParser service. +func New(settings factory.ProviderSettings) QueryParser { + return &queryParserImpl{ + settings: settings, + } +} + +func (p *queryParserImpl) AnalyzeQueryFilter(ctx context.Context, queryType querybuildertypesv5.QueryType, query string) (*queryfilterextractor.FilterResult, error) { + var extractorType queryfilterextractor.ExtractorType + switch queryType { + case querybuildertypesv5.QueryTypePromQL: + extractorType = queryfilterextractor.ExtractorTypePromQL + case querybuildertypesv5.QueryTypeClickHouseSQL: + extractorType = queryfilterextractor.ExtractorTypeClickHouseSQL + default: + return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported queryType: %s. Supported values are '%s' and '%s'", queryType, querybuildertypesv5.QueryTypePromQL, querybuildertypesv5.QueryTypeClickHouseSQL) + } + + // Create extractor + extractor, err := queryfilterextractor.NewExtractor(extractorType) + if err != nil { + return nil, err + } + return extractor.Extract(query) +} diff --git a/pkg/signoz/signoz.go b/pkg/signoz/signoz.go index c6c5346284..aa3a27412d 100644 --- a/pkg/signoz/signoz.go +++ b/pkg/signoz/signoz.go @@ -20,6 +20,7 @@ import ( "github.com/SigNoz/signoz/pkg/modules/user/impluser" "github.com/SigNoz/signoz/pkg/prometheus" "github.com/SigNoz/signoz/pkg/querier" + "github.com/SigNoz/signoz/pkg/queryparser" "github.com/SigNoz/signoz/pkg/sharder" "github.com/SigNoz/signoz/pkg/sqlmigration" "github.com/SigNoz/signoz/pkg/sqlmigrator" @@ -62,6 +63,7 @@ type SigNoz struct { Authz authz.AuthZ Modules Modules Handlers Handlers + QueryParser queryparser.QueryParser } func New( @@ -309,6 +311,9 @@ func New( return nil, err } + // Initialize query parser + queryParser := queryparser.New(providerSettings) + // Initialize authns store := sqlauthnstore.NewStore(sqlstore) authNs, err := authNsCallback(ctx, providerSettings, store, licensing) @@ -402,5 +407,6 @@ func New( Authz: authz, Modules: modules, Handlers: handlers, + QueryParser: queryParser, }, nil } diff --git a/pkg/types/parsertypes/queryfilteranalyzer.go b/pkg/types/parsertypes/queryfilteranalyzer.go new file mode 100644 index 0000000000..d00ed66b90 --- /dev/null +++ b/pkg/types/parsertypes/queryfilteranalyzer.go @@ -0,0 +1,49 @@ +package parsertypes + +import ( + "encoding/json" + "strings" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" +) + +// QueryFilterAnalyzeRequest represents the request body for query filter analysis +type QueryFilterAnalyzeRequest struct { + Query string `json:"query"` + QueryType querybuildertypesv5.QueryType `json:"queryType"` +} + +// UnmarshalJSON implements custom JSON unmarshaling with validation and normalization +func (q *QueryFilterAnalyzeRequest) UnmarshalJSON(data []byte) error { + // Use a temporary struct to avoid infinite recursion + type Alias QueryFilterAnalyzeRequest + aux := (*Alias)(q) + + if err := json.Unmarshal(data, aux); err != nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to parse json: %v", err) + } + + // Trim and validate query is not empty + q.Query = strings.TrimSpace(aux.Query) + if q.Query == "" { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "query is required and cannot be empty") + } + + // Validate query type + if aux.QueryType != querybuildertypesv5.QueryTypeClickHouseSQL && aux.QueryType != querybuildertypesv5.QueryTypePromQL { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported queryType: %v. Supported values are '%s' and '%s'", aux.QueryType, querybuildertypesv5.QueryTypePromQL, querybuildertypesv5.QueryTypeClickHouseSQL) + } + return nil +} + +type ColumnInfoResponse struct { + Name string `json:"columnName"` + Alias string `json:"columnAlias"` +} + +// QueryFilterAnalyzeResponse represents the response body for query filter analysis +type QueryFilterAnalyzeResponse struct { + MetricNames []string `json:"metricNames"` + Groups []ColumnInfoResponse `json:"groups"` +}