mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-19 07:22:29 +00:00
Compare commits
5 Commits
remove-v4-
...
nv/6703
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
84a7be22f7 | ||
|
|
c5923f942f | ||
|
|
f91eee7e50 | ||
|
|
5c86b80682 | ||
|
|
75512a81c6 |
3504
docs/api/openapi.yml
3504
docs/api/openapi.yml
File diff suppressed because it is too large
Load Diff
@@ -10,6 +10,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
@@ -106,7 +107,10 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
// v5
|
||||
router.HandleFunc("/api/v5/query_range", am.ViewAccess(ah.queryRangeV5)).Methods(http.MethodPost)
|
||||
router.Handle("/api/v5/query_range", handler.New(
|
||||
am.ViewAccess(ah.queryRangeV5),
|
||||
querierAPI.QueryRangeV5OpenAPIDef,
|
||||
)).Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
|
||||
|
||||
|
||||
@@ -5,16 +5,23 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"math"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/transition"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
|
||||
@@ -25,8 +32,6 @@ import (
|
||||
|
||||
querierV5 "github.com/SigNoz/signoz/pkg/querier"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
@@ -43,12 +48,17 @@ type AnomalyRule struct {
|
||||
|
||||
reader interfaces.Reader
|
||||
|
||||
// querierV5 is the query builder v5 querier used for all alert rule evaluation
|
||||
// querierV2 is used for alerts created after the introduction of new metrics query builder
|
||||
querierV2 interfaces.Querier
|
||||
|
||||
// querierV5 is used for alerts migrated after the introduction of new query builder
|
||||
querierV5 querierV5.Querier
|
||||
|
||||
provider anomaly.Provider
|
||||
providerV2 anomalyV2.Provider
|
||||
|
||||
logger *slog.Logger
|
||||
version string
|
||||
logger *slog.Logger
|
||||
|
||||
seasonality anomaly.Seasonality
|
||||
}
|
||||
@@ -92,6 +102,34 @@ func NewAnomalyRule(
|
||||
|
||||
logger.Info("using seasonality", "seasonality", t.seasonality.String())
|
||||
|
||||
querierOptsV2 := querierV2.QuerierOptions{
|
||||
Reader: reader,
|
||||
Cache: cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
}
|
||||
|
||||
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
|
||||
t.reader = reader
|
||||
if t.seasonality == anomaly.SeasonalityHourly {
|
||||
t.provider = anomaly.NewHourlyProvider(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](reader),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityDaily {
|
||||
t.provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](reader),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityWeekly {
|
||||
t.provider = anomaly.NewWeeklyProvider(
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
|
||||
)
|
||||
}
|
||||
|
||||
if t.seasonality == anomaly.SeasonalityHourly {
|
||||
t.providerV2 = anomalyV2.NewHourlyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](querierV5),
|
||||
@@ -110,7 +148,7 @@ func NewAnomalyRule(
|
||||
}
|
||||
|
||||
t.querierV5 = querierV5
|
||||
t.reader = reader
|
||||
t.version = p.Version
|
||||
t.logger = logger
|
||||
return &t, nil
|
||||
}
|
||||
@@ -119,9 +157,36 @@ func (r *AnomalyRule) Type() ruletypes.RuleType {
|
||||
return RuleTypeAnomaly
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
|
||||
func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3.QueryRangeParamsV3, error) {
|
||||
|
||||
r.logger.InfoContext(ctx, "prepare query range request", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds())
|
||||
r.logger.InfoContext(
|
||||
ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds(),
|
||||
)
|
||||
|
||||
st, en := r.Timestamps(ts)
|
||||
start := st.UnixMilli()
|
||||
end := en.UnixMilli()
|
||||
|
||||
compositeQuery := r.Condition().CompositeQuery
|
||||
|
||||
if compositeQuery.PanelType != v3.PanelTypeGraph {
|
||||
compositeQuery.PanelType = v3.PanelTypeGraph
|
||||
}
|
||||
|
||||
// default mode
|
||||
return &v3.QueryRangeParamsV3{
|
||||
Start: start,
|
||||
End: end,
|
||||
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
|
||||
CompositeQuery: compositeQuery,
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
|
||||
|
||||
r.logger.InfoContext(ctx, "prepare query range request v5", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds())
|
||||
|
||||
startTs, endTs := r.Timestamps(ts)
|
||||
start, end := startTs.UnixMilli(), endTs.UnixMilli()
|
||||
@@ -150,6 +215,60 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = r.PopulateTemporality(ctx, orgID, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("internal error while setting temporality")
|
||||
}
|
||||
|
||||
anomalies, err := r.provider.GetAnomalies(ctx, orgID, &anomaly.GetAnomaliesRequest{
|
||||
Params: params,
|
||||
Seasonality: r.seasonality,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var queryResult *v3.Result
|
||||
for _, result := range anomalies.Results {
|
||||
if result.QueryName == r.GetSelectedQuery() {
|
||||
queryResult = result
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
hasData := len(queryResult.AnomalyScores) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
|
||||
|
||||
for _, series := range queryResult.AnomalyScores {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
}
|
||||
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
SendUnmatched: r.ShouldSendUnmatched(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resultVector = append(resultVector, results...)
|
||||
}
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
|
||||
|
||||
params, err := r.prepareQueryRangeV5(ctx, ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
anomalies, err := r.providerV2.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{
|
||||
Params: *params,
|
||||
@@ -171,25 +290,20 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
r.logger.WarnContext(ctx, "nil qb result", "ts", ts.UnixMilli())
|
||||
}
|
||||
|
||||
var anomalyScores []*qbtypes.TimeSeries
|
||||
if qbResult != nil {
|
||||
for _, bucket := range qbResult.Aggregations {
|
||||
anomalyScores = append(anomalyScores, bucket.AnomalyScores...)
|
||||
}
|
||||
}
|
||||
queryResult := transition.ConvertV5TimeSeriesDataToV4Result(qbResult)
|
||||
|
||||
hasData := len(anomalyScores) > 0
|
||||
hasData := len(queryResult.AnomalyScores) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(anomalyScores)
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
|
||||
|
||||
// Filter out new series if newGroupEvalDelay is configured
|
||||
seriesToProcess := anomalyScores
|
||||
seriesToProcess := queryResult.AnomalyScores
|
||||
if r.ShouldSkipNewGroups() {
|
||||
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
|
||||
// In case of error we log the error and continue with the original series
|
||||
@@ -202,7 +316,7 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
|
||||
for _, series := range seriesToProcess {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Values), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
}
|
||||
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
@@ -223,7 +337,16 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
|
||||
valueFormatter := formatter.FromUnit(r.Unit())
|
||||
|
||||
res, err := r.buildAndRunQuery(ctx, r.OrgID(), ts)
|
||||
var res ruletypes.Vector
|
||||
var err error
|
||||
|
||||
if r.version == "v5" {
|
||||
r.logger.InfoContext(ctx, "running v5 query")
|
||||
res, err = r.buildAndRunQueryV5(ctx, r.OrgID(), ts)
|
||||
} else {
|
||||
r.logger.InfoContext(ctx, "running v4 query")
|
||||
res, err = r.buildAndRunQuery(ctx, r.OrgID(), ts)
|
||||
}
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
@@ -8,27 +8,28 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// mockAnomalyProviderV2 is a mock implementation of anomalyV2.Provider for testing.
|
||||
type mockAnomalyProviderV2 struct {
|
||||
responses []*anomalyV2.AnomaliesResponse
|
||||
// mockAnomalyProvider is a mock implementation of anomaly.Provider for testing.
|
||||
// We need this because the anomaly provider makes 6 different queries for various
|
||||
// time periods (current, past period, current season, past season, past 2 seasons,
|
||||
// past 3 seasons), making it cumbersome to create mock data.
|
||||
type mockAnomalyProvider struct {
|
||||
responses []*anomaly.GetAnomaliesResponse
|
||||
callCount int
|
||||
}
|
||||
|
||||
func (m *mockAnomalyProviderV2) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomalyV2.AnomaliesRequest) (*anomalyV2.AnomaliesResponse, error) {
|
||||
func (m *mockAnomalyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomaly.GetAnomaliesRequest) (*anomaly.GetAnomaliesResponse, error) {
|
||||
if m.callCount >= len(m.responses) {
|
||||
return &anomalyV2.AnomaliesResponse{Results: []*qbtypes.TimeSeriesData{}}, nil
|
||||
return &anomaly.GetAnomaliesResponse{Results: []*v3.Result{}}, nil
|
||||
}
|
||||
resp := m.responses[m.callCount]
|
||||
m.callCount++
|
||||
@@ -81,11 +82,11 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
responseNoData := &anomalyV2.AnomaliesResponse{
|
||||
Results: []*qbtypes.TimeSeriesData{
|
||||
responseNoData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
Aggregations: []*qbtypes.AggregationBucket{},
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -128,8 +129,8 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule.providerV2 = &mockAnomalyProviderV2{
|
||||
responses: []*anomalyV2.AnomaliesResponse{responseNoData},
|
||||
rule.provider = &mockAnomalyProvider{
|
||||
responses: []*anomaly.GetAnomaliesResponse{responseNoData},
|
||||
}
|
||||
|
||||
alertsFound, err := rule.Eval(context.Background(), evalTime)
|
||||
@@ -189,11 +190,11 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
responseNoData := &anomalyV2.AnomaliesResponse{
|
||||
Results: []*qbtypes.TimeSeriesData{
|
||||
responseNoData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
Aggregations: []*qbtypes.AggregationBucket{},
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -227,22 +228,16 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
t1 := baseTime.Add(5 * time.Minute)
|
||||
t2 := t1.Add(c.timeBetweenEvals)
|
||||
|
||||
responseWithData := &anomalyV2.AnomaliesResponse{
|
||||
Results: []*qbtypes.TimeSeriesData{
|
||||
responseWithData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
Aggregations: []*qbtypes.AggregationBucket{
|
||||
AnomalyScores: []*v3.Series{
|
||||
{
|
||||
AnomalyScores: []*qbtypes.TimeSeries{
|
||||
{
|
||||
Labels: []*qbtypes.Label{
|
||||
{Key: telemetrytypes.TelemetryFieldKey{Name: "test"}, Value: "label"},
|
||||
},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
|
||||
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
|
||||
},
|
||||
},
|
||||
Labels: map[string]string{"test": "label"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
|
||||
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -257,8 +252,8 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule.providerV2 = &mockAnomalyProviderV2{
|
||||
responses: []*anomalyV2.AnomaliesResponse{responseWithData, responseNoData},
|
||||
rule.provider = &mockAnomalyProvider{
|
||||
responses: []*anomaly.GetAnomaliesResponse{responseWithData, responseNoData},
|
||||
}
|
||||
|
||||
alertsFound1, err := rule.Eval(context.Background(), t1)
|
||||
|
||||
107
frontend/src/api/generated/services/query/index.ts
Normal file
107
frontend/src/api/generated/services/query/index.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
/**
|
||||
* ! Do not edit manually
|
||||
* * The file has been auto-generated using Orval for SigNoz
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import type {
|
||||
MutationFunction,
|
||||
UseMutationOptions,
|
||||
UseMutationResult,
|
||||
} from 'react-query';
|
||||
import { useMutation } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
Querybuildertypesv5QueryRangeRequestDTO,
|
||||
QueryRangeV5200,
|
||||
RenderErrorResponseDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.
|
||||
* @summary Query range
|
||||
*/
|
||||
export const queryRangeV5 = (
|
||||
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<QueryRangeV5200>({
|
||||
url: `/api/v5/query_range`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getQueryRangeV5MutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['queryRangeV5'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return queryRangeV5(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type QueryRangeV5MutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>
|
||||
>;
|
||||
export type QueryRangeV5MutationBody = Querybuildertypesv5QueryRangeRequestDTO;
|
||||
export type QueryRangeV5MutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Query range
|
||||
*/
|
||||
export const useQueryRangeV5 = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getQueryRangeV5MutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,7 +6,7 @@ import logEvent from 'api/common/logEvent';
|
||||
import PromQLIcon from 'assets/Dashboard/PromQl';
|
||||
import { QueryBuilderV2 } from 'components/QueryBuilderV2/QueryBuilderV2';
|
||||
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { QBShortcuts } from 'constants/shortcuts/QBShortcuts';
|
||||
import RunQueryBtn from 'container/QueryBuilder/components/RunQueryBtn/RunQueryBtn';
|
||||
@@ -63,8 +63,12 @@ function QuerySection({
|
||||
signalSource: signalSource === 'meter' ? 'meter' : '',
|
||||
}}
|
||||
showTraceOperator={alertType === AlertTypes.TRACES_BASED_ALERT}
|
||||
showFunctions
|
||||
version={ENTITY_VERSION_V5}
|
||||
showFunctions={
|
||||
(alertType === AlertTypes.METRICS_BASED_ALERT &&
|
||||
alertDef.version === ENTITY_VERSION_V4) ||
|
||||
alertType === AlertTypes.LOGS_BASED_ALERT
|
||||
}
|
||||
version={alertDef.version || 'v3'}
|
||||
onSignalSourceChange={handleSignalSourceChange}
|
||||
signalSourceChangeEnabled
|
||||
/>
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/swaggest/openapi-go"
|
||||
"github.com/swaggest/openapi-go/openapi3"
|
||||
)
|
||||
|
||||
type ServeOpenAPIFunc func(openapi.OperationContext)
|
||||
@@ -59,7 +60,39 @@ func (handler *handler) ServeOpenAPI(opCtx openapi.OperationContext) {
|
||||
}
|
||||
|
||||
// Add request structure
|
||||
opCtx.AddReqStructure(handler.openAPIDef.Request, openapi.WithContentType(handler.openAPIDef.RequestContentType))
|
||||
reqOpts := []openapi.ContentOption{openapi.WithContentType(handler.openAPIDef.RequestContentType)}
|
||||
if len(handler.openAPIDef.RequestExamples) > 0 {
|
||||
reqOpts = append(reqOpts, openapi.WithCustomize(func(cor openapi.ContentOrReference) {
|
||||
rbOrRef, ok := cor.(*openapi3.RequestBodyOrRef)
|
||||
if !ok || rbOrRef.RequestBody == nil {
|
||||
return
|
||||
}
|
||||
ct := handler.openAPIDef.RequestContentType
|
||||
if ct == "" {
|
||||
ct = "application/json"
|
||||
}
|
||||
mt, exists := rbOrRef.RequestBody.Content[ct]
|
||||
if !exists {
|
||||
return
|
||||
}
|
||||
if mt.Examples == nil {
|
||||
mt.Examples = make(map[string]openapi3.ExampleOrRef)
|
||||
}
|
||||
for _, ex := range handler.openAPIDef.RequestExamples {
|
||||
val := ex.Value
|
||||
oaExample := openapi3.Example{Value: &val}
|
||||
if ex.Summary != "" {
|
||||
oaExample.WithSummary(ex.Summary)
|
||||
}
|
||||
if ex.Description != "" {
|
||||
oaExample.WithDescription(ex.Description)
|
||||
}
|
||||
mt.Examples[ex.Name] = openapi3.ExampleOrRef{Example: &oaExample}
|
||||
}
|
||||
rbOrRef.RequestBody.Content[ct] = mt
|
||||
}))
|
||||
}
|
||||
opCtx.AddReqStructure(handler.openAPIDef.Request, reqOpts...)
|
||||
|
||||
// Add request query structure
|
||||
opCtx.AddReqStructure(handler.openAPIDef.RequestQuery)
|
||||
|
||||
@@ -9,6 +9,14 @@ import (
|
||||
"github.com/swaggest/rest/openapi"
|
||||
)
|
||||
|
||||
// OpenAPIExample is a named example for an OpenAPI operation.
|
||||
type OpenAPIExample struct {
|
||||
Name string
|
||||
Summary string
|
||||
Description string
|
||||
Value any
|
||||
}
|
||||
|
||||
// Def is the definition of an OpenAPI operation
|
||||
type OpenAPIDef struct {
|
||||
ID string
|
||||
@@ -18,6 +26,7 @@ type OpenAPIDef struct {
|
||||
Request any
|
||||
RequestQuery any
|
||||
RequestContentType string
|
||||
RequestExamples []OpenAPIExample
|
||||
Response any
|
||||
ResponseContentType string
|
||||
SuccessStatusCode int
|
||||
|
||||
@@ -80,11 +80,12 @@ func (q *builderQuery[T]) Fingerprint() string {
|
||||
case qbtypes.LogAggregation:
|
||||
aggParts = append(aggParts, a.Expression)
|
||||
case qbtypes.MetricAggregation:
|
||||
aggParts = append(aggParts, fmt.Sprintf("%s:%s:%s:%s",
|
||||
aggParts = append(aggParts, fmt.Sprintf("%s:%s:%s:%s:%s",
|
||||
a.MetricName,
|
||||
a.Temporality.StringValue(),
|
||||
a.TimeAggregation.StringValue(),
|
||||
a.SpaceAggregation.StringValue(),
|
||||
a.SpaceAggregationParam.StringValue(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
454
pkg/querier/openapi.go
Normal file
454
pkg/querier/openapi.go
Normal file
@@ -0,0 +1,454 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
)
|
||||
|
||||
// QueryRangeV5OpenAPIDef is the OpenAPI definition for the /api/v5/query_range endpoint.
|
||||
var QueryRangeV5OpenAPIDef = handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"query"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
RequestExamples: queryRangeV5Examples,
|
||||
Response: new(qbtypes.QueryRangeResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{
|
||||
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
},
|
||||
}
|
||||
|
||||
var queryRangeV5Examples = []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "span_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_time_series",
|
||||
Summary: "Time series: count error logs grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "log_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "log_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_gauge_time_series",
|
||||
Summary: "Time series: latest gauge value averaged across series",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "system.cpu.utilization", "timeAggregation": "latest", "spaceAggregation": "avg"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "host.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_rate_time_series",
|
||||
Summary: "Time series: rate of cumulative counter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum"},
|
||||
},
|
||||
"stepInterval": 120,
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_histogram_time_series",
|
||||
Summary: "Time series: p99 latency from histogram",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.bucket", "spaceAggregation": "p99"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_raw",
|
||||
Summary: "Raw: fetch raw log records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "body", "fieldContext": "log"},
|
||||
map[string]any{"name": "service.name", "fieldContext": "resource"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "log"}, "direction": "desc"},
|
||||
map[string]any{"key": map[string]any{"name": "id"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 50,
|
||||
"offset": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_raw",
|
||||
Summary: "Raw: fetch raw span records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend' AND has_error = true"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "name", "fieldContext": "span"},
|
||||
map[string]any{"name": "duration_nano", "fieldContext": "span"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "span"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_scalar",
|
||||
Summary: "Scalar: total span count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_scalar",
|
||||
Summary: "Scalar: total error log count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "error_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_scalar",
|
||||
Summary: "Scalar: single reduced metric value",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum", "reduceTo": "sum"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "formula",
|
||||
Summary: "Formula: error rate from two trace queries",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "countIf(has_error = true)"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "B",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "count()"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_formula",
|
||||
"spec": map[string]any{
|
||||
"name": "error_rate",
|
||||
"expression": "A / B * 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "promql",
|
||||
Summary: "PromQL: request rate with UTF-8 metric name",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "promql",
|
||||
"spec": map[string]any{
|
||||
"name": "request_rate",
|
||||
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
|
||||
"step": 60,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_time_series",
|
||||
Summary: "ClickHouse SQL: traces time series with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "span_rate",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" GROUP BY ts ORDER BY ts",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_logs_raw",
|
||||
Summary: "ClickHouse SQL: raw logs with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "recent_errors",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT timestamp, body" +
|
||||
" FROM signoz_logs.distributed_logs_v2" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" AND severity_text = 'ERROR'" +
|
||||
" ORDER BY timestamp DESC LIMIT 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_scalar",
|
||||
Summary: "ClickHouse SQL: scalar aggregate with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "total_spans",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -625,7 +625,7 @@ func (r *BaseRule) extractMetricAndGroupBys(ctx context.Context) (map[string][]s
|
||||
|
||||
// FilterNewSeries filters out items that are too new based on metadata first_seen timestamps.
|
||||
// Returns the filtered series (old ones) excluding new series that are still within the grace period.
|
||||
func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*qbtypes.TimeSeries) ([]*qbtypes.TimeSeries, error) {
|
||||
func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*v3.Series) ([]*v3.Series, error) {
|
||||
// Extract metric names and groupBy keys
|
||||
metricToGroupedFields, err := r.extractMetricAndGroupBys(ctx)
|
||||
if err != nil {
|
||||
@@ -642,7 +642,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
|
||||
seriesIdxToLookupKeys := make(map[int][]telemetrytypes.MetricMetadataLookupKey) // series index -> lookup keys
|
||||
|
||||
for i := 0; i < len(series); i++ {
|
||||
metricLabelMap := series[i].LabelsMap()
|
||||
metricLabelMap := series[i].Labels
|
||||
|
||||
// Collect groupBy attribute-value pairs for this series
|
||||
seriesKeys := make([]telemetrytypes.MetricMetadataLookupKey, 0)
|
||||
@@ -689,7 +689,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
|
||||
}
|
||||
|
||||
// Filter series based on first_seen + delay
|
||||
filteredSeries := make([]*qbtypes.TimeSeries, 0, len(series))
|
||||
filteredSeries := make([]*v3.Series, 0, len(series))
|
||||
evalTimeMs := ts.UnixMilli()
|
||||
newGroupEvalDelayMs := r.newGroupEvalDelay.Milliseconds()
|
||||
|
||||
@@ -727,7 +727,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
|
||||
// Check if first_seen + delay has passed
|
||||
if maxFirstSeen+newGroupEvalDelayMs > evalTimeMs {
|
||||
// Still within grace period, skip this series
|
||||
r.logger.InfoContext(ctx, "Skipping new series", "rule_name", r.Name(), "series_idx", i, "max_first_seen", maxFirstSeen, "eval_time_ms", evalTimeMs, "delay_ms", newGroupEvalDelayMs, "labels", series[i].LabelsMap())
|
||||
r.logger.InfoContext(ctx, "Skipping new series", "rule_name", r.Name(), "series_idx", i, "max_first_seen", maxFirstSeen, "eval_time_ms", evalTimeMs, "delay_ms", newGroupEvalDelayMs, "labels", series[i].Labels)
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
@@ -26,33 +26,33 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// createTestSeries creates a *qbtypes.TimeSeries with the given labels and optional values
|
||||
// createTestSeries creates a *v3.Series with the given labels and optional points
|
||||
// so we don't exactly need the points in the series because the labels are used to determine if the series is new or old
|
||||
// we use the labels to create a lookup key for the series and then check the first_seen timestamp for the series in the metadata table
|
||||
func createTestSeries(labels map[string]string, points []*qbtypes.TimeSeriesValue) *qbtypes.TimeSeries {
|
||||
func createTestSeries(labels map[string]string, points []v3.Point) *v3.Series {
|
||||
if points == nil {
|
||||
points = []*qbtypes.TimeSeriesValue{}
|
||||
points = []v3.Point{}
|
||||
}
|
||||
lbls := make([]*qbtypes.Label, 0, len(labels))
|
||||
for k, v := range labels {
|
||||
lbls = append(lbls, &qbtypes.Label{Key: telemetrytypes.TelemetryFieldKey{Name: k}, Value: v})
|
||||
}
|
||||
return &qbtypes.TimeSeries{
|
||||
Labels: lbls,
|
||||
Values: points,
|
||||
return &v3.Series{
|
||||
Labels: labels,
|
||||
Points: points,
|
||||
}
|
||||
}
|
||||
|
||||
// seriesEqual compares two *qbtypes.TimeSeries by their labels
|
||||
// seriesEqual compares two v3.Series by their labels
|
||||
// Returns true if the series have the same labels (order doesn't matter)
|
||||
func seriesEqual(s1, s2 *qbtypes.TimeSeries) bool {
|
||||
m1 := s1.LabelsMap()
|
||||
m2 := s2.LabelsMap()
|
||||
if len(m1) != len(m2) {
|
||||
func seriesEqual(s1, s2 *v3.Series) bool {
|
||||
if s1 == nil && s2 == nil {
|
||||
return true
|
||||
}
|
||||
if s1 == nil || s2 == nil {
|
||||
return false
|
||||
}
|
||||
for k, v := range m1 {
|
||||
if m2[k] != v {
|
||||
if len(s1.Labels) != len(s2.Labels) {
|
||||
return false
|
||||
}
|
||||
for k, v := range s1.Labels {
|
||||
if s2.Labels[k] != v {
|
||||
return false
|
||||
}
|
||||
}
|
||||
@@ -149,11 +149,11 @@ func createPostableRule(compositeQuery *v3.CompositeQuery) ruletypes.PostableRul
|
||||
type filterNewSeriesTestCase struct {
|
||||
name string
|
||||
compositeQuery *v3.CompositeQuery
|
||||
series []*qbtypes.TimeSeries
|
||||
series []*v3.Series
|
||||
firstSeenMap map[telemetrytypes.MetricMetadataLookupKey]int64
|
||||
newGroupEvalDelay valuer.TextDuration
|
||||
evalTime time.Time
|
||||
expectedFiltered []*qbtypes.TimeSeries // series that should be in the final filtered result (old enough)
|
||||
expectedFiltered []*v3.Series // series that should be in the final filtered result (old enough)
|
||||
expectError bool
|
||||
}
|
||||
|
||||
@@ -193,7 +193,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-new", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-missing", "env": "stage"}, nil),
|
||||
@@ -205,7 +205,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-missing", "env": "stage"}, nil),
|
||||
}, // svc-old and svc-missing should be included; svc-new is filtered out
|
||||
@@ -227,7 +227,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-new1", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-new2", "env": "stage"}, nil),
|
||||
},
|
||||
@@ -237,7 +237,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{}, // all should be filtered out (new series)
|
||||
expectedFiltered: []*v3.Series{}, // all should be filtered out (new series)
|
||||
},
|
||||
{
|
||||
name: "all old series - ClickHouse query",
|
||||
@@ -254,7 +254,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-old2", "env": "stage"}, nil),
|
||||
},
|
||||
@@ -264,7 +264,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-old2", "env": "stage"}, nil),
|
||||
}, // all should be included (old series)
|
||||
@@ -292,13 +292,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
}, // early return, no filtering - all series included
|
||||
},
|
||||
@@ -322,13 +322,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
}, // early return, no filtering - all series included
|
||||
},
|
||||
@@ -358,13 +358,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"status": "200"}, nil), // no service_name or env
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"status": "200"}, nil),
|
||||
}, // series included as we can't decide if it's new or old
|
||||
},
|
||||
@@ -385,7 +385,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-no-metadata", "env": "prod"}, nil),
|
||||
},
|
||||
@@ -393,7 +393,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
// svc-no-metadata has no entry in firstSeenMap
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-no-metadata", "env": "prod"}, nil),
|
||||
}, // both should be included - svc-old is old, svc-no-metadata can't be decided
|
||||
@@ -413,7 +413,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
|
||||
},
|
||||
// Only provide metadata for service_name, not env
|
||||
@@ -423,7 +423,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
|
||||
}, // has some metadata, uses max first_seen which is old
|
||||
},
|
||||
@@ -453,11 +453,11 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{},
|
||||
series: []*v3.Series{},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{},
|
||||
expectedFiltered: []*v3.Series{},
|
||||
},
|
||||
{
|
||||
name: "zero delay - Builder",
|
||||
@@ -485,13 +485,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
|
||||
newGroupEvalDelay: valuer.TextDuration{}, // zero delay
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
}, // with zero delay, all series pass
|
||||
},
|
||||
@@ -526,7 +526,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: mergeFirstSeenMaps(
|
||||
@@ -535,7 +535,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
},
|
||||
@@ -565,7 +565,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
// service_name is old, env is new - should use max (new)
|
||||
@@ -575,7 +575,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{}, // max first_seen is new, so should be filtered out
|
||||
expectedFiltered: []*v3.Series{}, // max first_seen is new, so should be filtered out
|
||||
},
|
||||
{
|
||||
name: "Logs query - should skip filtering and return empty skip indexes",
|
||||
@@ -600,14 +600,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
|
||||
}, // Logs queries should return early, no filtering - all included
|
||||
@@ -635,14 +635,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
|
||||
}, // Traces queries should return early, no filtering - all included
|
||||
@@ -724,14 +724,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
// Build a map to count occurrences of each unique label combination in expected series
|
||||
expectedCounts := make(map[string]int)
|
||||
for _, expected := range tt.expectedFiltered {
|
||||
key := labelsKey(expected.LabelsMap())
|
||||
key := labelsKey(expected.Labels)
|
||||
expectedCounts[key]++
|
||||
}
|
||||
|
||||
// Build a map to count occurrences of each unique label combination in filtered series
|
||||
actualCounts := make(map[string]int)
|
||||
for _, filtered := range filteredSeries {
|
||||
key := labelsKey(filtered.LabelsMap())
|
||||
key := labelsKey(filtered.Labels)
|
||||
actualCounts[key]++
|
||||
}
|
||||
|
||||
|
||||
@@ -12,18 +12,19 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/formatter"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
)
|
||||
|
||||
type PromRule struct {
|
||||
*BaseRule
|
||||
version string
|
||||
prometheus prometheus.Prometheus
|
||||
}
|
||||
|
||||
@@ -47,6 +48,7 @@ func NewPromRule(
|
||||
|
||||
p := PromRule{
|
||||
BaseRule: baseRule,
|
||||
version: postableRule.Version,
|
||||
prometheus: prometheus,
|
||||
}
|
||||
p.logger = logger
|
||||
@@ -81,30 +83,48 @@ func (r *PromRule) GetSelectedQuery() string {
|
||||
}
|
||||
|
||||
func (r *PromRule) getPqlQuery() (string, error) {
|
||||
if len(r.ruleCondition.CompositeQuery.Queries) > 0 {
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
for _, item := range r.ruleCondition.CompositeQuery.Queries {
|
||||
switch item.Type {
|
||||
case qbtypes.QueryTypePromQL:
|
||||
promQuery, ok := item.Spec.(qbtypes.PromQuery)
|
||||
if !ok {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query spec %T", item.Spec)
|
||||
}
|
||||
if promQuery.Name == selectedQuery {
|
||||
return promQuery.Query, nil
|
||||
if r.version == "v5" {
|
||||
if len(r.ruleCondition.CompositeQuery.Queries) > 0 {
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
for _, item := range r.ruleCondition.CompositeQuery.Queries {
|
||||
switch item.Type {
|
||||
case qbtypes.QueryTypePromQL:
|
||||
promQuery, ok := item.Spec.(qbtypes.PromQuery)
|
||||
if !ok {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query spec %T", item.Spec)
|
||||
}
|
||||
if promQuery.Name == selectedQuery {
|
||||
return promQuery.Query, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return "", fmt.Errorf("invalid promql rule setup")
|
||||
}
|
||||
return "", fmt.Errorf("invalid promql rule setup")
|
||||
|
||||
if r.ruleCondition.CompositeQuery.QueryType == v3.QueryTypePromQL {
|
||||
if len(r.ruleCondition.CompositeQuery.PromQueries) > 0 {
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
if promQuery, ok := r.ruleCondition.CompositeQuery.PromQueries[selectedQuery]; ok {
|
||||
query := promQuery.Query
|
||||
if query == "" {
|
||||
return query, fmt.Errorf("a promquery needs to be set for this rule to function")
|
||||
}
|
||||
return query, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("invalid promql rule query")
|
||||
}
|
||||
|
||||
func matrixToTimeSeries(res promql.Matrix) []*qbtypes.TimeSeries {
|
||||
result := make([]*qbtypes.TimeSeries, 0, len(res))
|
||||
func (r *PromRule) matrixToV3Series(res promql.Matrix) []*v3.Series {
|
||||
v3Series := make([]*v3.Series, 0, len(res))
|
||||
for _, series := range res {
|
||||
result = append(result, promSeriesToTimeSeries(series))
|
||||
commonSeries := toCommonSeries(series)
|
||||
v3Series = append(v3Series, &commonSeries)
|
||||
}
|
||||
return result
|
||||
return v3Series
|
||||
}
|
||||
|
||||
func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletypes.Vector, error) {
|
||||
@@ -123,31 +143,31 @@ func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletype
|
||||
return nil, err
|
||||
}
|
||||
|
||||
seriesToProcess := matrixToTimeSeries(res)
|
||||
matrixToProcess := r.matrixToV3Series(res)
|
||||
|
||||
hasData := len(seriesToProcess) > 0
|
||||
hasData := len(matrixToProcess) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
// Filter out new series if newGroupEvalDelay is configured
|
||||
if r.ShouldSkipNewGroups() {
|
||||
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
|
||||
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, matrixToProcess)
|
||||
// In case of error we log the error and continue with the original series
|
||||
if filterErr != nil {
|
||||
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
|
||||
} else {
|
||||
seriesToProcess = filteredSeries
|
||||
matrixToProcess = filteredSeries
|
||||
}
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
for _, series := range seriesToProcess {
|
||||
for _, series := range matrixToProcess {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(
|
||||
ctx, "not enough data points to evaluate series, skipping",
|
||||
"rule_id", r.ID(), "num_points", len(series.Values), "required_points", r.Condition().RequiredNumPoints,
|
||||
"rule_id", r.ID(), "num_points", len(series.Points), "required_points", r.Condition().RequiredNumPoints,
|
||||
)
|
||||
continue
|
||||
}
|
||||
@@ -434,25 +454,26 @@ func (r *PromRule) RunAlertQuery(ctx context.Context, qs string, start, end time
|
||||
}
|
||||
}
|
||||
|
||||
func promSeriesToTimeSeries(series promql.Series) *qbtypes.TimeSeries {
|
||||
ts := &qbtypes.TimeSeries{
|
||||
Labels: make([]*qbtypes.Label, 0, len(series.Metric)),
|
||||
Values: make([]*qbtypes.TimeSeriesValue, 0, len(series.Floats)),
|
||||
func toCommonSeries(series promql.Series) v3.Series {
|
||||
commonSeries := v3.Series{
|
||||
Labels: make(map[string]string),
|
||||
LabelsArray: make([]map[string]string, 0),
|
||||
Points: make([]v3.Point, 0),
|
||||
}
|
||||
|
||||
for _, lbl := range series.Metric {
|
||||
ts.Labels = append(ts.Labels, &qbtypes.Label{
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: lbl.Name},
|
||||
Value: lbl.Value,
|
||||
commonSeries.Labels[lbl.Name] = lbl.Value
|
||||
commonSeries.LabelsArray = append(commonSeries.LabelsArray, map[string]string{
|
||||
lbl.Name: lbl.Value,
|
||||
})
|
||||
}
|
||||
|
||||
for _, f := range series.Floats {
|
||||
ts.Values = append(ts.Values, &qbtypes.TimeSeriesValue{
|
||||
commonSeries.Points = append(commonSeries.Points, v3.Point{
|
||||
Timestamp: f.T,
|
||||
Value: f.F,
|
||||
})
|
||||
}
|
||||
|
||||
return ts
|
||||
return commonSeries
|
||||
}
|
||||
|
||||
@@ -20,7 +20,6 @@ import (
|
||||
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
@@ -48,13 +47,9 @@ func TestPromRuleEval(t *testing.T) {
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "A",
|
||||
Query: "dummy_query", // This is not used in the test
|
||||
},
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {
|
||||
Query: "dummy_query", // This is not used in the test
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -67,7 +62,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp string
|
||||
matchType string
|
||||
target float64
|
||||
expectedAlertSample float64
|
||||
expectedAlertSample v3.Point
|
||||
expectedVectorValues []float64 // Expected values in result vector
|
||||
}{
|
||||
// Test cases for Equals Always
|
||||
@@ -85,7 +80,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "3", // Equals
|
||||
matchType: "2", // Always
|
||||
target: 0.0,
|
||||
expectedAlertSample: 0.0,
|
||||
expectedAlertSample: v3.Point{Value: 0.0},
|
||||
expectedVectorValues: []float64{0.0},
|
||||
},
|
||||
{
|
||||
@@ -150,7 +145,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "3", // Equals
|
||||
matchType: "1", // Once
|
||||
target: 0.0,
|
||||
expectedAlertSample: 0.0,
|
||||
expectedAlertSample: v3.Point{Value: 0.0},
|
||||
expectedVectorValues: []float64{0.0},
|
||||
},
|
||||
{
|
||||
@@ -167,7 +162,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "3", // Equals
|
||||
matchType: "1", // Once
|
||||
target: 0.0,
|
||||
expectedAlertSample: 0.0,
|
||||
expectedAlertSample: v3.Point{Value: 0.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -183,7 +178,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "3", // Equals
|
||||
matchType: "1", // Once
|
||||
target: 0.0,
|
||||
expectedAlertSample: 0.0,
|
||||
expectedAlertSample: v3.Point{Value: 0.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -216,7 +211,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "1", // Greater Than
|
||||
matchType: "2", // Always
|
||||
target: 1.5,
|
||||
expectedAlertSample: 2.0,
|
||||
expectedAlertSample: v3.Point{Value: 2.0},
|
||||
expectedVectorValues: []float64{2.0},
|
||||
},
|
||||
{
|
||||
@@ -233,7 +228,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "1", // Above
|
||||
matchType: "2", // Always
|
||||
target: 2.0,
|
||||
expectedAlertSample: 3.0,
|
||||
expectedAlertSample: v3.Point{Value: 3.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -249,7 +244,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "2", // Below
|
||||
matchType: "2", // Always
|
||||
target: 13.0,
|
||||
expectedAlertSample: 12.0,
|
||||
expectedAlertSample: v3.Point{Value: 12.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -281,7 +276,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "1", // Greater Than
|
||||
matchType: "1", // Once
|
||||
target: 4.5,
|
||||
expectedAlertSample: 10.0,
|
||||
expectedAlertSample: v3.Point{Value: 10.0},
|
||||
expectedVectorValues: []float64{10.0},
|
||||
},
|
||||
{
|
||||
@@ -344,7 +339,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "4", // Not Equals
|
||||
matchType: "2", // Always
|
||||
target: 0.0,
|
||||
expectedAlertSample: 1.0,
|
||||
expectedAlertSample: v3.Point{Value: 1.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -376,7 +371,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "4", // Not Equals
|
||||
matchType: "1", // Once
|
||||
target: 0.0,
|
||||
expectedAlertSample: 1.0,
|
||||
expectedAlertSample: v3.Point{Value: 1.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -407,7 +402,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "4", // Not Equals
|
||||
matchType: "1", // Once
|
||||
target: 0.0,
|
||||
expectedAlertSample: 1.0,
|
||||
expectedAlertSample: v3.Point{Value: 1.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -423,7 +418,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "4", // Not Equals
|
||||
matchType: "1", // Once
|
||||
target: 0.0,
|
||||
expectedAlertSample: 1.0,
|
||||
expectedAlertSample: v3.Point{Value: 1.0},
|
||||
},
|
||||
// Test cases for Less Than Always
|
||||
{
|
||||
@@ -440,7 +435,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "2", // Less Than
|
||||
matchType: "2", // Always
|
||||
target: 4,
|
||||
expectedAlertSample: 1.5,
|
||||
expectedAlertSample: v3.Point{Value: 1.5},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -472,7 +467,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "2", // Less Than
|
||||
matchType: "1", // Once
|
||||
target: 4,
|
||||
expectedAlertSample: 2.5,
|
||||
expectedAlertSample: v3.Point{Value: 2.5},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -504,7 +499,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "3", // Equals
|
||||
matchType: "3", // OnAverage
|
||||
target: 6.0,
|
||||
expectedAlertSample: 6.0,
|
||||
expectedAlertSample: v3.Point{Value: 6.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -535,7 +530,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "4", // Not Equals
|
||||
matchType: "3", // OnAverage
|
||||
target: 4.5,
|
||||
expectedAlertSample: 6.0,
|
||||
expectedAlertSample: v3.Point{Value: 6.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -566,7 +561,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "1", // Greater Than
|
||||
matchType: "3", // OnAverage
|
||||
target: 4.5,
|
||||
expectedAlertSample: 6.0,
|
||||
expectedAlertSample: v3.Point{Value: 6.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -582,7 +577,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "2", // Less Than
|
||||
matchType: "3", // OnAverage
|
||||
target: 12.0,
|
||||
expectedAlertSample: 6.0,
|
||||
expectedAlertSample: v3.Point{Value: 6.0},
|
||||
},
|
||||
// Test cases for InTotal
|
||||
{
|
||||
@@ -599,7 +594,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "3", // Equals
|
||||
matchType: "4", // InTotal
|
||||
target: 30.0,
|
||||
expectedAlertSample: 30.0,
|
||||
expectedAlertSample: v3.Point{Value: 30.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -626,7 +621,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "4", // Not Equals
|
||||
matchType: "4", // InTotal
|
||||
target: 9.0,
|
||||
expectedAlertSample: 10.0,
|
||||
expectedAlertSample: v3.Point{Value: 10.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -650,7 +645,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "1", // Greater Than
|
||||
matchType: "4", // InTotal
|
||||
target: 10.0,
|
||||
expectedAlertSample: 20.0,
|
||||
expectedAlertSample: v3.Point{Value: 20.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -675,7 +670,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "2", // Less Than
|
||||
matchType: "4", // InTotal
|
||||
target: 30.0,
|
||||
expectedAlertSample: 20.0,
|
||||
expectedAlertSample: v3.Point{Value: 20.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -713,7 +708,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
resultVectors, err := rule.Threshold.Eval(*promSeriesToTimeSeries(c.values), rule.Unit(), ruletypes.EvalData{})
|
||||
resultVectors, err := rule.Threshold.Eval(toCommonSeries(c.values), rule.Unit(), ruletypes.EvalData{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Compare full result vector with expected vector
|
||||
@@ -729,12 +724,12 @@ func TestPromRuleEval(t *testing.T) {
|
||||
if len(resultVectors) > 0 {
|
||||
found := false
|
||||
for _, sample := range resultVectors {
|
||||
if sample.V == c.expectedAlertSample {
|
||||
if sample.V == c.expectedAlertSample.Value {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
assert.True(t, found, "Expected alert sample value %.2f not found in result vectors for case %d. Got values: %v", c.expectedAlertSample, idx, actualValues)
|
||||
assert.True(t, found, "Expected alert sample value %.2f not found in result vectors for case %d. Got values: %v", c.expectedAlertSample.Value, idx, actualValues)
|
||||
}
|
||||
} else {
|
||||
assert.Empty(t, resultVectors, "Expected no alert but got result vectors for case %d", idx)
|
||||
@@ -759,13 +754,9 @@ func TestPromRuleUnitCombinations(t *testing.T) {
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "A",
|
||||
Query: "test_metric",
|
||||
},
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {
|
||||
Query: "test_metric",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1022,13 +1013,9 @@ func _Enable_this_after_9146_issue_fix_is_merged_TestPromRuleNoData(t *testing.T
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "A",
|
||||
Query: "test_metric",
|
||||
},
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {
|
||||
Query: "test_metric",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1137,13 +1124,9 @@ func TestMultipleThresholdPromRule(t *testing.T) {
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "A",
|
||||
Query: "test_metric",
|
||||
},
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {
|
||||
Query: "test_metric",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1378,11 +1361,8 @@ func TestPromRule_NoData(t *testing.T) {
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
|
||||
},
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {Query: "test_metric"},
|
||||
},
|
||||
},
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
@@ -1506,11 +1486,8 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
|
||||
Target: &target,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
|
||||
},
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {Query: "test_metric"},
|
||||
},
|
||||
},
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
@@ -1658,11 +1635,8 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
|
||||
},
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {Query: "test_metric"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -1,24 +1,38 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"math"
|
||||
"net/url"
|
||||
"reflect"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/contextlinks"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
|
||||
"github.com/SigNoz/signoz/pkg/transition"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/querier"
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
querytemplate "github.com/SigNoz/signoz/pkg/query-service/utils/queryTemplate"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
|
||||
|
||||
logsv3 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v3"
|
||||
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/formatter"
|
||||
|
||||
querierV5 "github.com/SigNoz/signoz/pkg/querier"
|
||||
@@ -28,9 +42,23 @@ import (
|
||||
|
||||
type ThresholdRule struct {
|
||||
*BaseRule
|
||||
// Ever since we introduced the new metrics query builder, the version is "v4"
|
||||
// for all the rules
|
||||
// if the version is "v3", then we use the old querier
|
||||
// if the version is "v4", then we use the new querierV2
|
||||
version string
|
||||
|
||||
// querierV5 is the query builder v5 querier used for all alert rule evaluation
|
||||
// querier is used for alerts created before the introduction of new metrics query builder
|
||||
querier interfaces.Querier
|
||||
// querierV2 is used for alerts created after the introduction of new metrics query builder
|
||||
querierV2 interfaces.Querier
|
||||
|
||||
// querierV5 is used for alerts migrated after the introduction of new query builder
|
||||
querierV5 querierV5.Querier
|
||||
|
||||
// used for attribute metadata enrichment for logs and traces
|
||||
logsKeys map[string]v3.AttributeKey
|
||||
spansKeys map[string]v3.AttributeKey
|
||||
}
|
||||
|
||||
var _ Rule = (*ThresholdRule)(nil)
|
||||
@@ -54,10 +82,25 @@ func NewThresholdRule(
|
||||
}
|
||||
|
||||
t := ThresholdRule{
|
||||
BaseRule: baseRule,
|
||||
querierV5: querierV5,
|
||||
BaseRule: baseRule,
|
||||
version: p.Version,
|
||||
}
|
||||
|
||||
querierOption := querier.QuerierOptions{
|
||||
Reader: reader,
|
||||
Cache: nil,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
}
|
||||
|
||||
querierOptsV2 := querierV2.QuerierOptions{
|
||||
Reader: reader,
|
||||
Cache: nil,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
}
|
||||
|
||||
t.querier = querier.NewQuerier(querierOption)
|
||||
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
|
||||
t.querierV5 = querierV5
|
||||
t.reader = reader
|
||||
return &t, nil
|
||||
}
|
||||
@@ -77,9 +120,169 @@ func (r *ThresholdRule) Type() ruletypes.RuleType {
|
||||
return ruletypes.RuleTypeThreshold
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
|
||||
func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3.QueryRangeParamsV3, error) {
|
||||
r.logger.InfoContext(
|
||||
ctx, "prepare query range request", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
|
||||
ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
|
||||
)
|
||||
|
||||
startTs, endTs := r.Timestamps(ts)
|
||||
start, end := startTs.UnixMilli(), endTs.UnixMilli()
|
||||
|
||||
if r.ruleCondition.QueryType() == v3.QueryTypeClickHouseSQL {
|
||||
params := &v3.QueryRangeParamsV3{
|
||||
Start: start,
|
||||
End: end,
|
||||
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: r.ruleCondition.CompositeQuery.QueryType,
|
||||
PanelType: r.ruleCondition.CompositeQuery.PanelType,
|
||||
BuilderQueries: make(map[string]*v3.BuilderQuery),
|
||||
ClickHouseQueries: make(map[string]*v3.ClickHouseQuery),
|
||||
PromQueries: make(map[string]*v3.PromQuery),
|
||||
Unit: r.ruleCondition.CompositeQuery.Unit,
|
||||
},
|
||||
Variables: make(map[string]interface{}),
|
||||
NoCache: true,
|
||||
}
|
||||
querytemplate.AssignReservedVarsV3(params)
|
||||
for name, chQuery := range r.ruleCondition.CompositeQuery.ClickHouseQueries {
|
||||
if chQuery.Disabled {
|
||||
continue
|
||||
}
|
||||
tmpl := template.New("clickhouse-query")
|
||||
tmpl, err := tmpl.Parse(chQuery.Query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var query bytes.Buffer
|
||||
err = tmpl.Execute(&query, params.Variables)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
params.CompositeQuery.ClickHouseQueries[name] = &v3.ClickHouseQuery{
|
||||
Query: query.String(),
|
||||
Disabled: chQuery.Disabled,
|
||||
Legend: chQuery.Legend,
|
||||
}
|
||||
}
|
||||
return params, nil
|
||||
}
|
||||
|
||||
if r.ruleCondition.CompositeQuery != nil && r.ruleCondition.CompositeQuery.BuilderQueries != nil {
|
||||
for _, q := range r.ruleCondition.CompositeQuery.BuilderQueries {
|
||||
// If the step interval is less than the minimum allowed step interval, set it to the minimum allowed step interval
|
||||
if minStep := common.MinAllowedStepInterval(start, end); q.StepInterval < minStep {
|
||||
q.StepInterval = minStep
|
||||
}
|
||||
|
||||
q.SetShiftByFromFunc()
|
||||
|
||||
if q.DataSource == v3.DataSourceMetrics {
|
||||
// if the time range is greater than 1 day, and less than 1 week set the step interval to be multiple of 5 minutes
|
||||
// if the time range is greater than 1 week, set the step interval to be multiple of 30 mins
|
||||
if end-start >= 24*time.Hour.Milliseconds() && end-start < 7*24*time.Hour.Milliseconds() {
|
||||
q.StepInterval = int64(math.Round(float64(q.StepInterval)/300)) * 300
|
||||
} else if end-start >= 7*24*time.Hour.Milliseconds() {
|
||||
q.StepInterval = int64(math.Round(float64(q.StepInterval)/1800)) * 1800
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if r.ruleCondition.CompositeQuery.PanelType != v3.PanelTypeGraph {
|
||||
r.ruleCondition.CompositeQuery.PanelType = v3.PanelTypeGraph
|
||||
}
|
||||
|
||||
// default mode
|
||||
return &v3.QueryRangeParamsV3{
|
||||
Start: start,
|
||||
End: end,
|
||||
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
|
||||
CompositeQuery: r.ruleCondition.CompositeQuery,
|
||||
Variables: make(map[string]interface{}),
|
||||
NoCache: true,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) prepareLinksToLogs(ctx context.Context, ts time.Time, lbls labels.Labels) string {
|
||||
if r.version == "v5" {
|
||||
return r.prepareLinksToLogsV5(ctx, ts, lbls)
|
||||
}
|
||||
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
|
||||
qr, err := r.prepareQueryRange(ctx, ts)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
start := time.UnixMilli(qr.Start)
|
||||
end := time.UnixMilli(qr.End)
|
||||
|
||||
// TODO(srikanthccv): handle formula queries
|
||||
if selectedQuery < "A" || selectedQuery > "Z" {
|
||||
return ""
|
||||
}
|
||||
|
||||
q := r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery]
|
||||
if q == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
if q.DataSource != v3.DataSourceLogs {
|
||||
return ""
|
||||
}
|
||||
|
||||
queryFilter := []v3.FilterItem{}
|
||||
if q.Filters != nil {
|
||||
queryFilter = q.Filters.Items
|
||||
}
|
||||
|
||||
filterItems := contextlinks.PrepareFilters(lbls.Map(), queryFilter, q.GroupBy, r.logsKeys)
|
||||
|
||||
return contextlinks.PrepareLinksToLogs(start, end, filterItems)
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) prepareLinksToTraces(ctx context.Context, ts time.Time, lbls labels.Labels) string {
|
||||
if r.version == "v5" {
|
||||
return r.prepareLinksToTracesV5(ctx, ts, lbls)
|
||||
}
|
||||
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
|
||||
qr, err := r.prepareQueryRange(ctx, ts)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
start := time.UnixMilli(qr.Start)
|
||||
end := time.UnixMilli(qr.End)
|
||||
|
||||
// TODO(srikanthccv): handle formula queries
|
||||
if selectedQuery < "A" || selectedQuery > "Z" {
|
||||
return ""
|
||||
}
|
||||
|
||||
q := r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery]
|
||||
if q == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
if q.DataSource != v3.DataSourceTraces {
|
||||
return ""
|
||||
}
|
||||
|
||||
queryFilter := []v3.FilterItem{}
|
||||
if q.Filters != nil {
|
||||
queryFilter = q.Filters.Items
|
||||
}
|
||||
|
||||
filterItems := contextlinks.PrepareFilters(lbls.Map(), queryFilter, q.GroupBy, r.spansKeys)
|
||||
|
||||
return contextlinks.PrepareLinksToTraces(start, end, filterItems)
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
|
||||
r.logger.InfoContext(
|
||||
ctx, "prepare query range request v5", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
|
||||
)
|
||||
|
||||
startTs, endTs := r.Timestamps(ts)
|
||||
@@ -99,10 +302,10 @@ func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*q
|
||||
return req, nil
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) prepareLinksToLogs(ctx context.Context, ts time.Time, lbls labels.Labels) string {
|
||||
func (r *ThresholdRule) prepareLinksToLogsV5(ctx context.Context, ts time.Time, lbls labels.Labels) string {
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
|
||||
qr, err := r.prepareQueryRange(ctx, ts)
|
||||
qr, err := r.prepareQueryRangeV5(ctx, ts)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
@@ -139,10 +342,10 @@ func (r *ThresholdRule) prepareLinksToLogs(ctx context.Context, ts time.Time, lb
|
||||
return contextlinks.PrepareLinksToLogsV5(start, end, whereClause)
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) prepareLinksToTraces(ctx context.Context, ts time.Time, lbls labels.Labels) string {
|
||||
func (r *ThresholdRule) prepareLinksToTracesV5(ctx context.Context, ts time.Time, lbls labels.Labels) string {
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
|
||||
qr, err := r.prepareQueryRange(ctx, ts)
|
||||
qr, err := r.prepareQueryRangeV5(ctx, ts)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
@@ -188,6 +391,115 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = r.PopulateTemporality(ctx, orgID, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("internal error while setting temporality")
|
||||
}
|
||||
|
||||
if params.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
hasLogsQuery := false
|
||||
hasTracesQuery := false
|
||||
for _, query := range params.CompositeQuery.BuilderQueries {
|
||||
if query.DataSource == v3.DataSourceLogs {
|
||||
hasLogsQuery = true
|
||||
}
|
||||
if query.DataSource == v3.DataSourceTraces {
|
||||
hasTracesQuery = true
|
||||
}
|
||||
}
|
||||
|
||||
if hasLogsQuery {
|
||||
// check if any enrichment is required for logs if yes then enrich them
|
||||
if logsv3.EnrichmentRequired(params) {
|
||||
logsFields, apiErr := r.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(params.CompositeQuery))
|
||||
if apiErr != nil {
|
||||
return nil, apiErr.ToError()
|
||||
}
|
||||
logsKeys := model.GetLogFieldsV3(ctx, params, logsFields)
|
||||
r.logsKeys = logsKeys
|
||||
logsv3.Enrich(params, logsKeys)
|
||||
}
|
||||
}
|
||||
|
||||
if hasTracesQuery {
|
||||
spanKeys, err := r.reader.GetSpanAttributeKeysByNames(ctx, logsv3.GetFieldNames(params.CompositeQuery))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r.spansKeys = spanKeys
|
||||
tracesV4.Enrich(params, spanKeys)
|
||||
}
|
||||
}
|
||||
|
||||
var results []*v3.Result
|
||||
var queryErrors map[string]error
|
||||
|
||||
if r.version == "v4" {
|
||||
results, queryErrors, err = r.querierV2.QueryRange(ctx, orgID, params)
|
||||
} else {
|
||||
results, queryErrors, err = r.querier.QueryRange(ctx, orgID, params)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
r.logger.ErrorContext(ctx, "failed to get alert query range result", "rule_name", r.Name(), "error", err, "query_errors", queryErrors)
|
||||
return nil, fmt.Errorf("internal error while querying")
|
||||
}
|
||||
|
||||
if params.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
results, err = postprocess.PostProcessResult(results, params)
|
||||
if err != nil {
|
||||
r.logger.ErrorContext(ctx, "failed to post process result", "rule_name", r.Name(), "error", err)
|
||||
return nil, fmt.Errorf("internal error while post processing")
|
||||
}
|
||||
}
|
||||
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
|
||||
var queryResult *v3.Result
|
||||
for _, res := range results {
|
||||
if res.QueryName == selectedQuery {
|
||||
queryResult = res
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
hasData := queryResult != nil && len(queryResult.Series) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
if queryResult == nil {
|
||||
r.logger.WarnContext(ctx, "query result is nil", "rule_name", r.Name(), "query_name", selectedQuery)
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
for _, series := range queryResult.Series {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
}
|
||||
resultSeries, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
SendUnmatched: r.ShouldSendUnmatched(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resultVector = append(resultVector, resultSeries...)
|
||||
}
|
||||
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
|
||||
params, err := r.prepareQueryRangeV5(ctx, ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var results []*v3.Result
|
||||
|
||||
v5Result, err := r.querierV5.QueryRange(ctx, orgID, params)
|
||||
if err != nil {
|
||||
@@ -195,24 +507,26 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
|
||||
return nil, fmt.Errorf("internal error while querying")
|
||||
}
|
||||
|
||||
for _, item := range v5Result.Data.Results {
|
||||
if tsData, ok := item.(*qbtypes.TimeSeriesData); ok {
|
||||
results = append(results, transition.ConvertV5TimeSeriesDataToV4Result(tsData))
|
||||
} else {
|
||||
// NOTE: should not happen but just to ensure we don't miss it if it happens for some reason
|
||||
r.logger.WarnContext(ctx, "expected qbtypes.TimeSeriesData but got", "item_type", reflect.TypeOf(item))
|
||||
}
|
||||
}
|
||||
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
|
||||
var queryResult *qbtypes.TimeSeriesData
|
||||
for _, item := range v5Result.Data.Results {
|
||||
if tsData, ok := item.(*qbtypes.TimeSeriesData); ok && tsData.QueryName == selectedQuery {
|
||||
queryResult = tsData
|
||||
var queryResult *v3.Result
|
||||
for _, res := range results {
|
||||
if res.QueryName == selectedQuery {
|
||||
queryResult = res
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
var allSeries []*qbtypes.TimeSeries
|
||||
if queryResult != nil {
|
||||
for _, bucket := range queryResult.Aggregations {
|
||||
allSeries = append(allSeries, bucket.Series...)
|
||||
}
|
||||
}
|
||||
|
||||
hasData := len(allSeries) > 0
|
||||
hasData := queryResult != nil && len(queryResult.Series) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
@@ -225,7 +539,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
|
||||
}
|
||||
|
||||
// Filter out new series if newGroupEvalDelay is configured
|
||||
seriesToProcess := allSeries
|
||||
seriesToProcess := queryResult.Series
|
||||
if r.ShouldSkipNewGroups() {
|
||||
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
|
||||
// In case of error we log the error and continue with the original series
|
||||
@@ -238,7 +552,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
|
||||
|
||||
for _, series := range seriesToProcess {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Values), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
}
|
||||
resultSeries, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
@@ -259,7 +573,16 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
|
||||
valueFormatter := formatter.FromUnit(r.Unit())
|
||||
|
||||
res, err := r.buildAndRunQuery(ctx, r.orgID, ts)
|
||||
var res ruletypes.Vector
|
||||
var err error
|
||||
|
||||
if r.version == "v5" {
|
||||
r.logger.InfoContext(ctx, "running v5 query")
|
||||
res, err = r.buildAndRunQueryV5(ctx, r.orgID, ts)
|
||||
} else {
|
||||
r.logger.InfoContext(ctx, "running v4 query")
|
||||
res, err = r.buildAndRunQuery(ctx, r.orgID, ts)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return 0, err
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,10 @@
|
||||
package signoz
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"os"
|
||||
"reflect"
|
||||
|
||||
@@ -22,10 +25,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
"github.com/swaggest/openapi-go"
|
||||
"github.com/swaggest/openapi-go/openapi3"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
type OpenAPI struct {
|
||||
@@ -57,6 +63,10 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Register routes that live outside the APIServer modules
|
||||
// so they are discovered by the OpenAPI walker.
|
||||
registerQueryRoutes(apiserver.Router())
|
||||
|
||||
reflector := openapi3.NewReflector()
|
||||
reflector.JSONSchemaReflector().DefaultOptions = append(reflector.JSONSchemaReflector().DefaultOptions, jsonschema.InterceptDefName(func(t reflect.Type, defaultDefName string) string {
|
||||
if defaultDefName == "RenderSuccessResponse" {
|
||||
@@ -90,10 +100,67 @@ func (openapi *OpenAPI) CreateAndWrite(path string) error {
|
||||
return err
|
||||
}
|
||||
|
||||
spec, err := openapi.reflector.Spec.MarshalYAML()
|
||||
// The library's MarshalYAML does a JSON round-trip that converts all numbers
|
||||
// to float64, causing large integers (e.g. epoch millisecond timestamps) to
|
||||
// render in scientific notation (1.6409952e+12).
|
||||
jsonData, err := openapi.reflector.Spec.MarshalJSON()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
dec := json.NewDecoder(bytes.NewReader(jsonData))
|
||||
dec.UseNumber()
|
||||
|
||||
var v any
|
||||
if err := dec.Decode(&v); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
convertJSONNumbers(v)
|
||||
|
||||
spec, err := yaml.Marshal(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return os.WriteFile(path, spec, 0o600)
|
||||
}
|
||||
|
||||
// convertJSONNumbers recursively walks a decoded JSON structure and converts
|
||||
// json.Number values to int64 (preferred) or float64 so that YAML marshaling
|
||||
// renders them as plain numbers instead of quoted strings.
|
||||
func convertJSONNumbers(v interface{}) {
|
||||
switch val := v.(type) {
|
||||
case map[string]interface{}:
|
||||
for k, elem := range val {
|
||||
if n, ok := elem.(json.Number); ok {
|
||||
if i, err := n.Int64(); err == nil {
|
||||
val[k] = i
|
||||
} else if f, err := n.Float64(); err == nil {
|
||||
val[k] = f
|
||||
}
|
||||
} else {
|
||||
convertJSONNumbers(elem)
|
||||
}
|
||||
}
|
||||
case []interface{}:
|
||||
for i, elem := range val {
|
||||
if n, ok := elem.(json.Number); ok {
|
||||
if i64, err := n.Int64(); err == nil {
|
||||
val[i] = i64
|
||||
} else if f, err := n.Float64(); err == nil {
|
||||
val[i] = f
|
||||
}
|
||||
} else {
|
||||
convertJSONNumbers(elem)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func registerQueryRoutes(router *mux.Router) {
|
||||
router.Handle("/api/v5/query_range", handler.New(
|
||||
func(http.ResponseWriter, *http.Request) {},
|
||||
querier.QueryRangeV5OpenAPIDef,
|
||||
)).Methods(http.MethodPost)
|
||||
}
|
||||
|
||||
@@ -169,7 +169,6 @@ func NewSQLMigrationProviderFactories(
|
||||
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
|
||||
sqlmigration.NewAddRootUserFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewAddUserEmailOrgIDIndexFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewMigrateRulesV4ToV5Factory(sqlstore, telemetryStore),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,194 +0,0 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/transition"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type migrateRulesV4ToV5 struct {
|
||||
store sqlstore.SQLStore
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
logger *slog.Logger
|
||||
}
|
||||
|
||||
func NewMigrateRulesV4ToV5Factory(
|
||||
store sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(
|
||||
factory.MustNewName("migrate_rules_v4_to_v5"),
|
||||
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return &migrateRulesV4ToV5{
|
||||
store: store,
|
||||
telemetryStore: telemetryStore,
|
||||
logger: ps.Logger,
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (migration *migrateRulesV4ToV5) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *migrateRulesV4ToV5) getLogDuplicateKeys(ctx context.Context) ([]string, error) {
|
||||
query := `
|
||||
SELECT name
|
||||
FROM (
|
||||
SELECT DISTINCT name FROM signoz_logs.distributed_logs_attribute_keys
|
||||
INTERSECT
|
||||
SELECT DISTINCT name FROM signoz_logs.distributed_logs_resource_keys
|
||||
)
|
||||
ORDER BY name
|
||||
`
|
||||
|
||||
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
|
||||
if err != nil {
|
||||
migration.logger.WarnContext(ctx, "failed to query log duplicate keys", "error", err)
|
||||
return nil, nil
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var keys []string
|
||||
for rows.Next() {
|
||||
var key string
|
||||
if err := rows.Scan(&key); err != nil {
|
||||
migration.logger.WarnContext(ctx, "failed to scan log duplicate key", "error", err)
|
||||
continue
|
||||
}
|
||||
keys = append(keys, key)
|
||||
}
|
||||
|
||||
return keys, nil
|
||||
}
|
||||
|
||||
func (migration *migrateRulesV4ToV5) getTraceDuplicateKeys(ctx context.Context) ([]string, error) {
|
||||
query := `
|
||||
SELECT tagKey
|
||||
FROM signoz_traces.distributed_span_attributes_keys
|
||||
WHERE tagType IN ('tag', 'resource')
|
||||
GROUP BY tagKey
|
||||
HAVING COUNT(DISTINCT tagType) > 1
|
||||
ORDER BY tagKey
|
||||
`
|
||||
|
||||
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
|
||||
if err != nil {
|
||||
migration.logger.WarnContext(ctx, "failed to query trace duplicate keys", "error", err)
|
||||
return nil, nil
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var keys []string
|
||||
for rows.Next() {
|
||||
var key string
|
||||
if err := rows.Scan(&key); err != nil {
|
||||
migration.logger.WarnContext(ctx, "failed to scan trace duplicate key", "error", err)
|
||||
continue
|
||||
}
|
||||
keys = append(keys, key)
|
||||
}
|
||||
|
||||
return keys, nil
|
||||
}
|
||||
|
||||
func (migration *migrateRulesV4ToV5) Up(ctx context.Context, db *bun.DB) error {
|
||||
logsKeys, err := migration.getLogDuplicateKeys(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tracesKeys, err := migration.getTraceDuplicateKeys(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
var rules []struct {
|
||||
ID string `bun:"id"`
|
||||
Data map[string]any `bun:"data"`
|
||||
}
|
||||
|
||||
err = tx.NewSelect().
|
||||
Table("rule").
|
||||
Column("id", "data").
|
||||
Scan(ctx, &rules)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
alertsMigrator := transition.NewAlertMigrateV5(migration.logger, logsKeys, tracesKeys)
|
||||
|
||||
for _, rule := range rules {
|
||||
version, _ := rule.Data["version"].(string)
|
||||
if version == "v5" {
|
||||
continue
|
||||
}
|
||||
|
||||
migration.logger.InfoContext(ctx, "migrating rule v4 to v5", "rule_id", rule.ID, "current_version", version)
|
||||
|
||||
// Check if the queries envelope already exists and is non-empty
|
||||
hasQueriesEnvelope := false
|
||||
if condition, ok := rule.Data["condition"].(map[string]any); ok {
|
||||
if compositeQuery, ok := condition["compositeQuery"].(map[string]any); ok {
|
||||
if queries, ok := compositeQuery["queries"].([]any); ok && len(queries) > 0 {
|
||||
hasQueriesEnvelope = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if hasQueriesEnvelope {
|
||||
// Case 2: Already has queries envelope, just bump version
|
||||
migration.logger.InfoContext(ctx, "rule already has queries envelope, bumping version", "rule_id", rule.ID)
|
||||
rule.Data["version"] = "v5"
|
||||
} else {
|
||||
// Case 1: Old format, run full migration
|
||||
migration.logger.InfoContext(ctx, "rule has old format, running full migration", "rule_id", rule.ID)
|
||||
alertsMigrator.Migrate(ctx, rule.Data)
|
||||
// Force version to v5 regardless of Migrate return value
|
||||
rule.Data["version"] = "v5"
|
||||
}
|
||||
|
||||
dataJSON, err := json.Marshal(rule.Data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = tx.NewUpdate().
|
||||
Table("rule").
|
||||
Set("data = ?", string(dataJSON)).
|
||||
Where("id = ?", rule.ID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
func (migration *migrateRulesV4ToV5) Down(ctx context.Context, db *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
@@ -61,13 +61,11 @@ var (
|
||||
}
|
||||
)
|
||||
|
||||
type fieldMapper struct {
|
||||
}
|
||||
type fieldMapper struct {}
|
||||
|
||||
func NewFieldMapper() qbtypes.FieldMapper {
|
||||
return &fieldMapper{}
|
||||
}
|
||||
|
||||
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource:
|
||||
@@ -254,12 +252,27 @@ func (m *fieldMapper) buildFieldForJSON(key *telemetrytypes.TelemetryFieldKey) (
|
||||
"plan length is less than 2 for promoted path: %s", key.Name)
|
||||
}
|
||||
|
||||
// promoted column first then body_json column
|
||||
// TODO(Piyush): Change this in future for better performance
|
||||
expr = fmt.Sprintf("coalesce(%s, %s)",
|
||||
fmt.Sprintf("dynamicElement(%s, '%s')", plan[1].FieldPath(), plan[1].TerminalConfig.ElemType.StringValue()),
|
||||
expr,
|
||||
node := plan[1]
|
||||
promotedExpr := fmt.Sprintf(
|
||||
"dynamicElement(%s, '%s')",
|
||||
node.FieldPath(),
|
||||
node.TerminalConfig.ElemType.StringValue(),
|
||||
)
|
||||
|
||||
// dynamicElement returns NULL for scalar types or an empty array for array types.
|
||||
if node.TerminalConfig.ElemType.IsArray {
|
||||
expr = fmt.Sprintf(
|
||||
"if(length(%s) > 0, %s, %s)",
|
||||
promotedExpr,
|
||||
promotedExpr,
|
||||
expr,
|
||||
)
|
||||
} else {
|
||||
// promoted column first then body_json column
|
||||
// TODO(Piyush): Change this in future for better performance
|
||||
expr = fmt.Sprintf("coalesce(%s, %s)", promotedExpr, expr)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return expr, nil
|
||||
@@ -281,8 +294,7 @@ func (m *fieldMapper) buildArrayConcat(plan telemetrytypes.JSONAccessPlan) (stri
|
||||
}
|
||||
|
||||
// Build arrayMap expressions for ALL available branches at the root level.
|
||||
// Iterate branches in deterministic order (JSON then Dynamic) so generated SQL
|
||||
// is stable across environments; map iteration order is random in Go.
|
||||
// Iterate branches in deterministic order (JSON then Dynamic)
|
||||
var arrayMapExpressions []string
|
||||
for _, node := range plan {
|
||||
for _, branchType := range node.BranchesInOrder() {
|
||||
|
||||
@@ -32,7 +32,6 @@ func NewJSONConditionBuilder(key *telemetrytypes.TelemetryFieldKey, valueType te
|
||||
|
||||
// BuildCondition builds the full WHERE condition for body_json JSON paths
|
||||
func (c *jsonConditionBuilder) buildJSONCondition(operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
|
||||
conditions := []string{}
|
||||
for _, node := range c.key.JSONPlan {
|
||||
condition, err := c.emitPlannedCondition(node, operator, value, sb)
|
||||
@@ -73,9 +72,9 @@ func (c *jsonConditionBuilder) buildTerminalCondition(node *telemetrytypes.JSONA
|
||||
|
||||
// switch operator for array membership checks
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains, qbtypes.FilterOperatorIn:
|
||||
case qbtypes.FilterOperatorContains:
|
||||
operator = qbtypes.FilterOperatorEqual
|
||||
case qbtypes.FilterOperatorNotContains, qbtypes.FilterOperatorNotIn:
|
||||
case qbtypes.FilterOperatorNotContains:
|
||||
operator = qbtypes.FilterOperatorNotEqual
|
||||
}
|
||||
}
|
||||
@@ -191,13 +190,14 @@ func (c *jsonConditionBuilder) buildArrayMembershipCondition(node *telemetrytype
|
||||
arrayExpr = typedArrayExpr()
|
||||
}
|
||||
|
||||
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(&localKeyCopy, value, "x", operator)
|
||||
key := "x"
|
||||
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(&localKeyCopy, value, key, operator)
|
||||
op, err := c.applyOperator(sb, fieldExpr, operator, value)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return fmt.Sprintf("arrayExists(%s -> %s, %s)", fieldExpr, op, arrayExpr), nil
|
||||
return fmt.Sprintf("arrayExists(%s -> %s, %s)", key, op, arrayExpr), nil
|
||||
}
|
||||
|
||||
// recurseArrayHops recursively builds array traversal conditions
|
||||
@@ -279,27 +279,31 @@ func (c *jsonConditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, field
|
||||
case qbtypes.FilterOperatorNotContains:
|
||||
return sb.NotILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
|
||||
case qbtypes.FilterOperatorIn, qbtypes.FilterOperatorNotIn:
|
||||
// emulate IN/NOT IN using OR/AND over equals to leverage indexes consistently
|
||||
values, ok := value.([]any)
|
||||
if !ok {
|
||||
values = []any{value}
|
||||
}
|
||||
conds := []string{}
|
||||
for _, v := range values {
|
||||
if operator == qbtypes.FilterOperatorIn {
|
||||
conds = append(conds, sb.E(fieldExpr, v))
|
||||
} else {
|
||||
conds = append(conds, sb.NE(fieldExpr, v))
|
||||
}
|
||||
}
|
||||
if operator == qbtypes.FilterOperatorIn {
|
||||
return sb.Or(conds...), nil
|
||||
return sb.In(fieldExpr, values...), nil
|
||||
}
|
||||
return sb.And(conds...), nil
|
||||
return sb.NotIn(fieldExpr, values...), nil
|
||||
case qbtypes.FilterOperatorExists:
|
||||
return fmt.Sprintf("%s IS NOT NULL", fieldExpr), nil
|
||||
case qbtypes.FilterOperatorNotExists:
|
||||
return fmt.Sprintf("%s IS NULL", fieldExpr), nil
|
||||
// between and not between
|
||||
case qbtypes.FilterOperatorBetween, qbtypes.FilterOperatorNotBetween:
|
||||
values, ok := value.([]any)
|
||||
if !ok {
|
||||
return "", qbtypes.ErrBetweenValues
|
||||
}
|
||||
if len(values) != 2 {
|
||||
return "", qbtypes.ErrBetweenValues
|
||||
}
|
||||
if operator == qbtypes.FilterOperatorBetween {
|
||||
return sb.Between(fieldExpr, values[0], values[1]), nil
|
||||
}
|
||||
return sb.NotBetween(fieldExpr, values[0], values[1]), nil
|
||||
default:
|
||||
return "", qbtypes.ErrUnsupportedOperator
|
||||
}
|
||||
|
||||
@@ -316,7 +316,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%1.65%", 1.65, "%1.65%", 1.65, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
@@ -345,7 +345,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%true%", true, "%true%", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
@@ -360,12 +360,55 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%passed%", "passed", "%passed%", "passed", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Dynamic array IN Operator",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body.education[].parameters IN [1.65, 1.99]"},
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> toFloat64(x) IN (?, ?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> x IN (?, ?), arrayMap(x->dynamicElement(x, 'Array(Nullable(Float64))'), arrayFilter(x->(dynamicType(x) = 'Array(Nullable(Float64))'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), 1.65, 1.99, 1.65, 1.99, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Integer BETWEEN Operator",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "education[].awards[].semester BETWEEN 2 AND 4"},
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (arrayExists(`body_json.education`-> (arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) BETWEEN ? AND ?, dynamicElement(`body_json.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')) OR arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) BETWEEN ? AND ?, arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_json.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (arrayExists(`body_json.education`-> (arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) BETWEEN ? AND ?, dynamicElement(`body_json.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')) OR arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) BETWEEN ? AND ?, arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_json.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{float64(2), float64(4), float64(2), float64(4), uint64(1747945619), uint64(1747983448), float64(2), float64(4), float64(2), float64(4), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Integer IN Operator",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "education[].awards[].semester IN [2, 4]"},
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (arrayExists(`body_json.education`-> (arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) IN (?, ?), dynamicElement(`body_json.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')) OR arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) IN (?, ?), arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_json.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (arrayExists(`body_json.education`-> (arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) IN (?, ?), dynamicElement(`body_json.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')) OR arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) IN (?, ?), arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_json.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{float64(2), float64(4), float64(2), float64(4), uint64(1747945619), uint64(1747983448), float64(2), float64(4), float64(2), float64(4), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Equals to 'sports' inside array of awards",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
@@ -389,7 +432,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(toFloat64OrNull(x) -> toFloat64OrNull(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(x -> toFloat64(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(x -> toFloat64OrNull(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%4%", float64(4), "%4%", float64(4), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `interests[].entities[].reviews[].entries[].metadata[].positions[].ratings` is ambiguous, found 2 different combinations of field context / data type: [name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]int64,jsondatatype=Array(Nullable(Int64)) name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]string,jsondatatype=Array(Nullable(String))]."},
|
||||
},
|
||||
@@ -404,7 +447,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(x -> toString(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%Good%", "Good", "%Good%", "Good", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `interests[].entities[].reviews[].entries[].metadata[].positions[].ratings` is ambiguous, found 2 different combinations of field context / data type: [name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]int64,jsondatatype=Array(Nullable(Int64)) name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]string,jsondatatype=Array(Nullable(String))]."},
|
||||
},
|
||||
@@ -492,7 +535,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
|
||||
disableBodyJSONQuery(t)
|
||||
}()
|
||||
|
||||
statementBuilder := buildJSONTestStatementBuilder(t, "education")
|
||||
statementBuilder := buildJSONTestStatementBuilder(t, "education", "tags")
|
||||
cases := []struct {
|
||||
name string
|
||||
requestType qbtypes.RequestType
|
||||
@@ -500,6 +543,20 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
|
||||
expected qbtypes.Statement
|
||||
expectedErr error
|
||||
}{
|
||||
{
|
||||
name: "Has Array promoted uses body fallback",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "has(body.tags, 'production')"},
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND has(if(length(dynamicElement(body_json_promoted.`tags`, 'Array(Nullable(String))')) > 0, dynamicElement(body_json_promoted.`tags`, 'Array(Nullable(String))'), dynamicElement(body_json.`tags`, 'Array(Nullable(String))')), ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "production", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Key inside Array(JSON) exists",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
@@ -551,7 +608,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toFloat64(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%1.65%", 1.65, "%1.65%", 1.65, "%1.65%", 1.65, "%1.65%", 1.65, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
@@ -580,7 +637,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%true%", true, "%true%", true, "%true%", true, "%true%", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
@@ -595,7 +652,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toString(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%passed%", "passed", "%passed%", "passed", "%passed%", "passed", "%passed%", "passed", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
|
||||
@@ -3,12 +3,12 @@ package telemetrylogs
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func parseStrValue(valueStr string, operator qbtypes.FilterOperator) (telemetrytypes.FieldDataType, any) {
|
||||
@@ -41,31 +41,55 @@ func parseStrValue(valueStr string, operator qbtypes.FilterOperator) (telemetryt
|
||||
}
|
||||
|
||||
func InferDataType(value any, operator qbtypes.FilterOperator, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any) {
|
||||
// check if the value is a int, float, string, bool
|
||||
valueType := telemetrytypes.FieldDataTypeUnspecified
|
||||
switch v := value.(type) {
|
||||
case []any:
|
||||
// take the first element and infer the type
|
||||
if len(v) > 0 {
|
||||
valueType, _ = InferDataType(v[0], operator, key)
|
||||
if operator.IsArrayOperator() && reflect.ValueOf(value).Kind() != reflect.Slice {
|
||||
value = []any{value}
|
||||
}
|
||||
|
||||
// closure to calculate the data type of the value
|
||||
var closure func(value any, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any)
|
||||
closure = func(value any, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any) {
|
||||
// check if the value is a int, float, string, bool
|
||||
valueType := telemetrytypes.FieldDataTypeUnspecified
|
||||
switch v := value.(type) {
|
||||
case []any:
|
||||
// take the first element and infer the type
|
||||
var scalerType telemetrytypes.FieldDataType
|
||||
if len(v) > 0 {
|
||||
// Note: [[...]] Slices inside Slices are not handled yet
|
||||
if reflect.ValueOf(v[0]).Kind() == reflect.Slice {
|
||||
return telemetrytypes.FieldDataTypeUnspecified, value
|
||||
}
|
||||
|
||||
scalerType, _ = closure(v[0], key)
|
||||
}
|
||||
|
||||
arrayType := telemetrytypes.ScalerFieldTypeToArrayFieldType[scalerType]
|
||||
switch {
|
||||
// decide on the field data type based on the key
|
||||
case key.FieldDataType.IsArray():
|
||||
return arrayType, v
|
||||
default:
|
||||
// TODO(Piyush): backward compatibility for the old String based JSON QB queries
|
||||
if strings.HasSuffix(key.Name, telemetrytypes.ArrayAnyIndexSuffix) {
|
||||
return arrayType, v
|
||||
}
|
||||
return scalerType, v
|
||||
}
|
||||
case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64:
|
||||
valueType = telemetrytypes.FieldDataTypeInt64
|
||||
case float32, float64:
|
||||
valueType = telemetrytypes.FieldDataTypeFloat64
|
||||
case string:
|
||||
valueType, value = parseStrValue(v, operator)
|
||||
case bool:
|
||||
valueType = telemetrytypes.FieldDataTypeBool
|
||||
}
|
||||
return valueType, v
|
||||
case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64:
|
||||
valueType = telemetrytypes.FieldDataTypeInt64
|
||||
case float32, float64:
|
||||
valueType = telemetrytypes.FieldDataTypeFloat64
|
||||
case string:
|
||||
valueType, value = parseStrValue(v, operator)
|
||||
case bool:
|
||||
valueType = telemetrytypes.FieldDataTypeBool
|
||||
|
||||
return valueType, value
|
||||
}
|
||||
|
||||
// check if it is array
|
||||
if strings.HasSuffix(key.Name, "[*]") || strings.HasSuffix(key.Name, "[]") {
|
||||
valueType = telemetrytypes.FieldDataType{String: valuer.NewString(fmt.Sprintf("[]%s", valueType.StringValue()))}
|
||||
}
|
||||
|
||||
return valueType, value
|
||||
// calculate the data type of the value
|
||||
return closure(value, key)
|
||||
}
|
||||
|
||||
func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string {
|
||||
|
||||
@@ -421,6 +421,38 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "IN operator with json search",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "body.user_names[*] IN 'john_doe'",
|
||||
},
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((JSONExtract(JSON_QUERY(body, '$.\"user_names\"[*]'), 'Array(String)') = ?) AND JSON_EXISTS(body, '$.\"user_names\"[*]')) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "john_doe", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "has with json search",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "has(body.user_names[*], 'john_doe')",
|
||||
},
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND has(JSONExtract(JSON_QUERY(body, '$.\"user_names\"[*]'), 'Array(String)'), ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "john_doe", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
}
|
||||
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
|
||||
@@ -112,7 +112,8 @@ func (t *telemetryMetaStore) buildBodyJSONPaths(ctx context.Context,
|
||||
}
|
||||
|
||||
for _, fieldKey := range fieldKeys {
|
||||
fieldKey.Materialized = promoted.Contains(fieldKey.Name)
|
||||
promotedKey := strings.Split(fieldKey.Name, telemetrytypes.ArraySep)[0]
|
||||
fieldKey.Materialized = promoted.Contains(promotedKey)
|
||||
fieldKey.Indexes = indexes[fieldKey.Name]
|
||||
}
|
||||
|
||||
@@ -501,7 +502,8 @@ func (t *telemetryMetaStore) GetPromotedPaths(ctx context.Context, paths ...stri
|
||||
sb := sqlbuilder.Select("path").From(fmt.Sprintf("%s.%s", DBName, PromotedPathsTableName))
|
||||
pathConditions := []string{}
|
||||
for _, path := range paths {
|
||||
pathConditions = append(pathConditions, sb.Equal("path", path))
|
||||
split := strings.Split(path, telemetrytypes.ArraySep)
|
||||
pathConditions = append(pathConditions, sb.Equal("path", split[0]))
|
||||
}
|
||||
sb.Where(sb.Or(pathConditions...))
|
||||
|
||||
|
||||
@@ -123,7 +123,7 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
|
||||
origTimeAgg := query.Aggregations[0].TimeAggregation
|
||||
origGroupBy := slices.Clone(query.GroupBy)
|
||||
|
||||
if query.Aggregations[0].SpaceAggregation.IsPercentile() &&
|
||||
if (query.Aggregations[0].SpaceAggregation.IsPercentile() || query.Aggregations[0].SpaceAggregation == metrictypes.SpaceAggregationHistogramCount) &&
|
||||
query.Aggregations[0].Type != metrictypes.ExpHistogramType {
|
||||
// add le in the group by if doesn't exist
|
||||
leExists := false
|
||||
@@ -154,7 +154,11 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
|
||||
}
|
||||
|
||||
// make the time aggregation rate and space aggregation sum
|
||||
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationRate
|
||||
if query.Aggregations[0].SpaceAggregation.IsPercentile() {
|
||||
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationRate
|
||||
} else {
|
||||
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationIncrease
|
||||
}
|
||||
query.Aggregations[0].SpaceAggregation = metrictypes.SpaceAggregationSum
|
||||
}
|
||||
|
||||
@@ -524,7 +528,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
return "", nil, errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`]",
|
||||
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`, `histogram_count`]",
|
||||
)
|
||||
}
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
@@ -577,6 +581,34 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
|
||||
sb.From("__spatial_aggregation_cte")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.GroupBy("ts")
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
|
||||
sb.Having(rewrittenExpr)
|
||||
}
|
||||
} else if query.Aggregations[0].SpaceAggregation == metrictypes.SpaceAggregationHistogramCount {
|
||||
sb.Select("ts")
|
||||
|
||||
for _, g := range query.GroupBy {
|
||||
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
switch query.Aggregations[0].SpaceAggregationParam.(type) {
|
||||
case metrictypes.ComparisonSpaceAggregationParam:
|
||||
aggQuery, err := AggregationQueryForHistogramCount(query.Aggregations[0].SpaceAggregationParam.(metrictypes.ComparisonSpaceAggregationParam))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sb.SelectMore(aggQuery)
|
||||
default:
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "no aggregation param provided for histogram count")
|
||||
}
|
||||
|
||||
sb.From("__spatial_aggregation_cte")
|
||||
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.GroupBy("ts")
|
||||
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package telemetrymetrics
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
@@ -308,3 +309,17 @@ func AggregationColumnForSamplesTable(
|
||||
}
|
||||
return aggregationColumn, nil
|
||||
}
|
||||
|
||||
func AggregationQueryForHistogramCount(params metrictypes.ComparisonSpaceAggregationParam) (string, error) {
|
||||
histogramCountLimit := params.Limit
|
||||
|
||||
switch params.Operater {
|
||||
case "<=":
|
||||
return fmt.Sprintf("argMaxIf(value, toFloat64(le), toFloat64(le) <= %f) + (argMinIf(value, toFloat64(le), toFloat64(le) > %f) - argMaxIf(value, toFloat64(le), toFloat64(le) <= %f)) * (%f - maxIf(toFloat64(le), toFloat64(le) <= %f)) / (minIf(toFloat64(le), toFloat64(le) > %f) - maxIf(toFloat64(le), toFloat64(le) <= %f)) AS value", histogramCountLimit, histogramCountLimit, histogramCountLimit, histogramCountLimit, histogramCountLimit, histogramCountLimit, histogramCountLimit), nil
|
||||
case ">":
|
||||
return fmt.Sprintf("argMax(value, toFloat64(le)) - (argMaxIf(value, toFloat64(le), toFloat64(le) < %f) + (argMinIf(value, toFloat64(le), toFloat64(le) >= %f) - argMaxIf(value, toFloat64(le), toFloat64(le) < %f)) * (%f - maxIf(toFloat64(le), toFloat64(le) < %f)) / (minIf(toFloat64(le), toFloat64(le) >= %f) - maxIf(toFloat64(le), toFloat64(le) <= %f))) AS value", histogramCountLimit, histogramCountLimit, histogramCountLimit, histogramCountLimit, histogramCountLimit, histogramCountLimit, histogramCountLimit), nil
|
||||
default:
|
||||
return "", errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid space aggregation operator, should be one of the following: [`<=`, `>`]")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
102
pkg/transition/v5_to_v4.go
Normal file
102
pkg/transition/v5_to_v4.go
Normal file
@@ -0,0 +1,102 @@
|
||||
package transition
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
// ConvertV5TimeSeriesDataToV4Result converts v5 TimeSeriesData to v4 Result
|
||||
func ConvertV5TimeSeriesDataToV4Result(v5Data *qbtypes.TimeSeriesData) *v3.Result {
|
||||
if v5Data == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
result := &v3.Result{
|
||||
QueryName: v5Data.QueryName,
|
||||
Series: make([]*v3.Series, 0),
|
||||
}
|
||||
|
||||
toV4Series := func(ts *qbtypes.TimeSeries) *v3.Series {
|
||||
series := &v3.Series{
|
||||
Labels: make(map[string]string),
|
||||
LabelsArray: make([]map[string]string, 0),
|
||||
Points: make([]v3.Point, 0, len(ts.Values)),
|
||||
}
|
||||
|
||||
for _, label := range ts.Labels {
|
||||
valueStr := fmt.Sprintf("%v", label.Value)
|
||||
series.Labels[label.Key.Name] = valueStr
|
||||
}
|
||||
|
||||
if len(series.Labels) > 0 {
|
||||
series.LabelsArray = append(series.LabelsArray, series.Labels)
|
||||
}
|
||||
|
||||
for _, tsValue := range ts.Values {
|
||||
if tsValue.Partial {
|
||||
continue
|
||||
}
|
||||
|
||||
point := v3.Point{
|
||||
Timestamp: tsValue.Timestamp,
|
||||
Value: tsValue.Value,
|
||||
}
|
||||
series.Points = append(series.Points, point)
|
||||
}
|
||||
return series
|
||||
}
|
||||
|
||||
for _, aggBucket := range v5Data.Aggregations {
|
||||
for _, ts := range aggBucket.Series {
|
||||
result.Series = append(result.Series, toV4Series(ts))
|
||||
}
|
||||
|
||||
if len(aggBucket.AnomalyScores) != 0 {
|
||||
result.AnomalyScores = make([]*v3.Series, 0)
|
||||
for _, ts := range aggBucket.AnomalyScores {
|
||||
result.AnomalyScores = append(result.AnomalyScores, toV4Series(ts))
|
||||
}
|
||||
}
|
||||
|
||||
if len(aggBucket.PredictedSeries) != 0 {
|
||||
result.PredictedSeries = make([]*v3.Series, 0)
|
||||
for _, ts := range aggBucket.PredictedSeries {
|
||||
result.PredictedSeries = append(result.PredictedSeries, toV4Series(ts))
|
||||
}
|
||||
}
|
||||
|
||||
if len(aggBucket.LowerBoundSeries) != 0 {
|
||||
result.LowerBoundSeries = make([]*v3.Series, 0)
|
||||
for _, ts := range aggBucket.LowerBoundSeries {
|
||||
result.LowerBoundSeries = append(result.LowerBoundSeries, toV4Series(ts))
|
||||
}
|
||||
}
|
||||
|
||||
if len(aggBucket.UpperBoundSeries) != 0 {
|
||||
result.UpperBoundSeries = make([]*v3.Series, 0)
|
||||
for _, ts := range aggBucket.UpperBoundSeries {
|
||||
result.UpperBoundSeries = append(result.UpperBoundSeries, toV4Series(ts))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// ConvertV5TimeSeriesDataSliceToV4Results converts a slice of v5 TimeSeriesData to v4 QueryRangeResponse
|
||||
func ConvertV5TimeSeriesDataSliceToV4Results(v5DataSlice []*qbtypes.TimeSeriesData) *v3.QueryRangeResponse {
|
||||
response := &v3.QueryRangeResponse{
|
||||
ResultType: "matrix", // Time series data is typically "matrix" type
|
||||
Result: make([]*v3.Result, 0, len(v5DataSlice)),
|
||||
}
|
||||
|
||||
for _, v5Data := range v5DataSlice {
|
||||
if result := ConvertV5TimeSeriesDataToV4Result(v5Data); result != nil {
|
||||
response.Result = append(response.Result, result)
|
||||
}
|
||||
}
|
||||
|
||||
return response
|
||||
}
|
||||
@@ -2,6 +2,7 @@ package metrictypes
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
@@ -172,7 +173,6 @@ var (
|
||||
|
||||
func (TimeAggregation) Enum() []any {
|
||||
return []any{
|
||||
TimeAggregationUnspecified,
|
||||
TimeAggregationLatest,
|
||||
TimeAggregationSum,
|
||||
TimeAggregationAvg,
|
||||
@@ -190,22 +190,22 @@ type SpaceAggregation struct {
|
||||
}
|
||||
|
||||
var (
|
||||
SpaceAggregationUnspecified = SpaceAggregation{valuer.NewString("")}
|
||||
SpaceAggregationSum = SpaceAggregation{valuer.NewString("sum")}
|
||||
SpaceAggregationAvg = SpaceAggregation{valuer.NewString("avg")}
|
||||
SpaceAggregationMin = SpaceAggregation{valuer.NewString("min")}
|
||||
SpaceAggregationMax = SpaceAggregation{valuer.NewString("max")}
|
||||
SpaceAggregationCount = SpaceAggregation{valuer.NewString("count")}
|
||||
SpaceAggregationPercentile50 = SpaceAggregation{valuer.NewString("p50")}
|
||||
SpaceAggregationPercentile75 = SpaceAggregation{valuer.NewString("p75")}
|
||||
SpaceAggregationPercentile90 = SpaceAggregation{valuer.NewString("p90")}
|
||||
SpaceAggregationPercentile95 = SpaceAggregation{valuer.NewString("p95")}
|
||||
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
|
||||
SpaceAggregationUnspecified = SpaceAggregation{valuer.NewString("")}
|
||||
SpaceAggregationSum = SpaceAggregation{valuer.NewString("sum")}
|
||||
SpaceAggregationAvg = SpaceAggregation{valuer.NewString("avg")}
|
||||
SpaceAggregationMin = SpaceAggregation{valuer.NewString("min")}
|
||||
SpaceAggregationMax = SpaceAggregation{valuer.NewString("max")}
|
||||
SpaceAggregationCount = SpaceAggregation{valuer.NewString("count")}
|
||||
SpaceAggregationPercentile50 = SpaceAggregation{valuer.NewString("p50")}
|
||||
SpaceAggregationPercentile75 = SpaceAggregation{valuer.NewString("p75")}
|
||||
SpaceAggregationPercentile90 = SpaceAggregation{valuer.NewString("p90")}
|
||||
SpaceAggregationPercentile95 = SpaceAggregation{valuer.NewString("p95")}
|
||||
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
|
||||
SpaceAggregationHistogramCount = SpaceAggregation{valuer.NewString("histogram_count")}
|
||||
)
|
||||
|
||||
func (SpaceAggregation) Enum() []any {
|
||||
return []any{
|
||||
SpaceAggregationUnspecified,
|
||||
SpaceAggregationSum,
|
||||
SpaceAggregationAvg,
|
||||
SpaceAggregationMin,
|
||||
@@ -216,6 +216,7 @@ func (SpaceAggregation) Enum() []any {
|
||||
SpaceAggregationPercentile90,
|
||||
SpaceAggregationPercentile95,
|
||||
SpaceAggregationPercentile99,
|
||||
SpaceAggregationHistogramCount,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -258,3 +259,22 @@ type MetricTableHints struct {
|
||||
type MetricValueFilter struct {
|
||||
Value float64
|
||||
}
|
||||
|
||||
type SpaceAggregationParam interface {
|
||||
StringValue() string
|
||||
}
|
||||
|
||||
type NoSpaceAggregationParam struct{}
|
||||
|
||||
func (_ NoSpaceAggregationParam) StringValue() string {
|
||||
return "{}"
|
||||
}
|
||||
|
||||
type ComparisonSpaceAggregationParam struct {
|
||||
Operater string `json:"operator"`
|
||||
Limit float64 `json:"limit"`
|
||||
}
|
||||
|
||||
func (cso ComparisonSpaceAggregationParam) StringValue() string {
|
||||
return fmt.Sprintf("{\"operator\": \"%s\", \"limit\": \"%f\"}", cso.Operater, cso.Limit)
|
||||
}
|
||||
|
||||
@@ -10,10 +10,35 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
)
|
||||
|
||||
type Step struct{ time.Duration }
|
||||
|
||||
var _ jsonschema.Exposer = Step{}
|
||||
|
||||
// JSONSchema returns a custom schema for Step that accepts either a duration string or a number (seconds).
|
||||
func (Step) JSONSchema() (jsonschema.Schema, error) {
|
||||
s := jsonschema.Schema{}
|
||||
s.WithDescription("Step interval. Accepts a Go duration string (e.g., \"60s\", \"1m\", \"1h\") or a number representing seconds (e.g., 60).")
|
||||
|
||||
strSchema := jsonschema.Schema{}
|
||||
strSchema.WithType(jsonschema.String.Type())
|
||||
strSchema.WithExamples("60s", "5m", "1h")
|
||||
strSchema.WithDescription("Duration string (e.g., \"60s\", \"5m\", \"1h\").")
|
||||
|
||||
numSchema := jsonschema.Schema{}
|
||||
numSchema.WithType(jsonschema.Number.Type())
|
||||
numSchema.WithExamples(60, 300, 3600)
|
||||
numSchema.WithDescription("Duration in seconds.")
|
||||
|
||||
s.OneOf = []jsonschema.SchemaOrBool{
|
||||
strSchema.ToSchemaOrBool(),
|
||||
numSchema.ToSchemaOrBool(),
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
func (s *Step) UnmarshalJSON(b []byte) error {
|
||||
if len(b) == 0 {
|
||||
return nil
|
||||
@@ -161,6 +186,17 @@ func (f FilterOperator) IsStringSearchOperator() bool {
|
||||
}
|
||||
}
|
||||
|
||||
// IsArrayOperator returns true if the operator works with array values only
|
||||
func (f FilterOperator) IsArrayOperator() bool {
|
||||
switch f {
|
||||
case FilterOperatorIn, FilterOperatorNotIn,
|
||||
FilterOperatorBetween, FilterOperatorNotBetween:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
type OrderDirection struct {
|
||||
valuer.String
|
||||
}
|
||||
@@ -170,6 +206,14 @@ var (
|
||||
OrderDirectionDesc = OrderDirection{valuer.NewString("desc")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for OrderDirection.
|
||||
func (OrderDirection) Enum() []any {
|
||||
return []any{
|
||||
OrderDirectionAsc,
|
||||
OrderDirectionDesc,
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
OrderDirectionMap = map[string]OrderDirection{
|
||||
"asc": OrderDirectionAsc,
|
||||
@@ -192,6 +236,19 @@ var (
|
||||
ReduceToMedian = ReduceTo{valuer.NewString("median")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for ReduceTo.
|
||||
func (ReduceTo) Enum() []any {
|
||||
return []any{
|
||||
ReduceToSum,
|
||||
ReduceToCount,
|
||||
ReduceToAvg,
|
||||
ReduceToMin,
|
||||
ReduceToMax,
|
||||
ReduceToLast,
|
||||
ReduceToMedian,
|
||||
}
|
||||
}
|
||||
|
||||
// FunctionReduceTo applies the reduceTo operator to a time series and returns a new series with the reduced value
|
||||
// reduceTo can be one of: last, sum, avg, min, max, count, median
|
||||
// if reduceTo is not recognized, the function returns the original series
|
||||
@@ -389,6 +446,8 @@ type MetricAggregation struct {
|
||||
TimeAggregation metrictypes.TimeAggregation `json:"timeAggregation"`
|
||||
// space aggregation to apply to the query
|
||||
SpaceAggregation metrictypes.SpaceAggregation `json:"spaceAggregation"`
|
||||
// param for space aggregation if needed
|
||||
SpaceAggregationParam metrictypes.SpaceAggregationParam `json:"spaceAggregationParam"`
|
||||
// table hints to use for the query
|
||||
TableHints *metrictypes.MetricTableHints `json:"-"`
|
||||
// value filter to apply to the query
|
||||
@@ -397,6 +456,40 @@ type MetricAggregation struct {
|
||||
ReduceTo ReduceTo `json:"reduceTo,omitempty"`
|
||||
}
|
||||
|
||||
func (m *MetricAggregation) UnmarshalJSON(data []byte) error {
|
||||
type Alias MetricAggregation
|
||||
aux := &struct {
|
||||
SpaceAggregationParam json.RawMessage `json:"spaceAggregationParam"`
|
||||
*Alias
|
||||
}{
|
||||
Alias: (*Alias)(m),
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(data, &aux); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// If no param provided
|
||||
if len(aux.SpaceAggregationParam) == 0 || string(aux.SpaceAggregationParam) == "null" {
|
||||
m.SpaceAggregationParam = metrictypes.NoSpaceAggregationParam{}
|
||||
return nil
|
||||
}
|
||||
|
||||
switch m.SpaceAggregation {
|
||||
case metrictypes.SpaceAggregationHistogramCount:
|
||||
var p metrictypes.ComparisonSpaceAggregationParam
|
||||
if err := json.Unmarshal(aux.SpaceAggregationParam, &p); err != nil {
|
||||
return err
|
||||
}
|
||||
m.SpaceAggregationParam = p
|
||||
|
||||
default:
|
||||
m.SpaceAggregationParam = metrictypes.NoSpaceAggregationParam{}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Copy creates a deep copy of MetricAggregation
|
||||
func (m MetricAggregation) Copy() MetricAggregation {
|
||||
c := m
|
||||
|
||||
@@ -36,6 +36,30 @@ var (
|
||||
FunctionNameFillZero = FunctionName{valuer.NewString("fillZero")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for FunctionName.
|
||||
func (FunctionName) Enum() []any {
|
||||
return []any{
|
||||
FunctionNameCutOffMin,
|
||||
FunctionNameCutOffMax,
|
||||
FunctionNameClampMin,
|
||||
FunctionNameClampMax,
|
||||
FunctionNameAbsolute,
|
||||
FunctionNameRunningDiff,
|
||||
FunctionNameLog2,
|
||||
FunctionNameLog10,
|
||||
FunctionNameCumulativeSum,
|
||||
FunctionNameEWMA3,
|
||||
FunctionNameEWMA5,
|
||||
FunctionNameEWMA7,
|
||||
FunctionNameMedian3,
|
||||
FunctionNameMedian5,
|
||||
FunctionNameMedian7,
|
||||
FunctionNameTimeShift,
|
||||
FunctionNameAnomaly,
|
||||
FunctionNameFillZero,
|
||||
}
|
||||
}
|
||||
|
||||
// Validate checks if the FunctionName is valid and one of the known types
|
||||
func (fn FunctionName) Validate() error {
|
||||
validFunctions := []FunctionName{
|
||||
|
||||
@@ -16,6 +16,17 @@ var (
|
||||
JoinTypeCross = JoinType{valuer.NewString("cross")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for JoinType.
|
||||
func (JoinType) Enum() []any {
|
||||
return []any{
|
||||
JoinTypeInner,
|
||||
JoinTypeLeft,
|
||||
JoinTypeRight,
|
||||
JoinTypeFull,
|
||||
JoinTypeCross,
|
||||
}
|
||||
}
|
||||
|
||||
type QueryRef struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
)
|
||||
|
||||
type Query interface {
|
||||
@@ -29,4 +31,12 @@ type ExecStats struct {
|
||||
StepIntervals map[string]uint64 `json:"stepIntervals,omitempty"`
|
||||
}
|
||||
|
||||
var _ jsonschema.Preparer = &ExecStats{}
|
||||
|
||||
// PrepareJSONSchema adds description to the ExecStats schema.
|
||||
func (e *ExecStats) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Execution statistics for the query, including rows scanned, bytes scanned, and duration.")
|
||||
return nil
|
||||
}
|
||||
|
||||
type TimeRange struct{ From, To uint64 } // ms since epoch
|
||||
|
||||
@@ -16,3 +16,17 @@ var (
|
||||
QueryTypeClickHouseSQL = QueryType{valuer.NewString("clickhouse_sql")}
|
||||
QueryTypePromQL = QueryType{valuer.NewString("promql")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for QueryType.
|
||||
func (QueryType) Enum() []any {
|
||||
return []any{
|
||||
QueryTypeBuilder,
|
||||
QueryTypeFormula,
|
||||
// Not yet supported.
|
||||
// QueryTypeSubQuery,
|
||||
// QueryTypeJoin,
|
||||
QueryTypeTraceOperator,
|
||||
QueryTypeClickHouseSQL,
|
||||
QueryTypePromQL,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
)
|
||||
|
||||
type QueryEnvelope struct {
|
||||
@@ -18,6 +19,71 @@ type QueryEnvelope struct {
|
||||
Spec any `json:"spec"`
|
||||
}
|
||||
|
||||
// queryEnvelopeBuilderTrace is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=traces.
|
||||
type queryEnvelopeBuilderTrace struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderQuery[TraceAggregation] `json:"spec" description:"The trace builder query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeBuilderLog is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=logs.
|
||||
type queryEnvelopeBuilderLog struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderQuery[LogAggregation] `json:"spec" description:"The log builder query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeBuilderMetric is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=metrics.
|
||||
type queryEnvelopeBuilderMetric struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderQuery[MetricAggregation] `json:"spec" description:"The metric builder query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeFormula is the OpenAPI schema for a QueryEnvelope with type=builder_formula.
|
||||
type queryEnvelopeFormula struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderFormula `json:"spec" description:"The formula specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeJoin is the OpenAPI schema for a QueryEnvelope with type=builder_join.
|
||||
// type queryEnvelopeJoin struct {
|
||||
// Type QueryType `json:"type" description:"The type of the query."`
|
||||
// Spec QueryBuilderJoin `json:"spec" description:"The join specification."`
|
||||
// }
|
||||
|
||||
// queryEnvelopeTraceOperator is the OpenAPI schema for a QueryEnvelope with type=builder_trace_operator.
|
||||
type queryEnvelopeTraceOperator struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderTraceOperator `json:"spec" description:"The trace operator specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopePromQL is the OpenAPI schema for a QueryEnvelope with type=promql.
|
||||
type queryEnvelopePromQL struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec PromQuery `json:"spec" description:"The PromQL query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeClickHouseSQL is the OpenAPI schema for a QueryEnvelope with type=clickhouse_sql.
|
||||
type queryEnvelopeClickHouseSQL struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec ClickHouseQuery `json:"spec" description:"The ClickHouse SQL query specification."`
|
||||
}
|
||||
|
||||
var _ jsonschema.OneOfExposer = QueryEnvelope{}
|
||||
|
||||
// JSONSchemaOneOf returns the oneOf variants for the QueryEnvelope discriminated union.
|
||||
// Each variant represents a different query type with its corresponding spec schema.
|
||||
func (QueryEnvelope) JSONSchemaOneOf() []any {
|
||||
return []any{
|
||||
queryEnvelopeBuilderTrace{},
|
||||
queryEnvelopeBuilderLog{},
|
||||
queryEnvelopeBuilderMetric{},
|
||||
queryEnvelopeFormula{},
|
||||
// queryEnvelopeJoin{},
|
||||
queryEnvelopeTraceOperator{},
|
||||
queryEnvelopePromQL{},
|
||||
queryEnvelopeClickHouseSQL{},
|
||||
}
|
||||
}
|
||||
|
||||
// implement custom json unmarshaler for the QueryEnvelope
|
||||
func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
var shadow struct {
|
||||
@@ -130,6 +196,12 @@ type CompositeQuery struct {
|
||||
Queries []QueryEnvelope `json:"queries"`
|
||||
}
|
||||
|
||||
// PrepareJSONSchema adds description to the CompositeQuery schema.
|
||||
func (c *CompositeQuery) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Composite query containing one or more query envelopes. Each query envelope specifies its type and corresponding spec.")
|
||||
return nil
|
||||
}
|
||||
|
||||
// UnmarshalJSON implements custom JSON unmarshaling to provide better error messages
|
||||
func (c *CompositeQuery) UnmarshalJSON(data []byte) error {
|
||||
type Alias CompositeQuery
|
||||
@@ -192,6 +264,16 @@ var (
|
||||
TextBoxVariableType = VariableType{valuer.NewString("text")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for VariableType.
|
||||
func (VariableType) Enum() []any {
|
||||
return []any{
|
||||
QueryVariableType,
|
||||
DynamicVariableType,
|
||||
CustomVariableType,
|
||||
TextBoxVariableType,
|
||||
}
|
||||
}
|
||||
|
||||
type VariableItem struct {
|
||||
Type VariableType `json:"type"`
|
||||
Value any `json:"value"`
|
||||
@@ -217,6 +299,12 @@ type QueryRangeRequest struct {
|
||||
FormatOptions *FormatOptions `json:"formatOptions,omitempty"`
|
||||
}
|
||||
|
||||
// PrepareJSONSchema adds description to the QueryRangeRequest schema.
|
||||
func (q *QueryRangeRequest) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Request body for the v5 query range endpoint. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL queries.")
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *QueryRangeRequest) StepIntervalForQuery(name string) int64 {
|
||||
stepsMap := make(map[string]int64)
|
||||
for _, query := range r.CompositeQuery.Queries {
|
||||
|
||||
@@ -30,3 +30,15 @@ var (
|
||||
func (r RequestType) IsAggregation() bool {
|
||||
return r == RequestTypeTimeSeries || r == RequestTypeScalar || r == RequestTypeDistribution
|
||||
}
|
||||
|
||||
// Enum implements jsonschema.Enum; returns the acceptable values for RequestType.
|
||||
func (RequestType) Enum() []any {
|
||||
return []any{
|
||||
RequestTypeScalar,
|
||||
RequestTypeTimeSeries,
|
||||
RequestTypeRaw,
|
||||
RequestTypeRawStream,
|
||||
RequestTypeTrace,
|
||||
// RequestTypeDistribution,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
)
|
||||
|
||||
type QBEvent struct {
|
||||
@@ -42,6 +43,17 @@ type QueryData struct {
|
||||
Results []any `json:"results"`
|
||||
}
|
||||
|
||||
var _ jsonschema.OneOfExposer = QueryData{}
|
||||
|
||||
// JSONSchemaOneOf documents the polymorphic result types in QueryData.Results.
|
||||
func (QueryData) JSONSchemaOneOf() []any {
|
||||
return []any{
|
||||
TimeSeriesData{},
|
||||
ScalarData{},
|
||||
RawData{},
|
||||
}
|
||||
}
|
||||
|
||||
type QueryRangeResponse struct {
|
||||
Type RequestType `json:"type"`
|
||||
Data QueryData `json:"data"`
|
||||
@@ -52,6 +64,14 @@ type QueryRangeResponse struct {
|
||||
QBEvent *QBEvent `json:"-"`
|
||||
}
|
||||
|
||||
var _ jsonschema.Preparer = &QueryRangeResponse{}
|
||||
|
||||
// PrepareJSONSchema adds description to the QueryRangeResponse schema.
|
||||
func (q *QueryRangeResponse) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Response from the v5 query range endpoint. The data.results array contains typed results depending on the requestType: TimeSeriesData for time_series, ScalarData for scalar, or RawData for raw requests.")
|
||||
return nil
|
||||
}
|
||||
|
||||
type TimeSeriesData struct {
|
||||
QueryName string `json:"queryName"`
|
||||
Aggregations []*AggregationBucket `json:"aggregations"`
|
||||
@@ -76,33 +96,6 @@ type TimeSeries struct {
|
||||
Values []*TimeSeriesValue `json:"values"`
|
||||
}
|
||||
|
||||
// LabelsMap converts the label slice to a map[string]string for use in
|
||||
// alert evaluation helpers that operate on flat label maps.
|
||||
func (ts *TimeSeries) LabelsMap() map[string]string {
|
||||
if ts == nil {
|
||||
return nil
|
||||
}
|
||||
m := make(map[string]string, len(ts.Labels))
|
||||
for _, l := range ts.Labels {
|
||||
m[l.Key.Name] = fmt.Sprintf("%v", l.Value)
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// NonPartialValues returns only the values where Partial is false.
|
||||
func (ts *TimeSeries) NonPartialValues() []*TimeSeriesValue {
|
||||
if ts == nil {
|
||||
return nil
|
||||
}
|
||||
result := make([]*TimeSeriesValue, 0, len(ts.Values))
|
||||
for _, v := range ts.Values {
|
||||
if !v.Partial {
|
||||
result = append(result, v)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
type Label struct {
|
||||
Key telemetrytypes.TelemetryFieldKey `json:"key"`
|
||||
Value any `json:"value"`
|
||||
@@ -186,6 +179,14 @@ var (
|
||||
ColumnTypeAggregation = ColumnType{valuer.NewString("aggregation")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for ColumnType.
|
||||
func (ColumnType) Enum() []any {
|
||||
return []any{
|
||||
ColumnTypeGroup,
|
||||
ColumnTypeAggregation,
|
||||
}
|
||||
}
|
||||
|
||||
type ColumnDescriptor struct {
|
||||
telemetrytypes.TelemetryFieldKey
|
||||
QueryName string `json:"queryName"`
|
||||
|
||||
@@ -203,11 +203,11 @@ func (rc *RuleCondition) IsValid() bool {
|
||||
}
|
||||
|
||||
// ShouldEval checks if the further series should be evaluated at all for alerts.
|
||||
func (rc *RuleCondition) ShouldEval(series *qbtypes.TimeSeries) bool {
|
||||
func (rc *RuleCondition) ShouldEval(series *v3.Series) bool {
|
||||
if rc == nil {
|
||||
return true
|
||||
}
|
||||
return !rc.RequireMinPoints || len(series.NonPartialValues()) >= rc.RequiredNumPoints
|
||||
return !rc.RequireMinPoints || len(series.Points) >= rc.RequiredNumPoints
|
||||
}
|
||||
|
||||
// QueryType is a shorthand method to get query type
|
||||
|
||||
@@ -355,14 +355,6 @@ func (r *PostableRule) validate() error {
|
||||
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "composite query is required"))
|
||||
}
|
||||
|
||||
if r.Version != "" && r.Version != "v5" {
|
||||
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "only version v5 is supported, got %q", r.Version))
|
||||
}
|
||||
|
||||
if r.Version == "v5" && r.RuleCondition.CompositeQuery != nil && len(r.RuleCondition.CompositeQuery.Queries) == 0 {
|
||||
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "queries envelope is required in compositeQuery"))
|
||||
}
|
||||
|
||||
if isAllQueriesDisabled(r.RuleCondition.CompositeQuery) {
|
||||
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "all queries are disabled in rule condition"))
|
||||
}
|
||||
|
||||
@@ -8,8 +8,6 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
func TestIsAllQueriesDisabled(t *testing.T) {
|
||||
@@ -623,9 +621,9 @@ func TestParseIntoRuleThresholdGeneration(t *testing.T) {
|
||||
}
|
||||
|
||||
// Test that threshold can evaluate properly
|
||||
vector, err := threshold.Eval(qbtypes.TimeSeries{
|
||||
Values: []*qbtypes.TimeSeriesValue{{Value: 0.15, Timestamp: 1000}}, // 150ms in seconds
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "test"}, Value: "label"}},
|
||||
vector, err := threshold.Eval(v3.Series{
|
||||
Points: []v3.Point{{Value: 0.15, Timestamp: 1000}}, // 150ms in seconds
|
||||
Labels: map[string]string{"test": "label"},
|
||||
}, "", EvalData{})
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error in shouldAlert: %v", err)
|
||||
@@ -700,9 +698,9 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
|
||||
}
|
||||
|
||||
// Test with a value that should trigger both WARNING and CRITICAL thresholds
|
||||
vector, err := threshold.Eval(qbtypes.TimeSeries{
|
||||
Values: []*qbtypes.TimeSeriesValue{{Value: 95.0, Timestamp: 1000}}, // 95% CPU usage
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "service"}, Value: "test"}},
|
||||
vector, err := threshold.Eval(v3.Series{
|
||||
Points: []v3.Point{{Value: 95.0, Timestamp: 1000}}, // 95% CPU usage
|
||||
Labels: map[string]string{"service": "test"},
|
||||
}, "", EvalData{})
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error in shouldAlert: %v", err)
|
||||
@@ -710,9 +708,9 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
|
||||
|
||||
assert.Equal(t, 2, len(vector))
|
||||
|
||||
vector, err = threshold.Eval(qbtypes.TimeSeries{
|
||||
Values: []*qbtypes.TimeSeriesValue{{Value: 75.0, Timestamp: 1000}}, // 75% CPU usage
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "service"}, Value: "test"}},
|
||||
vector, err = threshold.Eval(v3.Series{
|
||||
Points: []v3.Point{{Value: 75.0, Timestamp: 1000}}, // 75% CPU usage
|
||||
Labels: map[string]string{"service": "test"},
|
||||
}, "", EvalData{})
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error in shouldAlert: %v", err)
|
||||
@@ -725,7 +723,7 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
ruleJSON []byte
|
||||
series qbtypes.TimeSeries
|
||||
series v3.Series
|
||||
shouldAlert bool
|
||||
expectedValue float64
|
||||
}{
|
||||
@@ -753,9 +751,9 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`),
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: -2.1}, // below & at least once, should alert
|
||||
{Timestamp: 2000, Value: -2.3},
|
||||
},
|
||||
@@ -787,9 +785,9 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`), // below & at least once, no value below -2.0
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: -1.9},
|
||||
{Timestamp: 2000, Value: -1.8},
|
||||
},
|
||||
@@ -820,9 +818,9 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`), // above & at least once, should alert
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: 2.1}, // above 2.0, should alert
|
||||
{Timestamp: 2000, Value: 2.2},
|
||||
},
|
||||
@@ -854,9 +852,9 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`),
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: 1.1},
|
||||
{Timestamp: 2000, Value: 1.2},
|
||||
},
|
||||
@@ -887,9 +885,9 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`), // below and all the times
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: -2.1}, // all below -2
|
||||
{Timestamp: 2000, Value: -2.2},
|
||||
{Timestamp: 3000, Value: -2.5},
|
||||
@@ -922,9 +920,9 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`),
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: -3.0},
|
||||
{Timestamp: 2000, Value: -1.0}, // above -2, breaks condition
|
||||
{Timestamp: 3000, Value: -2.5},
|
||||
@@ -956,10 +954,10 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`),
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
{Timestamp: 1000, Value: -8.0}, // abs(-8) >= 7, alert
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: -8.0}, // abs(−8) >= 7, alert
|
||||
{Timestamp: 2000, Value: 5.0},
|
||||
},
|
||||
},
|
||||
@@ -990,9 +988,9 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`),
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: 80.0}, // below 90, should alert
|
||||
{Timestamp: 2000, Value: 85.0},
|
||||
},
|
||||
@@ -1024,9 +1022,9 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`),
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: 60.0}, // below, should alert
|
||||
{Timestamp: 2000, Value: 90.0},
|
||||
},
|
||||
|
||||
@@ -8,8 +8,8 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/converter"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
@@ -83,7 +83,7 @@ func (eval EvalData) HasActiveAlert(sampleLabelFp uint64) bool {
|
||||
type RuleThreshold interface {
|
||||
// Eval runs the given series through the threshold rules
|
||||
// using the given EvalData and returns the matching series
|
||||
Eval(series qbtypes.TimeSeries, unit string, evalData EvalData) (Vector, error)
|
||||
Eval(series v3.Series, unit string, evalData EvalData) (Vector, error)
|
||||
GetRuleReceivers() []RuleReceivers
|
||||
}
|
||||
|
||||
@@ -122,11 +122,10 @@ func (r BasicRuleThresholds) Validate() error {
|
||||
return errors.Join(errs...)
|
||||
}
|
||||
|
||||
func (r BasicRuleThresholds) Eval(series qbtypes.TimeSeries, unit string, evalData EvalData) (Vector, error) {
|
||||
func (r BasicRuleThresholds) Eval(series v3.Series, unit string, evalData EvalData) (Vector, error) {
|
||||
var resultVector Vector
|
||||
thresholds := []BasicRuleThreshold(r)
|
||||
sortThresholds(thresholds)
|
||||
seriesLabels := series.LabelsMap()
|
||||
for _, threshold := range thresholds {
|
||||
smpl, shouldAlert := threshold.shouldAlert(series, unit)
|
||||
if shouldAlert {
|
||||
@@ -138,15 +137,15 @@ func (r BasicRuleThresholds) Eval(series qbtypes.TimeSeries, unit string, evalDa
|
||||
resultVector = append(resultVector, smpl)
|
||||
continue
|
||||
} else if evalData.SendUnmatched {
|
||||
// Sanitise the series values to remove any NaN, Inf, or partial values
|
||||
values := filterValidValues(series.Values)
|
||||
if len(values) == 0 {
|
||||
// Sanitise the series points to remove any NaN or Inf values
|
||||
series.Points = removeGroupinSetPoints(series)
|
||||
if len(series.Points) == 0 {
|
||||
continue
|
||||
}
|
||||
// prepare the sample with the first value of the series
|
||||
// prepare the sample with the first point of the series
|
||||
smpl := Sample{
|
||||
Point: Point{T: values[0].Timestamp, V: values[0].Value},
|
||||
Metric: PrepareSampleLabelsForRule(seriesLabels, threshold.Name),
|
||||
Point: Point{T: series.Points[0].Timestamp, V: series.Points[0].Value},
|
||||
Metric: PrepareSampleLabelsForRule(series.Labels, threshold.Name),
|
||||
Target: *threshold.TargetValue,
|
||||
TargetUnit: threshold.TargetUnit,
|
||||
}
|
||||
@@ -161,7 +160,7 @@ func (r BasicRuleThresholds) Eval(series qbtypes.TimeSeries, unit string, evalDa
|
||||
if threshold.RecoveryTarget == nil {
|
||||
continue
|
||||
}
|
||||
sampleLabels := PrepareSampleLabelsForRule(seriesLabels, threshold.Name)
|
||||
sampleLabels := PrepareSampleLabelsForRule(series.Labels, threshold.Name)
|
||||
alertHash := sampleLabels.Hash()
|
||||
// check if alert is active and then check if recovery threshold matches
|
||||
if evalData.HasActiveAlert(alertHash) {
|
||||
@@ -256,23 +255,18 @@ func (b BasicRuleThreshold) Validate() error {
|
||||
return errors.Join(errs...)
|
||||
}
|
||||
|
||||
func (b BasicRuleThreshold) matchesRecoveryThreshold(series qbtypes.TimeSeries, ruleUnit string) (Sample, bool) {
|
||||
func (b BasicRuleThreshold) matchesRecoveryThreshold(series v3.Series, ruleUnit string) (Sample, bool) {
|
||||
return b.shouldAlertWithTarget(series, b.recoveryTarget(ruleUnit))
|
||||
}
|
||||
func (b BasicRuleThreshold) shouldAlert(series qbtypes.TimeSeries, ruleUnit string) (Sample, bool) {
|
||||
func (b BasicRuleThreshold) shouldAlert(series v3.Series, ruleUnit string) (Sample, bool) {
|
||||
return b.shouldAlertWithTarget(series, b.target(ruleUnit))
|
||||
}
|
||||
|
||||
// filterValidValues returns only the values that are valid for alert evaluation:
|
||||
// non-partial, non-NaN, non-Inf, and with non-negative timestamps.
|
||||
func filterValidValues(values []*qbtypes.TimeSeriesValue) []*qbtypes.TimeSeriesValue {
|
||||
var result []*qbtypes.TimeSeriesValue
|
||||
for _, v := range values {
|
||||
if v.Partial {
|
||||
continue
|
||||
}
|
||||
if v.Timestamp >= 0 && !math.IsNaN(v.Value) && !math.IsInf(v.Value, 0) {
|
||||
result = append(result, v)
|
||||
func removeGroupinSetPoints(series v3.Series) []v3.Point {
|
||||
var result []v3.Point
|
||||
for _, s := range series.Points {
|
||||
if s.Timestamp >= 0 && !math.IsNaN(s.Value) && !math.IsInf(s.Value, 0) {
|
||||
result = append(result, s)
|
||||
}
|
||||
}
|
||||
return result
|
||||
@@ -290,15 +284,15 @@ func PrepareSampleLabelsForRule(seriesLabels map[string]string, thresholdName st
|
||||
return lb.Labels()
|
||||
}
|
||||
|
||||
func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, target float64) (Sample, bool) {
|
||||
func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float64) (Sample, bool) {
|
||||
var shouldAlert bool
|
||||
var alertSmpl Sample
|
||||
lbls := PrepareSampleLabelsForRule(series.LabelsMap(), b.Name)
|
||||
lbls := PrepareSampleLabelsForRule(series.Labels, b.Name)
|
||||
|
||||
values := filterValidValues(series.Values)
|
||||
series.Points = removeGroupinSetPoints(series)
|
||||
|
||||
// nothing to evaluate
|
||||
if len(values) == 0 {
|
||||
if len(series.Points) == 0 {
|
||||
return alertSmpl, false
|
||||
}
|
||||
|
||||
@@ -306,7 +300,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
case AtleastOnce:
|
||||
// If any sample matches the condition, the rule is firing.
|
||||
if b.CompareOp == ValueIsAbove {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value > target {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = true
|
||||
@@ -314,7 +308,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
}
|
||||
}
|
||||
} else if b.CompareOp == ValueIsBelow {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value < target {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = true
|
||||
@@ -322,7 +316,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
}
|
||||
}
|
||||
} else if b.CompareOp == ValueIsEq {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value == target {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = true
|
||||
@@ -330,7 +324,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
}
|
||||
}
|
||||
} else if b.CompareOp == ValueIsNotEq {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value != target {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = true
|
||||
@@ -338,7 +332,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
}
|
||||
}
|
||||
} else if b.CompareOp == ValueOutsideBounds {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if math.Abs(smpl.Value) >= target {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = true
|
||||
@@ -351,7 +345,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
shouldAlert = true
|
||||
alertSmpl = Sample{Point: Point{V: target}, Metric: lbls}
|
||||
if b.CompareOp == ValueIsAbove {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value <= target {
|
||||
shouldAlert = false
|
||||
break
|
||||
@@ -360,7 +354,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
// use min value from the series
|
||||
if shouldAlert {
|
||||
var minValue float64 = math.Inf(1)
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value < minValue {
|
||||
minValue = smpl.Value
|
||||
}
|
||||
@@ -368,7 +362,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
alertSmpl = Sample{Point: Point{V: minValue}, Metric: lbls}
|
||||
}
|
||||
} else if b.CompareOp == ValueIsBelow {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value >= target {
|
||||
shouldAlert = false
|
||||
break
|
||||
@@ -376,7 +370,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
}
|
||||
if shouldAlert {
|
||||
var maxValue float64 = math.Inf(-1)
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value > maxValue {
|
||||
maxValue = smpl.Value
|
||||
}
|
||||
@@ -384,14 +378,14 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
alertSmpl = Sample{Point: Point{V: maxValue}, Metric: lbls}
|
||||
}
|
||||
} else if b.CompareOp == ValueIsEq {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value != target {
|
||||
shouldAlert = false
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if b.CompareOp == ValueIsNotEq {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value == target {
|
||||
shouldAlert = false
|
||||
break
|
||||
@@ -399,7 +393,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
}
|
||||
// use any non-inf or nan value from the series
|
||||
if shouldAlert {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if !math.IsInf(smpl.Value, 0) && !math.IsNaN(smpl.Value) {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
break
|
||||
@@ -407,7 +401,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
}
|
||||
}
|
||||
} else if b.CompareOp == ValueOutsideBounds {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if math.Abs(smpl.Value) < target {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = false
|
||||
@@ -418,7 +412,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
case OnAverage:
|
||||
// If the average of all samples matches the condition, the rule is firing.
|
||||
var sum, count float64
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if math.IsNaN(smpl.Value) || math.IsInf(smpl.Value, 0) {
|
||||
continue
|
||||
}
|
||||
@@ -452,7 +446,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
// If the sum of all samples matches the condition, the rule is firing.
|
||||
var sum float64
|
||||
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if math.IsNaN(smpl.Value) || math.IsInf(smpl.Value, 0) {
|
||||
continue
|
||||
}
|
||||
@@ -483,22 +477,21 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
case Last:
|
||||
// If the last sample matches the condition, the rule is firing.
|
||||
shouldAlert = false
|
||||
lastValue := values[len(values)-1].Value
|
||||
alertSmpl = Sample{Point: Point{V: lastValue}, Metric: lbls}
|
||||
alertSmpl = Sample{Point: Point{V: series.Points[len(series.Points)-1].Value}, Metric: lbls}
|
||||
if b.CompareOp == ValueIsAbove {
|
||||
if lastValue > target {
|
||||
if series.Points[len(series.Points)-1].Value > target {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if b.CompareOp == ValueIsBelow {
|
||||
if lastValue < target {
|
||||
if series.Points[len(series.Points)-1].Value < target {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if b.CompareOp == ValueIsEq {
|
||||
if lastValue == target {
|
||||
if series.Points[len(series.Points)-1].Value == target {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if b.CompareOp == ValueIsNotEq {
|
||||
if lastValue != target {
|
||||
if series.Points[len(series.Points)-1].Value != target {
|
||||
shouldAlert = true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,24 +6,16 @@ import (
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
target := 100.0
|
||||
|
||||
makeLabel := func(name, value string) *qbtypes.Label {
|
||||
return &qbtypes.Label{Key: telemetrytypes.TelemetryFieldKey{Name: name}, Value: value}
|
||||
}
|
||||
makeValue := func(value float64, ts int64) *qbtypes.TimeSeriesValue {
|
||||
return &qbtypes.TimeSeriesValue{Value: value, Timestamp: ts}
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
threshold BasicRuleThreshold
|
||||
series qbtypes.TimeSeries
|
||||
series v3.Series
|
||||
ruleUnit string
|
||||
shouldAlert bool
|
||||
}{
|
||||
@@ -36,9 +28,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)}, // 150ms in seconds
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.15, Timestamp: 1000}, // 150ms in seconds
|
||||
},
|
||||
},
|
||||
ruleUnit: "s",
|
||||
shouldAlert: true,
|
||||
@@ -52,9 +46,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(0.05, 1000)}, // 50ms in seconds
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.05, Timestamp: 1000}, // 50ms in seconds
|
||||
},
|
||||
},
|
||||
ruleUnit: "s",
|
||||
shouldAlert: false,
|
||||
@@ -68,9 +64,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(150000, 1000)}, // 150000ms = 150s
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 150000, Timestamp: 1000}, // 150000ms = 150s
|
||||
},
|
||||
},
|
||||
ruleUnit: "ms",
|
||||
shouldAlert: true,
|
||||
@@ -85,9 +83,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)}, // 0.15KiB ≈ 153.6 bytes
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.15, Timestamp: 1000}, // 0.15KiB ≈ 153.6 bytes
|
||||
},
|
||||
},
|
||||
ruleUnit: "kbytes",
|
||||
shouldAlert: true,
|
||||
@@ -101,9 +101,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.15, Timestamp: 1000},
|
||||
},
|
||||
},
|
||||
ruleUnit: "mbytes",
|
||||
shouldAlert: true,
|
||||
@@ -118,9 +120,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsBelow,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(0.05, 1000)}, // 50ms in seconds
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.05, Timestamp: 1000}, // 50ms in seconds
|
||||
},
|
||||
},
|
||||
ruleUnit: "s",
|
||||
shouldAlert: true,
|
||||
@@ -134,12 +138,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: OnAverage,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
makeValue(0.08, 1000), // 80ms
|
||||
makeValue(0.12, 2000), // 120ms
|
||||
makeValue(0.15, 3000), // 150ms
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.08, Timestamp: 1000}, // 80ms
|
||||
{Value: 0.12, Timestamp: 2000}, // 120ms
|
||||
{Value: 0.15, Timestamp: 3000}, // 150ms
|
||||
},
|
||||
},
|
||||
ruleUnit: "s",
|
||||
@@ -154,12 +158,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: InTotal,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
makeValue(0.04, 1000), // 40MB
|
||||
makeValue(0.05, 2000), // 50MB
|
||||
makeValue(0.03, 3000), // 30MB
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.04, Timestamp: 1000}, // 40MB
|
||||
{Value: 0.05, Timestamp: 2000}, // 50MB
|
||||
{Value: 0.03, Timestamp: 3000}, // 30MB
|
||||
},
|
||||
},
|
||||
ruleUnit: "decgbytes",
|
||||
@@ -174,12 +178,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AllTheTimes,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
makeValue(0.11, 1000), // 110ms
|
||||
makeValue(0.12, 2000), // 120ms
|
||||
makeValue(0.15, 3000), // 150ms
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.11, Timestamp: 1000}, // 110ms
|
||||
{Value: 0.12, Timestamp: 2000}, // 120ms
|
||||
{Value: 0.15, Timestamp: 3000}, // 150ms
|
||||
},
|
||||
},
|
||||
ruleUnit: "s",
|
||||
@@ -194,11 +198,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: Last,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
makeValue(0.15, 1000), // 150kB
|
||||
makeValue(0.05, 2000), // 50kB (last value)
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.15, Timestamp: 1000}, // 150kB
|
||||
{Value: 0.05, Timestamp: 2000}, // 50kB (last value)
|
||||
},
|
||||
},
|
||||
ruleUnit: "decmbytes",
|
||||
@@ -214,9 +218,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.15, Timestamp: 1000},
|
||||
},
|
||||
},
|
||||
ruleUnit: "KBs",
|
||||
shouldAlert: true,
|
||||
@@ -231,9 +237,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(150, 1000)}, // 150ms
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 150, Timestamp: 1000}, // 150ms
|
||||
},
|
||||
},
|
||||
ruleUnit: "ms",
|
||||
shouldAlert: true,
|
||||
@@ -248,9 +256,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(150, 1000)}, // 150 (unitless)
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 150, Timestamp: 1000}, // 150 (unitless)
|
||||
},
|
||||
},
|
||||
ruleUnit: "",
|
||||
shouldAlert: true,
|
||||
|
||||
@@ -23,8 +23,10 @@ const (
|
||||
// e.g., "body.status" where "body." is the prefix
|
||||
BodyJSONStringSearchPrefix = "body."
|
||||
ArraySep = jsontypeexporter.ArraySeparator
|
||||
ArraySepSuffix = "[]"
|
||||
// TODO(Piyush): Remove once we've migrated to the new array syntax
|
||||
ArrayAnyIndex = "[*]."
|
||||
ArrayAnyIndex = "[*]."
|
||||
ArrayAnyIndexSuffix = "[*]"
|
||||
)
|
||||
|
||||
type TelemetryFieldKey struct {
|
||||
|
||||
@@ -172,3 +172,18 @@ func isContextValidForSignal(ctx FieldContext, signal Signal) bool {
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for FieldContext.
|
||||
func (FieldContext) Enum() []any {
|
||||
return []any{
|
||||
FieldContextMetric,
|
||||
FieldContextLog,
|
||||
FieldContextSpan,
|
||||
// FieldContextTrace,
|
||||
FieldContextResource,
|
||||
// FieldContextScope,
|
||||
FieldContextAttribute,
|
||||
// FieldContextEvent,
|
||||
FieldContextBody,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -93,6 +93,14 @@ var (
|
||||
FieldDataTypeArrayFloat64: "Array(Float64)",
|
||||
FieldDataTypeArrayBool: "Array(Bool)",
|
||||
}
|
||||
|
||||
ScalerFieldTypeToArrayFieldType = map[FieldDataType]FieldDataType{
|
||||
FieldDataTypeString: FieldDataTypeArrayString,
|
||||
FieldDataTypeBool: FieldDataTypeArrayBool,
|
||||
FieldDataTypeNumber: FieldDataTypeArrayNumber,
|
||||
FieldDataTypeInt64: FieldDataTypeArrayInt64,
|
||||
FieldDataTypeFloat64: FieldDataTypeArrayFloat64,
|
||||
}
|
||||
)
|
||||
|
||||
func (f FieldDataType) CHDataType() string {
|
||||
@@ -169,3 +177,19 @@ func (f FieldDataType) TagDataType() string {
|
||||
return "string"
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for FieldDataType.
|
||||
func (FieldDataType) Enum() []any {
|
||||
return []any{
|
||||
FieldDataTypeString,
|
||||
FieldDataTypeBool,
|
||||
FieldDataTypeFloat64,
|
||||
FieldDataTypeInt64,
|
||||
FieldDataTypeNumber,
|
||||
// FieldDataTypeArrayString,
|
||||
// FieldDataTypeArrayFloat64,
|
||||
// FieldDataTypeArrayBool,
|
||||
// FieldDataTypeArrayInt64,
|
||||
// FieldDataTypeArrayNumber,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,3 +12,12 @@ var (
|
||||
SignalMetrics = Signal{valuer.NewString("metrics")}
|
||||
SignalUnspecified = Signal{valuer.NewString("")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for Signal.
|
||||
func (Signal) Enum() []any {
|
||||
return []any{
|
||||
SignalTraces,
|
||||
SignalLogs,
|
||||
SignalMetrics,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,3 +10,10 @@ var (
|
||||
SourceMeter = Source{valuer.NewString("meter")}
|
||||
SourceUnspecified = Source{valuer.NewString("")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for Source.
|
||||
func (Source) Enum() []any {
|
||||
return []any{
|
||||
SourceMeter,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,6 +65,7 @@ func TestJSONTypeSet() (map[string][]JSONDataType, MetadataStore) {
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].unit": {String},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].ratings": {ArrayInt64, ArrayString},
|
||||
"message": {String},
|
||||
"tags": {ArrayString},
|
||||
}
|
||||
|
||||
return types, nil
|
||||
|
||||
Reference in New Issue
Block a user