Compare commits

..

3 Commits

Author SHA1 Message Date
srikanthccv
5e76de6f8d chore: resolve conflicts 2026-02-18 22:36:38 +05:30
Srikanth Chekuri
25e0c19ddb Merge branch 'main' into remove-v4-support-rules 2026-02-14 23:51:21 +05:30
srikanthccv
8a6abb2f09 chore: remove support for older versions in rules 2026-02-14 23:48:09 +05:30
48 changed files with 2413 additions and 6365 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -10,7 +10,6 @@ import (
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
@@ -107,10 +106,7 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
// v5
router.Handle("/api/v5/query_range", handler.New(
am.ViewAccess(ah.queryRangeV5),
querierAPI.QueryRangeV5OpenAPIDef,
)).Methods(http.MethodPost)
router.HandleFunc("/api/v5/query_range", am.ViewAccess(ah.queryRangeV5)).Methods(http.MethodPost)
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)

View File

@@ -5,23 +5,16 @@ import (
"encoding/json"
"fmt"
"log/slog"
"math"
"strings"
"sync"
"time"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
@@ -32,6 +25,8 @@ import (
querierV5 "github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
@@ -48,17 +43,12 @@ type AnomalyRule struct {
reader interfaces.Reader
// querierV2 is used for alerts created after the introduction of new metrics query builder
querierV2 interfaces.Querier
// querierV5 is used for alerts migrated after the introduction of new query builder
// querierV5 is the query builder v5 querier used for all alert rule evaluation
querierV5 querierV5.Querier
provider anomaly.Provider
providerV2 anomalyV2.Provider
version string
logger *slog.Logger
logger *slog.Logger
seasonality anomaly.Seasonality
}
@@ -102,34 +92,6 @@ func NewAnomalyRule(
logger.Info("using seasonality", "seasonality", t.seasonality.String())
querierOptsV2 := querierV2.QuerierOptions{
Reader: reader,
Cache: cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
}
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
t.reader = reader
if t.seasonality == anomaly.SeasonalityHourly {
t.provider = anomaly.NewHourlyProvider(
anomaly.WithCache[*anomaly.HourlyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.HourlyProvider](reader),
)
} else if t.seasonality == anomaly.SeasonalityDaily {
t.provider = anomaly.NewDailyProvider(
anomaly.WithCache[*anomaly.DailyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.DailyProvider](reader),
)
} else if t.seasonality == anomaly.SeasonalityWeekly {
t.provider = anomaly.NewWeeklyProvider(
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
)
}
if t.seasonality == anomaly.SeasonalityHourly {
t.providerV2 = anomalyV2.NewHourlyProvider(
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](querierV5),
@@ -148,7 +110,7 @@ func NewAnomalyRule(
}
t.querierV5 = querierV5
t.version = p.Version
t.reader = reader
t.logger = logger
return &t, nil
}
@@ -157,36 +119,9 @@ func (r *AnomalyRule) Type() ruletypes.RuleType {
return RuleTypeAnomaly
}
func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3.QueryRangeParamsV3, error) {
func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
r.logger.InfoContext(
ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds(),
)
st, en := r.Timestamps(ts)
start := st.UnixMilli()
end := en.UnixMilli()
compositeQuery := r.Condition().CompositeQuery
if compositeQuery.PanelType != v3.PanelTypeGraph {
compositeQuery.PanelType = v3.PanelTypeGraph
}
// default mode
return &v3.QueryRangeParamsV3{
Start: start,
End: end,
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
CompositeQuery: compositeQuery,
Variables: make(map[string]interface{}, 0),
NoCache: false,
}, nil
}
func (r *AnomalyRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
r.logger.InfoContext(ctx, "prepare query range request v5", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds())
r.logger.InfoContext(ctx, "prepare query range request", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds())
startTs, endTs := r.Timestamps(ts)
start, end := startTs.UnixMilli(), endTs.UnixMilli()
@@ -215,60 +150,6 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
if err != nil {
return nil, err
}
err = r.PopulateTemporality(ctx, orgID, params)
if err != nil {
return nil, fmt.Errorf("internal error while setting temporality")
}
anomalies, err := r.provider.GetAnomalies(ctx, orgID, &anomaly.GetAnomaliesRequest{
Params: params,
Seasonality: r.seasonality,
})
if err != nil {
return nil, err
}
var queryResult *v3.Result
for _, result := range anomalies.Results {
if result.QueryName == r.GetSelectedQuery() {
queryResult = result
break
}
}
hasData := len(queryResult.AnomalyScores) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
var resultVector ruletypes.Vector
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
for _, series := range queryResult.AnomalyScores {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
ActiveAlerts: r.ActiveAlertsLabelFP(),
SendUnmatched: r.ShouldSendUnmatched(),
})
if err != nil {
return nil, err
}
resultVector = append(resultVector, results...)
}
return resultVector, nil
}
func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
params, err := r.prepareQueryRangeV5(ctx, ts)
if err != nil {
return nil, err
}
anomalies, err := r.providerV2.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{
Params: *params,
@@ -290,20 +171,25 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
r.logger.WarnContext(ctx, "nil qb result", "ts", ts.UnixMilli())
}
queryResult := transition.ConvertV5TimeSeriesDataToV4Result(qbResult)
var anomalyScores []*qbtypes.TimeSeries
if qbResult != nil {
for _, bucket := range qbResult.Aggregations {
anomalyScores = append(anomalyScores, bucket.AnomalyScores...)
}
}
hasData := len(queryResult.AnomalyScores) > 0
hasData := len(anomalyScores) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
var resultVector ruletypes.Vector
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
scoresJSON, _ := json.Marshal(anomalyScores)
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
// Filter out new series if newGroupEvalDelay is configured
seriesToProcess := queryResult.AnomalyScores
seriesToProcess := anomalyScores
if r.ShouldSkipNewGroups() {
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
// In case of error we log the error and continue with the original series
@@ -316,7 +202,7 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
for _, series := range seriesToProcess {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Values), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
@@ -337,16 +223,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
valueFormatter := formatter.FromUnit(r.Unit())
var res ruletypes.Vector
var err error
if r.version == "v5" {
r.logger.InfoContext(ctx, "running v5 query")
res, err = r.buildAndRunQueryV5(ctx, r.OrgID(), ts)
} else {
r.logger.InfoContext(ctx, "running v4 query")
res, err = r.buildAndRunQuery(ctx, r.OrgID(), ts)
}
res, err := r.buildAndRunQuery(ctx, r.OrgID(), ts)
if err != nil {
return 0, err
}

View File

@@ -8,28 +8,27 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/valuer"
)
// mockAnomalyProvider is a mock implementation of anomaly.Provider for testing.
// We need this because the anomaly provider makes 6 different queries for various
// time periods (current, past period, current season, past season, past 2 seasons,
// past 3 seasons), making it cumbersome to create mock data.
type mockAnomalyProvider struct {
responses []*anomaly.GetAnomaliesResponse
// mockAnomalyProviderV2 is a mock implementation of anomalyV2.Provider for testing.
type mockAnomalyProviderV2 struct {
responses []*anomalyV2.AnomaliesResponse
callCount int
}
func (m *mockAnomalyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomaly.GetAnomaliesRequest) (*anomaly.GetAnomaliesResponse, error) {
func (m *mockAnomalyProviderV2) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomalyV2.AnomaliesRequest) (*anomalyV2.AnomaliesResponse, error) {
if m.callCount >= len(m.responses) {
return &anomaly.GetAnomaliesResponse{Results: []*v3.Result{}}, nil
return &anomalyV2.AnomaliesResponse{Results: []*qbtypes.TimeSeriesData{}}, nil
}
resp := m.responses[m.callCount]
m.callCount++
@@ -82,11 +81,11 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
},
}
responseNoData := &anomaly.GetAnomaliesResponse{
Results: []*v3.Result{
responseNoData := &anomalyV2.AnomaliesResponse{
Results: []*qbtypes.TimeSeriesData{
{
QueryName: "A",
AnomalyScores: []*v3.Series{},
QueryName: "A",
Aggregations: []*qbtypes.AggregationBucket{},
},
},
}
@@ -129,8 +128,8 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
)
require.NoError(t, err)
rule.provider = &mockAnomalyProvider{
responses: []*anomaly.GetAnomaliesResponse{responseNoData},
rule.providerV2 = &mockAnomalyProviderV2{
responses: []*anomalyV2.AnomaliesResponse{responseNoData},
}
alertsFound, err := rule.Eval(context.Background(), evalTime)
@@ -190,11 +189,11 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
},
}
responseNoData := &anomaly.GetAnomaliesResponse{
Results: []*v3.Result{
responseNoData := &anomalyV2.AnomaliesResponse{
Results: []*qbtypes.TimeSeriesData{
{
QueryName: "A",
AnomalyScores: []*v3.Series{},
QueryName: "A",
Aggregations: []*qbtypes.AggregationBucket{},
},
},
}
@@ -228,16 +227,22 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
t1 := baseTime.Add(5 * time.Minute)
t2 := t1.Add(c.timeBetweenEvals)
responseWithData := &anomaly.GetAnomaliesResponse{
Results: []*v3.Result{
responseWithData := &anomalyV2.AnomaliesResponse{
Results: []*qbtypes.TimeSeriesData{
{
QueryName: "A",
AnomalyScores: []*v3.Series{
Aggregations: []*qbtypes.AggregationBucket{
{
Labels: map[string]string{"test": "label"},
Points: []v3.Point{
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
AnomalyScores: []*qbtypes.TimeSeries{
{
Labels: []*qbtypes.Label{
{Key: telemetrytypes.TelemetryFieldKey{Name: "test"}, Value: "label"},
},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
},
},
},
},
},
@@ -252,8 +257,8 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil)
require.NoError(t, err)
rule.provider = &mockAnomalyProvider{
responses: []*anomaly.GetAnomaliesResponse{responseWithData, responseNoData},
rule.providerV2 = &mockAnomalyProviderV2{
responses: []*anomalyV2.AnomaliesResponse{responseWithData, responseNoData},
}
alertsFound1, err := rule.Eval(context.Background(), t1)

View File

@@ -1,107 +0,0 @@
/**
* ! Do not edit manually
* * The file has been auto-generated using Orval for SigNoz
* * regenerate with 'yarn generate:api'
* SigNoz
*/
import type {
MutationFunction,
UseMutationOptions,
UseMutationResult,
} from 'react-query';
import { useMutation } from 'react-query';
import { GeneratedAPIInstance } from '../../../index';
import type {
Querybuildertypesv5QueryRangeRequestDTO,
QueryRangeV5200,
RenderErrorResponseDTO,
} from '../sigNoz.schemas';
type AwaitedInput<T> = PromiseLike<T> | T;
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
/**
* Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.
* @summary Query range
*/
export const queryRangeV5 = (
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<QueryRangeV5200>({
url: `/api/v5/query_range`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: querybuildertypesv5QueryRangeRequestDTO,
signal,
});
};
export const getQueryRangeV5MutationOptions = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof queryRangeV5>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof queryRangeV5>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
> => {
const mutationKey = ['queryRangeV5'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof queryRangeV5>>,
{ data: Querybuildertypesv5QueryRangeRequestDTO }
> = (props) => {
const { data } = props ?? {};
return queryRangeV5(data);
};
return { mutationFn, ...mutationOptions };
};
export type QueryRangeV5MutationResult = NonNullable<
Awaited<ReturnType<typeof queryRangeV5>>
>;
export type QueryRangeV5MutationBody = Querybuildertypesv5QueryRangeRequestDTO;
export type QueryRangeV5MutationError = RenderErrorResponseDTO;
/**
* @summary Query range
*/
export const useQueryRangeV5 = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof queryRangeV5>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof queryRangeV5>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
> => {
const mutationOptions = getQueryRangeV5MutationOptions(options);
return useMutation(mutationOptions);
};

File diff suppressed because it is too large Load Diff

View File

@@ -6,7 +6,7 @@ import logEvent from 'api/common/logEvent';
import PromQLIcon from 'assets/Dashboard/PromQl';
import { QueryBuilderV2 } from 'components/QueryBuilderV2/QueryBuilderV2';
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
import { ENTITY_VERSION_V4 } from 'constants/app';
import { ENTITY_VERSION_V5 } from 'constants/app';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { QBShortcuts } from 'constants/shortcuts/QBShortcuts';
import RunQueryBtn from 'container/QueryBuilder/components/RunQueryBtn/RunQueryBtn';
@@ -63,12 +63,8 @@ function QuerySection({
signalSource: signalSource === 'meter' ? 'meter' : '',
}}
showTraceOperator={alertType === AlertTypes.TRACES_BASED_ALERT}
showFunctions={
(alertType === AlertTypes.METRICS_BASED_ALERT &&
alertDef.version === ENTITY_VERSION_V4) ||
alertType === AlertTypes.LOGS_BASED_ALERT
}
version={alertDef.version || 'v3'}
showFunctions
version={ENTITY_VERSION_V5}
onSignalSourceChange={handleSignalSourceChange}
signalSourceChangeEnabled
/>

View File

@@ -1,4 +1,4 @@
import { useMemo, useState } from 'react';
import { useMemo } from 'react';
import { Virtuoso } from 'react-virtuoso';
import cx from 'classnames';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
@@ -11,7 +11,6 @@ import './Tooltip.styles.scss';
const TOOLTIP_LIST_MAX_HEIGHT = 330;
const TOOLTIP_ITEM_HEIGHT = 38;
const TOOLTIP_LIST_PADDING = 10;
export default function Tooltip({
uPlotInstance,
@@ -20,7 +19,7 @@ export default function Tooltip({
showTooltipHeader = true,
}: TooltipProps): JSX.Element {
const isDarkMode = useIsDarkMode();
const [listHeight, setListHeight] = useState(0);
const tooltipContent = content ?? [];
const headerTitle = useMemo(() => {
@@ -42,15 +41,6 @@ export default function Tooltip({
showTooltipHeader,
]);
const virtuosoHeight = useMemo(() => {
return listHeight > 0
? Math.min(listHeight + TOOLTIP_LIST_PADDING, TOOLTIP_LIST_MAX_HEIGHT)
: Math.min(
tooltipContent.length * TOOLTIP_ITEM_HEIGHT,
TOOLTIP_LIST_MAX_HEIGHT,
);
}, [listHeight, tooltipContent.length]);
return (
<div
className={cx(
@@ -65,18 +55,18 @@ export default function Tooltip({
)}
<div
style={{
maxHeight: TOOLTIP_LIST_MAX_HEIGHT,
height: Math.min(
tooltipContent.length * TOOLTIP_ITEM_HEIGHT,
TOOLTIP_LIST_MAX_HEIGHT,
),
minHeight: 0,
}}
>
{tooltipContent.length > 0 ? (
<Virtuoso
className="uplot-tooltip-list"
data={tooltipContent}
style={{
height: virtuosoHeight,
width: '100%',
}}
totalListHeightChanged={setListHeight}
defaultItemHeight={TOOLTIP_ITEM_HEIGHT}
itemContent={(_, item): JSX.Element => (
<div className="uplot-tooltip-item">
<div

View File

@@ -7,7 +7,6 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/swaggest/openapi-go"
"github.com/swaggest/openapi-go/openapi3"
)
type ServeOpenAPIFunc func(openapi.OperationContext)
@@ -60,39 +59,7 @@ func (handler *handler) ServeOpenAPI(opCtx openapi.OperationContext) {
}
// Add request structure
reqOpts := []openapi.ContentOption{openapi.WithContentType(handler.openAPIDef.RequestContentType)}
if len(handler.openAPIDef.RequestExamples) > 0 {
reqOpts = append(reqOpts, openapi.WithCustomize(func(cor openapi.ContentOrReference) {
rbOrRef, ok := cor.(*openapi3.RequestBodyOrRef)
if !ok || rbOrRef.RequestBody == nil {
return
}
ct := handler.openAPIDef.RequestContentType
if ct == "" {
ct = "application/json"
}
mt, exists := rbOrRef.RequestBody.Content[ct]
if !exists {
return
}
if mt.Examples == nil {
mt.Examples = make(map[string]openapi3.ExampleOrRef)
}
for _, ex := range handler.openAPIDef.RequestExamples {
val := ex.Value
oaExample := openapi3.Example{Value: &val}
if ex.Summary != "" {
oaExample.WithSummary(ex.Summary)
}
if ex.Description != "" {
oaExample.WithDescription(ex.Description)
}
mt.Examples[ex.Name] = openapi3.ExampleOrRef{Example: &oaExample}
}
rbOrRef.RequestBody.Content[ct] = mt
}))
}
opCtx.AddReqStructure(handler.openAPIDef.Request, reqOpts...)
opCtx.AddReqStructure(handler.openAPIDef.Request, openapi.WithContentType(handler.openAPIDef.RequestContentType))
// Add request query structure
opCtx.AddReqStructure(handler.openAPIDef.RequestQuery)

View File

@@ -9,14 +9,6 @@ import (
"github.com/swaggest/rest/openapi"
)
// OpenAPIExample is a named example for an OpenAPI operation.
type OpenAPIExample struct {
Name string
Summary string
Description string
Value any
}
// Def is the definition of an OpenAPI operation
type OpenAPIDef struct {
ID string
@@ -26,7 +18,6 @@ type OpenAPIDef struct {
Request any
RequestQuery any
RequestContentType string
RequestExamples []OpenAPIExample
Response any
ResponseContentType string
SuccessStatusCode int

View File

@@ -1,454 +0,0 @@
package querier
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
)
// QueryRangeV5OpenAPIDef is the OpenAPI definition for the /api/v5/query_range endpoint.
var QueryRangeV5OpenAPIDef = handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"query"},
Summary: "Query range",
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
RequestExamples: queryRangeV5Examples,
Response: new(qbtypes.QueryRangeResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: []handler.OpenAPISecurityScheme{
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
},
}
var queryRangeV5Examples = []handler.OpenAPIExample{
{
Name: "traces_time_series",
Summary: "Time series: count spans grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "span_count"},
},
"stepInterval": "60s",
"filter": map[string]any{"expression": "service.name = 'frontend'"},
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
"order": []any{map[string]any{"key": map[string]any{"name": "span_count"}, "direction": "desc"}},
"limit": 10,
},
},
},
},
},
},
{
Name: "logs_time_series",
Summary: "Time series: count error logs grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "log_count"},
},
"stepInterval": "60s",
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
"order": []any{map[string]any{"key": map[string]any{"name": "log_count"}, "direction": "desc"}},
"limit": 10,
},
},
},
},
},
},
{
Name: "metrics_gauge_time_series",
Summary: "Time series: latest gauge value averaged across series",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "system.cpu.utilization", "timeAggregation": "latest", "spaceAggregation": "avg"},
},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "host.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "metrics_rate_time_series",
Summary: "Time series: rate of cumulative counter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum"},
},
"stepInterval": 120,
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "metrics_histogram_time_series",
Summary: "Time series: p99 latency from histogram",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.bucket", "spaceAggregation": "p99"},
},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "logs_raw",
Summary: "Raw: fetch raw log records",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
"selectFields": []any{
map[string]any{"name": "body", "fieldContext": "log"},
map[string]any{"name": "service.name", "fieldContext": "resource"},
},
"order": []any{
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "log"}, "direction": "desc"},
map[string]any{"key": map[string]any{"name": "id"}, "direction": "desc"},
},
"limit": 50,
"offset": 0,
},
},
},
},
},
},
{
Name: "traces_raw",
Summary: "Raw: fetch raw span records",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"filter": map[string]any{"expression": "service.name = 'frontend' AND has_error = true"},
"selectFields": []any{
map[string]any{"name": "name", "fieldContext": "span"},
map[string]any{"name": "duration_nano", "fieldContext": "span"},
},
"order": []any{
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "span"}, "direction": "desc"},
},
"limit": 100,
},
},
},
},
},
},
{
Name: "traces_scalar",
Summary: "Scalar: total span count",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "span_count"},
},
"filter": map[string]any{"expression": "service.name = 'frontend'"},
},
},
},
},
},
},
{
Name: "logs_scalar",
Summary: "Scalar: total error log count",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "error_count"},
},
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
},
},
},
},
},
},
{
Name: "metrics_scalar",
Summary: "Scalar: single reduced metric value",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum", "reduceTo": "sum"},
},
"stepInterval": "60s",
},
},
},
},
},
},
{
Name: "formula",
Summary: "Formula: error rate from two trace queries",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{map[string]any{"expression": "countIf(has_error = true)"}},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "B",
"signal": "traces",
"aggregations": []any{map[string]any{"expression": "count()"}},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
map[string]any{
"type": "builder_formula",
"spec": map[string]any{
"name": "error_rate",
"expression": "A / B * 100",
},
},
},
},
},
},
{
Name: "promql",
Summary: "PromQL: request rate with UTF-8 metric name",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "promql",
"spec": map[string]any{
"name": "request_rate",
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
"step": 60,
},
},
},
},
},
},
{
Name: "clickhouse_sql_traces_time_series",
Summary: "ClickHouse SQL: traces time series with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "span_rate",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value" +
" FROM signoz_traces.distributed_signoz_index_v3" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
" GROUP BY ts ORDER BY ts",
},
},
},
},
},
},
{
Name: "clickhouse_sql_logs_raw",
Summary: "ClickHouse SQL: raw logs with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "recent_errors",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT timestamp, body" +
" FROM signoz_logs.distributed_logs_v2" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
" AND severity_text = 'ERROR'" +
" ORDER BY timestamp DESC LIMIT 100",
},
},
},
},
},
},
{
Name: "clickhouse_sql_traces_scalar",
Summary: "ClickHouse SQL: scalar aggregate with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "total_spans",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT count() AS value" +
" FROM signoz_traces.distributed_signoz_index_v3" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp",
},
},
},
},
},
},
}

View File

@@ -625,7 +625,7 @@ func (r *BaseRule) extractMetricAndGroupBys(ctx context.Context) (map[string][]s
// FilterNewSeries filters out items that are too new based on metadata first_seen timestamps.
// Returns the filtered series (old ones) excluding new series that are still within the grace period.
func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*v3.Series) ([]*v3.Series, error) {
func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*qbtypes.TimeSeries) ([]*qbtypes.TimeSeries, error) {
// Extract metric names and groupBy keys
metricToGroupedFields, err := r.extractMetricAndGroupBys(ctx)
if err != nil {
@@ -642,7 +642,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
seriesIdxToLookupKeys := make(map[int][]telemetrytypes.MetricMetadataLookupKey) // series index -> lookup keys
for i := 0; i < len(series); i++ {
metricLabelMap := series[i].Labels
metricLabelMap := series[i].LabelsMap()
// Collect groupBy attribute-value pairs for this series
seriesKeys := make([]telemetrytypes.MetricMetadataLookupKey, 0)
@@ -689,7 +689,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
}
// Filter series based on first_seen + delay
filteredSeries := make([]*v3.Series, 0, len(series))
filteredSeries := make([]*qbtypes.TimeSeries, 0, len(series))
evalTimeMs := ts.UnixMilli()
newGroupEvalDelayMs := r.newGroupEvalDelay.Milliseconds()
@@ -727,7 +727,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
// Check if first_seen + delay has passed
if maxFirstSeen+newGroupEvalDelayMs > evalTimeMs {
// Still within grace period, skip this series
r.logger.InfoContext(ctx, "Skipping new series", "rule_name", r.Name(), "series_idx", i, "max_first_seen", maxFirstSeen, "eval_time_ms", evalTimeMs, "delay_ms", newGroupEvalDelayMs, "labels", series[i].Labels)
r.logger.InfoContext(ctx, "Skipping new series", "rule_name", r.Name(), "series_idx", i, "max_first_seen", maxFirstSeen, "eval_time_ms", evalTimeMs, "delay_ms", newGroupEvalDelayMs, "labels", series[i].LabelsMap())
continue
}

View File

@@ -26,33 +26,33 @@ import (
"github.com/SigNoz/signoz/pkg/valuer"
)
// createTestSeries creates a *v3.Series with the given labels and optional points
// createTestSeries creates a *qbtypes.TimeSeries with the given labels and optional values
// so we don't exactly need the points in the series because the labels are used to determine if the series is new or old
// we use the labels to create a lookup key for the series and then check the first_seen timestamp for the series in the metadata table
func createTestSeries(labels map[string]string, points []v3.Point) *v3.Series {
func createTestSeries(labels map[string]string, points []*qbtypes.TimeSeriesValue) *qbtypes.TimeSeries {
if points == nil {
points = []v3.Point{}
points = []*qbtypes.TimeSeriesValue{}
}
return &v3.Series{
Labels: labels,
Points: points,
lbls := make([]*qbtypes.Label, 0, len(labels))
for k, v := range labels {
lbls = append(lbls, &qbtypes.Label{Key: telemetrytypes.TelemetryFieldKey{Name: k}, Value: v})
}
return &qbtypes.TimeSeries{
Labels: lbls,
Values: points,
}
}
// seriesEqual compares two v3.Series by their labels
// seriesEqual compares two *qbtypes.TimeSeries by their labels
// Returns true if the series have the same labels (order doesn't matter)
func seriesEqual(s1, s2 *v3.Series) bool {
if s1 == nil && s2 == nil {
return true
}
if s1 == nil || s2 == nil {
func seriesEqual(s1, s2 *qbtypes.TimeSeries) bool {
m1 := s1.LabelsMap()
m2 := s2.LabelsMap()
if len(m1) != len(m2) {
return false
}
if len(s1.Labels) != len(s2.Labels) {
return false
}
for k, v := range s1.Labels {
if s2.Labels[k] != v {
for k, v := range m1 {
if m2[k] != v {
return false
}
}
@@ -149,11 +149,11 @@ func createPostableRule(compositeQuery *v3.CompositeQuery) ruletypes.PostableRul
type filterNewSeriesTestCase struct {
name string
compositeQuery *v3.CompositeQuery
series []*v3.Series
series []*qbtypes.TimeSeries
firstSeenMap map[telemetrytypes.MetricMetadataLookupKey]int64
newGroupEvalDelay valuer.TextDuration
evalTime time.Time
expectedFiltered []*v3.Series // series that should be in the final filtered result (old enough)
expectedFiltered []*qbtypes.TimeSeries // series that should be in the final filtered result (old enough)
expectError bool
}
@@ -193,7 +193,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-new", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-missing", "env": "stage"}, nil),
@@ -205,7 +205,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-missing", "env": "stage"}, nil),
}, // svc-old and svc-missing should be included; svc-new is filtered out
@@ -227,7 +227,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-new1", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-new2", "env": "stage"}, nil),
},
@@ -237,7 +237,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{}, // all should be filtered out (new series)
expectedFiltered: []*qbtypes.TimeSeries{}, // all should be filtered out (new series)
},
{
name: "all old series - ClickHouse query",
@@ -254,7 +254,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-old2", "env": "stage"}, nil),
},
@@ -264,7 +264,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-old2", "env": "stage"}, nil),
}, // all should be included (old series)
@@ -292,13 +292,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
}, // early return, no filtering - all series included
},
@@ -322,13 +322,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
}, // early return, no filtering - all series included
},
@@ -358,13 +358,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"status": "200"}, nil), // no service_name or env
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"status": "200"}, nil),
}, // series included as we can't decide if it's new or old
},
@@ -385,7 +385,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-no-metadata", "env": "prod"}, nil),
},
@@ -393,7 +393,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
// svc-no-metadata has no entry in firstSeenMap
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-no-metadata", "env": "prod"}, nil),
}, // both should be included - svc-old is old, svc-no-metadata can't be decided
@@ -413,7 +413,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
},
// Only provide metadata for service_name, not env
@@ -423,7 +423,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
}, // has some metadata, uses max first_seen which is old
},
@@ -453,11 +453,11 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{},
series: []*qbtypes.TimeSeries{},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{},
expectedFiltered: []*qbtypes.TimeSeries{},
},
{
name: "zero delay - Builder",
@@ -485,13 +485,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
newGroupEvalDelay: valuer.TextDuration{}, // zero delay
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
}, // with zero delay, all series pass
},
@@ -526,7 +526,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: mergeFirstSeenMaps(
@@ -535,7 +535,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
},
@@ -565,7 +565,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
// service_name is old, env is new - should use max (new)
@@ -575,7 +575,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{}, // max first_seen is new, so should be filtered out
expectedFiltered: []*qbtypes.TimeSeries{}, // max first_seen is new, so should be filtered out
},
{
name: "Logs query - should skip filtering and return empty skip indexes",
@@ -600,14 +600,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
}, // Logs queries should return early, no filtering - all included
@@ -635,14 +635,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
}, // Traces queries should return early, no filtering - all included
@@ -724,14 +724,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
// Build a map to count occurrences of each unique label combination in expected series
expectedCounts := make(map[string]int)
for _, expected := range tt.expectedFiltered {
key := labelsKey(expected.Labels)
key := labelsKey(expected.LabelsMap())
expectedCounts[key]++
}
// Build a map to count occurrences of each unique label combination in filtered series
actualCounts := make(map[string]int)
for _, filtered := range filteredSeries {
key := labelsKey(filtered.Labels)
key := labelsKey(filtered.LabelsMap())
actualCounts[key]++
}

View File

@@ -12,19 +12,18 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/formatter"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/prometheus/prometheus/promql"
)
type PromRule struct {
*BaseRule
version string
prometheus prometheus.Prometheus
}
@@ -48,7 +47,6 @@ func NewPromRule(
p := PromRule{
BaseRule: baseRule,
version: postableRule.Version,
prometheus: prometheus,
}
p.logger = logger
@@ -83,48 +81,30 @@ func (r *PromRule) GetSelectedQuery() string {
}
func (r *PromRule) getPqlQuery() (string, error) {
if r.version == "v5" {
if len(r.ruleCondition.CompositeQuery.Queries) > 0 {
selectedQuery := r.GetSelectedQuery()
for _, item := range r.ruleCondition.CompositeQuery.Queries {
switch item.Type {
case qbtypes.QueryTypePromQL:
promQuery, ok := item.Spec.(qbtypes.PromQuery)
if !ok {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query spec %T", item.Spec)
}
if promQuery.Name == selectedQuery {
return promQuery.Query, nil
}
if len(r.ruleCondition.CompositeQuery.Queries) > 0 {
selectedQuery := r.GetSelectedQuery()
for _, item := range r.ruleCondition.CompositeQuery.Queries {
switch item.Type {
case qbtypes.QueryTypePromQL:
promQuery, ok := item.Spec.(qbtypes.PromQuery)
if !ok {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query spec %T", item.Spec)
}
if promQuery.Name == selectedQuery {
return promQuery.Query, nil
}
}
}
return "", fmt.Errorf("invalid promql rule setup")
}
if r.ruleCondition.CompositeQuery.QueryType == v3.QueryTypePromQL {
if len(r.ruleCondition.CompositeQuery.PromQueries) > 0 {
selectedQuery := r.GetSelectedQuery()
if promQuery, ok := r.ruleCondition.CompositeQuery.PromQueries[selectedQuery]; ok {
query := promQuery.Query
if query == "" {
return query, fmt.Errorf("a promquery needs to be set for this rule to function")
}
return query, nil
}
}
}
return "", fmt.Errorf("invalid promql rule query")
return "", fmt.Errorf("invalid promql rule setup")
}
func (r *PromRule) matrixToV3Series(res promql.Matrix) []*v3.Series {
v3Series := make([]*v3.Series, 0, len(res))
func matrixToTimeSeries(res promql.Matrix) []*qbtypes.TimeSeries {
result := make([]*qbtypes.TimeSeries, 0, len(res))
for _, series := range res {
commonSeries := toCommonSeries(series)
v3Series = append(v3Series, &commonSeries)
result = append(result, promSeriesToTimeSeries(series))
}
return v3Series
return result
}
func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletypes.Vector, error) {
@@ -143,31 +123,31 @@ func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletype
return nil, err
}
matrixToProcess := r.matrixToV3Series(res)
seriesToProcess := matrixToTimeSeries(res)
hasData := len(matrixToProcess) > 0
hasData := len(seriesToProcess) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
// Filter out new series if newGroupEvalDelay is configured
if r.ShouldSkipNewGroups() {
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, matrixToProcess)
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
// In case of error we log the error and continue with the original series
if filterErr != nil {
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
} else {
matrixToProcess = filteredSeries
seriesToProcess = filteredSeries
}
}
var resultVector ruletypes.Vector
for _, series := range matrixToProcess {
for _, series := range seriesToProcess {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(
ctx, "not enough data points to evaluate series, skipping",
"rule_id", r.ID(), "num_points", len(series.Points), "required_points", r.Condition().RequiredNumPoints,
"rule_id", r.ID(), "num_points", len(series.Values), "required_points", r.Condition().RequiredNumPoints,
)
continue
}
@@ -454,26 +434,25 @@ func (r *PromRule) RunAlertQuery(ctx context.Context, qs string, start, end time
}
}
func toCommonSeries(series promql.Series) v3.Series {
commonSeries := v3.Series{
Labels: make(map[string]string),
LabelsArray: make([]map[string]string, 0),
Points: make([]v3.Point, 0),
func promSeriesToTimeSeries(series promql.Series) *qbtypes.TimeSeries {
ts := &qbtypes.TimeSeries{
Labels: make([]*qbtypes.Label, 0, len(series.Metric)),
Values: make([]*qbtypes.TimeSeriesValue, 0, len(series.Floats)),
}
for _, lbl := range series.Metric {
commonSeries.Labels[lbl.Name] = lbl.Value
commonSeries.LabelsArray = append(commonSeries.LabelsArray, map[string]string{
lbl.Name: lbl.Value,
ts.Labels = append(ts.Labels, &qbtypes.Label{
Key: telemetrytypes.TelemetryFieldKey{Name: lbl.Name},
Value: lbl.Value,
})
}
for _, f := range series.Floats {
commonSeries.Points = append(commonSeries.Points, v3.Point{
ts.Values = append(ts.Values, &qbtypes.TimeSeriesValue{
Timestamp: f.T,
Value: f.F,
})
}
return commonSeries
return ts
}

View File

@@ -20,6 +20,7 @@ import (
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -47,9 +48,13 @@ func TestPromRuleEval(t *testing.T) {
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {
Query: "dummy_query", // This is not used in the test
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "A",
Query: "dummy_query", // This is not used in the test
},
},
},
},
@@ -62,7 +67,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp string
matchType string
target float64
expectedAlertSample v3.Point
expectedAlertSample float64
expectedVectorValues []float64 // Expected values in result vector
}{
// Test cases for Equals Always
@@ -80,7 +85,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "2", // Always
target: 0.0,
expectedAlertSample: v3.Point{Value: 0.0},
expectedAlertSample: 0.0,
expectedVectorValues: []float64{0.0},
},
{
@@ -145,7 +150,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 0.0},
expectedAlertSample: 0.0,
expectedVectorValues: []float64{0.0},
},
{
@@ -162,7 +167,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 0.0},
expectedAlertSample: 0.0,
},
{
values: pql.Series{
@@ -178,7 +183,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 0.0},
expectedAlertSample: 0.0,
},
{
values: pql.Series{
@@ -211,7 +216,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Greater Than
matchType: "2", // Always
target: 1.5,
expectedAlertSample: v3.Point{Value: 2.0},
expectedAlertSample: 2.0,
expectedVectorValues: []float64{2.0},
},
{
@@ -228,7 +233,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Above
matchType: "2", // Always
target: 2.0,
expectedAlertSample: v3.Point{Value: 3.0},
expectedAlertSample: 3.0,
},
{
values: pql.Series{
@@ -244,7 +249,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Below
matchType: "2", // Always
target: 13.0,
expectedAlertSample: v3.Point{Value: 12.0},
expectedAlertSample: 12.0,
},
{
values: pql.Series{
@@ -276,7 +281,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Greater Than
matchType: "1", // Once
target: 4.5,
expectedAlertSample: v3.Point{Value: 10.0},
expectedAlertSample: 10.0,
expectedVectorValues: []float64{10.0},
},
{
@@ -339,7 +344,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "2", // Always
target: 0.0,
expectedAlertSample: v3.Point{Value: 1.0},
expectedAlertSample: 1.0,
},
{
values: pql.Series{
@@ -371,7 +376,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 1.0},
expectedAlertSample: 1.0,
},
{
values: pql.Series{
@@ -402,7 +407,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 1.0},
expectedAlertSample: 1.0,
},
{
values: pql.Series{
@@ -418,7 +423,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 1.0},
expectedAlertSample: 1.0,
},
// Test cases for Less Than Always
{
@@ -435,7 +440,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Less Than
matchType: "2", // Always
target: 4,
expectedAlertSample: v3.Point{Value: 1.5},
expectedAlertSample: 1.5,
},
{
values: pql.Series{
@@ -467,7 +472,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Less Than
matchType: "1", // Once
target: 4,
expectedAlertSample: v3.Point{Value: 2.5},
expectedAlertSample: 2.5,
},
{
values: pql.Series{
@@ -499,7 +504,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "3", // OnAverage
target: 6.0,
expectedAlertSample: v3.Point{Value: 6.0},
expectedAlertSample: 6.0,
},
{
values: pql.Series{
@@ -530,7 +535,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "3", // OnAverage
target: 4.5,
expectedAlertSample: v3.Point{Value: 6.0},
expectedAlertSample: 6.0,
},
{
values: pql.Series{
@@ -561,7 +566,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Greater Than
matchType: "3", // OnAverage
target: 4.5,
expectedAlertSample: v3.Point{Value: 6.0},
expectedAlertSample: 6.0,
},
{
values: pql.Series{
@@ -577,7 +582,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Less Than
matchType: "3", // OnAverage
target: 12.0,
expectedAlertSample: v3.Point{Value: 6.0},
expectedAlertSample: 6.0,
},
// Test cases for InTotal
{
@@ -594,7 +599,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "4", // InTotal
target: 30.0,
expectedAlertSample: v3.Point{Value: 30.0},
expectedAlertSample: 30.0,
},
{
values: pql.Series{
@@ -621,7 +626,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "4", // InTotal
target: 9.0,
expectedAlertSample: v3.Point{Value: 10.0},
expectedAlertSample: 10.0,
},
{
values: pql.Series{
@@ -645,7 +650,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Greater Than
matchType: "4", // InTotal
target: 10.0,
expectedAlertSample: v3.Point{Value: 20.0},
expectedAlertSample: 20.0,
},
{
values: pql.Series{
@@ -670,7 +675,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Less Than
matchType: "4", // InTotal
target: 30.0,
expectedAlertSample: v3.Point{Value: 20.0},
expectedAlertSample: 20.0,
},
{
values: pql.Series{
@@ -708,7 +713,7 @@ func TestPromRuleEval(t *testing.T) {
assert.NoError(t, err)
}
resultVectors, err := rule.Threshold.Eval(toCommonSeries(c.values), rule.Unit(), ruletypes.EvalData{})
resultVectors, err := rule.Threshold.Eval(*promSeriesToTimeSeries(c.values), rule.Unit(), ruletypes.EvalData{})
assert.NoError(t, err)
// Compare full result vector with expected vector
@@ -724,12 +729,12 @@ func TestPromRuleEval(t *testing.T) {
if len(resultVectors) > 0 {
found := false
for _, sample := range resultVectors {
if sample.V == c.expectedAlertSample.Value {
if sample.V == c.expectedAlertSample {
found = true
break
}
}
assert.True(t, found, "Expected alert sample value %.2f not found in result vectors for case %d. Got values: %v", c.expectedAlertSample.Value, idx, actualValues)
assert.True(t, found, "Expected alert sample value %.2f not found in result vectors for case %d. Got values: %v", c.expectedAlertSample, idx, actualValues)
}
} else {
assert.Empty(t, resultVectors, "Expected no alert but got result vectors for case %d", idx)
@@ -754,9 +759,13 @@ func TestPromRuleUnitCombinations(t *testing.T) {
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {
Query: "test_metric",
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "A",
Query: "test_metric",
},
},
},
},
@@ -1013,9 +1022,13 @@ func _Enable_this_after_9146_issue_fix_is_merged_TestPromRuleNoData(t *testing.T
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {
Query: "test_metric",
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "A",
Query: "test_metric",
},
},
},
},
@@ -1124,9 +1137,13 @@ func TestMultipleThresholdPromRule(t *testing.T) {
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {
Query: "test_metric",
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "A",
Query: "test_metric",
},
},
},
},
@@ -1361,8 +1378,11 @@ func TestPromRule_NoData(t *testing.T) {
MatchType: ruletypes.AtleastOnce,
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {Query: "test_metric"},
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
},
},
},
Thresholds: &ruletypes.RuleThresholdData{
@@ -1486,8 +1506,11 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
Target: &target,
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {Query: "test_metric"},
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
},
},
},
Thresholds: &ruletypes.RuleThresholdData{
@@ -1635,8 +1658,11 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
MatchType: ruletypes.AtleastOnce,
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {Query: "test_metric"},
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
},
},
},
},

View File

@@ -1,38 +1,24 @@
package rules
import (
"bytes"
"context"
"encoding/json"
"fmt"
"log/slog"
"math"
"net/url"
"reflect"
"text/template"
"time"
"github.com/SigNoz/signoz/pkg/contextlinks"
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/query-service/app/querier"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
querytemplate "github.com/SigNoz/signoz/pkg/query-service/utils/queryTemplate"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
logsv3 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v3"
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
"github.com/SigNoz/signoz/pkg/query-service/formatter"
querierV5 "github.com/SigNoz/signoz/pkg/querier"
@@ -42,23 +28,9 @@ import (
type ThresholdRule struct {
*BaseRule
// Ever since we introduced the new metrics query builder, the version is "v4"
// for all the rules
// if the version is "v3", then we use the old querier
// if the version is "v4", then we use the new querierV2
version string
// querier is used for alerts created before the introduction of new metrics query builder
querier interfaces.Querier
// querierV2 is used for alerts created after the introduction of new metrics query builder
querierV2 interfaces.Querier
// querierV5 is used for alerts migrated after the introduction of new query builder
// querierV5 is the query builder v5 querier used for all alert rule evaluation
querierV5 querierV5.Querier
// used for attribute metadata enrichment for logs and traces
logsKeys map[string]v3.AttributeKey
spansKeys map[string]v3.AttributeKey
}
var _ Rule = (*ThresholdRule)(nil)
@@ -82,25 +54,10 @@ func NewThresholdRule(
}
t := ThresholdRule{
BaseRule: baseRule,
version: p.Version,
BaseRule: baseRule,
querierV5: querierV5,
}
querierOption := querier.QuerierOptions{
Reader: reader,
Cache: nil,
KeyGenerator: queryBuilder.NewKeyGenerator(),
}
querierOptsV2 := querierV2.QuerierOptions{
Reader: reader,
Cache: nil,
KeyGenerator: queryBuilder.NewKeyGenerator(),
}
t.querier = querier.NewQuerier(querierOption)
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
t.querierV5 = querierV5
t.reader = reader
return &t, nil
}
@@ -120,169 +77,9 @@ func (r *ThresholdRule) Type() ruletypes.RuleType {
return ruletypes.RuleTypeThreshold
}
func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3.QueryRangeParamsV3, error) {
func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
r.logger.InfoContext(
ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
)
startTs, endTs := r.Timestamps(ts)
start, end := startTs.UnixMilli(), endTs.UnixMilli()
if r.ruleCondition.QueryType() == v3.QueryTypeClickHouseSQL {
params := &v3.QueryRangeParamsV3{
Start: start,
End: end,
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
CompositeQuery: &v3.CompositeQuery{
QueryType: r.ruleCondition.CompositeQuery.QueryType,
PanelType: r.ruleCondition.CompositeQuery.PanelType,
BuilderQueries: make(map[string]*v3.BuilderQuery),
ClickHouseQueries: make(map[string]*v3.ClickHouseQuery),
PromQueries: make(map[string]*v3.PromQuery),
Unit: r.ruleCondition.CompositeQuery.Unit,
},
Variables: make(map[string]interface{}),
NoCache: true,
}
querytemplate.AssignReservedVarsV3(params)
for name, chQuery := range r.ruleCondition.CompositeQuery.ClickHouseQueries {
if chQuery.Disabled {
continue
}
tmpl := template.New("clickhouse-query")
tmpl, err := tmpl.Parse(chQuery.Query)
if err != nil {
return nil, err
}
var query bytes.Buffer
err = tmpl.Execute(&query, params.Variables)
if err != nil {
return nil, err
}
params.CompositeQuery.ClickHouseQueries[name] = &v3.ClickHouseQuery{
Query: query.String(),
Disabled: chQuery.Disabled,
Legend: chQuery.Legend,
}
}
return params, nil
}
if r.ruleCondition.CompositeQuery != nil && r.ruleCondition.CompositeQuery.BuilderQueries != nil {
for _, q := range r.ruleCondition.CompositeQuery.BuilderQueries {
// If the step interval is less than the minimum allowed step interval, set it to the minimum allowed step interval
if minStep := common.MinAllowedStepInterval(start, end); q.StepInterval < minStep {
q.StepInterval = minStep
}
q.SetShiftByFromFunc()
if q.DataSource == v3.DataSourceMetrics {
// if the time range is greater than 1 day, and less than 1 week set the step interval to be multiple of 5 minutes
// if the time range is greater than 1 week, set the step interval to be multiple of 30 mins
if end-start >= 24*time.Hour.Milliseconds() && end-start < 7*24*time.Hour.Milliseconds() {
q.StepInterval = int64(math.Round(float64(q.StepInterval)/300)) * 300
} else if end-start >= 7*24*time.Hour.Milliseconds() {
q.StepInterval = int64(math.Round(float64(q.StepInterval)/1800)) * 1800
}
}
}
}
if r.ruleCondition.CompositeQuery.PanelType != v3.PanelTypeGraph {
r.ruleCondition.CompositeQuery.PanelType = v3.PanelTypeGraph
}
// default mode
return &v3.QueryRangeParamsV3{
Start: start,
End: end,
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
CompositeQuery: r.ruleCondition.CompositeQuery,
Variables: make(map[string]interface{}),
NoCache: true,
}, nil
}
func (r *ThresholdRule) prepareLinksToLogs(ctx context.Context, ts time.Time, lbls labels.Labels) string {
if r.version == "v5" {
return r.prepareLinksToLogsV5(ctx, ts, lbls)
}
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRange(ctx, ts)
if err != nil {
return ""
}
start := time.UnixMilli(qr.Start)
end := time.UnixMilli(qr.End)
// TODO(srikanthccv): handle formula queries
if selectedQuery < "A" || selectedQuery > "Z" {
return ""
}
q := r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery]
if q == nil {
return ""
}
if q.DataSource != v3.DataSourceLogs {
return ""
}
queryFilter := []v3.FilterItem{}
if q.Filters != nil {
queryFilter = q.Filters.Items
}
filterItems := contextlinks.PrepareFilters(lbls.Map(), queryFilter, q.GroupBy, r.logsKeys)
return contextlinks.PrepareLinksToLogs(start, end, filterItems)
}
func (r *ThresholdRule) prepareLinksToTraces(ctx context.Context, ts time.Time, lbls labels.Labels) string {
if r.version == "v5" {
return r.prepareLinksToTracesV5(ctx, ts, lbls)
}
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRange(ctx, ts)
if err != nil {
return ""
}
start := time.UnixMilli(qr.Start)
end := time.UnixMilli(qr.End)
// TODO(srikanthccv): handle formula queries
if selectedQuery < "A" || selectedQuery > "Z" {
return ""
}
q := r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery]
if q == nil {
return ""
}
if q.DataSource != v3.DataSourceTraces {
return ""
}
queryFilter := []v3.FilterItem{}
if q.Filters != nil {
queryFilter = q.Filters.Items
}
filterItems := contextlinks.PrepareFilters(lbls.Map(), queryFilter, q.GroupBy, r.spansKeys)
return contextlinks.PrepareLinksToTraces(start, end, filterItems)
}
func (r *ThresholdRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
r.logger.InfoContext(
ctx, "prepare query range request v5", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
ctx, "prepare query range request", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
)
startTs, endTs := r.Timestamps(ts)
@@ -302,10 +99,10 @@ func (r *ThresholdRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (
return req, nil
}
func (r *ThresholdRule) prepareLinksToLogsV5(ctx context.Context, ts time.Time, lbls labels.Labels) string {
func (r *ThresholdRule) prepareLinksToLogs(ctx context.Context, ts time.Time, lbls labels.Labels) string {
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRangeV5(ctx, ts)
qr, err := r.prepareQueryRange(ctx, ts)
if err != nil {
return ""
}
@@ -342,10 +139,10 @@ func (r *ThresholdRule) prepareLinksToLogsV5(ctx context.Context, ts time.Time,
return contextlinks.PrepareLinksToLogsV5(start, end, whereClause)
}
func (r *ThresholdRule) prepareLinksToTracesV5(ctx context.Context, ts time.Time, lbls labels.Labels) string {
func (r *ThresholdRule) prepareLinksToTraces(ctx context.Context, ts time.Time, lbls labels.Labels) string {
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRangeV5(ctx, ts)
qr, err := r.prepareQueryRange(ctx, ts)
if err != nil {
return ""
}
@@ -391,115 +188,6 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
if err != nil {
return nil, err
}
err = r.PopulateTemporality(ctx, orgID, params)
if err != nil {
return nil, fmt.Errorf("internal error while setting temporality")
}
if params.CompositeQuery.QueryType == v3.QueryTypeBuilder {
hasLogsQuery := false
hasTracesQuery := false
for _, query := range params.CompositeQuery.BuilderQueries {
if query.DataSource == v3.DataSourceLogs {
hasLogsQuery = true
}
if query.DataSource == v3.DataSourceTraces {
hasTracesQuery = true
}
}
if hasLogsQuery {
// check if any enrichment is required for logs if yes then enrich them
if logsv3.EnrichmentRequired(params) {
logsFields, apiErr := r.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(params.CompositeQuery))
if apiErr != nil {
return nil, apiErr.ToError()
}
logsKeys := model.GetLogFieldsV3(ctx, params, logsFields)
r.logsKeys = logsKeys
logsv3.Enrich(params, logsKeys)
}
}
if hasTracesQuery {
spanKeys, err := r.reader.GetSpanAttributeKeysByNames(ctx, logsv3.GetFieldNames(params.CompositeQuery))
if err != nil {
return nil, err
}
r.spansKeys = spanKeys
tracesV4.Enrich(params, spanKeys)
}
}
var results []*v3.Result
var queryErrors map[string]error
if r.version == "v4" {
results, queryErrors, err = r.querierV2.QueryRange(ctx, orgID, params)
} else {
results, queryErrors, err = r.querier.QueryRange(ctx, orgID, params)
}
if err != nil {
r.logger.ErrorContext(ctx, "failed to get alert query range result", "rule_name", r.Name(), "error", err, "query_errors", queryErrors)
return nil, fmt.Errorf("internal error while querying")
}
if params.CompositeQuery.QueryType == v3.QueryTypeBuilder {
results, err = postprocess.PostProcessResult(results, params)
if err != nil {
r.logger.ErrorContext(ctx, "failed to post process result", "rule_name", r.Name(), "error", err)
return nil, fmt.Errorf("internal error while post processing")
}
}
selectedQuery := r.GetSelectedQuery()
var queryResult *v3.Result
for _, res := range results {
if res.QueryName == selectedQuery {
queryResult = res
break
}
}
hasData := queryResult != nil && len(queryResult.Series) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
var resultVector ruletypes.Vector
if queryResult == nil {
r.logger.WarnContext(ctx, "query result is nil", "rule_name", r.Name(), "query_name", selectedQuery)
return resultVector, nil
}
for _, series := range queryResult.Series {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
resultSeries, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
ActiveAlerts: r.ActiveAlertsLabelFP(),
SendUnmatched: r.ShouldSendUnmatched(),
})
if err != nil {
return nil, err
}
resultVector = append(resultVector, resultSeries...)
}
return resultVector, nil
}
func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
params, err := r.prepareQueryRangeV5(ctx, ts)
if err != nil {
return nil, err
}
var results []*v3.Result
v5Result, err := r.querierV5.QueryRange(ctx, orgID, params)
if err != nil {
@@ -507,26 +195,24 @@ func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUI
return nil, fmt.Errorf("internal error while querying")
}
for _, item := range v5Result.Data.Results {
if tsData, ok := item.(*qbtypes.TimeSeriesData); ok {
results = append(results, transition.ConvertV5TimeSeriesDataToV4Result(tsData))
} else {
// NOTE: should not happen but just to ensure we don't miss it if it happens for some reason
r.logger.WarnContext(ctx, "expected qbtypes.TimeSeriesData but got", "item_type", reflect.TypeOf(item))
}
}
selectedQuery := r.GetSelectedQuery()
var queryResult *v3.Result
for _, res := range results {
if res.QueryName == selectedQuery {
queryResult = res
var queryResult *qbtypes.TimeSeriesData
for _, item := range v5Result.Data.Results {
if tsData, ok := item.(*qbtypes.TimeSeriesData); ok && tsData.QueryName == selectedQuery {
queryResult = tsData
break
}
}
hasData := queryResult != nil && len(queryResult.Series) > 0
var allSeries []*qbtypes.TimeSeries
if queryResult != nil {
for _, bucket := range queryResult.Aggregations {
allSeries = append(allSeries, bucket.Series...)
}
}
hasData := len(allSeries) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
@@ -539,7 +225,7 @@ func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUI
}
// Filter out new series if newGroupEvalDelay is configured
seriesToProcess := queryResult.Series
seriesToProcess := allSeries
if r.ShouldSkipNewGroups() {
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
// In case of error we log the error and continue with the original series
@@ -552,7 +238,7 @@ func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUI
for _, series := range seriesToProcess {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Values), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
resultSeries, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
@@ -573,16 +259,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
valueFormatter := formatter.FromUnit(r.Unit())
var res ruletypes.Vector
var err error
if r.version == "v5" {
r.logger.InfoContext(ctx, "running v5 query")
res, err = r.buildAndRunQueryV5(ctx, r.orgID, ts)
} else {
r.logger.InfoContext(ctx, "running v4 query")
res, err = r.buildAndRunQuery(ctx, r.orgID, ts)
}
res, err := r.buildAndRunQuery(ctx, r.orgID, ts)
if err != nil {
return 0, err

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,10 +1,7 @@
package signoz
import (
"bytes"
"context"
"encoding/json"
"net/http"
"os"
"reflect"
@@ -25,13 +22,10 @@ import (
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
"github.com/gorilla/mux"
"github.com/swaggest/jsonschema-go"
"github.com/swaggest/openapi-go"
"github.com/swaggest/openapi-go/openapi3"
"gopkg.in/yaml.v2"
)
type OpenAPI struct {
@@ -63,10 +57,6 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
return nil, err
}
// Register routes that live outside the APIServer modules
// so they are discovered by the OpenAPI walker.
registerQueryRoutes(apiserver.Router())
reflector := openapi3.NewReflector()
reflector.JSONSchemaReflector().DefaultOptions = append(reflector.JSONSchemaReflector().DefaultOptions, jsonschema.InterceptDefName(func(t reflect.Type, defaultDefName string) string {
if defaultDefName == "RenderSuccessResponse" {
@@ -100,67 +90,10 @@ func (openapi *OpenAPI) CreateAndWrite(path string) error {
return err
}
// The library's MarshalYAML does a JSON round-trip that converts all numbers
// to float64, causing large integers (e.g. epoch millisecond timestamps) to
// render in scientific notation (1.6409952e+12).
jsonData, err := openapi.reflector.Spec.MarshalJSON()
if err != nil {
return err
}
dec := json.NewDecoder(bytes.NewReader(jsonData))
dec.UseNumber()
var v any
if err := dec.Decode(&v); err != nil {
return err
}
convertJSONNumbers(v)
spec, err := yaml.Marshal(v)
spec, err := openapi.reflector.Spec.MarshalYAML()
if err != nil {
return err
}
return os.WriteFile(path, spec, 0o600)
}
// convertJSONNumbers recursively walks a decoded JSON structure and converts
// json.Number values to int64 (preferred) or float64 so that YAML marshaling
// renders them as plain numbers instead of quoted strings.
func convertJSONNumbers(v interface{}) {
switch val := v.(type) {
case map[string]interface{}:
for k, elem := range val {
if n, ok := elem.(json.Number); ok {
if i, err := n.Int64(); err == nil {
val[k] = i
} else if f, err := n.Float64(); err == nil {
val[k] = f
}
} else {
convertJSONNumbers(elem)
}
}
case []interface{}:
for i, elem := range val {
if n, ok := elem.(json.Number); ok {
if i64, err := n.Int64(); err == nil {
val[i] = i64
} else if f, err := n.Float64(); err == nil {
val[i] = f
}
} else {
convertJSONNumbers(elem)
}
}
}
}
func registerQueryRoutes(router *mux.Router) {
router.Handle("/api/v5/query_range", handler.New(
func(http.ResponseWriter, *http.Request) {},
querier.QueryRangeV5OpenAPIDef,
)).Methods(http.MethodPost)
}

View File

@@ -169,6 +169,7 @@ func NewSQLMigrationProviderFactories(
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
sqlmigration.NewAddRootUserFactory(sqlstore, sqlschema),
sqlmigration.NewAddUserEmailOrgIDIndexFactory(sqlstore, sqlschema),
sqlmigration.NewMigrateRulesV4ToV5Factory(sqlstore, telemetryStore),
)
}

View File

@@ -0,0 +1,194 @@
package sqlmigration
import (
"context"
"database/sql"
"encoding/json"
"log/slog"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
type migrateRulesV4ToV5 struct {
store sqlstore.SQLStore
telemetryStore telemetrystore.TelemetryStore
logger *slog.Logger
}
func NewMigrateRulesV4ToV5Factory(
store sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore,
) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(
factory.MustNewName("migrate_rules_v4_to_v5"),
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return &migrateRulesV4ToV5{
store: store,
telemetryStore: telemetryStore,
logger: ps.Logger,
}, nil
})
}
func (migration *migrateRulesV4ToV5) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *migrateRulesV4ToV5) getLogDuplicateKeys(ctx context.Context) ([]string, error) {
query := `
SELECT name
FROM (
SELECT DISTINCT name FROM signoz_logs.distributed_logs_attribute_keys
INTERSECT
SELECT DISTINCT name FROM signoz_logs.distributed_logs_resource_keys
)
ORDER BY name
`
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
if err != nil {
migration.logger.WarnContext(ctx, "failed to query log duplicate keys", "error", err)
return nil, nil
}
defer rows.Close()
var keys []string
for rows.Next() {
var key string
if err := rows.Scan(&key); err != nil {
migration.logger.WarnContext(ctx, "failed to scan log duplicate key", "error", err)
continue
}
keys = append(keys, key)
}
return keys, nil
}
func (migration *migrateRulesV4ToV5) getTraceDuplicateKeys(ctx context.Context) ([]string, error) {
query := `
SELECT tagKey
FROM signoz_traces.distributed_span_attributes_keys
WHERE tagType IN ('tag', 'resource')
GROUP BY tagKey
HAVING COUNT(DISTINCT tagType) > 1
ORDER BY tagKey
`
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
if err != nil {
migration.logger.WarnContext(ctx, "failed to query trace duplicate keys", "error", err)
return nil, nil
}
defer rows.Close()
var keys []string
for rows.Next() {
var key string
if err := rows.Scan(&key); err != nil {
migration.logger.WarnContext(ctx, "failed to scan trace duplicate key", "error", err)
continue
}
keys = append(keys, key)
}
return keys, nil
}
func (migration *migrateRulesV4ToV5) Up(ctx context.Context, db *bun.DB) error {
logsKeys, err := migration.getLogDuplicateKeys(ctx)
if err != nil {
return err
}
tracesKeys, err := migration.getTraceDuplicateKeys(ctx)
if err != nil {
return err
}
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
var rules []struct {
ID string `bun:"id"`
Data map[string]any `bun:"data"`
}
err = tx.NewSelect().
Table("rule").
Column("id", "data").
Scan(ctx, &rules)
if err != nil {
if err == sql.ErrNoRows {
return nil
}
return err
}
alertsMigrator := transition.NewAlertMigrateV5(migration.logger, logsKeys, tracesKeys)
for _, rule := range rules {
version, _ := rule.Data["version"].(string)
if version == "v5" {
continue
}
migration.logger.InfoContext(ctx, "migrating rule v4 to v5", "rule_id", rule.ID, "current_version", version)
// Check if the queries envelope already exists and is non-empty
hasQueriesEnvelope := false
if condition, ok := rule.Data["condition"].(map[string]any); ok {
if compositeQuery, ok := condition["compositeQuery"].(map[string]any); ok {
if queries, ok := compositeQuery["queries"].([]any); ok && len(queries) > 0 {
hasQueriesEnvelope = true
}
}
}
if hasQueriesEnvelope {
// Case 2: Already has queries envelope, just bump version
migration.logger.InfoContext(ctx, "rule already has queries envelope, bumping version", "rule_id", rule.ID)
rule.Data["version"] = "v5"
} else {
// Case 1: Old format, run full migration
migration.logger.InfoContext(ctx, "rule has old format, running full migration", "rule_id", rule.ID)
alertsMigrator.Migrate(ctx, rule.Data)
// Force version to v5 regardless of Migrate return value
rule.Data["version"] = "v5"
}
dataJSON, err := json.Marshal(rule.Data)
if err != nil {
return err
}
_, err = tx.NewUpdate().
Table("rule").
Set("data = ?", string(dataJSON)).
Where("id = ?", rule.ID).
Exec(ctx)
if err != nil {
return err
}
}
return tx.Commit()
}
func (migration *migrateRulesV4ToV5) Down(ctx context.Context, db *bun.DB) error {
return nil
}

View File

@@ -61,11 +61,13 @@ var (
}
)
type fieldMapper struct {}
type fieldMapper struct {
}
func NewFieldMapper() qbtypes.FieldMapper {
return &fieldMapper{}
}
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
switch key.FieldContext {
case telemetrytypes.FieldContextResource:
@@ -252,27 +254,12 @@ func (m *fieldMapper) buildFieldForJSON(key *telemetrytypes.TelemetryFieldKey) (
"plan length is less than 2 for promoted path: %s", key.Name)
}
node := plan[1]
promotedExpr := fmt.Sprintf(
"dynamicElement(%s, '%s')",
node.FieldPath(),
node.TerminalConfig.ElemType.StringValue(),
// promoted column first then body_json column
// TODO(Piyush): Change this in future for better performance
expr = fmt.Sprintf("coalesce(%s, %s)",
fmt.Sprintf("dynamicElement(%s, '%s')", plan[1].FieldPath(), plan[1].TerminalConfig.ElemType.StringValue()),
expr,
)
// dynamicElement returns NULL for scalar types or an empty array for array types.
if node.TerminalConfig.ElemType.IsArray {
expr = fmt.Sprintf(
"if(length(%s) > 0, %s, %s)",
promotedExpr,
promotedExpr,
expr,
)
} else {
// promoted column first then body_json column
// TODO(Piyush): Change this in future for better performance
expr = fmt.Sprintf("coalesce(%s, %s)", promotedExpr, expr)
}
}
return expr, nil
@@ -294,7 +281,8 @@ func (m *fieldMapper) buildArrayConcat(plan telemetrytypes.JSONAccessPlan) (stri
}
// Build arrayMap expressions for ALL available branches at the root level.
// Iterate branches in deterministic order (JSON then Dynamic)
// Iterate branches in deterministic order (JSON then Dynamic) so generated SQL
// is stable across environments; map iteration order is random in Go.
var arrayMapExpressions []string
for _, node := range plan {
for _, branchType := range node.BranchesInOrder() {

View File

@@ -32,6 +32,7 @@ func NewJSONConditionBuilder(key *telemetrytypes.TelemetryFieldKey, valueType te
// BuildCondition builds the full WHERE condition for body_json JSON paths
func (c *jsonConditionBuilder) buildJSONCondition(operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
conditions := []string{}
for _, node := range c.key.JSONPlan {
condition, err := c.emitPlannedCondition(node, operator, value, sb)
@@ -72,9 +73,9 @@ func (c *jsonConditionBuilder) buildTerminalCondition(node *telemetrytypes.JSONA
// switch operator for array membership checks
switch operator {
case qbtypes.FilterOperatorContains:
case qbtypes.FilterOperatorContains, qbtypes.FilterOperatorIn:
operator = qbtypes.FilterOperatorEqual
case qbtypes.FilterOperatorNotContains:
case qbtypes.FilterOperatorNotContains, qbtypes.FilterOperatorNotIn:
operator = qbtypes.FilterOperatorNotEqual
}
}
@@ -190,14 +191,13 @@ func (c *jsonConditionBuilder) buildArrayMembershipCondition(node *telemetrytype
arrayExpr = typedArrayExpr()
}
key := "x"
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(&localKeyCopy, value, key, operator)
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(&localKeyCopy, value, "x", operator)
op, err := c.applyOperator(sb, fieldExpr, operator, value)
if err != nil {
return "", err
}
return fmt.Sprintf("arrayExists(%s -> %s, %s)", key, op, arrayExpr), nil
return fmt.Sprintf("arrayExists(%s -> %s, %s)", fieldExpr, op, arrayExpr), nil
}
// recurseArrayHops recursively builds array traversal conditions
@@ -279,31 +279,27 @@ func (c *jsonConditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, field
case qbtypes.FilterOperatorNotContains:
return sb.NotILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
case qbtypes.FilterOperatorIn, qbtypes.FilterOperatorNotIn:
// emulate IN/NOT IN using OR/AND over equals to leverage indexes consistently
values, ok := value.([]any)
if !ok {
values = []any{value}
}
if operator == qbtypes.FilterOperatorIn {
return sb.In(fieldExpr, values...), nil
conds := []string{}
for _, v := range values {
if operator == qbtypes.FilterOperatorIn {
conds = append(conds, sb.E(fieldExpr, v))
} else {
conds = append(conds, sb.NE(fieldExpr, v))
}
}
return sb.NotIn(fieldExpr, values...), nil
if operator == qbtypes.FilterOperatorIn {
return sb.Or(conds...), nil
}
return sb.And(conds...), nil
case qbtypes.FilterOperatorExists:
return fmt.Sprintf("%s IS NOT NULL", fieldExpr), nil
case qbtypes.FilterOperatorNotExists:
return fmt.Sprintf("%s IS NULL", fieldExpr), nil
// between and not between
case qbtypes.FilterOperatorBetween, qbtypes.FilterOperatorNotBetween:
values, ok := value.([]any)
if !ok {
return "", qbtypes.ErrBetweenValues
}
if len(values) != 2 {
return "", qbtypes.ErrBetweenValues
}
if operator == qbtypes.FilterOperatorBetween {
return sb.Between(fieldExpr, values[0], values[1]), nil
}
return sb.NotBetween(fieldExpr, values[0], values[1]), nil
default:
return "", qbtypes.ErrUnsupportedOperator
}

View File

@@ -316,7 +316,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%1.65%", 1.65, "%1.65%", 1.65, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
},
@@ -345,7 +345,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%true%", true, "%true%", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
},
@@ -360,55 +360,12 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%passed%", "passed", "%passed%", "passed", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
},
expectedErr: nil,
},
{
name: "Dynamic array IN Operator",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{Expression: "body.education[].parameters IN [1.65, 1.99]"},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> toFloat64(x) IN (?, ?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> x IN (?, ?), arrayMap(x->dynamicElement(x, 'Array(Nullable(Float64))'), arrayFilter(x->(dynamicType(x) = 'Array(Nullable(Float64))'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), 1.65, 1.99, 1.65, 1.99, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
},
expectedErr: nil,
},
{
name: "Integer BETWEEN Operator",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{Expression: "education[].awards[].semester BETWEEN 2 AND 4"},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (arrayExists(`body_json.education`-> (arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) BETWEEN ? AND ?, dynamicElement(`body_json.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')) OR arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) BETWEEN ? AND ?, arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_json.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (arrayExists(`body_json.education`-> (arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) BETWEEN ? AND ?, dynamicElement(`body_json.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')) OR arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) BETWEEN ? AND ?, arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_json.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{float64(2), float64(4), float64(2), float64(4), uint64(1747945619), uint64(1747983448), float64(2), float64(4), float64(2), float64(4), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
},
{
name: "Integer IN Operator",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{Expression: "education[].awards[].semester IN [2, 4]"},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (arrayExists(`body_json.education`-> (arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) IN (?, ?), dynamicElement(`body_json.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')) OR arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) IN (?, ?), arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_json.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (arrayExists(`body_json.education`-> (arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) IN (?, ?), dynamicElement(`body_json.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')) OR arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) IN (?, ?), arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_json.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{float64(2), float64(4), float64(2), float64(4), uint64(1747945619), uint64(1747983448), float64(2), float64(4), float64(2), float64(4), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
},
{
name: "Equals to 'sports' inside array of awards",
requestType: qbtypes.RequestTypeRaw,
@@ -432,7 +389,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(x -> toFloat64(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(x -> toFloat64OrNull(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(toFloat64OrNull(x) -> toFloat64OrNull(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%4%", float64(4), "%4%", float64(4), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `interests[].entities[].reviews[].entries[].metadata[].positions[].ratings` is ambiguous, found 2 different combinations of field context / data type: [name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]int64,jsondatatype=Array(Nullable(Int64)) name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]string,jsondatatype=Array(Nullable(String))]."},
},
@@ -447,7 +404,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(x -> toString(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%Good%", "Good", "%Good%", "Good", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `interests[].entities[].reviews[].entries[].metadata[].positions[].ratings` is ambiguous, found 2 different combinations of field context / data type: [name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]int64,jsondatatype=Array(Nullable(Int64)) name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]string,jsondatatype=Array(Nullable(String))]."},
},
@@ -535,7 +492,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
disableBodyJSONQuery(t)
}()
statementBuilder := buildJSONTestStatementBuilder(t, "education", "tags")
statementBuilder := buildJSONTestStatementBuilder(t, "education")
cases := []struct {
name string
requestType qbtypes.RequestType
@@ -543,20 +500,6 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
expected qbtypes.Statement
expectedErr error
}{
{
name: "Has Array promoted uses body fallback",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{Expression: "has(body.tags, 'production')"},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND has(if(length(dynamicElement(body_json_promoted.`tags`, 'Array(Nullable(String))')) > 0, dynamicElement(body_json_promoted.`tags`, 'Array(Nullable(String))'), dynamicElement(body_json.`tags`, 'Array(Nullable(String))')), ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "production", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
},
{
name: "Key inside Array(JSON) exists",
requestType: qbtypes.RequestTypeRaw,
@@ -608,7 +551,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toFloat64(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%1.65%", 1.65, "%1.65%", 1.65, "%1.65%", 1.65, "%1.65%", 1.65, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
},
@@ -637,7 +580,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%true%", true, "%true%", true, "%true%", true, "%true%", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
},
@@ -652,7 +595,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toString(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%passed%", "passed", "%passed%", "passed", "%passed%", "passed", "%passed%", "passed", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
},

View File

@@ -3,12 +3,12 @@ package telemetrylogs
import (
"context"
"fmt"
"reflect"
"strconv"
"strings"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
func parseStrValue(valueStr string, operator qbtypes.FilterOperator) (telemetrytypes.FieldDataType, any) {
@@ -41,55 +41,31 @@ func parseStrValue(valueStr string, operator qbtypes.FilterOperator) (telemetryt
}
func InferDataType(value any, operator qbtypes.FilterOperator, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any) {
if operator.IsArrayOperator() && reflect.ValueOf(value).Kind() != reflect.Slice {
value = []any{value}
}
// closure to calculate the data type of the value
var closure func(value any, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any)
closure = func(value any, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any) {
// check if the value is a int, float, string, bool
valueType := telemetrytypes.FieldDataTypeUnspecified
switch v := value.(type) {
case []any:
// take the first element and infer the type
var scalerType telemetrytypes.FieldDataType
if len(v) > 0 {
// Note: [[...]] Slices inside Slices are not handled yet
if reflect.ValueOf(v[0]).Kind() == reflect.Slice {
return telemetrytypes.FieldDataTypeUnspecified, value
}
scalerType, _ = closure(v[0], key)
}
arrayType := telemetrytypes.ScalerFieldTypeToArrayFieldType[scalerType]
switch {
// decide on the field data type based on the key
case key.FieldDataType.IsArray():
return arrayType, v
default:
// TODO(Piyush): backward compatibility for the old String based JSON QB queries
if strings.HasSuffix(key.Name, telemetrytypes.ArrayAnyIndexSuffix) {
return arrayType, v
}
return scalerType, v
}
case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64:
valueType = telemetrytypes.FieldDataTypeInt64
case float32, float64:
valueType = telemetrytypes.FieldDataTypeFloat64
case string:
valueType, value = parseStrValue(v, operator)
case bool:
valueType = telemetrytypes.FieldDataTypeBool
// check if the value is a int, float, string, bool
valueType := telemetrytypes.FieldDataTypeUnspecified
switch v := value.(type) {
case []any:
// take the first element and infer the type
if len(v) > 0 {
valueType, _ = InferDataType(v[0], operator, key)
}
return valueType, value
return valueType, v
case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64:
valueType = telemetrytypes.FieldDataTypeInt64
case float32, float64:
valueType = telemetrytypes.FieldDataTypeFloat64
case string:
valueType, value = parseStrValue(v, operator)
case bool:
valueType = telemetrytypes.FieldDataTypeBool
}
// calculate the data type of the value
return closure(value, key)
// check if it is array
if strings.HasSuffix(key.Name, "[*]") || strings.HasSuffix(key.Name, "[]") {
valueType = telemetrytypes.FieldDataType{String: valuer.NewString(fmt.Sprintf("[]%s", valueType.StringValue()))}
}
return valueType, value
}
func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string {

View File

@@ -421,38 +421,6 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
},
expectedErr: nil,
},
{
name: "IN operator with json search",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{
Expression: "body.user_names[*] IN 'john_doe'",
},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((JSONExtract(JSON_QUERY(body, '$.\"user_names\"[*]'), 'Array(String)') = ?) AND JSON_EXISTS(body, '$.\"user_names\"[*]')) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "john_doe", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
},
{
name: "has with json search",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{
Expression: "has(body.user_names[*], 'john_doe')",
},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND has(JSONExtract(JSON_QUERY(body, '$.\"user_names\"[*]'), 'Array(String)'), ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "john_doe", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
},
}
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()

View File

@@ -112,8 +112,7 @@ func (t *telemetryMetaStore) buildBodyJSONPaths(ctx context.Context,
}
for _, fieldKey := range fieldKeys {
promotedKey := strings.Split(fieldKey.Name, telemetrytypes.ArraySep)[0]
fieldKey.Materialized = promoted.Contains(promotedKey)
fieldKey.Materialized = promoted.Contains(fieldKey.Name)
fieldKey.Indexes = indexes[fieldKey.Name]
}
@@ -502,8 +501,7 @@ func (t *telemetryMetaStore) GetPromotedPaths(ctx context.Context, paths ...stri
sb := sqlbuilder.Select("path").From(fmt.Sprintf("%s.%s", DBName, PromotedPathsTableName))
pathConditions := []string{}
for _, path := range paths {
split := strings.Split(path, telemetrytypes.ArraySep)
pathConditions = append(pathConditions, sb.Equal("path", split[0]))
pathConditions = append(pathConditions, sb.Equal("path", path))
}
sb.Where(sb.Or(pathConditions...))

View File

@@ -1,102 +0,0 @@
package transition
import (
"fmt"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
// ConvertV5TimeSeriesDataToV4Result converts v5 TimeSeriesData to v4 Result
func ConvertV5TimeSeriesDataToV4Result(v5Data *qbtypes.TimeSeriesData) *v3.Result {
if v5Data == nil {
return nil
}
result := &v3.Result{
QueryName: v5Data.QueryName,
Series: make([]*v3.Series, 0),
}
toV4Series := func(ts *qbtypes.TimeSeries) *v3.Series {
series := &v3.Series{
Labels: make(map[string]string),
LabelsArray: make([]map[string]string, 0),
Points: make([]v3.Point, 0, len(ts.Values)),
}
for _, label := range ts.Labels {
valueStr := fmt.Sprintf("%v", label.Value)
series.Labels[label.Key.Name] = valueStr
}
if len(series.Labels) > 0 {
series.LabelsArray = append(series.LabelsArray, series.Labels)
}
for _, tsValue := range ts.Values {
if tsValue.Partial {
continue
}
point := v3.Point{
Timestamp: tsValue.Timestamp,
Value: tsValue.Value,
}
series.Points = append(series.Points, point)
}
return series
}
for _, aggBucket := range v5Data.Aggregations {
for _, ts := range aggBucket.Series {
result.Series = append(result.Series, toV4Series(ts))
}
if len(aggBucket.AnomalyScores) != 0 {
result.AnomalyScores = make([]*v3.Series, 0)
for _, ts := range aggBucket.AnomalyScores {
result.AnomalyScores = append(result.AnomalyScores, toV4Series(ts))
}
}
if len(aggBucket.PredictedSeries) != 0 {
result.PredictedSeries = make([]*v3.Series, 0)
for _, ts := range aggBucket.PredictedSeries {
result.PredictedSeries = append(result.PredictedSeries, toV4Series(ts))
}
}
if len(aggBucket.LowerBoundSeries) != 0 {
result.LowerBoundSeries = make([]*v3.Series, 0)
for _, ts := range aggBucket.LowerBoundSeries {
result.LowerBoundSeries = append(result.LowerBoundSeries, toV4Series(ts))
}
}
if len(aggBucket.UpperBoundSeries) != 0 {
result.UpperBoundSeries = make([]*v3.Series, 0)
for _, ts := range aggBucket.UpperBoundSeries {
result.UpperBoundSeries = append(result.UpperBoundSeries, toV4Series(ts))
}
}
}
return result
}
// ConvertV5TimeSeriesDataSliceToV4Results converts a slice of v5 TimeSeriesData to v4 QueryRangeResponse
func ConvertV5TimeSeriesDataSliceToV4Results(v5DataSlice []*qbtypes.TimeSeriesData) *v3.QueryRangeResponse {
response := &v3.QueryRangeResponse{
ResultType: "matrix", // Time series data is typically "matrix" type
Result: make([]*v3.Result, 0, len(v5DataSlice)),
}
for _, v5Data := range v5DataSlice {
if result := ConvertV5TimeSeriesDataToV4Result(v5Data); result != nil {
response.Result = append(response.Result, result)
}
}
return response
}

View File

@@ -172,6 +172,7 @@ var (
func (TimeAggregation) Enum() []any {
return []any{
TimeAggregationUnspecified,
TimeAggregationLatest,
TimeAggregationSum,
TimeAggregationAvg,
@@ -204,6 +205,7 @@ var (
func (SpaceAggregation) Enum() []any {
return []any{
SpaceAggregationUnspecified,
SpaceAggregationSum,
SpaceAggregationAvg,
SpaceAggregationMin,

View File

@@ -10,35 +10,10 @@ import (
"github.com/SigNoz/signoz/pkg/types/metrictypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/swaggest/jsonschema-go"
)
type Step struct{ time.Duration }
var _ jsonschema.Exposer = Step{}
// JSONSchema returns a custom schema for Step that accepts either a duration string or a number (seconds).
func (Step) JSONSchema() (jsonschema.Schema, error) {
s := jsonschema.Schema{}
s.WithDescription("Step interval. Accepts a Go duration string (e.g., \"60s\", \"1m\", \"1h\") or a number representing seconds (e.g., 60).")
strSchema := jsonschema.Schema{}
strSchema.WithType(jsonschema.String.Type())
strSchema.WithExamples("60s", "5m", "1h")
strSchema.WithDescription("Duration string (e.g., \"60s\", \"5m\", \"1h\").")
numSchema := jsonschema.Schema{}
numSchema.WithType(jsonschema.Number.Type())
numSchema.WithExamples(60, 300, 3600)
numSchema.WithDescription("Duration in seconds.")
s.OneOf = []jsonschema.SchemaOrBool{
strSchema.ToSchemaOrBool(),
numSchema.ToSchemaOrBool(),
}
return s, nil
}
func (s *Step) UnmarshalJSON(b []byte) error {
if len(b) == 0 {
return nil
@@ -186,17 +161,6 @@ func (f FilterOperator) IsStringSearchOperator() bool {
}
}
// IsArrayOperator returns true if the operator works with array values only
func (f FilterOperator) IsArrayOperator() bool {
switch f {
case FilterOperatorIn, FilterOperatorNotIn,
FilterOperatorBetween, FilterOperatorNotBetween:
return true
default:
return false
}
}
type OrderDirection struct {
valuer.String
}
@@ -206,14 +170,6 @@ var (
OrderDirectionDesc = OrderDirection{valuer.NewString("desc")}
)
// Enum returns the acceptable values for OrderDirection.
func (OrderDirection) Enum() []any {
return []any{
OrderDirectionAsc,
OrderDirectionDesc,
}
}
var (
OrderDirectionMap = map[string]OrderDirection{
"asc": OrderDirectionAsc,
@@ -236,19 +192,6 @@ var (
ReduceToMedian = ReduceTo{valuer.NewString("median")}
)
// Enum returns the acceptable values for ReduceTo.
func (ReduceTo) Enum() []any {
return []any{
ReduceToSum,
ReduceToCount,
ReduceToAvg,
ReduceToMin,
ReduceToMax,
ReduceToLast,
ReduceToMedian,
}
}
// FunctionReduceTo applies the reduceTo operator to a time series and returns a new series with the reduced value
// reduceTo can be one of: last, sum, avg, min, max, count, median
// if reduceTo is not recognized, the function returns the original series

View File

@@ -36,30 +36,6 @@ var (
FunctionNameFillZero = FunctionName{valuer.NewString("fillZero")}
)
// Enum returns the acceptable values for FunctionName.
func (FunctionName) Enum() []any {
return []any{
FunctionNameCutOffMin,
FunctionNameCutOffMax,
FunctionNameClampMin,
FunctionNameClampMax,
FunctionNameAbsolute,
FunctionNameRunningDiff,
FunctionNameLog2,
FunctionNameLog10,
FunctionNameCumulativeSum,
FunctionNameEWMA3,
FunctionNameEWMA5,
FunctionNameEWMA7,
FunctionNameMedian3,
FunctionNameMedian5,
FunctionNameMedian7,
FunctionNameTimeShift,
FunctionNameAnomaly,
FunctionNameFillZero,
}
}
// Validate checks if the FunctionName is valid and one of the known types
func (fn FunctionName) Validate() error {
validFunctions := []FunctionName{

View File

@@ -16,17 +16,6 @@ var (
JoinTypeCross = JoinType{valuer.NewString("cross")}
)
// Enum returns the acceptable values for JoinType.
func (JoinType) Enum() []any {
return []any{
JoinTypeInner,
JoinTypeLeft,
JoinTypeRight,
JoinTypeFull,
JoinTypeCross,
}
}
type QueryRef struct {
Name string `json:"name"`
}

View File

@@ -2,8 +2,6 @@ package querybuildertypesv5
import (
"context"
"github.com/swaggest/jsonschema-go"
)
type Query interface {
@@ -31,12 +29,4 @@ type ExecStats struct {
StepIntervals map[string]uint64 `json:"stepIntervals,omitempty"`
}
var _ jsonschema.Preparer = &ExecStats{}
// PrepareJSONSchema adds description to the ExecStats schema.
func (e *ExecStats) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Execution statistics for the query, including rows scanned, bytes scanned, and duration.")
return nil
}
type TimeRange struct{ From, To uint64 } // ms since epoch

View File

@@ -16,17 +16,3 @@ var (
QueryTypeClickHouseSQL = QueryType{valuer.NewString("clickhouse_sql")}
QueryTypePromQL = QueryType{valuer.NewString("promql")}
)
// Enum returns the acceptable values for QueryType.
func (QueryType) Enum() []any {
return []any{
QueryTypeBuilder,
QueryTypeFormula,
// Not yet supported.
// QueryTypeSubQuery,
// QueryTypeJoin,
QueryTypeTraceOperator,
QueryTypeClickHouseSQL,
QueryTypePromQL,
}
}

View File

@@ -9,7 +9,6 @@ import (
"github.com/SigNoz/signoz/pkg/types/metrictypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/swaggest/jsonschema-go"
)
type QueryEnvelope struct {
@@ -19,71 +18,6 @@ type QueryEnvelope struct {
Spec any `json:"spec"`
}
// queryEnvelopeBuilderTrace is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=traces.
type queryEnvelopeBuilderTrace struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[TraceAggregation] `json:"spec" description:"The trace builder query specification."`
}
// queryEnvelopeBuilderLog is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=logs.
type queryEnvelopeBuilderLog struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[LogAggregation] `json:"spec" description:"The log builder query specification."`
}
// queryEnvelopeBuilderMetric is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=metrics.
type queryEnvelopeBuilderMetric struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[MetricAggregation] `json:"spec" description:"The metric builder query specification."`
}
// queryEnvelopeFormula is the OpenAPI schema for a QueryEnvelope with type=builder_formula.
type queryEnvelopeFormula struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderFormula `json:"spec" description:"The formula specification."`
}
// queryEnvelopeJoin is the OpenAPI schema for a QueryEnvelope with type=builder_join.
// type queryEnvelopeJoin struct {
// Type QueryType `json:"type" description:"The type of the query."`
// Spec QueryBuilderJoin `json:"spec" description:"The join specification."`
// }
// queryEnvelopeTraceOperator is the OpenAPI schema for a QueryEnvelope with type=builder_trace_operator.
type queryEnvelopeTraceOperator struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderTraceOperator `json:"spec" description:"The trace operator specification."`
}
// queryEnvelopePromQL is the OpenAPI schema for a QueryEnvelope with type=promql.
type queryEnvelopePromQL struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec PromQuery `json:"spec" description:"The PromQL query specification."`
}
// queryEnvelopeClickHouseSQL is the OpenAPI schema for a QueryEnvelope with type=clickhouse_sql.
type queryEnvelopeClickHouseSQL struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec ClickHouseQuery `json:"spec" description:"The ClickHouse SQL query specification."`
}
var _ jsonschema.OneOfExposer = QueryEnvelope{}
// JSONSchemaOneOf returns the oneOf variants for the QueryEnvelope discriminated union.
// Each variant represents a different query type with its corresponding spec schema.
func (QueryEnvelope) JSONSchemaOneOf() []any {
return []any{
queryEnvelopeBuilderTrace{},
queryEnvelopeBuilderLog{},
queryEnvelopeBuilderMetric{},
queryEnvelopeFormula{},
// queryEnvelopeJoin{},
queryEnvelopeTraceOperator{},
queryEnvelopePromQL{},
queryEnvelopeClickHouseSQL{},
}
}
// implement custom json unmarshaler for the QueryEnvelope
func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
var shadow struct {
@@ -196,12 +130,6 @@ type CompositeQuery struct {
Queries []QueryEnvelope `json:"queries"`
}
// PrepareJSONSchema adds description to the CompositeQuery schema.
func (c *CompositeQuery) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Composite query containing one or more query envelopes. Each query envelope specifies its type and corresponding spec.")
return nil
}
// UnmarshalJSON implements custom JSON unmarshaling to provide better error messages
func (c *CompositeQuery) UnmarshalJSON(data []byte) error {
type Alias CompositeQuery
@@ -264,16 +192,6 @@ var (
TextBoxVariableType = VariableType{valuer.NewString("text")}
)
// Enum returns the acceptable values for VariableType.
func (VariableType) Enum() []any {
return []any{
QueryVariableType,
DynamicVariableType,
CustomVariableType,
TextBoxVariableType,
}
}
type VariableItem struct {
Type VariableType `json:"type"`
Value any `json:"value"`
@@ -299,12 +217,6 @@ type QueryRangeRequest struct {
FormatOptions *FormatOptions `json:"formatOptions,omitempty"`
}
// PrepareJSONSchema adds description to the QueryRangeRequest schema.
func (q *QueryRangeRequest) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Request body for the v5 query range endpoint. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL queries.")
return nil
}
func (r *QueryRangeRequest) StepIntervalForQuery(name string) int64 {
stepsMap := make(map[string]int64)
for _, query := range r.CompositeQuery.Queries {

View File

@@ -30,15 +30,3 @@ var (
func (r RequestType) IsAggregation() bool {
return r == RequestTypeTimeSeries || r == RequestTypeScalar || r == RequestTypeDistribution
}
// Enum implements jsonschema.Enum; returns the acceptable values for RequestType.
func (RequestType) Enum() []any {
return []any{
RequestTypeScalar,
RequestTypeTimeSeries,
RequestTypeRaw,
RequestTypeRawStream,
RequestTypeTrace,
// RequestTypeDistribution,
}
}

View File

@@ -12,7 +12,6 @@ import (
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/swaggest/jsonschema-go"
)
type QBEvent struct {
@@ -43,17 +42,6 @@ type QueryData struct {
Results []any `json:"results"`
}
var _ jsonschema.OneOfExposer = QueryData{}
// JSONSchemaOneOf documents the polymorphic result types in QueryData.Results.
func (QueryData) JSONSchemaOneOf() []any {
return []any{
TimeSeriesData{},
ScalarData{},
RawData{},
}
}
type QueryRangeResponse struct {
Type RequestType `json:"type"`
Data QueryData `json:"data"`
@@ -64,14 +52,6 @@ type QueryRangeResponse struct {
QBEvent *QBEvent `json:"-"`
}
var _ jsonschema.Preparer = &QueryRangeResponse{}
// PrepareJSONSchema adds description to the QueryRangeResponse schema.
func (q *QueryRangeResponse) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Response from the v5 query range endpoint. The data.results array contains typed results depending on the requestType: TimeSeriesData for time_series, ScalarData for scalar, or RawData for raw requests.")
return nil
}
type TimeSeriesData struct {
QueryName string `json:"queryName"`
Aggregations []*AggregationBucket `json:"aggregations"`
@@ -96,6 +76,33 @@ type TimeSeries struct {
Values []*TimeSeriesValue `json:"values"`
}
// LabelsMap converts the label slice to a map[string]string for use in
// alert evaluation helpers that operate on flat label maps.
func (ts *TimeSeries) LabelsMap() map[string]string {
if ts == nil {
return nil
}
m := make(map[string]string, len(ts.Labels))
for _, l := range ts.Labels {
m[l.Key.Name] = fmt.Sprintf("%v", l.Value)
}
return m
}
// NonPartialValues returns only the values where Partial is false.
func (ts *TimeSeries) NonPartialValues() []*TimeSeriesValue {
if ts == nil {
return nil
}
result := make([]*TimeSeriesValue, 0, len(ts.Values))
for _, v := range ts.Values {
if !v.Partial {
result = append(result, v)
}
}
return result
}
type Label struct {
Key telemetrytypes.TelemetryFieldKey `json:"key"`
Value any `json:"value"`
@@ -179,14 +186,6 @@ var (
ColumnTypeAggregation = ColumnType{valuer.NewString("aggregation")}
)
// Enum returns the acceptable values for ColumnType.
func (ColumnType) Enum() []any {
return []any{
ColumnTypeGroup,
ColumnTypeAggregation,
}
}
type ColumnDescriptor struct {
telemetrytypes.TelemetryFieldKey
QueryName string `json:"queryName"`

View File

@@ -203,11 +203,11 @@ func (rc *RuleCondition) IsValid() bool {
}
// ShouldEval checks if the further series should be evaluated at all for alerts.
func (rc *RuleCondition) ShouldEval(series *v3.Series) bool {
func (rc *RuleCondition) ShouldEval(series *qbtypes.TimeSeries) bool {
if rc == nil {
return true
}
return !rc.RequireMinPoints || len(series.Points) >= rc.RequiredNumPoints
return !rc.RequireMinPoints || len(series.NonPartialValues()) >= rc.RequiredNumPoints
}
// QueryType is a shorthand method to get query type

View File

@@ -355,6 +355,14 @@ func (r *PostableRule) validate() error {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "composite query is required"))
}
if r.Version != "" && r.Version != "v5" {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "only version v5 is supported, got %q", r.Version))
}
if r.Version == "v5" && r.RuleCondition.CompositeQuery != nil && len(r.RuleCondition.CompositeQuery.Queries) == 0 {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "queries envelope is required in compositeQuery"))
}
if isAllQueriesDisabled(r.RuleCondition.CompositeQuery) {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "all queries are disabled in rule condition"))
}

View File

@@ -8,6 +8,8 @@ import (
"github.com/stretchr/testify/assert"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
func TestIsAllQueriesDisabled(t *testing.T) {
@@ -621,9 +623,9 @@ func TestParseIntoRuleThresholdGeneration(t *testing.T) {
}
// Test that threshold can evaluate properly
vector, err := threshold.Eval(v3.Series{
Points: []v3.Point{{Value: 0.15, Timestamp: 1000}}, // 150ms in seconds
Labels: map[string]string{"test": "label"},
vector, err := threshold.Eval(qbtypes.TimeSeries{
Values: []*qbtypes.TimeSeriesValue{{Value: 0.15, Timestamp: 1000}}, // 150ms in seconds
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "test"}, Value: "label"}},
}, "", EvalData{})
if err != nil {
t.Fatalf("Unexpected error in shouldAlert: %v", err)
@@ -698,9 +700,9 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
}
// Test with a value that should trigger both WARNING and CRITICAL thresholds
vector, err := threshold.Eval(v3.Series{
Points: []v3.Point{{Value: 95.0, Timestamp: 1000}}, // 95% CPU usage
Labels: map[string]string{"service": "test"},
vector, err := threshold.Eval(qbtypes.TimeSeries{
Values: []*qbtypes.TimeSeriesValue{{Value: 95.0, Timestamp: 1000}}, // 95% CPU usage
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "service"}, Value: "test"}},
}, "", EvalData{})
if err != nil {
t.Fatalf("Unexpected error in shouldAlert: %v", err)
@@ -708,9 +710,9 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
assert.Equal(t, 2, len(vector))
vector, err = threshold.Eval(v3.Series{
Points: []v3.Point{{Value: 75.0, Timestamp: 1000}}, // 75% CPU usage
Labels: map[string]string{"service": "test"},
vector, err = threshold.Eval(qbtypes.TimeSeries{
Values: []*qbtypes.TimeSeriesValue{{Value: 75.0, Timestamp: 1000}}, // 75% CPU usage
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "service"}, Value: "test"}},
}, "", EvalData{})
if err != nil {
t.Fatalf("Unexpected error in shouldAlert: %v", err)
@@ -723,7 +725,7 @@ func TestAnomalyNegationEval(t *testing.T) {
tests := []struct {
name string
ruleJSON []byte
series v3.Series
series qbtypes.TimeSeries
shouldAlert bool
expectedValue float64
}{
@@ -751,9 +753,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: -2.1}, // below & at least once, should alert
{Timestamp: 2000, Value: -2.3},
},
@@ -785,9 +787,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`), // below & at least once, no value below -2.0
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: -1.9},
{Timestamp: 2000, Value: -1.8},
},
@@ -818,9 +820,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`), // above & at least once, should alert
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: 2.1}, // above 2.0, should alert
{Timestamp: 2000, Value: 2.2},
},
@@ -852,9 +854,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: 1.1},
{Timestamp: 2000, Value: 1.2},
},
@@ -885,9 +887,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`), // below and all the times
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: -2.1}, // all below -2
{Timestamp: 2000, Value: -2.2},
{Timestamp: 3000, Value: -2.5},
@@ -920,9 +922,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: -3.0},
{Timestamp: 2000, Value: -1.0}, // above -2, breaks condition
{Timestamp: 3000, Value: -2.5},
@@ -954,10 +956,10 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -8.0}, // abs(8) >= 7, alert
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: -8.0}, // abs(-8) >= 7, alert
{Timestamp: 2000, Value: 5.0},
},
},
@@ -988,9 +990,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: 80.0}, // below 90, should alert
{Timestamp: 2000, Value: 85.0},
},
@@ -1022,9 +1024,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: 60.0}, // below, should alert
{Timestamp: 2000, Value: 90.0},
},

View File

@@ -8,8 +8,8 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/converter"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -83,7 +83,7 @@ func (eval EvalData) HasActiveAlert(sampleLabelFp uint64) bool {
type RuleThreshold interface {
// Eval runs the given series through the threshold rules
// using the given EvalData and returns the matching series
Eval(series v3.Series, unit string, evalData EvalData) (Vector, error)
Eval(series qbtypes.TimeSeries, unit string, evalData EvalData) (Vector, error)
GetRuleReceivers() []RuleReceivers
}
@@ -122,10 +122,11 @@ func (r BasicRuleThresholds) Validate() error {
return errors.Join(errs...)
}
func (r BasicRuleThresholds) Eval(series v3.Series, unit string, evalData EvalData) (Vector, error) {
func (r BasicRuleThresholds) Eval(series qbtypes.TimeSeries, unit string, evalData EvalData) (Vector, error) {
var resultVector Vector
thresholds := []BasicRuleThreshold(r)
sortThresholds(thresholds)
seriesLabels := series.LabelsMap()
for _, threshold := range thresholds {
smpl, shouldAlert := threshold.shouldAlert(series, unit)
if shouldAlert {
@@ -137,15 +138,15 @@ func (r BasicRuleThresholds) Eval(series v3.Series, unit string, evalData EvalDa
resultVector = append(resultVector, smpl)
continue
} else if evalData.SendUnmatched {
// Sanitise the series points to remove any NaN or Inf values
series.Points = removeGroupinSetPoints(series)
if len(series.Points) == 0 {
// Sanitise the series values to remove any NaN, Inf, or partial values
values := filterValidValues(series.Values)
if len(values) == 0 {
continue
}
// prepare the sample with the first point of the series
// prepare the sample with the first value of the series
smpl := Sample{
Point: Point{T: series.Points[0].Timestamp, V: series.Points[0].Value},
Metric: PrepareSampleLabelsForRule(series.Labels, threshold.Name),
Point: Point{T: values[0].Timestamp, V: values[0].Value},
Metric: PrepareSampleLabelsForRule(seriesLabels, threshold.Name),
Target: *threshold.TargetValue,
TargetUnit: threshold.TargetUnit,
}
@@ -160,7 +161,7 @@ func (r BasicRuleThresholds) Eval(series v3.Series, unit string, evalData EvalDa
if threshold.RecoveryTarget == nil {
continue
}
sampleLabels := PrepareSampleLabelsForRule(series.Labels, threshold.Name)
sampleLabels := PrepareSampleLabelsForRule(seriesLabels, threshold.Name)
alertHash := sampleLabels.Hash()
// check if alert is active and then check if recovery threshold matches
if evalData.HasActiveAlert(alertHash) {
@@ -255,18 +256,23 @@ func (b BasicRuleThreshold) Validate() error {
return errors.Join(errs...)
}
func (b BasicRuleThreshold) matchesRecoveryThreshold(series v3.Series, ruleUnit string) (Sample, bool) {
func (b BasicRuleThreshold) matchesRecoveryThreshold(series qbtypes.TimeSeries, ruleUnit string) (Sample, bool) {
return b.shouldAlertWithTarget(series, b.recoveryTarget(ruleUnit))
}
func (b BasicRuleThreshold) shouldAlert(series v3.Series, ruleUnit string) (Sample, bool) {
func (b BasicRuleThreshold) shouldAlert(series qbtypes.TimeSeries, ruleUnit string) (Sample, bool) {
return b.shouldAlertWithTarget(series, b.target(ruleUnit))
}
func removeGroupinSetPoints(series v3.Series) []v3.Point {
var result []v3.Point
for _, s := range series.Points {
if s.Timestamp >= 0 && !math.IsNaN(s.Value) && !math.IsInf(s.Value, 0) {
result = append(result, s)
// filterValidValues returns only the values that are valid for alert evaluation:
// non-partial, non-NaN, non-Inf, and with non-negative timestamps.
func filterValidValues(values []*qbtypes.TimeSeriesValue) []*qbtypes.TimeSeriesValue {
var result []*qbtypes.TimeSeriesValue
for _, v := range values {
if v.Partial {
continue
}
if v.Timestamp >= 0 && !math.IsNaN(v.Value) && !math.IsInf(v.Value, 0) {
result = append(result, v)
}
}
return result
@@ -284,15 +290,15 @@ func PrepareSampleLabelsForRule(seriesLabels map[string]string, thresholdName st
return lb.Labels()
}
func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float64) (Sample, bool) {
func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, target float64) (Sample, bool) {
var shouldAlert bool
var alertSmpl Sample
lbls := PrepareSampleLabelsForRule(series.Labels, b.Name)
lbls := PrepareSampleLabelsForRule(series.LabelsMap(), b.Name)
series.Points = removeGroupinSetPoints(series)
values := filterValidValues(series.Values)
// nothing to evaluate
if len(series.Points) == 0 {
if len(values) == 0 {
return alertSmpl, false
}
@@ -300,7 +306,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
case AtleastOnce:
// If any sample matches the condition, the rule is firing.
if b.CompareOp == ValueIsAbove {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value > target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -308,7 +314,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
}
} else if b.CompareOp == ValueIsBelow {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value < target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -316,7 +322,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
}
} else if b.CompareOp == ValueIsEq {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value == target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -324,7 +330,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
}
} else if b.CompareOp == ValueIsNotEq {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value != target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -332,7 +338,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
}
} else if b.CompareOp == ValueOutsideBounds {
for _, smpl := range series.Points {
for _, smpl := range values {
if math.Abs(smpl.Value) >= target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -345,7 +351,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
shouldAlert = true
alertSmpl = Sample{Point: Point{V: target}, Metric: lbls}
if b.CompareOp == ValueIsAbove {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value <= target {
shouldAlert = false
break
@@ -354,7 +360,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
// use min value from the series
if shouldAlert {
var minValue float64 = math.Inf(1)
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value < minValue {
minValue = smpl.Value
}
@@ -362,7 +368,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
alertSmpl = Sample{Point: Point{V: minValue}, Metric: lbls}
}
} else if b.CompareOp == ValueIsBelow {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value >= target {
shouldAlert = false
break
@@ -370,7 +376,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
if shouldAlert {
var maxValue float64 = math.Inf(-1)
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value > maxValue {
maxValue = smpl.Value
}
@@ -378,14 +384,14 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
alertSmpl = Sample{Point: Point{V: maxValue}, Metric: lbls}
}
} else if b.CompareOp == ValueIsEq {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value != target {
shouldAlert = false
break
}
}
} else if b.CompareOp == ValueIsNotEq {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value == target {
shouldAlert = false
break
@@ -393,7 +399,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
// use any non-inf or nan value from the series
if shouldAlert {
for _, smpl := range series.Points {
for _, smpl := range values {
if !math.IsInf(smpl.Value, 0) && !math.IsNaN(smpl.Value) {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
break
@@ -401,7 +407,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
}
} else if b.CompareOp == ValueOutsideBounds {
for _, smpl := range series.Points {
for _, smpl := range values {
if math.Abs(smpl.Value) < target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = false
@@ -412,7 +418,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
case OnAverage:
// If the average of all samples matches the condition, the rule is firing.
var sum, count float64
for _, smpl := range series.Points {
for _, smpl := range values {
if math.IsNaN(smpl.Value) || math.IsInf(smpl.Value, 0) {
continue
}
@@ -446,7 +452,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
// If the sum of all samples matches the condition, the rule is firing.
var sum float64
for _, smpl := range series.Points {
for _, smpl := range values {
if math.IsNaN(smpl.Value) || math.IsInf(smpl.Value, 0) {
continue
}
@@ -477,21 +483,22 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
case Last:
// If the last sample matches the condition, the rule is firing.
shouldAlert = false
alertSmpl = Sample{Point: Point{V: series.Points[len(series.Points)-1].Value}, Metric: lbls}
lastValue := values[len(values)-1].Value
alertSmpl = Sample{Point: Point{V: lastValue}, Metric: lbls}
if b.CompareOp == ValueIsAbove {
if series.Points[len(series.Points)-1].Value > target {
if lastValue > target {
shouldAlert = true
}
} else if b.CompareOp == ValueIsBelow {
if series.Points[len(series.Points)-1].Value < target {
if lastValue < target {
shouldAlert = true
}
} else if b.CompareOp == ValueIsEq {
if series.Points[len(series.Points)-1].Value == target {
if lastValue == target {
shouldAlert = true
}
} else if b.CompareOp == ValueIsNotEq {
if series.Points[len(series.Points)-1].Value != target {
if lastValue != target {
shouldAlert = true
}
}

View File

@@ -6,16 +6,24 @@ import (
"github.com/stretchr/testify/assert"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
target := 100.0
makeLabel := func(name, value string) *qbtypes.Label {
return &qbtypes.Label{Key: telemetrytypes.TelemetryFieldKey{Name: name}, Value: value}
}
makeValue := func(value float64, ts int64) *qbtypes.TimeSeriesValue {
return &qbtypes.TimeSeriesValue{Value: value, Timestamp: ts}
}
tests := []struct {
name string
threshold BasicRuleThreshold
series v3.Series
series qbtypes.TimeSeries
ruleUnit string
shouldAlert bool
}{
@@ -28,11 +36,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000}, // 150ms in seconds
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)}, // 150ms in seconds
},
ruleUnit: "s",
shouldAlert: true,
@@ -46,11 +52,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.05, Timestamp: 1000}, // 50ms in seconds
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.05, 1000)}, // 50ms in seconds
},
ruleUnit: "s",
shouldAlert: false,
@@ -64,11 +68,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 150000, Timestamp: 1000}, // 150000ms = 150s
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(150000, 1000)}, // 150000ms = 150s
},
ruleUnit: "ms",
shouldAlert: true,
@@ -83,11 +85,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000}, // 0.15KiB ≈ 153.6 bytes
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)}, // 0.15KiB ≈ 153.6 bytes
},
ruleUnit: "kbytes",
shouldAlert: true,
@@ -101,11 +101,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000},
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)},
},
ruleUnit: "mbytes",
shouldAlert: true,
@@ -120,11 +118,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsBelow,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.05, Timestamp: 1000}, // 50ms in seconds
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.05, 1000)}, // 50ms in seconds
},
ruleUnit: "s",
shouldAlert: true,
@@ -138,12 +134,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: OnAverage,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.08, Timestamp: 1000}, // 80ms
{Value: 0.12, Timestamp: 2000}, // 120ms
{Value: 0.15, Timestamp: 3000}, // 150ms
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{
makeValue(0.08, 1000), // 80ms
makeValue(0.12, 2000), // 120ms
makeValue(0.15, 3000), // 150ms
},
},
ruleUnit: "s",
@@ -158,12 +154,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: InTotal,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.04, Timestamp: 1000}, // 40MB
{Value: 0.05, Timestamp: 2000}, // 50MB
{Value: 0.03, Timestamp: 3000}, // 30MB
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{
makeValue(0.04, 1000), // 40MB
makeValue(0.05, 2000), // 50MB
makeValue(0.03, 3000), // 30MB
},
},
ruleUnit: "decgbytes",
@@ -178,12 +174,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AllTheTimes,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.11, Timestamp: 1000}, // 110ms
{Value: 0.12, Timestamp: 2000}, // 120ms
{Value: 0.15, Timestamp: 3000}, // 150ms
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{
makeValue(0.11, 1000), // 110ms
makeValue(0.12, 2000), // 120ms
makeValue(0.15, 3000), // 150ms
},
},
ruleUnit: "s",
@@ -198,11 +194,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: Last,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000}, // 150kB
{Value: 0.05, Timestamp: 2000}, // 50kB (last value)
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{
makeValue(0.15, 1000), // 150kB
makeValue(0.05, 2000), // 50kB (last value)
},
},
ruleUnit: "decmbytes",
@@ -218,11 +214,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000},
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)},
},
ruleUnit: "KBs",
shouldAlert: true,
@@ -237,11 +231,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 150, Timestamp: 1000}, // 150ms
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(150, 1000)}, // 150ms
},
ruleUnit: "ms",
shouldAlert: true,
@@ -256,11 +248,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 150, Timestamp: 1000}, // 150 (unitless)
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(150, 1000)}, // 150 (unitless)
},
ruleUnit: "",
shouldAlert: true,

View File

@@ -23,10 +23,8 @@ const (
// e.g., "body.status" where "body." is the prefix
BodyJSONStringSearchPrefix = "body."
ArraySep = jsontypeexporter.ArraySeparator
ArraySepSuffix = "[]"
// TODO(Piyush): Remove once we've migrated to the new array syntax
ArrayAnyIndex = "[*]."
ArrayAnyIndexSuffix = "[*]"
ArrayAnyIndex = "[*]."
)
type TelemetryFieldKey struct {

View File

@@ -172,18 +172,3 @@ func isContextValidForSignal(ctx FieldContext, signal Signal) bool {
}
return true
}
// Enum returns the acceptable values for FieldContext.
func (FieldContext) Enum() []any {
return []any{
FieldContextMetric,
FieldContextLog,
FieldContextSpan,
// FieldContextTrace,
FieldContextResource,
// FieldContextScope,
FieldContextAttribute,
// FieldContextEvent,
FieldContextBody,
}
}

View File

@@ -93,14 +93,6 @@ var (
FieldDataTypeArrayFloat64: "Array(Float64)",
FieldDataTypeArrayBool: "Array(Bool)",
}
ScalerFieldTypeToArrayFieldType = map[FieldDataType]FieldDataType{
FieldDataTypeString: FieldDataTypeArrayString,
FieldDataTypeBool: FieldDataTypeArrayBool,
FieldDataTypeNumber: FieldDataTypeArrayNumber,
FieldDataTypeInt64: FieldDataTypeArrayInt64,
FieldDataTypeFloat64: FieldDataTypeArrayFloat64,
}
)
func (f FieldDataType) CHDataType() string {
@@ -177,19 +169,3 @@ func (f FieldDataType) TagDataType() string {
return "string"
}
}
// Enum returns the acceptable values for FieldDataType.
func (FieldDataType) Enum() []any {
return []any{
FieldDataTypeString,
FieldDataTypeBool,
FieldDataTypeFloat64,
FieldDataTypeInt64,
FieldDataTypeNumber,
// FieldDataTypeArrayString,
// FieldDataTypeArrayFloat64,
// FieldDataTypeArrayBool,
// FieldDataTypeArrayInt64,
// FieldDataTypeArrayNumber,
}
}

View File

@@ -12,12 +12,3 @@ var (
SignalMetrics = Signal{valuer.NewString("metrics")}
SignalUnspecified = Signal{valuer.NewString("")}
)
// Enum returns the acceptable values for Signal.
func (Signal) Enum() []any {
return []any{
SignalTraces,
SignalLogs,
SignalMetrics,
}
}

View File

@@ -10,10 +10,3 @@ var (
SourceMeter = Source{valuer.NewString("meter")}
SourceUnspecified = Source{valuer.NewString("")}
)
// Enum returns the acceptable values for Source.
func (Source) Enum() []any {
return []any{
SourceMeter,
}
}

View File

@@ -65,7 +65,6 @@ func TestJSONTypeSet() (map[string][]JSONDataType, MetadataStore) {
"interests[].entities[].reviews[].entries[].metadata[].positions[].unit": {String},
"interests[].entities[].reviews[].entries[].metadata[].positions[].ratings": {ArrayInt64, ArrayString},
"message": {String},
"tags": {ArrayString},
}
return types, nil