Compare commits

...

119 Commits

Author SHA1 Message Date
Abhishek Kumar Singh
b8da56b4f4 Merge branch 'main' into fix/validate_alert_payload_before_saving 2026-01-19 16:15:25 +05:30
Abhishek Kumar Singh
e1f2a25f99 feat: added unit validation in compositeQuery and ruleThreshold 2026-01-13 17:34:49 +05:30
Abhishek Kumar Singh
80f04da274 chore: removed having + expression check as not respected from frontend 2026-01-13 16:57:54 +05:30
Abhishek Kumar Singh
5b169dc3b2 refactor: use model.BadRequest in manager, replaced html template library to text template 2026-01-13 16:31:07 +05:30
Abhishek Kumar Singh
ae50f7d29b chore: remove unused fields form tests 2026-01-13 15:50:53 +05:30
Abhishek Kumar Singh
68f315b13e Merge branch 'main' into fix/validate_alert_payload_before_saving 2026-01-13 10:10:56 +05:30
Srikanth Chekuri
3a2c38888f Merge branch 'main' into fix/validate_alert_payload_before_saving 2026-01-09 23:39:35 +05:30
Abhishek Kumar Singh
40151b4472 Merge branch 'feat/exclude_new_series_from_alert' into fix/validate_alert_payload_before_saving 2026-01-08 15:53:45 +05:30
Abhishek Kumar Singh
f239212745 chore: fix diff in openapi spec 2026-01-08 08:39:05 +05:30
Srikanth Chekuri
28498a0419 Merge branch 'main' into feat/exclude_new_series_from_alert 2026-01-08 02:03:22 +05:30
Srikanth Chekuri
d0c7e20832 Merge branch 'main' into feat/exclude_new_series_from_alert 2026-01-07 22:28:05 +05:30
Abhishek Kumar Singh
3155762978 refactor: added support for formula query in AnalyzeQueryEnvelopes of queryparser 2026-01-07 19:36:32 +05:30
Abhishek Kumar Singh
ffb6b0af67 refactor: updated telemetry metadata store to use tuples for col search 2026-01-07 15:35:18 +05:30
Abhishek Kumar Singh
d2e296ce79 fix: correct first_seen calculation and add skipped series logging 2026-01-07 15:18:00 +05:30
Abhishek Kumar Singh
41ae5b7319 refactor: use correct grouping key for preparing lookup keys 2026-01-06 21:37:14 +05:30
Abhishek Kumar Singh
ec593f02e5 refactor: update CH filter extractor to not process the function column expr in stripTableAlias 2026-01-06 21:10:30 +05:30
Abhishek Kumar Singh
b3e446755f chore: comments for query filter extractor result sorting 2026-01-06 19:04:34 +05:30
Abhishek Kumar Singh
4f4c628905 refactor: update extractMetricAndGroupBys to return a map of metrics to groupBy keys 2026-01-06 18:49:29 +05:30
Abhishek Kumar Singh
9da0cf48ee refactor: change return type for AnalyzeQueryEnvelopes 2026-01-06 16:53:56 +05:30
Abhishek Kumar Singh
061540e824 refactor: updated AnalyzeCompositeQuery to be AnalyzeQueryEnvelopes 2026-01-06 15:11:58 +05:30
Abhishek Kumar Singh
c159f484ba refactor: used gosqlbuilder to generate query for telemetryMetaStore 2026-01-06 12:54:37 +05:30
Abhishek Kumar Singh
ce96494f01 refactor: moved composite query validation to struct + added supported types in error messages 2026-01-05 21:17:47 +05:30
Srikanth Chekuri
53fdb60c72 Merge branch 'main' into feat/exclude_new_series_from_alert 2026-01-02 23:19:37 +05:30
Abhishek Kumar Singh
f34c8f2084 refactor: removed new series filteration from non v5 queries 2026-01-02 13:05:25 +05:30
Abhishek Kumar Singh
bb93ef3c1f Merge branch 'main' into feat/exclude_new_series_from_alert 2026-01-02 13:01:06 +05:30
Abhishek Kumar Singh
024e351d6d refactor: moved CH metadata related query from telemetry store to metadata store 2026-01-02 12:53:42 +05:30
Abhishek Kumar Singh
0b8e68f0b7 fix: on error from FilterNewSeries, continue with original series 2025-12-30 21:03:37 +05:30
Abhishek Kumar Singh
5256ace34f refactor: update AnalyzeCompositeQuery to return an array of FilterResult 2025-12-30 20:58:43 +05:30
Abhishek Kumar Singh
8ef3b89ab3 test: changed logic to compare series in TestBaseRule_FilterNewSeries 2025-12-30 20:48:13 +05:30
Abhishek Kumar Singh
7ff332f184 refactor: updated baseRule.FilterNewSeries to accept and return array of pointer to Series 2025-12-30 20:38:25 +05:30
Abhishek Kumar Singh
aec969c897 feat: integrated ValidateCompositeQuery in API layer in rule update APIs 2025-12-29 13:40:54 +05:30
Abhishek Kumar Singh
75e67a7e35 feat: apply templates on Clickhouse query before parsing 2025-12-26 18:22:53 +05:30
Abhishek Kumar Singh
8c67f6ff7a chore: fixed tests for ValidateCompositeQuery 2025-12-26 13:21:05 +05:30
Abhishek Kumar Singh
d62ed6f003 feat: added validation for QueryBuilderFormula 2025-12-26 13:07:34 +05:30
Abhishek Kumar Singh
ef4ef47634 feat: added new error type for Query parsing, added validation for QueryBuilderJoin 2025-12-26 12:52:30 +05:30
Srikanth Chekuri
e036d928c4 Merge branch 'main' into feat/exclude_new_series_from_alert 2025-12-26 09:48:14 +05:30
Abhishek Kumar Singh
0a42c77ca7 test: added test for ValidateCompositeQuery 2025-12-24 19:16:20 +05:30
Abhishek Kumar Singh
a89bb71f2c feat: added ValidateCompositeQuery in queryparser 2025-12-24 19:14:39 +05:30
Abhishek Kumar Singh
521e5d92e7 test: fixed breaking tests post PostableRule validations 2025-12-24 17:27:09 +05:30
Abhishek Kumar Singh
09b7360513 feat: added struct based validation for PostableRule and it's child structs 2025-12-24 17:20:57 +05:30
Abhishek Kumar Singh
0fd926b8a1 feat: exclude filtering new series in Logs and Traces queries with corresponding tests 2025-12-23 13:21:30 +05:30
Abhishek Kumar Singh
e4214309f4 refactor: moved series filteration logic to new function for prom_rule 2025-12-22 14:30:50 +05:30
Abhishek Kumar Singh
297383ddca feat: don't skip series with missing metadata as we can't decide in this case if series is old/new 2025-12-22 13:51:08 +05:30
Abhishek Kumar Singh
6871eccd28 feat: return series early on no skipped index 2025-12-22 12:01:12 +05:30
Abhishek Kumar Singh
0a272b5b43 Merge branch 'main' into feat/exclude_new_series_from_alert 2025-12-22 11:47:41 +05:30
Abhishek Kumar Singh
4c4387b6d2 test: added test for FilterNewSeries 2025-12-19 23:32:26 +05:30
Abhishek Kumar Singh
cb242e2d4c refactor: removed un-used check from filterNewSeries + fix CH metadata table name 2025-12-19 18:16:16 +05:30
Abhishek Kumar Singh
c98cdc174b refactor: code reuse 2025-12-19 17:00:29 +05:30
Abhishek Kumar Singh
6fc38bac79 refactor: updated FilterNewSeries to use v3.Series a common collection to filter new series 2025-12-19 16:42:03 +05:30
Abhishek Kumar Singh
ddba7e71b7 refactor: improve type assertion for filtered collections in anomaly, prom, and threshold rules 2025-12-19 14:18:21 +05:30
Abhishek Kumar Singh
23f9ff50a7 chore: added helpful comments for FilterNewSeries 2025-12-19 13:30:54 +05:30
Abhishek Kumar Singh
55e5c871fe refactor: user real QP for FilterNewSeries test 2025-12-19 12:53:22 +05:30
Abhishek Kumar Singh
511bb176dd Merge branch 'main' into feat/exclude_new_series_from_alert 2025-12-19 11:50:55 +05:30
Abhishek Kumar Singh
4e0c0319d0 Merge branch 'main' into feat/exclude_new_series_from_alert 2025-12-18 14:49:28 +05:30
Srikanth Chekuri
9e5ea4de9c Merge branch 'main' into feat/exclude_new_series_from_alert 2025-12-15 08:27:00 +05:30
Abhishek Kumar Singh
81e0df09b8 refactor: merge conflict fix 2025-12-03 19:06:18 +05:30
Abhishek Kumar Singh
a522f39b9b Merge branch 'main' into feat/exclude_new_series_from_alert 2025-12-03 19:04:18 +05:30
Abhishek Kumar Singh
affb6eee05 feat: added function in query parser to parse composite query and get filter result 2025-12-02 18:56:19 +05:30
Abhishek Kumar Singh
13a5e9dd24 Merge branch 'feat/groups_in_ch_and_promql_queries' into feat/exclude_new_series_from_alert 2025-12-02 18:02:40 +05:30
Abhishek Kumar Singh
f620767876 refactor: used binding.JSON.BindBody to parse body 2025-12-02 17:58:39 +05:30
Srikanth Chekuri
9fb8b2bb1b Merge branch 'main' into feat/groups_in_ch_and_promql_queries 2025-12-02 17:26:54 +05:30
Abhishek Kumar Singh
30494c9196 chore: updated comments 2025-12-02 16:58:06 +05:30
Abhishek Kumar Singh
cae4cf0777 refactor: use query parser in baseRule 2025-12-02 16:55:58 +05:30
Abhishek Kumar Singh
c9538b0604 Merge branch 'feat/groups_in_ch_and_promql_queries' into feat/exclude_new_series_from_alert 2025-12-02 15:57:15 +05:30
Abhishek Kumar Singh
204cc4e5c5 Merge branch 'main' into feat/groups_in_ch_and_promql_queries 2025-12-02 15:47:30 +05:30
Abhishek Kumar Singh
6dd2ffcb64 refactor: update API handler to use new queryparser package for query parsing 2025-11-27 20:11:16 +05:30
Abhishek Kumar Singh
13c15249c5 Merge branch 'feat/groups_in_ch_and_promql_queries' into feat/exclude_new_series_from_alert 2025-11-27 19:51:27 +05:30
Abhishek Kumar Singh
8419ca7982 Merge branch 'main' into feat/exclude_new_series_from_alert 2025-11-27 19:49:57 +05:30
Abhishek Kumar Singh
6b189b14c6 chore: updated series collection to labelledcollection 2025-11-27 19:45:01 +05:30
Abhishek Kumar Singh
550c49fab0 feat: created queryparser package with APIs 2025-11-27 19:37:41 +05:30
Abhishek Kumar Singh
5b6ff92648 refactor: moved package queryfilterextractor in pkg/queryparser 2025-11-27 19:14:55 +05:30
Abhishek Kumar Singh
45954b38fa Merge branch 'main' into feat/exclude_new_series_from_alert 2025-11-25 12:27:00 +05:30
Abhishek Kumar Singh
ceade6c7d7 refactor: moved query parser APIs to queryfilterextractor package 2025-11-24 14:15:44 +05:30
Srikanth Chekuri
f15c88836c Merge branch 'main' into feat/groups_in_ch_and_promql_queries 2025-11-21 15:26:44 +05:30
Abhishek Kumar Singh
9af45643a9 refactor: created valuer enum for extractor types + exposed alias along with column name for groups 2025-11-21 15:16:03 +05:30
Srikanth Chekuri
d15e974e9f Merge branch 'main' into feat/groups_in_ch_and_promql_queries 2025-11-20 22:59:29 +05:30
Abhishek Kumar Singh
71e752a015 refactor: moved api request and response to types 2025-11-20 20:00:51 +05:30
Abhishek Kumar Singh
3407760585 Merge branch 'feat/queryfilterextractor' into feat/groups_in_ch_and_promql_queries 2025-11-20 19:02:11 +05:30
Abhishek Kumar Singh
58a0e36869 refactor: return relevant errors when parsing query 2025-11-20 19:01:40 +05:30
Abhishek Kumar Singh
5d688eb919 test: updated test case to include CH query error case 2025-11-20 18:55:45 +05:30
Abhishek Kumar Singh
c0f237a7c4 refactor: use render.Error instead of RespondError 2025-11-20 18:12:47 +05:30
Abhishek Kumar Singh
8ce8bc940a Merge branch 'feat/queryfilterextractor' into feat/groups_in_ch_and_promql_queries 2025-11-20 17:42:46 +05:30
Abhishek Kumar Singh
abce05b289 feat: exposed group name from column info 2025-11-20 17:38:00 +05:30
Abhishek Kumar Singh
ccd25c3b67 refactor: removed redundant checks 2025-11-20 17:24:36 +05:30
Abhishek Kumar Singh
ddb98da217 refactor: change function visibility for extractMetricAndGroupBys 2025-11-20 15:56:45 +05:30
Abhishek Kumar Singh
18d63d2e66 fix: close CH rows on reading each chunk 2025-11-20 15:44:07 +05:30
Abhishek Kumar Singh
67c108f021 feat: added new series filter in anomaly rule as well 2025-11-20 15:26:14 +05:30
Abhishek Kumar Singh
02939cafa4 refactor: added GetFirstSeenFromMetricMetadata in clickhouse reader, removed caching for query result 2025-11-20 15:24:14 +05:30
Abhishek Kumar Singh
e62b070c1e fix: created interface for standard series filtration from both prom and threshold rule 2025-11-20 14:35:25 +05:30
Srikanth Chekuri
be0a7d8fd4 Merge branch 'main' into feat/queryfilterextractor 2025-11-20 02:48:43 +05:30
Srikanth Chekuri
419044dc9e Merge branch 'main' into feat/queryfilterextractor 2025-11-19 22:42:12 +05:30
Abhishek Kumar Singh
223465d6d5 Merge branch 'feat/queryfilterextractor' into feat/exclude_new_series_from_alert 2025-11-19 20:56:01 +05:30
Abhishek Kumar Singh
cec99674fa feat: exclude new samples from alert evals 2025-11-19 20:52:59 +05:30
Abhishek Kumar Singh
0ccf58ac7a fix: common behaviour across CH and PromQL originField and originExpr 2025-11-19 20:36:00 +05:30
Abhishek Kumar Singh
b08d636d6a refactor: updated query type check with constants 2025-11-19 20:12:20 +05:30
Abhishek Kumar Singh
f6141bc6c5 feat: added API to extract metric names and groups from CH or PromQL query 2025-11-19 16:48:03 +05:30
Srikanth Chekuri
bfe49f0f1b Merge branch 'main' into feat/queryfilterextractor 2025-11-19 14:21:35 +05:30
Abhishek Kumar Singh
8e8064c5c1 fix: ci lint issues 2025-11-19 13:28:04 +05:30
Abhishek Kumar Singh
4392341467 improv: added comments for CH originparser + some code improv 2025-11-19 12:58:34 +05:30
Abhishek Kumar Singh
521d8e4f4d improv: added more tests for promql and added comments 2025-11-19 12:15:20 +05:30
Abhishek Kumar Singh
b6103f371f Merge branch 'main' into feat/queryfilterextractor 2025-11-18 21:09:45 +05:30
Abhishek Kumar Singh
43283506db Merge branch 'feat/queryfilterextractor_complex' into feat/queryfilterextractor 2025-11-18 15:22:24 +05:30
Abhishek Kumar Singh
694d9958db improv: integrated origin field extraction and updated tests to check for origin fields 2025-11-18 15:03:24 +05:30
Abhishek Kumar Singh
addee4c0a5 feat: added origin field extractor for ch query 2025-11-18 14:36:03 +05:30
Abhishek Kumar Singh
f10cf7ac04 refactor: code organisation 2025-11-17 16:27:17 +05:30
Abhishek Kumar Singh
b336678639 fix: CH test cases 2025-11-17 15:01:32 +05:30
Abhishek Kumar Singh
c438b3444e refactor: removed GroupBy from FilterResult 2025-11-17 14:34:46 +05:30
Abhishek Kumar Singh
b624414507 feat: extract column origin from subquery and join before searching directly 2025-11-17 13:42:47 +05:30
Abhishek Kumar Singh
bde7963444 feat: implemented extractOriginFromSelectItem which will find the given columnName till the very end to return the origin column with given name 2025-11-17 09:00:18 +05:30
Abhishek Kumar Singh
2df93ff217 feat: extract column origin from query and add in column info 2025-11-16 10:20:38 +05:30
Abhishek Kumar Singh
f496a6ecde improv: updated result for queryfilterextractor to return column with alias 2025-11-16 08:58:33 +05:30
Abhishek Kumar Singh
599e230a72 feat: added NewExtractor function for creating extractor 2025-11-13 13:52:32 +05:30
Abhishek Kumar Singh
9a0e32ff3b refactor: removed redundant non nil checks 2025-11-13 13:41:51 +05:30
Abhishek Kumar Singh
5fe2732698 refactor: removed unused extractFromAnyFunction 2025-11-13 13:20:59 +05:30
Abhishek Kumar Singh
4993a44ecc refactor: removed unused cases + added comments 2025-11-13 12:59:35 +05:30
Abhishek Kumar Singh
ebd575a16b chore: comments + remove usage of seen map in extractGroupFromGroupByClause 2025-11-12 19:26:44 +05:30
Abhishek Kumar Singh
666582337e feat: support for CTE in clickhouse queryfilterextractor 2025-11-12 18:58:30 +05:30
Abhishek Kumar Singh
23512ab05c feat: added support for promql in queryfilterextractor 2025-11-10 20:50:42 +05:30
Abhishek Kumar Singh
1423749529 feat: added filter extractor interface and clickhouse impl with tests 2025-11-10 20:05:39 +05:30
19 changed files with 1177 additions and 65 deletions

View File

@@ -1407,6 +1407,10 @@ func (aH *APIHandler) patchRule(w http.ResponseWriter, r *http.Request) {
RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("rule not found")}, nil)
return
}
if apiErr, ok := err.(*model.ApiError); ok {
RespondError(w, apiErr, nil)
return
}
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return
}
@@ -1437,6 +1441,10 @@ func (aH *APIHandler) editRule(w http.ResponseWriter, r *http.Request) {
RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("rule not found")}, nil)
return
}
if apiErr, ok := err.(*model.ApiError); ok {
RespondError(w, apiErr, nil)
return
}
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return
}
@@ -1457,6 +1465,10 @@ func (aH *APIHandler) createRule(w http.ResponseWriter, r *http.Request) {
rule, err := aH.ruleManager.CreateRule(r.Context(), string(body))
if err != nil {
if apiErr, ok := err.(*model.ApiError); ok {
RespondError(w, apiErr, nil)
return
}
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
return
}

View File

@@ -887,7 +887,7 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
keys := make([]string, 0, len(queryRangeParams.Variables))
querytemplate.AssignReservedVarsV3(queryRangeParams)
querytemplate.AssignReservedVars(queryRangeParams.Variables, queryRangeParams.Start, queryRangeParams.End)
for k := range queryRangeParams.Variables {
keys = append(keys, k)
@@ -927,7 +927,7 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
continue
}
querytemplate.AssignReservedVarsV3(queryRangeParams)
querytemplate.AssignReservedVars(queryRangeParams.Variables, queryRangeParams.Start, queryRangeParams.End)
keys := make([]string, 0, len(queryRangeParams.Variables))

View File

@@ -1,8 +1,17 @@
package converter
import "github.com/SigNoz/signoz/pkg/errors"
// Unit represents a unit of measurement
type Unit string
func (u Unit) Validate() error {
if !IsValidUnit(u) {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid unit: %s", u)
}
return nil
}
// Value represents a value with a unit of measurement
type Value struct {
F float64
@@ -60,6 +69,27 @@ func FromUnit(u Unit) Converter {
}
}
// IsValidUnit returns true if the given unit is valid
func IsValidUnit(u Unit) bool {
switch u {
// Duration unit
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min",
// Data unit
"bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy",
// Data rate unit
"binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s",
// Percent unit
"percent", "percentunit", "%",
// Bool unit
"bool", "bool_yes_no", "bool_true_false", "bool_1_0",
// Throughput unit
"cps", "ops", "reqps", "rps", "wps", "iops", "cpm", "opm", "rpm", "wpm", "{count}/s", "{ops}/s", "{req}/s", "{read}/s", "{write}/s", "{iops}/s", "{count}/min", "{ops}/min", "{read}/min", "{write}/min":
return true
default:
return false
}
}
func UnitToName(u string) string {
switch u {
case "ns":

View File

@@ -9,8 +9,9 @@ import (
"strings"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/converter"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/pkg/errors"
"go.uber.org/zap"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
@@ -601,43 +602,166 @@ func (c *CompositeQuery) Sanitize() {
func (c *CompositeQuery) Validate() error {
if c == nil {
return fmt.Errorf("composite query is required")
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"composite query is required",
)
}
if c.BuilderQueries == nil && c.ClickHouseQueries == nil && c.PromQueries == nil && len(c.Queries) == 0 {
return fmt.Errorf("composite query must contain at least one query type")
if len(c.Queries) == 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"at least one query is required",
)
}
if c.QueryType == QueryTypeBuilder {
for name, query := range c.BuilderQueries {
if err := query.Validate(c.PanelType); err != nil {
return fmt.Errorf("builder query %s is invalid: %w", name, err)
}
// Validate unit if supplied
if c.Unit != "" {
unit := converter.Unit(c.Unit)
err := unit.Validate()
if err != nil {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid unit: %s",
err.Error(),
)
}
}
if c.QueryType == QueryTypeClickHouseSQL {
for name, query := range c.ClickHouseQueries {
if err := query.Validate(); err != nil {
return fmt.Errorf("clickhouse query %s is invalid: %w", name, err)
// Validate each query
for i, envelope := range c.Queries {
queryId := qbtypes.GetQueryIdentifier(envelope, i)
switch envelope.Type {
case qbtypes.QueryTypeBuilder, qbtypes.QueryTypeSubQuery:
switch spec := envelope.Spec.(type) {
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
if err := spec.Validate(qbtypes.RequestTypeTimeSeries); err != nil {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid %s: %s",
queryId,
err.Error(),
)
}
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
if err := spec.Validate(qbtypes.RequestTypeTimeSeries); err != nil {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid %s: %s",
queryId,
err.Error(),
)
}
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
if err := spec.Validate(qbtypes.RequestTypeTimeSeries); err != nil {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid %s: %s",
queryId,
err.Error(),
)
}
default:
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown query spec type for %s",
queryId,
)
}
case qbtypes.QueryTypePromQL:
spec, ok := envelope.Spec.(qbtypes.PromQuery)
if !ok {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
if spec.Query == "" {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"query expression is required for %s",
queryId,
)
}
if err := validatePromQLQuery(spec.Query); err != nil {
return err
}
case qbtypes.QueryTypeClickHouseSQL:
spec, ok := envelope.Spec.(qbtypes.ClickHouseQuery)
if !ok {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
if spec.Query == "" {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"query expression is required for %s",
queryId,
)
}
if err := validateClickHouseQuery(spec.Query); err != nil {
return err
}
case qbtypes.QueryTypeFormula:
spec, ok := envelope.Spec.(qbtypes.QueryBuilderFormula)
if !ok {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
if err := spec.Validate(); err != nil {
return err
}
case qbtypes.QueryTypeJoin:
spec, ok := envelope.Spec.(qbtypes.QueryBuilderJoin)
if !ok {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
if err := spec.Validate(); err != nil {
return err
}
case qbtypes.QueryTypeTraceOperator:
spec, ok := envelope.Spec.(qbtypes.QueryBuilderTraceOperator)
if !ok {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
err := spec.ValidateTraceOperator(c.Queries)
if err != nil {
return err
}
default:
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown query type '%s' for %s",
envelope.Type,
queryId,
).WithAdditional(
"Valid query types are: builder_query, builder_sub_query, builder_formula, builder_join, promql, clickhouse_sql, trace_operator",
)
}
}
if c.QueryType == QueryTypePromQL {
for name, query := range c.PromQueries {
if err := query.Validate(); err != nil {
return fmt.Errorf("prom query %s is invalid: %w", name, err)
}
}
}
if err := c.PanelType.Validate(); err != nil {
return fmt.Errorf("panel type is invalid: %w", err)
}
if err := c.QueryType.Validate(); err != nil {
return fmt.Errorf("query type is invalid: %w", err)
// Check if all queries are disabled
if allDisabled := checkQueriesDisabled(c); allDisabled {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"all queries are disabled - at least one query must be enabled",
)
}
return nil
@@ -1203,7 +1327,7 @@ func (f *FilterSet) Scan(src interface{}) error {
func (f *FilterSet) Value() (driver.Value, error) {
filterSetJson, err := json.Marshal(f)
if err != nil {
return nil, errors.Wrap(err, "could not serialize FilterSet to JSON")
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "could not serialize FilterSet to JSON")
}
return filterSetJson, nil
}

View File

@@ -0,0 +1,137 @@
package v3
import (
"bytes"
"fmt"
"text/template"
"time"
clickhouse "github.com/AfterShip/clickhouse-sql-parser/parser"
"github.com/SigNoz/signoz/pkg/errors"
querytemplate "github.com/SigNoz/signoz/pkg/query-service/utils/queryTemplate"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/prometheus/prometheus/promql/parser"
)
type QueryParseError struct {
StartPosition *int
EndPosition *int
ErrorMessage string
Query string
}
func (e *QueryParseError) Error() string {
if e.StartPosition != nil && e.EndPosition != nil {
return fmt.Sprintf("query parse error: %s at position %d:%d", e.ErrorMessage, *e.StartPosition, *e.EndPosition)
}
return fmt.Sprintf("query parse error: %s", e.ErrorMessage)
}
// validatePromQLQuery validates a PromQL query syntax using the Prometheus parser
func validatePromQLQuery(query string) error {
_, err := parser.ParseExpr(query)
if err != nil {
if syntaxErrs, ok := err.(parser.ParseErrors); ok {
syntaxErr := syntaxErrs[0]
startPosition := int(syntaxErr.PositionRange.Start)
endPosition := int(syntaxErr.PositionRange.End)
return &QueryParseError{
StartPosition: &startPosition,
EndPosition: &endPosition,
ErrorMessage: syntaxErr.Error(),
Query: query,
}
}
}
return err
}
// validateClickHouseQuery validates a ClickHouse SQL query syntax using the ClickHouse parser
func validateClickHouseQuery(query string) error {
// Assign the default template variables with dummy values
variables := make(map[string]interface{})
start := time.Now().UnixMilli()
end := start + 1000
querytemplate.AssignReservedVars(variables, start, end)
// Apply the values for default template variables before parsing the query
tmpl := template.New("clickhouse-query")
tmpl, err := tmpl.Parse(query)
if err != nil {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"failed to parse clickhouse query: %s",
err.Error(),
)
}
var queryBuffer bytes.Buffer
err = tmpl.Execute(&queryBuffer, variables)
if err != nil {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"failed to execute clickhouse query template: %s",
err.Error(),
)
}
// Parse the ClickHouse query with the default template variables applied
p := clickhouse.NewParser(queryBuffer.String())
_, err = p.ParseStmts()
if err != nil {
// TODO: errors returned here is errors.errorString, rather than using regex to parser the error
// we should think on using some other library that parses the CH query in more accurate manner,
// current CH parser only does very minimal checks.
// Sample Error: "line 0:36 expected table name or subquery, got ;\nSELECT department, avg(salary) FROM ;\n ^\n"
return &QueryParseError{
ErrorMessage: err.Error(),
Query: query,
}
}
return nil
}
// checkQueriesDisabled checks if all queries are disabled. Returns true if all queries are disabled, false otherwise.
func checkQueriesDisabled(compositeQuery *CompositeQuery) bool {
for _, envelope := range compositeQuery.Queries {
switch envelope.Type {
case qbtypes.QueryTypeBuilder, qbtypes.QueryTypeSubQuery:
switch spec := envelope.Spec.(type) {
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
if !spec.Disabled {
return false
}
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
if !spec.Disabled {
return false
}
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
if !spec.Disabled {
return false
}
}
case qbtypes.QueryTypeFormula:
if spec, ok := envelope.Spec.(qbtypes.QueryBuilderFormula); ok && !spec.Disabled {
return false
}
case qbtypes.QueryTypeTraceOperator:
if spec, ok := envelope.Spec.(qbtypes.QueryBuilderTraceOperator); ok && !spec.Disabled {
return false
}
case qbtypes.QueryTypeJoin:
if spec, ok := envelope.Spec.(qbtypes.QueryBuilderJoin); ok && !spec.Disabled {
return false
}
case qbtypes.QueryTypePromQL:
if spec, ok := envelope.Spec.(qbtypes.PromQuery); ok && !spec.Disabled {
return false
}
case qbtypes.QueryTypeClickHouseSQL:
if spec, ok := envelope.Spec.(qbtypes.ClickHouseQuery); ok && !spec.Disabled {
return false
}
}
}
// If we reach here, all queries are disabled
return true
}

View File

@@ -0,0 +1,482 @@
package v3
import (
"testing"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/stretchr/testify/require"
)
func TestValidateCompositeQuery(t *testing.T) {
tests := []struct {
name string
compositeQuery *CompositeQuery
wantErr bool
errContains string
}{
{
name: "nil composite query should return error",
compositeQuery: nil,
wantErr: true,
errContains: "composite query is required",
},
{
name: "empty queries array should return error",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{},
},
wantErr: true,
errContains: "at least one query is required",
},
{
name: "invalid input error",
compositeQuery: &CompositeQuery{
Unit: "some_invalid_unit",
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "rate(http_requests_total[5m])",
},
},
},
},
wantErr: true,
errContains: "invalid unit",
},
{
name: "valid metric builder query should pass",
compositeQuery: &CompositeQuery{
Unit: "bytes", // valid unit
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "metric_query",
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "cpu_usage",
},
},
},
},
},
},
wantErr: false,
},
{
name: "valid log builder query should pass",
compositeQuery: &CompositeQuery{
Unit: "µs", // valid unit
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Name: "log_query",
Signal: telemetrytypes.SignalLogs,
Aggregations: []qbtypes.LogAggregation{
{
Expression: "count()",
},
},
},
},
},
},
wantErr: false,
},
{
name: "valid trace builder query should pass",
compositeQuery: &CompositeQuery{
Unit: "MBs", // valid unit
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Name: "trace_query",
Signal: telemetrytypes.SignalTraces,
Aggregations: []qbtypes.TraceAggregation{
{
Expression: "count()",
},
},
},
},
},
},
wantErr: false,
},
{
name: "valid PromQL query should pass",
compositeQuery: &CompositeQuery{
Unit: "{req}/s", // valid unit
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "rate(http_requests_total[5m])",
},
},
},
},
wantErr: false,
},
{
name: "valid ClickHouse query should pass",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeClickHouseSQL,
Spec: qbtypes.ClickHouseQuery{
Name: "ch_query",
Query: "SELECT count(*) FROM metrics WHERE metric_name = 'cpu_usage'",
},
},
},
},
wantErr: false,
},
{
name: "valid formula query should pass",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeFormula,
Spec: qbtypes.QueryBuilderFormula{
Name: "formula_query",
Expression: "A + B",
},
},
},
},
wantErr: false,
},
{
name: "valid join query should pass",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeJoin,
Spec: qbtypes.QueryBuilderJoin{
Name: "join_query",
Left: qbtypes.QueryRef{Name: "A"},
Right: qbtypes.QueryRef{Name: "B"},
Type: qbtypes.JoinTypeInner,
On: "service_name",
},
},
},
},
wantErr: false,
},
{
name: "valid trace operator query should pass",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Name: "A",
Signal: telemetrytypes.SignalTraces,
Aggregations: []qbtypes.TraceAggregation{
{
Expression: "count()",
},
},
},
},
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Name: "B",
Signal: telemetrytypes.SignalTraces,
Aggregations: []qbtypes.TraceAggregation{
{
Expression: "count()",
},
},
},
},
{
Type: qbtypes.QueryTypeTraceOperator,
Spec: qbtypes.QueryBuilderTraceOperator{
Name: "trace_operator",
Expression: "A && B",
},
},
},
},
wantErr: false,
},
{
name: "invalid metric builder query - missing aggregation should return error",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "metric_query",
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{},
},
},
},
},
wantErr: true,
errContains: "invalid",
},
{
name: "invalid PromQL query - empty query should return error",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "",
},
},
},
},
wantErr: true,
errContains: "query expression is required",
},
{
name: "invalid PromQL query - syntax error should return error",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "rate(http_requests_total[5m",
},
},
},
},
wantErr: true,
errContains: "unclosed left parenthesis",
},
{
name: "invalid ClickHouse query - empty query should return error",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeClickHouseSQL,
Spec: qbtypes.ClickHouseQuery{
Name: "ch_query",
Query: "",
},
},
},
},
wantErr: true,
errContains: "query expression is required",
},
{
name: "invalid ClickHouse query - syntax error should return error",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeClickHouseSQL,
Spec: qbtypes.ClickHouseQuery{
Name: "ch_query",
Query: "SELECT * FROM metrics WHERE",
},
},
},
},
wantErr: true,
errContains: "query parse error",
},
{
name: "invalid formula query - empty expression should return error",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeFormula,
Spec: qbtypes.QueryBuilderFormula{
Name: "formula_query",
Expression: "",
},
},
},
},
wantErr: true,
errContains: "formula expression cannot be blank",
},
{
name: "invalid trace operator query - empty expression should return error",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeTraceOperator,
Spec: qbtypes.QueryBuilderTraceOperator{
Name: "trace_operator",
Expression: "",
},
},
},
},
wantErr: true,
errContains: "expression cannot be empty",
},
{
name: "all queries disabled should return error",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "metric_query",
Disabled: true,
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "cpu_usage",
},
},
},
},
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "rate(http_requests_total[5m])",
Disabled: true,
},
},
},
},
wantErr: true,
errContains: "all queries are disabled",
},
{
name: "mixed disabled and enabled queries should pass",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "metric_query",
Disabled: true,
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "cpu_usage",
},
},
},
},
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "rate(http_requests_total[5m])",
Disabled: false,
},
},
},
},
wantErr: false,
},
{
name: "multiple valid queries should pass",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "metric_query",
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "cpu_usage",
},
},
},
},
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "rate(http_requests_total[5m])",
},
},
{
Type: qbtypes.QueryTypeClickHouseSQL,
Spec: qbtypes.ClickHouseQuery{
Name: "ch_query",
Query: "SELECT count(*) FROM metrics WHERE metric_name = 'cpu_usage'",
},
},
},
},
wantErr: false,
},
{
name: "invalid query in multiple queries should return error",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "metric_query",
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "cpu_usage",
},
},
},
},
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "invalid promql syntax [",
},
},
},
},
wantErr: true,
errContains: "query parse error",
},
{
name: "unknown query type should return error",
compositeQuery: &CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryType{String: valuer.NewString("invalid_query_type")},
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "rate(http_requests_total[5m])",
},
},
},
},
wantErr: true,
errContains: "unknown query type",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := tt.compositeQuery.Validate()
if tt.wantErr {
require.Error(t, err)
if tt.errContains != "" {
require.Contains(t, err.Error(), tt.errContains)
}
} else {
require.NoError(t, err)
}
})
}
}

View File

@@ -353,6 +353,11 @@ func (m *Manager) EditRule(ctx context.Context, ruleStr string, id valuer.UUID)
return err
}
err = parsedRule.RuleCondition.CompositeQuery.Validate()
if err != nil {
return model.BadRequest(err)
}
existingRule, err := m.ruleStore.GetStoredRule(ctx, id)
if err != nil {
return err
@@ -552,6 +557,11 @@ func (m *Manager) CreateRule(ctx context.Context, ruleStr string) (*ruletypes.Ge
return nil, err
}
err = parsedRule.RuleCondition.CompositeQuery.Validate()
if err != nil {
return nil, model.BadRequest(err)
}
now := time.Now()
storedRule := &ruletypes.Rule{
Identifiable: types.Identifiable{
@@ -940,6 +950,11 @@ func (m *Manager) PatchRule(ctx context.Context, ruleStr string, id valuer.UUID)
return nil, err
}
err = storedRule.RuleCondition.CompositeQuery.Validate()
if err != nil {
return nil, model.BadRequest(err)
}
// deploy or un-deploy task according to patched (new) rule state
if err := m.syncRuleStateWithTask(ctx, orgID, taskName, &storedRule); err != nil {
zap.L().Error("failed to sync stored rule state with the task", zap.String("taskName", taskName), zap.Error(err))
@@ -990,6 +1005,12 @@ func (m *Manager) TestNotification(ctx context.Context, orgID valuer.UUID, ruleS
if err != nil {
return 0, model.BadRequest(err)
}
err = parsedRule.RuleCondition.CompositeQuery.Validate()
if err != nil {
return 0, model.BadRequest(err)
}
if !parsedRule.NotificationSettings.UsePolicy {
parsedRule.NotificationSettings.GroupBy = append(parsedRule.NotificationSettings.GroupBy, ruletypes.LabelThresholdName)
}

View File

@@ -159,7 +159,7 @@ func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletype
var resultVector ruletypes.Vector
for _, series := range matrixToProcess {
resultSeries, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
ActiveAlerts: r.ActiveAlertsLabelFP(),
ActiveAlerts: r.ActiveAlertsLabelFP(),
SendUnmatched: r.ShouldSendUnmatched(),
})
if err != nil {

View File

@@ -133,7 +133,7 @@ func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v
Variables: make(map[string]interface{}, 0),
NoCache: true,
}
querytemplate.AssignReservedVarsV3(params)
querytemplate.AssignReservedVars(params.Variables, params.Start, params.End)
for name, chQuery := range r.ruleCondition.CompositeQuery.ClickHouseQueries {
if chQuery.Disabled {
continue

View File

@@ -2,26 +2,21 @@ package querytemplate
import (
"fmt"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
)
// AssignReservedVars assigns values for go template vars. assumes that
// model.QueryRangeParamsV3.Start and End are Unix Nano timestamps
func AssignReservedVarsV3(queryRangeParams *v3.QueryRangeParamsV3) {
queryRangeParams.Variables["start_timestamp"] = queryRangeParams.Start / 1000
queryRangeParams.Variables["end_timestamp"] = queryRangeParams.End / 1000
func AssignReservedVars(variables map[string]interface{}, start int64, end int64) {
variables["start_timestamp"] = start / 1000
variables["end_timestamp"] = end / 1000
queryRangeParams.Variables["start_timestamp_ms"] = queryRangeParams.Start
queryRangeParams.Variables["end_timestamp_ms"] = queryRangeParams.End
variables["start_timestamp_ms"] = start
variables["end_timestamp_ms"] = end
queryRangeParams.Variables["SIGNOZ_START_TIME"] = queryRangeParams.Start
queryRangeParams.Variables["SIGNOZ_END_TIME"] = queryRangeParams.End
variables["SIGNOZ_START_TIME"] = start
variables["SIGNOZ_END_TIME"] = end
queryRangeParams.Variables["start_timestamp_nano"] = queryRangeParams.Start * 1e6
queryRangeParams.Variables["end_timestamp_nano"] = queryRangeParams.End * 1e6
queryRangeParams.Variables["start_datetime"] = fmt.Sprintf("toDateTime(%d)", queryRangeParams.Start/1000)
queryRangeParams.Variables["end_datetime"] = fmt.Sprintf("toDateTime(%d)", queryRangeParams.End/1000)
variables["start_timestamp_nano"] = start * 1e6
variables["end_timestamp_nano"] = end * 1e6
variables["start_datetime"] = fmt.Sprintf("toDateTime(%d)", start/1000)
variables["end_datetime"] = fmt.Sprintf("toDateTime(%d)", end/1000)
}

View File

@@ -553,6 +553,11 @@ func (f Function) Copy() Function {
return c
}
// Validate validates the Function by calling Validate on its Name
func (f Function) Validate() error {
return f.Name.Validate()
}
type LimitBy struct {
// keys to limit by
Keys []string `json:"keys"`

View File

@@ -73,6 +73,43 @@ func (f *QueryBuilderFormula) UnmarshalJSON(data []byte) error {
return nil
}
// Validate validates the QueryBuilderFormula
func (f QueryBuilderFormula) Validate() error {
// Validate name is not blank
if strings.TrimSpace(f.Name) == "" {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"formula name cannot be blank",
)
}
// Validate expression is not blank
if strings.TrimSpace(f.Expression) == "" {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"formula expression cannot be blank",
)
}
// Validate functions if present
for i, fn := range f.Functions {
if err := fn.Validate(); err != nil {
fnId := fmt.Sprintf("function #%d", i+1)
if f.Name != "" {
fnId = fmt.Sprintf("function #%d in formula '%s'", i+1, f.Name)
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid %s: %s",
fnId,
err.Error(),
)
}
}
return nil
}
// small container to store the query name and index or alias reference
// for a variable in the formula expression
// read below for more details on aggregation references

View File

@@ -5,6 +5,7 @@ import (
"slices"
"strconv"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -33,6 +34,37 @@ var (
FunctionNameFillZero = FunctionName{valuer.NewString("fillZero")}
)
// Validate validates that the FunctionName is one of the known types
func (fn FunctionName) Validate() error {
switch fn {
case FunctionNameCutOffMin,
FunctionNameCutOffMax,
FunctionNameClampMin,
FunctionNameClampMax,
FunctionNameAbsolute,
FunctionNameRunningDiff,
FunctionNameLog2,
FunctionNameLog10,
FunctionNameCumulativeSum,
FunctionNameEWMA3,
FunctionNameEWMA5,
FunctionNameEWMA7,
FunctionNameMedian3,
FunctionNameMedian5,
FunctionNameMedian7,
FunctionNameTimeShift,
FunctionNameAnomaly,
FunctionNameFillZero:
return nil
default:
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid function name: %s",
fn.StringValue(),
)
}
}
// ApplyFunction applies the given function to the result data
func ApplyFunction(fn Function, result *TimeSeries) *TimeSeries {
// Extract the function name and arguments

View File

@@ -1,6 +1,9 @@
package querybuildertypesv5
import (
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -16,6 +19,15 @@ var (
JoinTypeCross = JoinType{valuer.NewString("cross")}
)
func (j JoinType) Validate() error {
switch j {
case JoinTypeInner, JoinTypeLeft, JoinTypeRight, JoinTypeFull, JoinTypeCross:
return nil
default:
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid join type: %s, supported values: inner, left, right, full, cross", j.StringValue())
}
}
type QueryRef struct {
Name string `json:"name"`
}
@@ -53,6 +65,25 @@ type QueryBuilderJoin struct {
Functions []Function `json:"functions,omitempty"`
}
func (q *QueryBuilderJoin) Validate() error {
if strings.TrimSpace(q.Name) == "" {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "name is required")
}
if strings.TrimSpace(q.Left.Name) == "" {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "left name is required")
}
if strings.TrimSpace(q.Right.Name) == "" {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "right name is required")
}
if err := q.Type.Validate(); err != nil {
return err
}
if strings.TrimSpace(q.On) == "" {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "on is required")
}
return nil
}
// Copy creates a deep copy of QueryBuilderJoin
func (q QueryBuilderJoin) Copy() QueryBuilderJoin {
c := q

View File

@@ -10,8 +10,8 @@ import (
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
// getQueryIdentifier returns a friendly identifier for a query based on its type and name/content
func getQueryIdentifier(envelope QueryEnvelope, index int) string {
// GetQueryIdentifier returns a friendly identifier for a query based on its type and name/content
func GetQueryIdentifier(envelope QueryEnvelope, index int) string {
switch envelope.Type {
case QueryTypeBuilder, QueryTypeSubQuery:
switch spec := envelope.Spec.(type) {
@@ -567,7 +567,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
switch spec := envelope.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
if err := spec.Validate(r.RequestType); err != nil {
queryId := getQueryIdentifier(envelope, i)
queryId := GetQueryIdentifier(envelope, i)
return wrapValidationError(err, queryId, "invalid %s: %s")
}
// Check name uniqueness for non-formula context
@@ -583,7 +583,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
}
case QueryBuilderQuery[LogAggregation]:
if err := spec.Validate(r.RequestType); err != nil {
queryId := getQueryIdentifier(envelope, i)
queryId := GetQueryIdentifier(envelope, i)
return wrapValidationError(err, queryId, "invalid %s: %s")
}
// Check name uniqueness for non-formula context
@@ -599,7 +599,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
}
case QueryBuilderQuery[MetricAggregation]:
if err := spec.Validate(r.RequestType); err != nil {
queryId := getQueryIdentifier(envelope, i)
queryId := GetQueryIdentifier(envelope, i)
return wrapValidationError(err, queryId, "invalid %s: %s")
}
// Check name uniqueness for non-formula context
@@ -614,7 +614,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
queryNames[spec.Name] = true
}
default:
queryId := getQueryIdentifier(envelope, i)
queryId := GetQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown spec type for %s",
@@ -625,7 +625,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
// Formula validation is handled separately
spec, ok := envelope.Spec.(QueryBuilderFormula)
if !ok {
queryId := getQueryIdentifier(envelope, i)
queryId := GetQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
@@ -633,7 +633,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
)
}
if spec.Expression == "" {
queryId := getQueryIdentifier(envelope, i)
queryId := GetQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"expression is required for %s",
@@ -644,7 +644,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
// Join validation is handled separately
_, ok := envelope.Spec.(QueryBuilderJoin)
if !ok {
queryId := getQueryIdentifier(envelope, i)
queryId := GetQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
@@ -654,7 +654,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
case QueryTypeTraceOperator:
spec, ok := envelope.Spec.(QueryBuilderTraceOperator)
if !ok {
queryId := getQueryIdentifier(envelope, i)
queryId := GetQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
@@ -662,7 +662,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
)
}
if spec.Expression == "" {
queryId := getQueryIdentifier(envelope, i)
queryId := GetQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"expression is required for %s",
@@ -673,7 +673,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
// PromQL validation is handled separately
spec, ok := envelope.Spec.(PromQuery)
if !ok {
queryId := getQueryIdentifier(envelope, i)
queryId := GetQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
@@ -681,7 +681,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
)
}
if spec.Query == "" {
queryId := getQueryIdentifier(envelope, i)
queryId := GetQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"query expression is required for %s",
@@ -692,7 +692,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
// ClickHouse SQL validation is handled separately
spec, ok := envelope.Spec.(ClickHouseQuery)
if !ok {
queryId := getQueryIdentifier(envelope, i)
queryId := GetQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
@@ -700,7 +700,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
)
}
if spec.Query == "" {
queryId := getQueryIdentifier(envelope, i)
queryId := GetQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"query expression is required for %s",
@@ -708,7 +708,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
)
}
default:
queryId := getQueryIdentifier(envelope, i)
queryId := GetQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown query type '%s' for %s",
@@ -735,7 +735,7 @@ func (c *CompositeQuery) Validate(requestType RequestType) error {
// Validate each query
for i, envelope := range c.Queries {
if err := validateQueryEnvelope(envelope, requestType); err != nil {
queryId := getQueryIdentifier(envelope, i)
queryId := GetQueryIdentifier(envelope, i)
return wrapValidationError(err, queryId, "invalid %s: %s")
}
}

View File

@@ -8,6 +8,7 @@ import (
"strings"
"time"
signozError "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
@@ -121,6 +122,107 @@ type RuleCondition struct {
Thresholds *RuleThresholdData `json:"thresholds,omitempty"`
}
func (rc *RuleCondition) UnmarshalJSON(data []byte) error {
type Alias RuleCondition
aux := (*Alias)(rc)
if err := json.Unmarshal(data, aux); err != nil {
return signozError.NewInvalidInputf(signozError.CodeInvalidInput, "failed to parse rule condition json: %v", err)
}
var errs []error
// Validate CompositeQuery - must be non-nil and pass validation
if rc.CompositeQuery == nil {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "composite query is required"))
}
// Validate AlertOnAbsent + AbsentFor - if AlertOnAbsent is true, AbsentFor must be > 0
if rc.AlertOnAbsent && rc.AbsentFor == 0 {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "absentFor must be greater than 0 when alertOnAbsent is true"))
}
// Validate Seasonality - must be one of the allowed values when provided
if !isValidSeasonality(rc.Seasonality) {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "invalid seasonality: %s, supported values: hourly, daily, weekly", rc.Seasonality))
}
// Validate SelectedQueryName - must match one of the query names from CompositeQuery
if rc.SelectedQuery != "" && rc.CompositeQuery != nil {
queryNames := getAllQueryNames(rc.CompositeQuery)
if _, exists := queryNames[rc.SelectedQuery]; !exists {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "selected query name '%s' does not match any query in composite query", rc.SelectedQuery))
}
}
// Validate RequireMinPoints + RequiredNumPoints - if RequireMinPoints is true, RequiredNumPoints must be > 0
if rc.RequireMinPoints && rc.RequiredNumPoints <= 0 {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "requiredNumPoints must be greater than 0 when requireMinPoints is true"))
}
if len(errs) > 0 {
return signozError.Join(errs...)
}
return nil
}
// getAllQueryNames extracts all query names from CompositeQuery across all query types
// Returns a map of query names for quick lookup
func getAllQueryNames(compositeQuery *v3.CompositeQuery) map[string]struct{} {
queryNames := make(map[string]struct{})
// Extract names from Queries (v5 envelopes)
if compositeQuery != nil && compositeQuery.Queries != nil {
for _, query := range compositeQuery.Queries {
switch spec := query.Spec.(type) {
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
if spec.Name != "" {
queryNames[spec.Name] = struct{}{}
}
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
if spec.Name != "" {
queryNames[spec.Name] = struct{}{}
}
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
if spec.Name != "" {
queryNames[spec.Name] = struct{}{}
}
case qbtypes.QueryBuilderFormula:
if spec.Name != "" {
queryNames[spec.Name] = struct{}{}
}
case qbtypes.QueryBuilderTraceOperator:
if spec.Name != "" {
queryNames[spec.Name] = struct{}{}
}
case qbtypes.PromQuery:
if spec.Name != "" {
queryNames[spec.Name] = struct{}{}
}
case qbtypes.ClickHouseQuery:
if spec.Name != "" {
queryNames[spec.Name] = struct{}{}
}
}
}
}
return queryNames
}
// isValidSeasonality validates that Seasonality is one of the allowed values
func isValidSeasonality(seasonality string) bool {
if seasonality == "" {
return true // empty seasonality is allowed (optional field)
}
switch seasonality {
case "hourly", "daily", "weekly":
return true
default:
return false
}
}
func (rc *RuleCondition) GetSelectedQueryName() string {
if rc != nil {
if rc.SelectedQuery != "" {

View File

@@ -304,6 +304,39 @@ func isValidLabelValue(v string) bool {
return utf8.ValidString(v)
}
// isValidAlertType validates that the AlertType is one of the allowed enum values
func isValidAlertType(alertType AlertType) bool {
switch alertType {
case AlertTypeMetric, AlertTypeTraces, AlertTypeLogs, AlertTypeExceptions:
return true
default:
return false
}
}
// isValidRuleType validates that the RuleType is one of the allowed enum values
func isValidRuleType(ruleType RuleType) bool {
switch ruleType {
case RuleTypeThreshold, RuleTypeProm, RuleTypeAnomaly:
return true
default:
return false
}
}
// isValidVersion validates that the version is one of the supported versions
func isValidVersion(version string) bool {
if version == "" {
return true // empty version is allowed (optional field)
}
switch version {
case "v3", "v4", "v5":
return true
default:
return false
}
}
func isAllQueriesDisabled(compositeQuery *v3.CompositeQuery) bool {
if compositeQuery == nil {
return false
@@ -359,6 +392,26 @@ func (r *PostableRule) validate() error {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "all queries are disabled in rule condition"))
}
// Validate AlertName - required field
if r.AlertName == "" {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "alert name is required"))
}
// Validate AlertType - must be one of the allowed enum values
if !isValidAlertType(r.AlertType) {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "invalid alert type: %s, must be one of: METRIC_BASED_ALERT, TRACES_BASED_ALERT, LOGS_BASED_ALERT, EXCEPTIONS_BASED_ALERT", r.AlertType))
}
// Validate RuleType - must be one of the allowed enum values
if !isValidRuleType(r.RuleType) {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "invalid rule type: %s, must be one of: threshold_rule, promql_rule, anomaly_rule", r.RuleType))
}
// Validate Version - must be one of the supported versions if provided
if !isValidVersion(r.Version) {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "invalid version: %s, must be one of: v3, v4, v5", r.Version))
}
for k, v := range r.Labels {
if !isValidLabelName(k) {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "invalid label name: %s", k))

View File

@@ -111,8 +111,10 @@ func TestParseIntoRule(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"builderQueries": {
"A": {
"queryName": "A",
"expression": "A",
"disabled": false,
"aggregateAttribute": {
@@ -149,10 +151,12 @@ func TestParseIntoRule(t *testing.T) {
initRule: PostableRule{},
content: []byte(`{
"alert": "DefaultsRule",
"alertType": "METRIC_BASED_ALERT",
"ruleType": "threshold_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"builderQueries": {
"A": {
"disabled": false,
@@ -187,9 +191,11 @@ func TestParseIntoRule(t *testing.T) {
initRule: PostableRule{},
content: []byte(`{
"alert": "PromQLRule",
"alertType": "METRIC_BASED_ALERT",
"condition": {
"compositeQuery": {
"queryType": "promql",
"panelType": "graph",
"promQueries": {
"A": {
"query": "rate(http_requests_total[5m])",
@@ -255,10 +261,12 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
initRule: PostableRule{},
content: []byte(`{
"alert": "SeverityLabelTest",
"alertType": "METRIC_BASED_ALERT",
"schemaVersion": "v1",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"builderQueries": {
"A": {
"aggregateAttribute": {
@@ -343,10 +351,12 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
initRule: PostableRule{},
content: []byte(`{
"alert": "NoLabelsTest",
"alertType": "METRIC_BASED_ALERT",
"schemaVersion": "v1",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"builderQueries": {
"A": {
"aggregateAttribute": {
@@ -383,10 +393,12 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
initRule: PostableRule{},
content: []byte(`{
"alert": "OverwriteTest",
"alertType": "METRIC_BASED_ALERT",
"schemaVersion": "v1",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"builderQueries": {
"A": {
"aggregateAttribute": {
@@ -473,10 +485,12 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
initRule: PostableRule{},
content: []byte(`{
"alert": "V2Test",
"alertType": "METRIC_BASED_ALERT",
"schemaVersion": "v2",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"builderQueries": {
"A": {
"aggregateAttribute": {
@@ -517,9 +531,11 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
initRule: PostableRule{},
content: []byte(`{
"alert": "DefaultSchemaTest",
"alertType": "METRIC_BASED_ALERT",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"builderQueries": {
"A": {
"aggregateAttribute": {
@@ -569,12 +585,16 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
func TestParseIntoRuleThresholdGeneration(t *testing.T) {
content := []byte(`{
"alert": "TestThresholds",
"alertType": "METRIC_BASED_ALERT",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"builderQueries": {
"A": {
"queryName": "A",
"expression": "A",
"dataSource": "metrics",
"disabled": false,
"aggregateAttribute": {
"key": "response_time"
@@ -638,14 +658,18 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
content := []byte(`{
"schemaVersion": "v2",
"alert": "MultiThresholdAlert",
"alertType": "METRIC_BASED_ALERT",
"ruleType": "threshold_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"unit": "%",
"builderQueries": {
"A": {
"queryName": "A",
"expression": "A",
"dataSource": "metrics",
"disabled": false,
"aggregateAttribute": {
"key": "cpu_usage"
@@ -731,10 +755,12 @@ func TestAnomalyNegationEval(t *testing.T) {
name: "anomaly rule with ValueIsBelow - should alert",
ruleJSON: []byte(`{
"alert": "AnomalyBelowTest",
"alertType": "METRIC_BASED_ALERT",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"queries": [{
"type": "builder_query",
"spec": {
@@ -765,10 +791,12 @@ func TestAnomalyNegationEval(t *testing.T) {
name: "anomaly rule with ValueIsBelow; should not alert",
ruleJSON: []byte(`{
"alert": "AnomalyBelowTest",
"alertType": "METRIC_BASED_ALERT",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"queries": [{
"type": "builder_query",
"spec": {
@@ -798,10 +826,12 @@ func TestAnomalyNegationEval(t *testing.T) {
name: "anomaly rule with ValueIsAbove; should alert",
ruleJSON: []byte(`{
"alert": "AnomalyAboveTest",
"alertType": "METRIC_BASED_ALERT",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"queries": [{
"type": "builder_query",
"spec": {
@@ -832,10 +862,12 @@ func TestAnomalyNegationEval(t *testing.T) {
name: "anomaly rule with ValueIsAbove; should not alert",
ruleJSON: []byte(`{
"alert": "AnomalyAboveTest",
"alertType": "METRIC_BASED_ALERT",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"queries": [{
"type": "builder_query",
"spec": {
@@ -865,10 +897,12 @@ func TestAnomalyNegationEval(t *testing.T) {
name: "anomaly rule with ValueIsBelow and AllTheTimes; should alert",
ruleJSON: []byte(`{
"alert": "AnomalyBelowAllTest",
"alertType": "METRIC_BASED_ALERT",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"queries": [{
"type": "builder_query",
"spec": {
@@ -900,10 +934,12 @@ func TestAnomalyNegationEval(t *testing.T) {
name: "anomaly rule with ValueIsBelow and AllTheTimes; should not alert",
ruleJSON: []byte(`{
"alert": "AnomalyBelowAllTest",
"alertType": "METRIC_BASED_ALERT",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"queries": [{
"type": "builder_query",
"spec": {
@@ -934,10 +970,12 @@ func TestAnomalyNegationEval(t *testing.T) {
name: "anomaly rule with ValueOutsideBounds; should alert",
ruleJSON: []byte(`{
"alert": "AnomalyOutOfBoundsTest",
"alertType": "METRIC_BASED_ALERT",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"queries": [{
"type": "builder_query",
"spec": {
@@ -968,10 +1006,12 @@ func TestAnomalyNegationEval(t *testing.T) {
name: "non-anomaly threshold rule with ValueIsBelow; should alert",
ruleJSON: []byte(`{
"alert": "ThresholdTest",
"alertType": "METRIC_BASED_ALERT",
"ruleType": "threshold_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"queries": [{
"type": "builder_query",
"spec": {
@@ -1002,10 +1042,12 @@ func TestAnomalyNegationEval(t *testing.T) {
name: "non-anomaly rule with ValueIsBelow - should not alert",
ruleJSON: []byte(`{
"alert": "ThresholdTest",
"alertType": "METRIC_BASED_ALERT",
"ruleType": "threshold_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"queries": [{
"type": "builder_query",
"spec": {

View File

@@ -252,6 +252,15 @@ func (b BasicRuleThreshold) Validate() error {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid match type: %s", string(b.MatchType)))
}
// Only validate unit if specified
if b.TargetUnit != "" {
unit := converter.Unit(b.TargetUnit)
err := unit.Validate()
if err != nil {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid unit"))
}
}
return errors.Join(errs...)
}