mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-16 14:12:13 +00:00
Compare commits
15 Commits
remove-v4-
...
platform-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3de8b3b5b2 | ||
|
|
a3d74b19fc | ||
|
|
7edd97b3d2 | ||
|
|
17958d8aa0 | ||
|
|
f8341790e9 | ||
|
|
e84c6749b2 | ||
|
|
65fd5f274c | ||
|
|
8898f02698 | ||
|
|
f277009ff8 | ||
|
|
17c6b79d79 | ||
|
|
76d6c23217 | ||
|
|
82dffdda56 | ||
|
|
8f38398863 | ||
|
|
eb39772d3c | ||
|
|
df72c897f9 |
6
.github/CODEOWNERS
vendored
6
.github/CODEOWNERS
vendored
@@ -43,6 +43,12 @@
|
||||
/pkg/analytics/ @vikrantgupta25
|
||||
/pkg/statsreporter/ @vikrantgupta25
|
||||
|
||||
# Emailing Owners
|
||||
|
||||
/pkg/emailing/ @vikrantgupta25
|
||||
/pkg/types/emailtypes/ @vikrantgupta25
|
||||
/templates/email/ @vikrantgupta25
|
||||
|
||||
# Querier Owners
|
||||
|
||||
/pkg/querier/ @srikanthccv
|
||||
|
||||
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@@ -14,5 +14,8 @@
|
||||
},
|
||||
"[sql]": {
|
||||
"editor.defaultFormatter": "adpyke.vscode-sql-formatter"
|
||||
},
|
||||
"[html]": {
|
||||
"editor.defaultFormatter": "vscode.html-language-features"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -193,6 +193,15 @@ emailing:
|
||||
templates:
|
||||
# The directory containing the email templates. This directory should contain a list of files defined at pkg/types/emailtypes/template.go.
|
||||
directory: /opt/signoz/conf/templates/email
|
||||
format:
|
||||
header:
|
||||
enabled: false
|
||||
logo_url: ""
|
||||
help:
|
||||
enabled: false
|
||||
email: ""
|
||||
footer:
|
||||
enabled: false
|
||||
smtp:
|
||||
# The SMTP server address.
|
||||
address: localhost:25
|
||||
@@ -300,3 +309,14 @@ user:
|
||||
allow_self: true
|
||||
# The duration within which a user can reset their password.
|
||||
max_token_lifetime: 6h
|
||||
root:
|
||||
# Whether to enable the root user. When enabled, a root user is provisioned
|
||||
# on startup using the email and password below. The root user cannot be
|
||||
# deleted, updated, or have their password changed through the UI.
|
||||
enabled: false
|
||||
# The email address of the root user.
|
||||
email: ""
|
||||
# The password of the root user. Must meet password requirements.
|
||||
password: ""
|
||||
# The name of the organization to create or look up for the root user.
|
||||
org_name: default
|
||||
|
||||
@@ -4678,6 +4678,8 @@ components:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
isRoot:
|
||||
type: boolean
|
||||
orgId:
|
||||
type: string
|
||||
role:
|
||||
|
||||
@@ -45,7 +45,7 @@ type APIHandler struct {
|
||||
}
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.Config) (*APIHandler, error) {
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
RuleManager: opts.RulesManager,
|
||||
@@ -58,7 +58,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
})
|
||||
}, config)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -175,7 +175,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
GlobalConfig: config.Global,
|
||||
}
|
||||
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts, signoz, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -5,16 +5,23 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"math"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/transition"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
|
||||
@@ -25,8 +32,6 @@ import (
|
||||
|
||||
querierV5 "github.com/SigNoz/signoz/pkg/querier"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
@@ -43,12 +48,17 @@ type AnomalyRule struct {
|
||||
|
||||
reader interfaces.Reader
|
||||
|
||||
// querierV5 is the query builder v5 querier used for all alert rule evaluation
|
||||
// querierV2 is used for alerts created after the introduction of new metrics query builder
|
||||
querierV2 interfaces.Querier
|
||||
|
||||
// querierV5 is used for alerts migrated after the introduction of new query builder
|
||||
querierV5 querierV5.Querier
|
||||
|
||||
provider anomaly.Provider
|
||||
providerV2 anomalyV2.Provider
|
||||
|
||||
logger *slog.Logger
|
||||
version string
|
||||
logger *slog.Logger
|
||||
|
||||
seasonality anomaly.Seasonality
|
||||
}
|
||||
@@ -92,6 +102,34 @@ func NewAnomalyRule(
|
||||
|
||||
logger.Info("using seasonality", "seasonality", t.seasonality.String())
|
||||
|
||||
querierOptsV2 := querierV2.QuerierOptions{
|
||||
Reader: reader,
|
||||
Cache: cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
}
|
||||
|
||||
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
|
||||
t.reader = reader
|
||||
if t.seasonality == anomaly.SeasonalityHourly {
|
||||
t.provider = anomaly.NewHourlyProvider(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](reader),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityDaily {
|
||||
t.provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](reader),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityWeekly {
|
||||
t.provider = anomaly.NewWeeklyProvider(
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
|
||||
)
|
||||
}
|
||||
|
||||
if t.seasonality == anomaly.SeasonalityHourly {
|
||||
t.providerV2 = anomalyV2.NewHourlyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](querierV5),
|
||||
@@ -110,7 +148,7 @@ func NewAnomalyRule(
|
||||
}
|
||||
|
||||
t.querierV5 = querierV5
|
||||
t.reader = reader
|
||||
t.version = p.Version
|
||||
t.logger = logger
|
||||
return &t, nil
|
||||
}
|
||||
@@ -119,9 +157,36 @@ func (r *AnomalyRule) Type() ruletypes.RuleType {
|
||||
return RuleTypeAnomaly
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
|
||||
func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3.QueryRangeParamsV3, error) {
|
||||
|
||||
r.logger.InfoContext(ctx, "prepare query range request", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds())
|
||||
r.logger.InfoContext(
|
||||
ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds(),
|
||||
)
|
||||
|
||||
st, en := r.Timestamps(ts)
|
||||
start := st.UnixMilli()
|
||||
end := en.UnixMilli()
|
||||
|
||||
compositeQuery := r.Condition().CompositeQuery
|
||||
|
||||
if compositeQuery.PanelType != v3.PanelTypeGraph {
|
||||
compositeQuery.PanelType = v3.PanelTypeGraph
|
||||
}
|
||||
|
||||
// default mode
|
||||
return &v3.QueryRangeParamsV3{
|
||||
Start: start,
|
||||
End: end,
|
||||
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
|
||||
CompositeQuery: compositeQuery,
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
|
||||
|
||||
r.logger.InfoContext(ctx, "prepare query range request v5", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds())
|
||||
|
||||
startTs, endTs := r.Timestamps(ts)
|
||||
start, end := startTs.UnixMilli(), endTs.UnixMilli()
|
||||
@@ -150,6 +215,60 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = r.PopulateTemporality(ctx, orgID, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("internal error while setting temporality")
|
||||
}
|
||||
|
||||
anomalies, err := r.provider.GetAnomalies(ctx, orgID, &anomaly.GetAnomaliesRequest{
|
||||
Params: params,
|
||||
Seasonality: r.seasonality,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var queryResult *v3.Result
|
||||
for _, result := range anomalies.Results {
|
||||
if result.QueryName == r.GetSelectedQuery() {
|
||||
queryResult = result
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
hasData := len(queryResult.AnomalyScores) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
|
||||
|
||||
for _, series := range queryResult.AnomalyScores {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
}
|
||||
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
SendUnmatched: r.ShouldSendUnmatched(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resultVector = append(resultVector, results...)
|
||||
}
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
|
||||
|
||||
params, err := r.prepareQueryRangeV5(ctx, ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
anomalies, err := r.providerV2.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{
|
||||
Params: *params,
|
||||
@@ -171,25 +290,20 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
r.logger.WarnContext(ctx, "nil qb result", "ts", ts.UnixMilli())
|
||||
}
|
||||
|
||||
var anomalyScores []*qbtypes.TimeSeries
|
||||
if qbResult != nil {
|
||||
for _, bucket := range qbResult.Aggregations {
|
||||
anomalyScores = append(anomalyScores, bucket.AnomalyScores...)
|
||||
}
|
||||
}
|
||||
queryResult := transition.ConvertV5TimeSeriesDataToV4Result(qbResult)
|
||||
|
||||
hasData := len(anomalyScores) > 0
|
||||
hasData := len(queryResult.AnomalyScores) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(anomalyScores)
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
|
||||
|
||||
// Filter out new series if newGroupEvalDelay is configured
|
||||
seriesToProcess := anomalyScores
|
||||
seriesToProcess := queryResult.AnomalyScores
|
||||
if r.ShouldSkipNewGroups() {
|
||||
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
|
||||
// In case of error we log the error and continue with the original series
|
||||
@@ -202,7 +316,7 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
|
||||
for _, series := range seriesToProcess {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Values), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
}
|
||||
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
@@ -223,7 +337,16 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
|
||||
valueFormatter := formatter.FromUnit(r.Unit())
|
||||
|
||||
res, err := r.buildAndRunQuery(ctx, r.OrgID(), ts)
|
||||
var res ruletypes.Vector
|
||||
var err error
|
||||
|
||||
if r.version == "v5" {
|
||||
r.logger.InfoContext(ctx, "running v5 query")
|
||||
res, err = r.buildAndRunQueryV5(ctx, r.OrgID(), ts)
|
||||
} else {
|
||||
r.logger.InfoContext(ctx, "running v4 query")
|
||||
res, err = r.buildAndRunQuery(ctx, r.OrgID(), ts)
|
||||
}
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
@@ -8,27 +8,28 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// mockAnomalyProviderV2 is a mock implementation of anomalyV2.Provider for testing.
|
||||
type mockAnomalyProviderV2 struct {
|
||||
responses []*anomalyV2.AnomaliesResponse
|
||||
// mockAnomalyProvider is a mock implementation of anomaly.Provider for testing.
|
||||
// We need this because the anomaly provider makes 6 different queries for various
|
||||
// time periods (current, past period, current season, past season, past 2 seasons,
|
||||
// past 3 seasons), making it cumbersome to create mock data.
|
||||
type mockAnomalyProvider struct {
|
||||
responses []*anomaly.GetAnomaliesResponse
|
||||
callCount int
|
||||
}
|
||||
|
||||
func (m *mockAnomalyProviderV2) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomalyV2.AnomaliesRequest) (*anomalyV2.AnomaliesResponse, error) {
|
||||
func (m *mockAnomalyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomaly.GetAnomaliesRequest) (*anomaly.GetAnomaliesResponse, error) {
|
||||
if m.callCount >= len(m.responses) {
|
||||
return &anomalyV2.AnomaliesResponse{Results: []*qbtypes.TimeSeriesData{}}, nil
|
||||
return &anomaly.GetAnomaliesResponse{Results: []*v3.Result{}}, nil
|
||||
}
|
||||
resp := m.responses[m.callCount]
|
||||
m.callCount++
|
||||
@@ -81,11 +82,11 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
responseNoData := &anomalyV2.AnomaliesResponse{
|
||||
Results: []*qbtypes.TimeSeriesData{
|
||||
responseNoData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
Aggregations: []*qbtypes.AggregationBucket{},
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -128,8 +129,8 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule.providerV2 = &mockAnomalyProviderV2{
|
||||
responses: []*anomalyV2.AnomaliesResponse{responseNoData},
|
||||
rule.provider = &mockAnomalyProvider{
|
||||
responses: []*anomaly.GetAnomaliesResponse{responseNoData},
|
||||
}
|
||||
|
||||
alertsFound, err := rule.Eval(context.Background(), evalTime)
|
||||
@@ -189,11 +190,11 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
responseNoData := &anomalyV2.AnomaliesResponse{
|
||||
Results: []*qbtypes.TimeSeriesData{
|
||||
responseNoData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
Aggregations: []*qbtypes.AggregationBucket{},
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -227,22 +228,16 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
t1 := baseTime.Add(5 * time.Minute)
|
||||
t2 := t1.Add(c.timeBetweenEvals)
|
||||
|
||||
responseWithData := &anomalyV2.AnomaliesResponse{
|
||||
Results: []*qbtypes.TimeSeriesData{
|
||||
responseWithData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
Aggregations: []*qbtypes.AggregationBucket{
|
||||
AnomalyScores: []*v3.Series{
|
||||
{
|
||||
AnomalyScores: []*qbtypes.TimeSeries{
|
||||
{
|
||||
Labels: []*qbtypes.Label{
|
||||
{Key: telemetrytypes.TelemetryFieldKey{Name: "test"}, Value: "label"},
|
||||
},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
|
||||
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
|
||||
},
|
||||
},
|
||||
Labels: map[string]string{"test": "label"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
|
||||
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -257,8 +252,8 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule.providerV2 = &mockAnomalyProviderV2{
|
||||
responses: []*anomalyV2.AnomaliesResponse{responseWithData, responseNoData},
|
||||
rule.provider = &mockAnomalyProvider{
|
||||
responses: []*anomaly.GetAnomaliesResponse{responseWithData, responseNoData},
|
||||
}
|
||||
|
||||
alertsFound1, err := rule.Eval(context.Background(), t1)
|
||||
|
||||
@@ -1542,6 +1542,10 @@ export interface TypesUserDTO {
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
isRoot?: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
|
||||
@@ -73,7 +73,7 @@ describe('convertV5ResponseToLegacy', () => {
|
||||
const v5Data: QueryRangeResponseV5 = {
|
||||
type: 'time_series',
|
||||
data: { results: [timeSeries] },
|
||||
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
|
||||
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
|
||||
};
|
||||
|
||||
const params = makeBaseParams('time_series', [
|
||||
@@ -156,7 +156,7 @@ describe('convertV5ResponseToLegacy', () => {
|
||||
const v5Data: QueryRangeResponseV5 = {
|
||||
type: 'scalar',
|
||||
data: { results: [scalar] },
|
||||
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
|
||||
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
|
||||
};
|
||||
|
||||
const params = makeBaseParams('scalar', [
|
||||
@@ -239,7 +239,7 @@ describe('convertV5ResponseToLegacy', () => {
|
||||
const v5Data: QueryRangeResponseV5 = {
|
||||
type: 'scalar',
|
||||
data: { results: [scalar] },
|
||||
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
|
||||
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
|
||||
};
|
||||
|
||||
const params = makeBaseParams('scalar', [
|
||||
|
||||
@@ -388,6 +388,7 @@ export function convertV5ResponseToLegacy(
|
||||
warnings: v5Data?.data?.warning || [],
|
||||
},
|
||||
warning: v5Data?.warning || undefined,
|
||||
meta: v5Data?.meta,
|
||||
},
|
||||
warning: v5Data?.warning || undefined,
|
||||
};
|
||||
@@ -406,6 +407,7 @@ export function convertV5ResponseToLegacy(
|
||||
payload: {
|
||||
data: convertedData,
|
||||
warning: v5Response.payload?.data?.warning || undefined,
|
||||
meta: v5Data?.meta,
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -78,12 +78,10 @@ function TestWrapper({ children }: { children: React.ReactNode }): JSX.Element {
|
||||
describe('VariableItem Integration Tests', () => {
|
||||
let user: ReturnType<typeof userEvent.setup>;
|
||||
let mockOnValueUpdate: jest.Mock;
|
||||
let mockSetVariablesToGetUpdated: jest.Mock;
|
||||
|
||||
beforeEach(() => {
|
||||
user = userEvent.setup();
|
||||
mockOnValueUpdate = jest.fn();
|
||||
mockSetVariablesToGetUpdated = jest.fn();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
@@ -102,9 +100,6 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -150,9 +145,6 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -195,9 +187,6 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -247,9 +236,6 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -272,9 +258,6 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -308,9 +291,6 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -344,9 +324,6 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -369,9 +346,6 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -405,9 +379,6 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -461,9 +432,6 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -508,9 +476,6 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -548,9 +513,6 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -582,9 +544,6 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
|
||||
@@ -9,11 +9,15 @@ import {
|
||||
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
|
||||
import {
|
||||
enqueueDescendantsOfVariable,
|
||||
enqueueFetchOfAllVariables,
|
||||
initializeVariableFetchStore,
|
||||
} from 'providers/Dashboard/store/variableFetchStore';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import { onUpdateVariableNode } from './util';
|
||||
import VariableItem from './VariableItem';
|
||||
|
||||
import './DashboardVariableSelection.styles.scss';
|
||||
@@ -22,8 +26,6 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
const {
|
||||
setSelectedDashboard,
|
||||
updateLocalStorageDashboardVariables,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
} = useDashboard();
|
||||
|
||||
const { updateUrlVariable, getUrlVariables } = useVariablesFromUrl();
|
||||
@@ -55,11 +57,14 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
[dependencyData?.order],
|
||||
);
|
||||
|
||||
// Trigger refetch when dependency order changes or global time changes
|
||||
// Initialize fetch store then start a new fetch cycle.
|
||||
// Runs on dependency order changes, and time range changes.
|
||||
useEffect(() => {
|
||||
if (dependencyData?.order && dependencyData.order.length > 0) {
|
||||
setVariablesToGetUpdated(dependencyData?.order || []);
|
||||
}
|
||||
const allVariableNames = sortedVariablesArray
|
||||
.map((v) => v.name)
|
||||
.filter((name): name is string => !!name);
|
||||
initializeVariableFetchStore(allVariableNames);
|
||||
enqueueFetchOfAllVariables();
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [dependencyOrderKey, minTime, maxTime]);
|
||||
|
||||
@@ -121,29 +126,14 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
return prev;
|
||||
});
|
||||
|
||||
if (dependencyData) {
|
||||
const updatedVariables: string[] = [];
|
||||
onUpdateVariableNode(
|
||||
name,
|
||||
dependencyData.graph,
|
||||
dependencyData.order,
|
||||
(node) => updatedVariables.push(node),
|
||||
);
|
||||
setVariablesToGetUpdated((prev) => [
|
||||
...new Set([...prev, ...updatedVariables.filter((v) => v !== name)]),
|
||||
]);
|
||||
} else {
|
||||
setVariablesToGetUpdated((prev) => prev.filter((v) => v !== name));
|
||||
}
|
||||
// Cascade: enqueue query-type descendants for refetching
|
||||
enqueueDescendantsOfVariable(name);
|
||||
},
|
||||
[
|
||||
// This can be removed
|
||||
dashboardVariables,
|
||||
updateLocalStorageDashboardVariables,
|
||||
dependencyData,
|
||||
updateUrlVariable,
|
||||
setSelectedDashboard,
|
||||
setVariablesToGetUpdated,
|
||||
],
|
||||
);
|
||||
|
||||
@@ -158,9 +148,6 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
existingVariables={dashboardVariables}
|
||||
variableData={variable}
|
||||
onValueUpdate={onValueUpdate}
|
||||
variablesToGetUpdated={variablesToGetUpdated}
|
||||
setVariablesToGetUpdated={setVariablesToGetUpdated}
|
||||
dependencyData={dependencyData}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
|
||||
@@ -2,18 +2,25 @@ import { memo, useCallback, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { getFieldValues } from 'api/dynamicVariables/getFieldValues';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { DEBOUNCE_DELAY } from 'constants/queryBuilderFilterConfig';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useVariableFetchState } from 'hooks/dashboard/useVariableFetchState';
|
||||
import useDebounce from 'hooks/useDebounce';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { FieldValueResponse } from 'types/api/dynamicVariables/getFieldValues';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { isRetryableError as checkIfRetryableError } from 'utils/errorUtils';
|
||||
|
||||
import SelectVariableInput from './SelectVariableInput';
|
||||
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
|
||||
import { getOptionsForDynamicVariable } from './util';
|
||||
import {
|
||||
buildExistingDynamicVariableQuery,
|
||||
extractErrorMessage,
|
||||
getOptionsForDynamicVariable,
|
||||
mergeUniqueStrings,
|
||||
settleVariableFetch,
|
||||
} from './util';
|
||||
import { VariableItemProps } from './VariableItem';
|
||||
import { dynamicVariableSelectStrategy } from './variableSelectStrategy/dynamicVariableSelectStrategy';
|
||||
|
||||
@@ -24,7 +31,6 @@ type DynamicVariableInputProps = Pick<
|
||||
'variableData' | 'onValueUpdate' | 'existingVariables'
|
||||
>;
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function DynamicVariableInput({
|
||||
variableData,
|
||||
onValueUpdate,
|
||||
@@ -55,14 +61,8 @@ function DynamicVariableInput({
|
||||
|
||||
const debouncedApiSearchText = useDebounce(apiSearchText, DEBOUNCE_DELAY);
|
||||
|
||||
// Build a memoized list of all currently available option strings (normalized + related)
|
||||
const allAvailableOptionStrings = useMemo(
|
||||
() => [
|
||||
...new Set([
|
||||
...optionsData.map((v) => v.toString()),
|
||||
...relatedValues.map((v) => v.toString()),
|
||||
]),
|
||||
],
|
||||
() => mergeUniqueStrings(optionsData, relatedValues),
|
||||
[optionsData, relatedValues],
|
||||
);
|
||||
|
||||
@@ -104,67 +104,24 @@ function DynamicVariableInput({
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
// existing query is the query made from the other dynamic variables around this one with there current values
|
||||
// for e.g. k8s.namespace.name IN ["zeus", "gene"] AND doc_op_type IN ["test"]
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const existingQuery = useMemo(() => {
|
||||
if (!existingVariables || !variableData.dynamicVariablesAttribute) {
|
||||
return '';
|
||||
}
|
||||
const {
|
||||
variableFetchCycleId,
|
||||
isVariableSettled,
|
||||
isVariableFetching,
|
||||
hasVariableFetchedOnce,
|
||||
isVariableWaitingForDependencies,
|
||||
variableDependencyWaitMessage,
|
||||
} = useVariableFetchState(variableData.name || '');
|
||||
|
||||
const queryParts: string[] = [];
|
||||
|
||||
Object.entries(existingVariables).forEach(([, variable]) => {
|
||||
// Skip the current variable being processed
|
||||
if (variable.id === variableData.id) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Only include dynamic variables that have selected values and are not selected as ALL
|
||||
if (
|
||||
variable.type === 'DYNAMIC' &&
|
||||
variable.dynamicVariablesAttribute &&
|
||||
variable.selectedValue &&
|
||||
!isEmpty(variable.selectedValue) &&
|
||||
(variable.showALLOption ? !variable.allSelected : true)
|
||||
) {
|
||||
const attribute = variable.dynamicVariablesAttribute;
|
||||
const values = Array.isArray(variable.selectedValue)
|
||||
? variable.selectedValue
|
||||
: [variable.selectedValue];
|
||||
|
||||
// Filter out empty values and convert to strings
|
||||
const validValues = values
|
||||
.filter((val) => val !== null && val !== undefined && val !== '')
|
||||
.map((val) => val.toString());
|
||||
|
||||
if (validValues.length > 0) {
|
||||
// Format values for query - wrap strings in quotes, keep numbers as is
|
||||
const formattedValues = validValues.map((val) => {
|
||||
// Check if value is a number
|
||||
const numValue = Number(val);
|
||||
if (!Number.isNaN(numValue) && Number.isFinite(numValue)) {
|
||||
return val; // Keep as number
|
||||
}
|
||||
// Escape single quotes and wrap in quotes
|
||||
return `'${val.replace(/'/g, "\\'")}'`;
|
||||
});
|
||||
|
||||
if (formattedValues.length === 1) {
|
||||
queryParts.push(`${attribute} = ${formattedValues[0]}`);
|
||||
} else {
|
||||
queryParts.push(`${attribute} IN [${formattedValues.join(', ')}]`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return queryParts.join(' AND ');
|
||||
}, [
|
||||
existingVariables,
|
||||
variableData.id,
|
||||
variableData.dynamicVariablesAttribute,
|
||||
]);
|
||||
const existingQuery = useMemo(
|
||||
() =>
|
||||
buildExistingDynamicVariableQuery(
|
||||
existingVariables,
|
||||
variableData.id,
|
||||
!!variableData.dynamicVariablesAttribute,
|
||||
),
|
||||
[existingVariables, variableData.id, variableData.dynamicVariablesAttribute],
|
||||
);
|
||||
|
||||
// Wrap the hook's onDropdownVisibleChange to also track isDropdownOpen and handle cleanup
|
||||
const handleSelectDropdownVisibilityChange = useCallback(
|
||||
@@ -182,6 +139,73 @@ function DynamicVariableInput({
|
||||
[onDropdownVisibleChange, optionsData, originalRelatedValues],
|
||||
);
|
||||
|
||||
const handleQuerySuccess = useCallback(
|
||||
(data: SuccessResponseV2<FieldValueResponse>): void => {
|
||||
const newNormalizedValues = data.data?.normalizedValues || [];
|
||||
const newRelatedValues = data.data?.relatedValues || [];
|
||||
|
||||
if (!debouncedApiSearchText) {
|
||||
setOptionsData(newNormalizedValues);
|
||||
setIsComplete(data.data?.complete || false);
|
||||
}
|
||||
setFilteredOptionsData(newNormalizedValues);
|
||||
setRelatedValues(newRelatedValues);
|
||||
setOriginalRelatedValues(newRelatedValues);
|
||||
|
||||
// Sync temp selection with latest API values when ALL is active and dropdown is open
|
||||
if (variableData.allSelected && isDropdownOpen) {
|
||||
const latestValues = mergeUniqueStrings(
|
||||
newNormalizedValues,
|
||||
newRelatedValues,
|
||||
);
|
||||
|
||||
const currentStrings = Array.isArray(tempSelection)
|
||||
? tempSelection.map((v) => v.toString())
|
||||
: tempSelection
|
||||
? [tempSelection.toString()]
|
||||
: [];
|
||||
|
||||
const areSame =
|
||||
currentStrings.length === latestValues.length &&
|
||||
latestValues.every((v) => currentStrings.includes(v));
|
||||
|
||||
if (!areSame) {
|
||||
setTempSelection(latestValues);
|
||||
}
|
||||
}
|
||||
|
||||
// Apply default if no value is selected (e.g., new variable, first load)
|
||||
if (!debouncedApiSearchText) {
|
||||
applyDefaultIfNeeded(
|
||||
mergeUniqueStrings(newNormalizedValues, newRelatedValues),
|
||||
);
|
||||
}
|
||||
|
||||
settleVariableFetch(variableData.name, 'complete');
|
||||
},
|
||||
[
|
||||
debouncedApiSearchText,
|
||||
variableData.allSelected,
|
||||
variableData.name,
|
||||
isDropdownOpen,
|
||||
tempSelection,
|
||||
setTempSelection,
|
||||
applyDefaultIfNeeded,
|
||||
],
|
||||
);
|
||||
|
||||
const handleQueryError = useCallback(
|
||||
(error: { message?: string } | null): void => {
|
||||
if (error) {
|
||||
setErrorMessage(extractErrorMessage(error));
|
||||
setIsRetryableError(checkIfRetryableError(error));
|
||||
}
|
||||
|
||||
settleVariableFetch(variableData.name, 'failure');
|
||||
},
|
||||
[variableData.name],
|
||||
);
|
||||
|
||||
const { isLoading, refetch } = useQuery(
|
||||
[
|
||||
REACT_QUERY_KEY.DASHBOARD_BY_ID,
|
||||
@@ -192,13 +216,22 @@ function DynamicVariableInput({
|
||||
debouncedApiSearchText,
|
||||
variableData.dynamicVariablesSource,
|
||||
variableData.dynamicVariablesAttribute,
|
||||
variableFetchCycleId,
|
||||
],
|
||||
{
|
||||
/*
|
||||
* enabled if
|
||||
* - we have dynamic variable source and attribute defined (ALWAYS)
|
||||
* - AND
|
||||
* - we're either still fetching variable options
|
||||
* - OR
|
||||
* - if variable is in idle state and we have already fetched options for it
|
||||
**/
|
||||
enabled:
|
||||
variableData.type === 'DYNAMIC' &&
|
||||
!!variableData.dynamicVariablesSource &&
|
||||
!!variableData.dynamicVariablesAttribute,
|
||||
queryFn: () =>
|
||||
!!variableData.dynamicVariablesAttribute &&
|
||||
(isVariableFetching || (isVariableSettled && hasVariableFetchedOnce)),
|
||||
queryFn: ({ signal }) =>
|
||||
getFieldValues(
|
||||
variableData.dynamicVariablesSource?.toLowerCase() === 'all telemetry'
|
||||
? undefined
|
||||
@@ -211,70 +244,10 @@ function DynamicVariableInput({
|
||||
minTime,
|
||||
maxTime,
|
||||
existingQuery,
|
||||
signal,
|
||||
),
|
||||
onSuccess: (data) => {
|
||||
const newNormalizedValues = data.data?.normalizedValues || [];
|
||||
const newRelatedValues = data.data?.relatedValues || [];
|
||||
|
||||
if (!debouncedApiSearchText) {
|
||||
setOptionsData(newNormalizedValues);
|
||||
setIsComplete(data.data?.complete || false);
|
||||
}
|
||||
setFilteredOptionsData(newNormalizedValues);
|
||||
setRelatedValues(newRelatedValues);
|
||||
setOriginalRelatedValues(newRelatedValues);
|
||||
|
||||
// Only run auto-check logic when necessary to avoid performance issues
|
||||
if (variableData.allSelected && isDropdownOpen) {
|
||||
// Build the latest full list from API (normalized + related)
|
||||
const latestValues = [
|
||||
...new Set([
|
||||
...newNormalizedValues.map((v) => v.toString()),
|
||||
...newRelatedValues.map((v) => v.toString()),
|
||||
]),
|
||||
];
|
||||
|
||||
// Update temp selection to exactly reflect latest API values when ALL is active
|
||||
const currentStrings = Array.isArray(tempSelection)
|
||||
? tempSelection.map((v) => v.toString())
|
||||
: tempSelection
|
||||
? [tempSelection.toString()]
|
||||
: [];
|
||||
const areSame =
|
||||
currentStrings.length === latestValues.length &&
|
||||
latestValues.every((v) => currentStrings.includes(v));
|
||||
if (!areSame) {
|
||||
setTempSelection(latestValues);
|
||||
}
|
||||
}
|
||||
|
||||
// Apply default if no value is selected (e.g., new variable, first load)
|
||||
if (!debouncedApiSearchText) {
|
||||
const allNewOptions = [
|
||||
...new Set([
|
||||
...newNormalizedValues.map((v) => v.toString()),
|
||||
...newRelatedValues.map((v) => v.toString()),
|
||||
]),
|
||||
];
|
||||
applyDefaultIfNeeded(allNewOptions);
|
||||
}
|
||||
},
|
||||
onError: (error: any) => {
|
||||
if (error) {
|
||||
let message = SOMETHING_WENT_WRONG;
|
||||
if (error?.message) {
|
||||
message = error?.message;
|
||||
} else {
|
||||
message =
|
||||
'Please make sure configuration is valid and you have required setup and permissions';
|
||||
}
|
||||
setErrorMessage(message);
|
||||
|
||||
// Check if error is retryable (5xx) or not (4xx)
|
||||
const isRetryable = checkIfRetryableError(error);
|
||||
setIsRetryableError(isRetryable);
|
||||
}
|
||||
},
|
||||
onSuccess: handleQuerySuccess,
|
||||
onError: handleQueryError,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -336,6 +309,8 @@ function DynamicVariableInput({
|
||||
showRetryButton={isRetryableError}
|
||||
showIncompleteDataMessage={!isComplete && filteredOptionsData.length > 0}
|
||||
onSearch={handleSearch}
|
||||
waiting={isVariableWaitingForDependencies}
|
||||
waitingMessage={variableDependencyWaitMessage}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -3,8 +3,9 @@ import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useVariableFetchState } from 'hooks/dashboard/useVariableFetchState';
|
||||
import sortValues from 'lib/dashboardVariables/sortVariableValues';
|
||||
import { isArray, isString } from 'lodash-es';
|
||||
import { isArray, isEmpty, isString } from 'lodash-es';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
@@ -12,26 +13,18 @@ import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { variablePropsToPayloadVariables } from '../utils';
|
||||
import SelectVariableInput from './SelectVariableInput';
|
||||
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
|
||||
import { areArraysEqual, checkAPIInvocation } from './util';
|
||||
import { areArraysEqual, settleVariableFetch } from './util';
|
||||
import { VariableItemProps } from './VariableItem';
|
||||
import { queryVariableSelectStrategy } from './variableSelectStrategy/queryVariableSelectStrategy';
|
||||
|
||||
type QueryVariableInputProps = Pick<
|
||||
VariableItemProps,
|
||||
| 'variableData'
|
||||
| 'existingVariables'
|
||||
| 'onValueUpdate'
|
||||
| 'variablesToGetUpdated'
|
||||
| 'setVariablesToGetUpdated'
|
||||
| 'dependencyData'
|
||||
'variableData' | 'existingVariables' | 'onValueUpdate'
|
||||
>;
|
||||
|
||||
function QueryVariableInput({
|
||||
variableData,
|
||||
existingVariables,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
dependencyData,
|
||||
onValueUpdate,
|
||||
}: QueryVariableInputProps): JSX.Element {
|
||||
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
|
||||
@@ -43,6 +36,15 @@ function QueryVariableInput({
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const {
|
||||
variableFetchCycleId,
|
||||
isVariableSettled,
|
||||
isVariableFetching,
|
||||
hasVariableFetchedOnce,
|
||||
isVariableWaitingForDependencies,
|
||||
variableDependencyWaitMessage,
|
||||
} = useVariableFetchState(variableData.name || '');
|
||||
|
||||
const {
|
||||
tempSelection,
|
||||
setTempSelection,
|
||||
@@ -60,16 +62,6 @@ function QueryVariableInput({
|
||||
strategy: queryVariableSelectStrategy,
|
||||
});
|
||||
|
||||
const validVariableUpdate = useCallback((): boolean => {
|
||||
if (!variableData.name) {
|
||||
return false;
|
||||
}
|
||||
return Boolean(
|
||||
variablesToGetUpdated.length &&
|
||||
variablesToGetUpdated[0] === variableData.name,
|
||||
);
|
||||
}, [variableData.name, variablesToGetUpdated]);
|
||||
|
||||
const getOptions = useCallback(
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
(variablesRes: VariableResponseProps | null): void => {
|
||||
@@ -103,18 +95,24 @@ function QueryVariableInput({
|
||||
valueNotInList = true;
|
||||
}
|
||||
|
||||
// variablesData.allSelected is added for the case where on change of options we need to update the
|
||||
// local storage
|
||||
if (
|
||||
variableData.name &&
|
||||
(validVariableUpdate() || valueNotInList || variableData.allSelected)
|
||||
) {
|
||||
if (variableData.name && (valueNotInList || variableData.allSelected)) {
|
||||
if (
|
||||
variableData.allSelected &&
|
||||
variableData.multiSelect &&
|
||||
variableData.showALLOption
|
||||
) {
|
||||
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
|
||||
if (
|
||||
variableData.name &&
|
||||
variableData.id &&
|
||||
!isEmpty(variableData.selectedValue)
|
||||
) {
|
||||
onValueUpdate(
|
||||
variableData.name,
|
||||
variableData.id,
|
||||
newOptionsData,
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
// Update tempSelection to maintain ALL state when dropdown is open
|
||||
if (tempSelection !== undefined) {
|
||||
@@ -132,7 +130,11 @@ function QueryVariableInput({
|
||||
newOptionsData.every((option) => selectedValue.includes(option));
|
||||
}
|
||||
|
||||
if (variableData.name && variableData.id) {
|
||||
if (
|
||||
variableData.name &&
|
||||
variableData.id &&
|
||||
!isEmpty(variableData.selectedValue)
|
||||
) {
|
||||
onValueUpdate(variableData.name, variableData.id, value, allSelected);
|
||||
}
|
||||
}
|
||||
@@ -141,10 +143,6 @@ function QueryVariableInput({
|
||||
setOptionsData(newOptionsData);
|
||||
// Apply default if no value is selected (e.g., new variable, first load)
|
||||
applyDefaultIfNeeded(newOptionsData);
|
||||
} else {
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((name) => name !== variableData.name),
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
@@ -157,8 +155,6 @@ function QueryVariableInput({
|
||||
onValueUpdate,
|
||||
tempSelection,
|
||||
setTempSelection,
|
||||
validVariableUpdate,
|
||||
setVariablesToGetUpdated,
|
||||
applyDefaultIfNeeded,
|
||||
],
|
||||
);
|
||||
@@ -169,27 +165,28 @@ function QueryVariableInput({
|
||||
variableData.name || '',
|
||||
`${minTime}`,
|
||||
`${maxTime}`,
|
||||
JSON.stringify(dependencyData?.order),
|
||||
variableFetchCycleId,
|
||||
],
|
||||
{
|
||||
enabled:
|
||||
variableData &&
|
||||
checkAPIInvocation(
|
||||
variablesToGetUpdated,
|
||||
variableData,
|
||||
dependencyData?.parentDependencyGraph,
|
||||
/*
|
||||
* enabled if
|
||||
* - we're either still fetching variable options
|
||||
* - OR
|
||||
* - if variable is in idle state and we have already fetched options for it
|
||||
**/
|
||||
enabled: isVariableFetching || (isVariableSettled && hasVariableFetchedOnce),
|
||||
queryFn: ({ signal }) =>
|
||||
dashboardVariablesQuery(
|
||||
{
|
||||
query: variableData.queryValue || '',
|
||||
variables: variablePropsToPayloadVariables(existingVariables),
|
||||
},
|
||||
signal,
|
||||
),
|
||||
queryFn: () =>
|
||||
dashboardVariablesQuery({
|
||||
query: variableData.queryValue || '',
|
||||
variables: variablePropsToPayloadVariables(existingVariables),
|
||||
}),
|
||||
refetchOnWindowFocus: false,
|
||||
onSuccess: (response) => {
|
||||
getOptions(response.payload);
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((v) => v !== variableData.name),
|
||||
);
|
||||
settleVariableFetch(variableData.name, 'complete');
|
||||
},
|
||||
onError: (error: {
|
||||
details: {
|
||||
@@ -206,9 +203,7 @@ function QueryVariableInput({
|
||||
}
|
||||
setErrorMessage(message);
|
||||
}
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((v) => v !== variableData.name),
|
||||
);
|
||||
settleVariableFetch(variableData.name, 'failure');
|
||||
},
|
||||
},
|
||||
);
|
||||
@@ -242,6 +237,8 @@ function QueryVariableInput({
|
||||
loading={isLoading}
|
||||
errorMessage={errorMessage}
|
||||
onRetry={handleRetry}
|
||||
waiting={isVariableWaitingForDependencies}
|
||||
waitingMessage={variableDependencyWaitMessage}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -28,6 +28,8 @@ interface SelectVariableInputProps {
|
||||
showRetryButton?: boolean;
|
||||
showIncompleteDataMessage?: boolean;
|
||||
onSearch?: (searchTerm: string) => void;
|
||||
waiting?: boolean;
|
||||
waitingMessage?: string;
|
||||
}
|
||||
|
||||
const MAX_TAG_DISPLAY_VALUES = 10;
|
||||
@@ -65,6 +67,7 @@ function SelectVariableInput({
|
||||
showRetryButton,
|
||||
showIncompleteDataMessage,
|
||||
onSearch,
|
||||
waitingMessage,
|
||||
}: SelectVariableInputProps): JSX.Element {
|
||||
const commonProps = useMemo(
|
||||
() => ({
|
||||
@@ -78,7 +81,6 @@ function SelectVariableInput({
|
||||
className: 'variable-select',
|
||||
popupClassName: 'dropdown-styles',
|
||||
getPopupContainer: popupContainer,
|
||||
style: SelectItemStyle,
|
||||
showSearch: true,
|
||||
bordered: false,
|
||||
|
||||
@@ -86,6 +88,8 @@ function SelectVariableInput({
|
||||
'data-testid': 'variable-select',
|
||||
onChange,
|
||||
loading,
|
||||
waitingMessage,
|
||||
style: SelectItemStyle,
|
||||
options,
|
||||
errorMessage,
|
||||
onRetry,
|
||||
@@ -101,6 +105,7 @@ function SelectVariableInput({
|
||||
defaultValue,
|
||||
onChange,
|
||||
loading,
|
||||
waitingMessage,
|
||||
options,
|
||||
value,
|
||||
errorMessage,
|
||||
|
||||
@@ -47,14 +47,6 @@ describe('VariableItem', () => {
|
||||
variableData={mockVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={(): void => {}}
|
||||
dependencyData={{
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
@@ -69,14 +61,6 @@ describe('VariableItem', () => {
|
||||
variableData={mockVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={(): void => {}}
|
||||
dependencyData={{
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
@@ -92,14 +76,6 @@ describe('VariableItem', () => {
|
||||
variableData={mockVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={(): void => {}}
|
||||
dependencyData={{
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
@@ -133,14 +109,6 @@ describe('VariableItem', () => {
|
||||
variableData={mockCustomVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={(): void => {}}
|
||||
dependencyData={{
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
@@ -163,14 +131,6 @@ describe('VariableItem', () => {
|
||||
variableData={customVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={(): void => {}}
|
||||
dependencyData={{
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
@@ -185,14 +145,6 @@ describe('VariableItem', () => {
|
||||
variableData={mockCustomVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={(): void => {}}
|
||||
dependencyData={{
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { memo } from 'react';
|
||||
import { InfoCircleOutlined } from '@ant-design/icons';
|
||||
import { Tooltip, Typography } from 'antd';
|
||||
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import CustomVariableInput from './CustomVariableInput';
|
||||
@@ -21,18 +20,12 @@ export interface VariableItemProps {
|
||||
allSelected: boolean,
|
||||
haveCustomValuesSelected?: boolean,
|
||||
) => void;
|
||||
variablesToGetUpdated: string[];
|
||||
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
|
||||
dependencyData: IDependencyData | null;
|
||||
}
|
||||
|
||||
function VariableItem({
|
||||
variableData,
|
||||
onValueUpdate,
|
||||
existingVariables,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
dependencyData,
|
||||
}: VariableItemProps): JSX.Element {
|
||||
const { name, description, type: variableType } = variableData;
|
||||
|
||||
@@ -65,9 +58,6 @@ function VariableItem({
|
||||
variableData={variableData}
|
||||
onValueUpdate={onValueUpdate}
|
||||
existingVariables={existingVariables}
|
||||
variablesToGetUpdated={variablesToGetUpdated}
|
||||
setVariablesToGetUpdated={setVariablesToGetUpdated}
|
||||
dependencyData={dependencyData}
|
||||
/>
|
||||
)}
|
||||
{variableType === 'DYNAMIC' && (
|
||||
|
||||
@@ -7,6 +7,19 @@ import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import DynamicVariableInput from '../DynamicVariableInput';
|
||||
|
||||
// Mock useVariableFetchState to return "fetching" state so useQuery is enabled
|
||||
jest.mock('hooks/dashboard/useVariableFetchState', () => ({
|
||||
useVariableFetchState: (): Record<string, unknown> => ({
|
||||
variableFetchCycleId: 0,
|
||||
variableFetchState: 'loading',
|
||||
isVariableSettled: false,
|
||||
isVariableFetching: true,
|
||||
hasVariableFetchedOnce: false,
|
||||
isVariableWaitingForDependencies: false,
|
||||
variableDependencyWaitMessage: '',
|
||||
}),
|
||||
}));
|
||||
|
||||
// Don't mock the components - use real ones
|
||||
|
||||
// Mock for useQuery
|
||||
@@ -217,9 +230,10 @@ describe('DynamicVariableInput Component', () => {
|
||||
'',
|
||||
'Traces',
|
||||
'service.name',
|
||||
0, // variableFetchCycleId
|
||||
],
|
||||
expect.objectContaining({
|
||||
enabled: true, // Type is 'DYNAMIC'
|
||||
enabled: true, // isVariableFetching is true from mock
|
||||
queryFn: expect.any(Function),
|
||||
onSuccess: expect.any(Function),
|
||||
onError: expect.any(Function),
|
||||
|
||||
@@ -8,14 +8,6 @@ import '@testing-library/jest-dom/extend-expect';
|
||||
import VariableItem from '../VariableItem';
|
||||
|
||||
const mockOnValueUpdate = jest.fn();
|
||||
const mockSetVariablesToGetUpdated = jest.fn();
|
||||
|
||||
const baseDependencyData = {
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
hasCycle: false,
|
||||
};
|
||||
|
||||
const TEST_VARIABLE_ID = 'test_variable';
|
||||
const VARIABLE_SELECT_TESTID = 'variable-select';
|
||||
@@ -31,9 +23,6 @@ const renderVariableItem = (
|
||||
variableData={variableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={baseDependencyData}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
|
||||
@@ -2,14 +2,12 @@ import {
|
||||
buildDependencies,
|
||||
buildDependencyGraph,
|
||||
buildParentDependencyGraph,
|
||||
checkAPIInvocation,
|
||||
onUpdateVariableNode,
|
||||
VariableGraph,
|
||||
} from '../util';
|
||||
import {
|
||||
buildDependenciesMock,
|
||||
buildGraphMock,
|
||||
checkAPIInvocationMock,
|
||||
onUpdateVariableNodeMock,
|
||||
} from './mock';
|
||||
|
||||
@@ -72,97 +70,6 @@ describe('dashboardVariables - utilities and processors', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkAPIInvocation', () => {
|
||||
const {
|
||||
variablesToGetUpdated,
|
||||
variableData,
|
||||
parentDependencyGraph,
|
||||
} = checkAPIInvocationMock;
|
||||
|
||||
const mockRootElement = {
|
||||
name: 'deployment_environment',
|
||||
key: '036a47cd-9ffc-47de-9f27-0329198964a8',
|
||||
id: '036a47cd-9ffc-47de-9f27-0329198964a8',
|
||||
modificationUUID: '5f71b591-f583-497c-839d-6a1590c3f60f',
|
||||
selectedValue: 'production',
|
||||
type: 'QUERY',
|
||||
// ... other properties omitted for brevity
|
||||
} as any;
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should return false when variableData is empty', () => {
|
||||
expect(
|
||||
checkAPIInvocation(
|
||||
variablesToGetUpdated,
|
||||
variableData,
|
||||
parentDependencyGraph,
|
||||
),
|
||||
).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should return true when parentDependencyGraph is empty', () => {
|
||||
expect(
|
||||
checkAPIInvocation(variablesToGetUpdated, variableData, {}),
|
||||
).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('variable sequences', () => {
|
||||
it('should return true for valid sequence', () => {
|
||||
expect(
|
||||
checkAPIInvocation(
|
||||
['k8s_node_name', 'k8s_namespace_name'],
|
||||
variableData,
|
||||
parentDependencyGraph,
|
||||
),
|
||||
).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should return false for invalid sequence', () => {
|
||||
expect(
|
||||
checkAPIInvocation(
|
||||
['k8s_cluster_name', 'k8s_node_name', 'k8s_namespace_name'],
|
||||
variableData,
|
||||
parentDependencyGraph,
|
||||
),
|
||||
).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should return false when variableData is not in sequence', () => {
|
||||
expect(
|
||||
checkAPIInvocation(
|
||||
['deployment_environment', 'service_name', 'endpoint'],
|
||||
variableData,
|
||||
parentDependencyGraph,
|
||||
),
|
||||
).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('root element behavior', () => {
|
||||
it('should return true for valid root element sequence', () => {
|
||||
expect(
|
||||
checkAPIInvocation(
|
||||
[
|
||||
'deployment_environment',
|
||||
'service_name',
|
||||
'endpoint',
|
||||
'http_status_code',
|
||||
],
|
||||
mockRootElement,
|
||||
parentDependencyGraph,
|
||||
),
|
||||
).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should return true for empty variablesToGetUpdated array', () => {
|
||||
expect(
|
||||
checkAPIInvocation([], mockRootElement, parentDependencyGraph),
|
||||
).toBeTruthy();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Graph Building Utilities', () => {
|
||||
const { graph } = buildGraphMock;
|
||||
const { variables } = buildDependenciesMock;
|
||||
@@ -223,10 +130,86 @@ describe('dashboardVariables - utilities and processors', () => {
|
||||
},
|
||||
hasCycle: false,
|
||||
cycleNodes: undefined,
|
||||
transitiveDescendants: {
|
||||
deployment_environment: ['service_name', 'endpoint', 'http_status_code'],
|
||||
endpoint: ['http_status_code'],
|
||||
environment: [],
|
||||
http_status_code: [],
|
||||
k8s_cluster_name: ['k8s_node_name', 'k8s_namespace_name'],
|
||||
k8s_namespace_name: [],
|
||||
k8s_node_name: ['k8s_namespace_name'],
|
||||
service_name: ['endpoint', 'http_status_code'],
|
||||
},
|
||||
};
|
||||
|
||||
expect(buildDependencyGraph(graph)).toEqual(expected);
|
||||
});
|
||||
|
||||
it('should return empty transitiveDescendants for an empty graph', () => {
|
||||
const result = buildDependencyGraph({});
|
||||
expect(result.transitiveDescendants).toEqual({});
|
||||
expect(result.order).toEqual([]);
|
||||
expect(result.hasCycle).toBe(false);
|
||||
});
|
||||
|
||||
it('should compute transitiveDescendants for a linear chain (a -> b -> c)', () => {
|
||||
const linearGraph: VariableGraph = {
|
||||
a: ['b'],
|
||||
b: ['c'],
|
||||
c: [],
|
||||
};
|
||||
const result = buildDependencyGraph(linearGraph);
|
||||
expect(result.transitiveDescendants).toEqual({
|
||||
a: ['b', 'c'],
|
||||
b: ['c'],
|
||||
c: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should compute transitiveDescendants for a diamond dependency (a -> b, a -> c, b -> d, c -> d)', () => {
|
||||
const diamondGraph: VariableGraph = {
|
||||
a: ['b', 'c'],
|
||||
b: ['d'],
|
||||
c: ['d'],
|
||||
d: [],
|
||||
};
|
||||
const result = buildDependencyGraph(diamondGraph);
|
||||
expect(result.transitiveDescendants.a).toEqual(
|
||||
expect.arrayContaining(['b', 'c', 'd']),
|
||||
);
|
||||
expect(result.transitiveDescendants.a).toHaveLength(3);
|
||||
expect(result.transitiveDescendants.b).toEqual(['d']);
|
||||
expect(result.transitiveDescendants.c).toEqual(['d']);
|
||||
expect(result.transitiveDescendants.d).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle disconnected components in transitiveDescendants', () => {
|
||||
const disconnectedGraph: VariableGraph = {
|
||||
a: ['b'],
|
||||
b: [],
|
||||
x: ['y'],
|
||||
y: [],
|
||||
};
|
||||
const result = buildDependencyGraph(disconnectedGraph);
|
||||
expect(result.transitiveDescendants.a).toEqual(['b']);
|
||||
expect(result.transitiveDescendants.b).toEqual([]);
|
||||
expect(result.transitiveDescendants.x).toEqual(['y']);
|
||||
expect(result.transitiveDescendants.y).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty transitiveDescendants for all leaf nodes', () => {
|
||||
const leafOnlyGraph: VariableGraph = {
|
||||
a: [],
|
||||
b: [],
|
||||
c: [],
|
||||
};
|
||||
const result = buildDependencyGraph(leafOnlyGraph);
|
||||
expect(result.transitiveDescendants).toEqual({
|
||||
a: [],
|
||||
b: [],
|
||||
c: [],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildDependencies', () => {
|
||||
|
||||
@@ -1,36 +1,3 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
export const checkAPIInvocationMock = {
|
||||
variablesToGetUpdated: [],
|
||||
variableData: {
|
||||
name: 'k8s_node_name',
|
||||
key: '4d71d385-beaf-4434-8dbf-c62be68049fc',
|
||||
allSelected: false,
|
||||
customValue: '',
|
||||
description: '',
|
||||
id: '4d71d385-beaf-4434-8dbf-c62be68049fc',
|
||||
modificationUUID: '77233d3c-96d7-4ccb-aa9d-11b04d563068',
|
||||
multiSelect: false,
|
||||
order: 6,
|
||||
queryValue:
|
||||
"SELECT JSONExtractString(labels, 'k8s_node_name') AS k8s_node_name\nFROM signoz_metrics.distributed_time_series_v4_1day\nWHERE metric_name = 'k8s_node_cpu_time' AND JSONExtractString(labels, 'k8s_cluster_name') = {{.k8s_cluster_name}}\nGROUP BY k8s_node_name",
|
||||
selectedValue: 'gke-signoz-saas-si-consumer-bsc-e2sd4-a6d430fa-gvm2',
|
||||
showALLOption: false,
|
||||
sort: 'DISABLED',
|
||||
textboxValue: '',
|
||||
type: 'QUERY',
|
||||
},
|
||||
parentDependencyGraph: {
|
||||
deployment_environment: [],
|
||||
service_name: ['deployment_environment'],
|
||||
endpoint: ['deployment_environment', 'service_name'],
|
||||
http_status_code: ['endpoint'],
|
||||
k8s_cluster_name: [],
|
||||
environment: [],
|
||||
k8s_node_name: ['k8s_cluster_name'],
|
||||
k8s_namespace_name: ['k8s_cluster_name', 'k8s_node_name'],
|
||||
},
|
||||
} as any;
|
||||
|
||||
export const onUpdateVariableNodeMock = {
|
||||
nodeToUpdate: 'deployment_environment',
|
||||
graph: {
|
||||
|
||||
@@ -1,9 +1,16 @@
|
||||
import { OptionData } from 'components/NewSelect/types';
|
||||
import { isEmpty, isNull } from 'lodash-es';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { textContainsVariableReference } from 'lib/dashboardVariables/variableReference';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import {
|
||||
IDashboardVariables,
|
||||
IDependencyData,
|
||||
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import {
|
||||
onVariableFetchComplete,
|
||||
onVariableFetchFailure,
|
||||
variableFetchStore,
|
||||
} from 'providers/Dashboard/store/variableFetchStore';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
export function areArraysEqual(
|
||||
@@ -45,30 +52,16 @@ const getDependentVariablesBasedOnVariableName = (
|
||||
}
|
||||
|
||||
return variables
|
||||
?.map((variable: any) => {
|
||||
.map((variable) => {
|
||||
if (variable.type === 'QUERY') {
|
||||
// Combined pattern for all formats
|
||||
// {{.variable_name}} - original format
|
||||
// $variable_name - dollar prefix format
|
||||
// [[variable_name]] - square bracket format
|
||||
// {{variable_name}} - without dot format
|
||||
const patterns = [
|
||||
`\\{\\{\\s*?\\.${variableName}\\s*?\\}\\}`, // {{.var}}
|
||||
`\\{\\{\\s*${variableName}\\s*\\}\\}`, // {{var}}
|
||||
`\\$${variableName}\\b`, // $var
|
||||
`\\[\\[\\s*${variableName}\\s*\\]\\]`, // [[var]]
|
||||
];
|
||||
const combinedRegex = new RegExp(patterns.join('|'));
|
||||
|
||||
const queryValue = variable.queryValue || '';
|
||||
const dependVarReMatch = queryValue.match(combinedRegex);
|
||||
if (dependVarReMatch !== null && dependVarReMatch.length > 0) {
|
||||
if (textContainsVariableReference(queryValue, variableName)) {
|
||||
return variable.name;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.filter((val: string | null) => !isNull(val));
|
||||
.filter((val): val is string => val !== null);
|
||||
};
|
||||
export type VariableGraph = Record<string, string[]>;
|
||||
|
||||
@@ -246,10 +239,26 @@ export const buildDependencyGraph = (
|
||||
|
||||
const hasCycle = topologicalOrder.length !== Object.keys(dependencies)?.length;
|
||||
|
||||
// Pre-compute transitive descendants by walking topological order in reverse.
|
||||
// Each node's transitive descendants = direct children + their transitive descendants.
|
||||
const transitiveDescendants: VariableGraph = {};
|
||||
for (let i = topologicalOrder.length - 1; i >= 0; i--) {
|
||||
const node = topologicalOrder[i];
|
||||
const desc = new Set<string>();
|
||||
for (const child of adjList[node] || []) {
|
||||
desc.add(child);
|
||||
for (const d of transitiveDescendants[child] || []) {
|
||||
desc.add(d);
|
||||
}
|
||||
}
|
||||
transitiveDescendants[node] = Array.from(desc);
|
||||
}
|
||||
|
||||
return {
|
||||
order: topologicalOrder,
|
||||
graph: adjList,
|
||||
parentDependencyGraph: buildParentDependencyGraph(adjList),
|
||||
transitiveDescendants,
|
||||
hasCycle,
|
||||
cycleNodes,
|
||||
};
|
||||
@@ -284,33 +293,6 @@ export const onUpdateVariableNode = (
|
||||
});
|
||||
};
|
||||
|
||||
export const checkAPIInvocation = (
|
||||
variablesToGetUpdated: string[],
|
||||
variableData: IDashboardVariable,
|
||||
parentDependencyGraph?: VariableGraph,
|
||||
): boolean => {
|
||||
if (isEmpty(variableData.name)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (isEmpty(parentDependencyGraph)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// if no dependency then true
|
||||
const haveDependency =
|
||||
parentDependencyGraph?.[variableData.name || '']?.length > 0;
|
||||
if (!haveDependency) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// if variable is in the list and has dependency then check if its the top element in the queue then true else false
|
||||
return (
|
||||
variablesToGetUpdated.length > 0 &&
|
||||
variablesToGetUpdated[0] === variableData.name
|
||||
);
|
||||
};
|
||||
|
||||
export const getOptionsForDynamicVariable = (
|
||||
normalizedValues: (string | number | boolean)[],
|
||||
relatedValues: string[],
|
||||
@@ -375,3 +357,130 @@ export const getSelectValue = (
|
||||
}
|
||||
return selectedValue?.toString();
|
||||
};
|
||||
|
||||
/**
|
||||
* Merges multiple arrays of values into a single deduplicated string array.
|
||||
*/
|
||||
export function mergeUniqueStrings(
|
||||
...arrays: (string | number | boolean)[][]
|
||||
): string[] {
|
||||
return [...new Set(arrays.flatMap((arr) => arr.map((v) => v.toString())))];
|
||||
}
|
||||
|
||||
function isEligibleFilterVariable(
|
||||
variable: IDashboardVariable,
|
||||
currentVariableId: string,
|
||||
): boolean {
|
||||
if (variable.id === currentVariableId) {
|
||||
return false;
|
||||
}
|
||||
if (variable.type !== 'DYNAMIC') {
|
||||
return false;
|
||||
}
|
||||
if (!variable.dynamicVariablesAttribute) {
|
||||
return false;
|
||||
}
|
||||
if (!variable.selectedValue || isEmpty(variable.selectedValue)) {
|
||||
return false;
|
||||
}
|
||||
return !(variable.showALLOption && variable.allSelected);
|
||||
}
|
||||
|
||||
function formatQueryValue(val: string): string {
|
||||
const numValue = Number(val);
|
||||
if (!Number.isNaN(numValue) && Number.isFinite(numValue)) {
|
||||
return val;
|
||||
}
|
||||
return `'${val.replace(/'/g, "\\'")}'`;
|
||||
}
|
||||
|
||||
function buildQueryPart(attribute: string, values: string[]): string {
|
||||
const formatted = values.map(formatQueryValue);
|
||||
if (formatted.length === 1) {
|
||||
return `${attribute} = ${formatted[0]}`;
|
||||
}
|
||||
return `${attribute} IN [${formatted.join(', ')}]`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a filter query string from sibling dynamic variables' selected values.
|
||||
* e.g. `k8s.namespace.name IN ['zeus', 'gene'] AND doc_op_type = 'test'`
|
||||
*/
|
||||
export function buildExistingDynamicVariableQuery(
|
||||
existingVariables: IDashboardVariables | null,
|
||||
currentVariableId: string,
|
||||
hasDynamicAttribute: boolean,
|
||||
): string {
|
||||
if (!existingVariables || !hasDynamicAttribute) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const queryParts: string[] = [];
|
||||
|
||||
for (const variable of Object.values(existingVariables)) {
|
||||
// Skip the current variable being processed
|
||||
if (!isEligibleFilterVariable(variable, currentVariableId)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const rawValues = Array.isArray(variable.selectedValue)
|
||||
? variable.selectedValue
|
||||
: [variable.selectedValue];
|
||||
|
||||
// Filter out empty values and convert to strings
|
||||
const validValues = rawValues
|
||||
.filter(
|
||||
(val): val is string | number | boolean =>
|
||||
val !== null && val !== undefined && val !== '',
|
||||
)
|
||||
.map((val) => val.toString());
|
||||
|
||||
if (validValues.length > 0 && variable.dynamicVariablesAttribute) {
|
||||
queryParts.push(
|
||||
buildQueryPart(variable.dynamicVariablesAttribute, validValues),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return queryParts.join(' AND ');
|
||||
}
|
||||
|
||||
function isVariableInActiveFetchState(state: string | undefined): boolean {
|
||||
return state === 'loading' || state === 'revalidating';
|
||||
}
|
||||
|
||||
/**
|
||||
* Completes or fails a variable's fetch state machine transition.
|
||||
* No-ops if the variable is not currently in an active fetch state.
|
||||
*/
|
||||
export function settleVariableFetch(
|
||||
name: string | undefined,
|
||||
outcome: 'complete' | 'failure',
|
||||
): void {
|
||||
if (!name) {
|
||||
return;
|
||||
}
|
||||
|
||||
const currentState = variableFetchStore.getSnapshot().states[name];
|
||||
if (!isVariableInActiveFetchState(currentState)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (outcome === 'complete') {
|
||||
onVariableFetchComplete(name);
|
||||
} else {
|
||||
onVariableFetchFailure(name);
|
||||
}
|
||||
}
|
||||
|
||||
export function extractErrorMessage(
|
||||
error: { message?: string } | null,
|
||||
): string {
|
||||
if (!error) {
|
||||
return SOMETHING_WENT_WRONG;
|
||||
}
|
||||
return (
|
||||
error.message ||
|
||||
'Please make sure configuration is valid and you have required setup and permissions'
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,31 @@
|
||||
import { areArraysEqual, onUpdateVariableNode, VariableGraph } from './util';
|
||||
jest.mock('providers/Dashboard/store/variableFetchStore', () => ({
|
||||
variableFetchStore: {
|
||||
getSnapshot: jest.fn(),
|
||||
},
|
||||
onVariableFetchComplete: jest.fn(),
|
||||
onVariableFetchFailure: jest.fn(),
|
||||
}));
|
||||
|
||||
import {
|
||||
onVariableFetchComplete,
|
||||
onVariableFetchFailure,
|
||||
variableFetchStore,
|
||||
} from 'providers/Dashboard/store/variableFetchStore';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import {
|
||||
areArraysEqual,
|
||||
buildExistingDynamicVariableQuery,
|
||||
extractErrorMessage,
|
||||
mergeUniqueStrings,
|
||||
onUpdateVariableNode,
|
||||
settleVariableFetch,
|
||||
VariableGraph,
|
||||
} from './util';
|
||||
|
||||
// ────────────────────────────────────────────────────────────────
|
||||
// Existing tests
|
||||
// ────────────────────────────────────────────────────────────────
|
||||
|
||||
describe('areArraysEqual', () => {
|
||||
it('should return true for equal arrays with same order', () => {
|
||||
@@ -149,3 +176,348 @@ describe('onUpdateVariableNode', () => {
|
||||
expect(visited).toEqual(['namespace', 'service', 'pod']);
|
||||
});
|
||||
});
|
||||
|
||||
// ────────────────────────────────────────────────────────────────
|
||||
// New tests for functions added in recent commits
|
||||
// ────────────────────────────────────────────────────────────────
|
||||
|
||||
function makeDynamicVar(
|
||||
overrides: Partial<IDashboardVariable> & { id: string },
|
||||
): IDashboardVariable {
|
||||
return {
|
||||
name: overrides.id,
|
||||
description: '',
|
||||
type: 'DYNAMIC',
|
||||
sort: 'DISABLED',
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
allSelected: false,
|
||||
dynamicVariablesAttribute: 'attr',
|
||||
selectedValue: 'some-value',
|
||||
...overrides,
|
||||
} as IDashboardVariable;
|
||||
}
|
||||
|
||||
describe('mergeUniqueStrings', () => {
|
||||
it('should merge two arrays and deduplicate', () => {
|
||||
expect(mergeUniqueStrings(['a', 'b'], ['b', 'c'])).toEqual(['a', 'b', 'c']);
|
||||
});
|
||||
|
||||
it('should convert numbers and booleans to strings', () => {
|
||||
expect(mergeUniqueStrings([1, true, 'hello'], [2, false])).toEqual([
|
||||
'1',
|
||||
'true',
|
||||
'hello',
|
||||
'2',
|
||||
'false',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should deduplicate when number and its string form both appear', () => {
|
||||
expect(mergeUniqueStrings([42], ['42'])).toEqual(['42']);
|
||||
});
|
||||
|
||||
it('should handle a single array', () => {
|
||||
expect(mergeUniqueStrings(['x', 'y', 'x'])).toEqual(['x', 'y']);
|
||||
});
|
||||
|
||||
it('should handle three or more arrays', () => {
|
||||
expect(mergeUniqueStrings(['a'], ['b'], ['c'], ['a', 'c'])).toEqual([
|
||||
'a',
|
||||
'b',
|
||||
'c',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return empty array when no arrays are provided', () => {
|
||||
expect(mergeUniqueStrings()).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array when all input arrays are empty', () => {
|
||||
expect(mergeUniqueStrings([], [], [])).toEqual([]);
|
||||
});
|
||||
|
||||
it('should preserve order of first occurrence', () => {
|
||||
expect(mergeUniqueStrings(['c', 'a'], ['b', 'a'])).toEqual(['c', 'a', 'b']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildExistingDynamicVariableQuery', () => {
|
||||
// --- Guard clauses ---
|
||||
it('should return empty string when existingVariables is null', () => {
|
||||
expect(buildExistingDynamicVariableQuery(null, 'v1', true)).toBe('');
|
||||
});
|
||||
|
||||
it('should return empty string when hasDynamicAttribute is false', () => {
|
||||
const variables = { v2: makeDynamicVar({ id: 'v2' }) };
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', false)).toBe('');
|
||||
});
|
||||
|
||||
// --- Eligibility filtering ---
|
||||
it('should skip the current variable (same id)', () => {
|
||||
const variables = {
|
||||
v1: makeDynamicVar({
|
||||
id: 'v1',
|
||||
dynamicVariablesAttribute: 'ns',
|
||||
selectedValue: 'prod',
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
|
||||
});
|
||||
|
||||
it('should skip non-DYNAMIC variables', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({ id: 'v2', type: 'QUERY' as any }),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
|
||||
});
|
||||
|
||||
it('should skip variables without dynamicVariablesAttribute', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
dynamicVariablesAttribute: undefined,
|
||||
selectedValue: 'val',
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
|
||||
});
|
||||
|
||||
it('should skip variables with null selectedValue', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({ id: 'v2', selectedValue: null }),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
|
||||
});
|
||||
|
||||
it('should skip variables with empty string selectedValue', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({ id: 'v2', selectedValue: '' }),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
|
||||
});
|
||||
|
||||
it('should skip variables with empty array selectedValue', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({ id: 'v2', selectedValue: [] }),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
|
||||
});
|
||||
|
||||
it('should skip variables where showALLOption and allSelected are both true', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
showALLOption: true,
|
||||
allSelected: true,
|
||||
dynamicVariablesAttribute: 'ns',
|
||||
selectedValue: 'prod',
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
|
||||
});
|
||||
|
||||
it('should include variable with showALLOption true but allSelected false', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
showALLOption: true,
|
||||
allSelected: false,
|
||||
dynamicVariablesAttribute: 'ns',
|
||||
selectedValue: 'prod',
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
|
||||
"ns = 'prod'",
|
||||
);
|
||||
});
|
||||
|
||||
// --- Value formatting ---
|
||||
it('should quote string values in the query', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
dynamicVariablesAttribute: 'k8s.namespace.name',
|
||||
selectedValue: 'zeus',
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
|
||||
"k8s.namespace.name = 'zeus'",
|
||||
);
|
||||
});
|
||||
|
||||
it('should leave numeric values unquoted', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
dynamicVariablesAttribute: 'http.status_code',
|
||||
selectedValue: '200',
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
|
||||
'http.status_code = 200',
|
||||
);
|
||||
});
|
||||
|
||||
it('should escape single quotes in string values', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
dynamicVariablesAttribute: 'user.name',
|
||||
selectedValue: "O'Brien",
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
|
||||
"user.name = 'O\\'Brien'",
|
||||
);
|
||||
});
|
||||
|
||||
it('should build an IN clause for array selectedValue with multiple items', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
dynamicVariablesAttribute: 'k8s.namespace.name',
|
||||
selectedValue: ['zeus', 'gene'],
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
|
||||
"k8s.namespace.name IN ['zeus', 'gene']",
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle mix of numeric and string values in IN clause', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
dynamicVariablesAttribute: 'http.status_code',
|
||||
selectedValue: ['200', 'unknown'],
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
|
||||
"http.status_code IN [200, 'unknown']",
|
||||
);
|
||||
});
|
||||
|
||||
it('should filter out empty string values from array', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
dynamicVariablesAttribute: 'region',
|
||||
selectedValue: ['us-east', '', 'eu-west'],
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
|
||||
"region IN ['us-east', 'eu-west']",
|
||||
);
|
||||
});
|
||||
|
||||
// --- Multiple siblings ---
|
||||
it('should join multiple sibling variables with AND', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
dynamicVariablesAttribute: 'k8s.namespace.name',
|
||||
selectedValue: ['zeus', 'gene'],
|
||||
}),
|
||||
v3: makeDynamicVar({
|
||||
id: 'v3',
|
||||
dynamicVariablesAttribute: 'doc_op_type',
|
||||
selectedValue: 'test',
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
|
||||
"k8s.namespace.name IN ['zeus', 'gene'] AND doc_op_type = 'test'",
|
||||
);
|
||||
});
|
||||
|
||||
it('should return empty string when no variables are eligible', () => {
|
||||
const variables = {
|
||||
v1: makeDynamicVar({ id: 'v1' }), // same as current — skipped
|
||||
v2: makeDynamicVar({ id: 'v2', type: 'QUERY' as any }), // not DYNAMIC
|
||||
v3: makeDynamicVar({ id: 'v3', selectedValue: null }), // no value
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('settleVariableFetch', () => {
|
||||
const mockGetSnapshot = variableFetchStore.getSnapshot as jest.Mock;
|
||||
const mockComplete = onVariableFetchComplete as jest.Mock;
|
||||
const mockFailure = onVariableFetchFailure as jest.Mock;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should no-op when name is undefined', () => {
|
||||
settleVariableFetch(undefined, 'complete');
|
||||
expect(mockGetSnapshot).not.toHaveBeenCalled();
|
||||
expect(mockComplete).not.toHaveBeenCalled();
|
||||
expect(mockFailure).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it.each(['idle', 'waiting', 'error', undefined] as const)(
|
||||
'should no-op when variable state is %s',
|
||||
(state) => {
|
||||
mockGetSnapshot.mockReturnValue({ states: { myVar: state } });
|
||||
settleVariableFetch('myVar', 'complete');
|
||||
expect(mockComplete).not.toHaveBeenCalled();
|
||||
expect(mockFailure).not.toHaveBeenCalled();
|
||||
},
|
||||
);
|
||||
|
||||
it('should call onVariableFetchComplete when state is loading and outcome is complete', () => {
|
||||
mockGetSnapshot.mockReturnValue({ states: { myVar: 'loading' } });
|
||||
settleVariableFetch('myVar', 'complete');
|
||||
expect(mockComplete).toHaveBeenCalledWith('myVar');
|
||||
expect(mockFailure).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should call onVariableFetchComplete when state is revalidating and outcome is complete', () => {
|
||||
mockGetSnapshot.mockReturnValue({ states: { myVar: 'revalidating' } });
|
||||
settleVariableFetch('myVar', 'complete');
|
||||
expect(mockComplete).toHaveBeenCalledWith('myVar');
|
||||
expect(mockFailure).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should call onVariableFetchFailure when state is loading and outcome is failure', () => {
|
||||
mockGetSnapshot.mockReturnValue({ states: { myVar: 'loading' } });
|
||||
settleVariableFetch('myVar', 'failure');
|
||||
expect(mockFailure).toHaveBeenCalledWith('myVar');
|
||||
expect(mockComplete).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should call onVariableFetchFailure when state is revalidating and outcome is failure', () => {
|
||||
mockGetSnapshot.mockReturnValue({ states: { myVar: 'revalidating' } });
|
||||
settleVariableFetch('myVar', 'failure');
|
||||
expect(mockFailure).toHaveBeenCalledWith('myVar');
|
||||
expect(mockComplete).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractErrorMessage', () => {
|
||||
const FALLBACK_MESSAGE =
|
||||
'Please make sure configuration is valid and you have required setup and permissions';
|
||||
|
||||
it('should return SOMETHING_WENT_WRONG when error is null', () => {
|
||||
expect(extractErrorMessage(null)).toBe('Something went wrong');
|
||||
});
|
||||
|
||||
it('should return the error message when it exists', () => {
|
||||
expect(extractErrorMessage({ message: 'Query timeout' })).toBe(
|
||||
'Query timeout',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return fallback when error object has no message property', () => {
|
||||
expect(extractErrorMessage({})).toBe(FALLBACK_MESSAGE);
|
||||
});
|
||||
|
||||
it('should return fallback when error.message is empty string', () => {
|
||||
expect(extractErrorMessage({ message: '' })).toBe(FALLBACK_MESSAGE);
|
||||
});
|
||||
|
||||
it('should return fallback when error.message is undefined', () => {
|
||||
expect(extractErrorMessage({ message: undefined })).toBe(FALLBACK_MESSAGE);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,17 +1,11 @@
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { VariableItemProps } from '../VariableItem';
|
||||
|
||||
export interface VariableSelectStrategy {
|
||||
handleChange(params: {
|
||||
value: string | string[];
|
||||
variableData: IDashboardVariable;
|
||||
variableData: VariableItemProps['variableData'];
|
||||
onValueUpdate: VariableItemProps['onValueUpdate'];
|
||||
optionsData: (string | number | boolean)[];
|
||||
allAvailableOptionStrings: string[];
|
||||
onValueUpdate: (
|
||||
name: string,
|
||||
id: string,
|
||||
value: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
haveCustomValuesSelected?: boolean,
|
||||
) => void;
|
||||
}): void;
|
||||
}
|
||||
|
||||
@@ -17,6 +17,19 @@ import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import DynamicVariableInput from '../DashboardVariablesSelection/DynamicVariableInput';
|
||||
|
||||
// Mock useVariableFetchState to return "fetching" state so useQuery is enabled
|
||||
jest.mock('hooks/dashboard/useVariableFetchState', () => ({
|
||||
useVariableFetchState: (): Record<string, unknown> => ({
|
||||
variableFetchCycleId: 0,
|
||||
variableFetchState: 'loading',
|
||||
isVariableSettled: false,
|
||||
isVariableFetching: true,
|
||||
hasVariableFetchedOnce: false,
|
||||
isVariableWaitingForDependencies: false,
|
||||
variableDependencyWaitMessage: '',
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock the getFieldValues API
|
||||
jest.mock('api/dynamicVariables/getFieldValues', () => ({
|
||||
getFieldValues: jest.fn(),
|
||||
@@ -95,7 +108,7 @@ describe('Dynamic Variable Default Behavior', () => {
|
||||
}
|
||||
}
|
||||
if (queryFn) {
|
||||
queryFn();
|
||||
queryFn({ signal: undefined });
|
||||
}
|
||||
}
|
||||
}, [enabled, variableName, dynamicVarsKey]); // Only depend on enabled/keys
|
||||
@@ -234,6 +247,7 @@ describe('Dynamic Variable Default Behavior', () => {
|
||||
'2023-01-01T00:00:00Z',
|
||||
'2023-01-02T00:00:00Z',
|
||||
'',
|
||||
undefined, // signal
|
||||
);
|
||||
});
|
||||
|
||||
@@ -487,6 +501,7 @@ describe('Dynamic Variable Default Behavior', () => {
|
||||
'2023-01-01T00:00:00Z',
|
||||
'2023-01-02T00:00:00Z',
|
||||
'',
|
||||
undefined, // signal
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
@@ -49,15 +49,11 @@ const mockDashboard = {
|
||||
// Mock the dashboard provider with stable functions to prevent infinite loops
|
||||
const mockSetSelectedDashboard = jest.fn();
|
||||
const mockUpdateLocalStorageDashboardVariables = jest.fn();
|
||||
const mockSetVariablesToGetUpdated = jest.fn();
|
||||
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): any => ({
|
||||
selectedDashboard: mockDashboard,
|
||||
setSelectedDashboard: mockSetSelectedDashboard,
|
||||
updateLocalStorageDashboardVariables: mockUpdateLocalStorageDashboardVariables,
|
||||
variablesToGetUpdated: ['env'], // Stable initial value
|
||||
setVariablesToGetUpdated: mockSetVariablesToGetUpdated,
|
||||
}),
|
||||
}));
|
||||
|
||||
|
||||
@@ -0,0 +1,117 @@
|
||||
import { AlignedData } from 'uplot';
|
||||
|
||||
import { getInitialStackedBands, stack } from '../stackUtils';
|
||||
|
||||
describe('stackUtils', () => {
|
||||
describe('stack', () => {
|
||||
const neverOmit = (): boolean => false;
|
||||
|
||||
it('preserves time axis as first row', () => {
|
||||
const data: AlignedData = [
|
||||
[100, 200, 300],
|
||||
[1, 2, 3],
|
||||
[4, 5, 6],
|
||||
];
|
||||
const { data: result } = stack(data, neverOmit);
|
||||
expect(result[0]).toEqual([100, 200, 300]);
|
||||
});
|
||||
|
||||
it('stacks value series cumulatively (last = raw, first = total)', () => {
|
||||
// Time, then 3 value series. Stack order: last series stays raw, then we add upward.
|
||||
const data: AlignedData = [
|
||||
[0, 1, 2],
|
||||
[1, 2, 3], // series 1
|
||||
[4, 5, 6], // series 2
|
||||
[7, 8, 9], // series 3
|
||||
];
|
||||
const { data: result } = stack(data, neverOmit);
|
||||
// result[1] = s1+s2+s3, result[2] = s2+s3, result[3] = s3
|
||||
expect(result[1]).toEqual([12, 15, 18]); // 1+4+7, 2+5+8, 3+6+9
|
||||
expect(result[2]).toEqual([11, 13, 15]); // 4+7, 5+8, 6+9
|
||||
expect(result[3]).toEqual([7, 8, 9]);
|
||||
});
|
||||
|
||||
it('treats null values as 0 when stacking', () => {
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[1, null],
|
||||
[null, 10],
|
||||
];
|
||||
const { data: result } = stack(data, neverOmit);
|
||||
expect(result[1]).toEqual([1, 10]); // total
|
||||
expect(result[2]).toEqual([0, 10]); // last series with null→0
|
||||
});
|
||||
|
||||
it('copies omitted series as-is without accumulating', () => {
|
||||
// Omit series 2 (index 2); series 1 and 3 are stacked.
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[10, 20], // series 1
|
||||
[100, 200], // series 2 - omitted
|
||||
[1, 2], // series 3
|
||||
];
|
||||
const omitSeries2 = (i: number): boolean => i === 2;
|
||||
const { data: result } = stack(data, omitSeries2);
|
||||
// series 3 raw: [1, 2]; series 2 omitted: [100, 200] as-is; series 1 stacked with s3: [11, 22]
|
||||
expect(result[1]).toEqual([11, 22]); // 10+1, 20+2
|
||||
expect(result[2]).toEqual([100, 200]); // copied, not stacked
|
||||
expect(result[3]).toEqual([1, 2]);
|
||||
});
|
||||
|
||||
it('returns bands between consecutive visible series when none omitted', () => {
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[1, 2],
|
||||
[3, 4],
|
||||
[5, 6],
|
||||
];
|
||||
const { bands } = stack(data, neverOmit);
|
||||
expect(bands).toEqual([{ series: [1, 2] }, { series: [2, 3] }]);
|
||||
});
|
||||
|
||||
it('returns bands only between visible series when some are omitted', () => {
|
||||
// 4 value series; omit index 2. Visible: 1, 3, 4. Bands: [1,3], [3,4]
|
||||
const data: AlignedData = [[0], [1], [2], [3], [4]];
|
||||
const omitSeries2 = (i: number): boolean => i === 2;
|
||||
const { bands } = stack(data, omitSeries2);
|
||||
expect(bands).toEqual([{ series: [1, 3] }, { series: [3, 4] }]);
|
||||
});
|
||||
|
||||
it('returns empty bands when only one value series', () => {
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[1, 2],
|
||||
];
|
||||
const { bands } = stack(data, neverOmit);
|
||||
expect(bands).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getInitialStackedBands', () => {
|
||||
it('returns one band between each consecutive pair for seriesCount 3', () => {
|
||||
expect(getInitialStackedBands(3)).toEqual([
|
||||
{ series: [1, 2] },
|
||||
{ series: [2, 3] },
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns empty array for seriesCount 0 or 1', () => {
|
||||
expect(getInitialStackedBands(0)).toEqual([]);
|
||||
expect(getInitialStackedBands(1)).toEqual([]);
|
||||
});
|
||||
|
||||
it('returns single band for seriesCount 2', () => {
|
||||
expect(getInitialStackedBands(2)).toEqual([{ series: [1, 2] }]);
|
||||
});
|
||||
|
||||
it('returns bands [1,2], [2,3], ..., [n-1, n] for seriesCount n', () => {
|
||||
const bands = getInitialStackedBands(5);
|
||||
expect(bands).toEqual([
|
||||
{ series: [1, 2] },
|
||||
{ series: [2, 3] },
|
||||
{ series: [3, 4] },
|
||||
{ series: [4, 5] },
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,116 @@
|
||||
import uPlot, { AlignedData } from 'uplot';
|
||||
|
||||
/**
|
||||
* Stack data cumulatively (top-down: first series = top, last = bottom).
|
||||
* When `omit(seriesIndex)` returns true, that series is excluded from stacking.
|
||||
*/
|
||||
export function stack(
|
||||
data: AlignedData,
|
||||
omit: (seriesIndex: number) => boolean,
|
||||
): { data: AlignedData; bands: uPlot.Band[] } {
|
||||
const timeAxis = data[0];
|
||||
const pointCount = timeAxis.length;
|
||||
const valueSeriesCount = data.length - 1; // exclude time axis
|
||||
|
||||
const stackedSeries = buildStackedSeries({
|
||||
data,
|
||||
valueSeriesCount,
|
||||
pointCount,
|
||||
omit,
|
||||
});
|
||||
const bands = buildFillBands(valueSeriesCount + 1, omit); // +1 for 1-based series indices
|
||||
|
||||
return {
|
||||
data: [timeAxis, ...stackedSeries] as AlignedData,
|
||||
bands,
|
||||
};
|
||||
}
|
||||
|
||||
interface BuildStackedSeriesParams {
|
||||
data: AlignedData;
|
||||
valueSeriesCount: number;
|
||||
pointCount: number;
|
||||
omit: (seriesIndex: number) => boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Accumulate from last series upward: last series = raw values, first = total.
|
||||
* Omitted series are copied as-is (no accumulation).
|
||||
*/
|
||||
function buildStackedSeries({
|
||||
data,
|
||||
valueSeriesCount,
|
||||
pointCount,
|
||||
omit,
|
||||
}: BuildStackedSeriesParams): (number | null)[][] {
|
||||
const stackedSeries: (number | null)[][] = Array(valueSeriesCount);
|
||||
const cumulativeSums = Array(pointCount).fill(0) as number[];
|
||||
|
||||
for (let seriesIndex = valueSeriesCount; seriesIndex >= 1; seriesIndex--) {
|
||||
const rawValues = data[seriesIndex] as (number | null)[];
|
||||
|
||||
if (omit(seriesIndex)) {
|
||||
stackedSeries[seriesIndex - 1] = rawValues;
|
||||
} else {
|
||||
stackedSeries[seriesIndex - 1] = rawValues.map((rawValue, pointIndex) => {
|
||||
const numericValue = rawValue == null ? 0 : Number(rawValue);
|
||||
return (cumulativeSums[pointIndex] += numericValue);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return stackedSeries;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bands define fill between consecutive visible series for stacked appearance.
|
||||
* uPlot format: [upperSeriesIdx, lowerSeriesIdx].
|
||||
*/
|
||||
function buildFillBands(
|
||||
seriesLength: number,
|
||||
omit: (seriesIndex: number) => boolean,
|
||||
): uPlot.Band[] {
|
||||
const bands: uPlot.Band[] = [];
|
||||
|
||||
for (let seriesIndex = 1; seriesIndex < seriesLength; seriesIndex++) {
|
||||
if (omit(seriesIndex)) {
|
||||
continue;
|
||||
}
|
||||
const nextVisibleSeriesIndex = findNextVisibleSeriesIndex(
|
||||
seriesLength,
|
||||
seriesIndex,
|
||||
omit,
|
||||
);
|
||||
if (nextVisibleSeriesIndex !== -1) {
|
||||
bands.push({ series: [seriesIndex, nextVisibleSeriesIndex] });
|
||||
}
|
||||
}
|
||||
|
||||
return bands;
|
||||
}
|
||||
|
||||
function findNextVisibleSeriesIndex(
|
||||
seriesLength: number,
|
||||
afterIndex: number,
|
||||
omit: (seriesIndex: number) => boolean,
|
||||
): number {
|
||||
for (let i = afterIndex + 1; i < seriesLength; i++) {
|
||||
if (!omit(i)) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns band indices for initial stacked state (no series omitted).
|
||||
* Top-down: first series at top, band fills between consecutive series.
|
||||
* uPlot band format: [upperSeriesIdx, lowerSeriesIdx].
|
||||
*/
|
||||
export function getInitialStackedBands(seriesCount: number): uPlot.Band[] {
|
||||
const bands: uPlot.Band[] = [];
|
||||
for (let seriesIndex = 1; seriesIndex < seriesCount; seriesIndex++) {
|
||||
bands.push({ series: [seriesIndex, seriesIndex + 1] });
|
||||
}
|
||||
return bands;
|
||||
}
|
||||
@@ -0,0 +1,313 @@
|
||||
import { renderHook } from '@testing-library/react';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
import type { UseBarChartStackingParams } from '../useBarChartStacking';
|
||||
import { useBarChartStacking } from '../useBarChartStacking';
|
||||
|
||||
type MockConfig = { addHook: jest.Mock };
|
||||
|
||||
function asConfig(c: MockConfig): UseBarChartStackingParams['config'] {
|
||||
return (c as unknown) as UseBarChartStackingParams['config'];
|
||||
}
|
||||
|
||||
function createMockConfig(): {
|
||||
config: MockConfig;
|
||||
invokeSetData: (plot: uPlot) => void;
|
||||
invokeSetSeries: (
|
||||
plot: uPlot,
|
||||
seriesIndex: number | null,
|
||||
opts: Partial<uPlot.Series> & { focus?: boolean },
|
||||
) => void;
|
||||
removeSetData: jest.Mock;
|
||||
removeSetSeries: jest.Mock;
|
||||
} {
|
||||
let setDataHandler: ((plot: uPlot) => void) | null = null;
|
||||
let setSeriesHandler:
|
||||
| ((plot: uPlot, seriesIndex: number | null, opts: uPlot.Series) => void)
|
||||
| null = null;
|
||||
|
||||
const removeSetData = jest.fn();
|
||||
const removeSetSeries = jest.fn();
|
||||
|
||||
const addHook = jest.fn(
|
||||
(
|
||||
hookName: string,
|
||||
handler: (plot: uPlot, ...args: unknown[]) => void,
|
||||
): (() => void) => {
|
||||
if (hookName === 'setData') {
|
||||
setDataHandler = handler as (plot: uPlot) => void;
|
||||
return removeSetData;
|
||||
}
|
||||
if (hookName === 'setSeries') {
|
||||
setSeriesHandler = handler as (
|
||||
plot: uPlot,
|
||||
seriesIndex: number | null,
|
||||
opts: uPlot.Series,
|
||||
) => void;
|
||||
return removeSetSeries;
|
||||
}
|
||||
return jest.fn();
|
||||
},
|
||||
);
|
||||
|
||||
const config: MockConfig = { addHook };
|
||||
|
||||
const invokeSetData = (plot: uPlot): void => {
|
||||
setDataHandler?.(plot);
|
||||
};
|
||||
|
||||
const invokeSetSeries = (
|
||||
plot: uPlot,
|
||||
seriesIndex: number | null,
|
||||
opts: Partial<uPlot.Series> & { focus?: boolean },
|
||||
): void => {
|
||||
setSeriesHandler?.(plot, seriesIndex, opts as uPlot.Series);
|
||||
};
|
||||
|
||||
return {
|
||||
config,
|
||||
invokeSetData,
|
||||
invokeSetSeries,
|
||||
removeSetData,
|
||||
removeSetSeries,
|
||||
};
|
||||
}
|
||||
|
||||
function createMockPlot(overrides: Partial<uPlot> = {}): uPlot {
|
||||
return ({
|
||||
data: [
|
||||
[0, 1, 2],
|
||||
[1, 2, 3],
|
||||
[4, 5, 6],
|
||||
],
|
||||
series: [{ show: true }, { show: true }, { show: true }],
|
||||
delBand: jest.fn(),
|
||||
addBand: jest.fn(),
|
||||
setData: jest.fn(),
|
||||
...overrides,
|
||||
} as unknown) as uPlot;
|
||||
}
|
||||
|
||||
describe('useBarChartStacking', () => {
|
||||
it('returns data as-is when isStackedBarChart is false', () => {
|
||||
const data: uPlot.AlignedData = [
|
||||
[100, 200],
|
||||
[1, 2],
|
||||
[3, 4],
|
||||
];
|
||||
const { result } = renderHook(() =>
|
||||
useBarChartStacking({
|
||||
data,
|
||||
isStackedBarChart: false,
|
||||
config: null,
|
||||
}),
|
||||
);
|
||||
expect(result.current).toBe(data);
|
||||
});
|
||||
|
||||
it('returns data as-is when config is null and isStackedBarChart is true', () => {
|
||||
const data: uPlot.AlignedData = [
|
||||
[0, 1],
|
||||
[1, 2],
|
||||
[4, 5],
|
||||
];
|
||||
const { result } = renderHook(() =>
|
||||
useBarChartStacking({
|
||||
data,
|
||||
isStackedBarChart: true,
|
||||
config: null,
|
||||
}),
|
||||
);
|
||||
// Still returns stacked data (computed in useMemo); no hooks registered
|
||||
expect(result.current[0]).toEqual([0, 1]);
|
||||
expect(result.current[1]).toEqual([5, 7]); // stacked
|
||||
expect(result.current[2]).toEqual([4, 5]);
|
||||
});
|
||||
|
||||
it('returns stacked data when isStackedBarChart is true and multiple value series', () => {
|
||||
const data: uPlot.AlignedData = [
|
||||
[0, 1, 2],
|
||||
[1, 2, 3],
|
||||
[4, 5, 6],
|
||||
[7, 8, 9],
|
||||
];
|
||||
const { result } = renderHook(() =>
|
||||
useBarChartStacking({
|
||||
data,
|
||||
isStackedBarChart: true,
|
||||
config: null,
|
||||
}),
|
||||
);
|
||||
expect(result.current[0]).toEqual([0, 1, 2]);
|
||||
expect(result.current[1]).toEqual([12, 15, 18]); // s1+s2+s3
|
||||
expect(result.current[2]).toEqual([11, 13, 15]); // s2+s3
|
||||
expect(result.current[3]).toEqual([7, 8, 9]);
|
||||
});
|
||||
|
||||
it('returns data as-is when only one value series (no stacking needed)', () => {
|
||||
const data: uPlot.AlignedData = [
|
||||
[0, 1],
|
||||
[1, 2],
|
||||
];
|
||||
const { result } = renderHook(() =>
|
||||
useBarChartStacking({
|
||||
data,
|
||||
isStackedBarChart: true,
|
||||
config: null,
|
||||
}),
|
||||
);
|
||||
expect(result.current).toEqual(data);
|
||||
});
|
||||
|
||||
it('registers setData and setSeries hooks when isStackedBarChart and config provided', () => {
|
||||
const { config } = createMockConfig();
|
||||
const data: uPlot.AlignedData = [
|
||||
[0, 1],
|
||||
[1, 2],
|
||||
[3, 4],
|
||||
];
|
||||
|
||||
renderHook(() =>
|
||||
useBarChartStacking({
|
||||
data,
|
||||
isStackedBarChart: true,
|
||||
config: asConfig(config),
|
||||
}),
|
||||
);
|
||||
|
||||
expect(config.addHook).toHaveBeenCalledWith('setData', expect.any(Function));
|
||||
expect(config.addHook).toHaveBeenCalledWith(
|
||||
'setSeries',
|
||||
expect.any(Function),
|
||||
);
|
||||
});
|
||||
|
||||
it('does not register hooks when isStackedBarChart is false', () => {
|
||||
const { config } = createMockConfig();
|
||||
const data: uPlot.AlignedData = [
|
||||
[0, 1],
|
||||
[1, 2],
|
||||
[3, 4],
|
||||
];
|
||||
|
||||
renderHook(() =>
|
||||
useBarChartStacking({
|
||||
data,
|
||||
isStackedBarChart: false,
|
||||
config: asConfig(config),
|
||||
}),
|
||||
);
|
||||
|
||||
expect(config.addHook).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('calls cleanup when unmounted', () => {
|
||||
const { config, removeSetData, removeSetSeries } = createMockConfig();
|
||||
const data: uPlot.AlignedData = [
|
||||
[0, 1],
|
||||
[1, 2],
|
||||
[3, 4],
|
||||
];
|
||||
|
||||
const { unmount } = renderHook(() =>
|
||||
useBarChartStacking({
|
||||
data,
|
||||
isStackedBarChart: true,
|
||||
config: asConfig(config),
|
||||
}),
|
||||
);
|
||||
|
||||
unmount();
|
||||
|
||||
expect(removeSetData).toHaveBeenCalled();
|
||||
expect(removeSetSeries).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('re-stacks and updates plot when setData hook is invoked', () => {
|
||||
const { config, invokeSetData } = createMockConfig();
|
||||
const data: uPlot.AlignedData = [
|
||||
[0, 1, 2],
|
||||
[1, 2, 3],
|
||||
[4, 5, 6],
|
||||
];
|
||||
const plot = createMockPlot({
|
||||
data: [
|
||||
[0, 1, 2],
|
||||
[5, 7, 9],
|
||||
[4, 5, 6],
|
||||
],
|
||||
});
|
||||
|
||||
renderHook(() =>
|
||||
useBarChartStacking({
|
||||
data,
|
||||
isStackedBarChart: true,
|
||||
config: asConfig(config),
|
||||
}),
|
||||
);
|
||||
|
||||
invokeSetData(plot);
|
||||
|
||||
expect(plot.delBand).toHaveBeenCalledWith(null);
|
||||
expect(plot.addBand).toHaveBeenCalled();
|
||||
expect(plot.setData).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
[0, 1, 2],
|
||||
expect.any(Array), // stacked row 1
|
||||
expect.any(Array), // stacked row 2
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('re-stacks when setSeries hook is invoked (e.g. legend toggle)', () => {
|
||||
const { config, invokeSetSeries } = createMockConfig();
|
||||
const data: uPlot.AlignedData = [
|
||||
[0, 1],
|
||||
[10, 20],
|
||||
[5, 10],
|
||||
];
|
||||
// Plot data must match unstacked length so canApplyStacking passes
|
||||
const plot = createMockPlot({
|
||||
data: [
|
||||
[0, 1],
|
||||
[15, 30],
|
||||
[5, 10],
|
||||
],
|
||||
});
|
||||
|
||||
renderHook(() =>
|
||||
useBarChartStacking({
|
||||
data,
|
||||
isStackedBarChart: true,
|
||||
config: asConfig(config),
|
||||
}),
|
||||
);
|
||||
|
||||
invokeSetSeries(plot, 1, { show: false });
|
||||
|
||||
expect(plot.setData).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('does not re-stack when setSeries is called with focus option', () => {
|
||||
const { config, invokeSetSeries } = createMockConfig();
|
||||
const data: uPlot.AlignedData = [
|
||||
[0, 1],
|
||||
[1, 2],
|
||||
[3, 4],
|
||||
];
|
||||
const plot = createMockPlot();
|
||||
|
||||
renderHook(() =>
|
||||
useBarChartStacking({
|
||||
data,
|
||||
isStackedBarChart: true,
|
||||
config: asConfig(config),
|
||||
}),
|
||||
);
|
||||
|
||||
(plot.setData as jest.Mock).mockClear();
|
||||
invokeSetSeries(plot, 1, { focus: true } as uPlot.Series);
|
||||
|
||||
expect(plot.setData).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
VisibilityMode,
|
||||
} from 'lib/uPlotV2/config/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { get } from 'lodash-es';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -77,6 +78,12 @@ export function prepareBarPanelConfig({
|
||||
builder.setBands(getInitialStackedBands(seriesCount));
|
||||
}
|
||||
|
||||
const stepIntervals: Record<string, number> = get(
|
||||
apiResponse,
|
||||
'data.newResult.meta.stepIntervals',
|
||||
{},
|
||||
);
|
||||
|
||||
const seriesList: QueryData[] = apiResponse?.data?.result || [];
|
||||
seriesList.forEach((series) => {
|
||||
const baseLabelName = getLabelName(
|
||||
@@ -89,6 +96,8 @@ export function prepareBarPanelConfig({
|
||||
? getLegend(series, currentQuery, baseLabelName)
|
||||
: baseLabelName;
|
||||
|
||||
const currentStepInterval = get(stepIntervals, series.queryName, undefined);
|
||||
|
||||
builder.addSeries({
|
||||
scaleKey: 'y',
|
||||
drawStyle: DrawStyle.Bar,
|
||||
@@ -101,6 +110,7 @@ export function prepareBarPanelConfig({
|
||||
showPoints: VisibilityMode.Never,
|
||||
pointSize: 5,
|
||||
isDarkMode,
|
||||
stepInterval: currentStepInterval,
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import logEvent from 'api/common/logEvent';
|
||||
import PromQLIcon from 'assets/Dashboard/PromQl';
|
||||
import { QueryBuilderV2 } from 'components/QueryBuilderV2/QueryBuilderV2';
|
||||
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { QBShortcuts } from 'constants/shortcuts/QBShortcuts';
|
||||
import RunQueryBtn from 'container/QueryBuilder/components/RunQueryBtn/RunQueryBtn';
|
||||
@@ -63,8 +63,12 @@ function QuerySection({
|
||||
signalSource: signalSource === 'meter' ? 'meter' : '',
|
||||
}}
|
||||
showTraceOperator={alertType === AlertTypes.TRACES_BASED_ALERT}
|
||||
showFunctions
|
||||
version={ENTITY_VERSION_V5}
|
||||
showFunctions={
|
||||
(alertType === AlertTypes.METRICS_BASED_ALERT &&
|
||||
alertDef.version === ENTITY_VERSION_V4) ||
|
||||
alertType === AlertTypes.LOGS_BASED_ALERT
|
||||
}
|
||||
version={alertDef.version || 'v3'}
|
||||
onSignalSourceChange={handleSignalSourceChange}
|
||||
signalSourceChangeEnabled
|
||||
/>
|
||||
|
||||
@@ -6,10 +6,12 @@ import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { populateMultipleResults } from 'container/NewWidget/LeftContainer/WidgetGraph/util';
|
||||
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/types';
|
||||
import { useIsPanelWaitingOnVariable } from 'hooks/dashboard/useVariableFetchState';
|
||||
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
|
||||
import { useIntersectionObserver } from 'hooks/useIntersectionObserver';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
|
||||
import { getVariableReferencesInQuery } from 'lib/dashboardVariables/variableReference';
|
||||
import getTimeString from 'lib/getTimeString';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import isEmpty from 'lodash-es/isEmpty';
|
||||
@@ -53,7 +55,6 @@ function GridCardGraph({
|
||||
customOnRowClick,
|
||||
customTimeRangeWindowForCoRelation,
|
||||
enableDrillDown,
|
||||
widgetsByDynamicVariableId,
|
||||
}: GridCardGraphProps): JSX.Element {
|
||||
const dispatch = useDispatch();
|
||||
const [errorMessage, setErrorMessage] = useState<string>();
|
||||
@@ -64,8 +65,8 @@ function GridCardGraph({
|
||||
toScrollWidgetId,
|
||||
setToScrollWidgetId,
|
||||
setDashboardQueryRangeCalled,
|
||||
variablesToGetUpdated,
|
||||
} = useDashboard();
|
||||
|
||||
const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
@@ -117,10 +118,25 @@ function GridCardGraph({
|
||||
|
||||
const updatedQuery = widget?.query;
|
||||
|
||||
const referencedVariableNames = useMemo(() => {
|
||||
if (!variables || !updatedQuery) {
|
||||
return [];
|
||||
}
|
||||
const allNames = Object.values(variables)
|
||||
.map((v) => v.name)
|
||||
.filter((name): name is string => !!name);
|
||||
return getVariableReferencesInQuery(updatedQuery, allNames);
|
||||
}, [updatedQuery, variables]);
|
||||
|
||||
const isEmptyWidget =
|
||||
widget?.id === PANEL_TYPES.EMPTY_WIDGET || isEmpty(widget);
|
||||
|
||||
const queryEnabledCondition = isVisible && !isEmptyWidget && isQueryEnabled;
|
||||
const isPanelWaitingOnAnyVariable = useIsPanelWaitingOnVariable(
|
||||
referencedVariableNames,
|
||||
);
|
||||
|
||||
const queryEnabledCondition =
|
||||
isVisible && !isEmptyWidget && isQueryEnabled && !isPanelWaitingOnAnyVariable;
|
||||
|
||||
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
|
||||
if (widget.panelTypes !== PANEL_TYPES.LIST) {
|
||||
@@ -177,27 +193,6 @@ function GridCardGraph({
|
||||
[requestData.query],
|
||||
);
|
||||
|
||||
// Bring back dependency on variable chaining for panels to refetch,
|
||||
// but only for non-dynamic variables. We derive a stable token from
|
||||
// the head of the variablesToGetUpdated queue when it's non-dynamic.
|
||||
const nonDynamicVariableChainToken = useMemo(() => {
|
||||
if (!variablesToGetUpdated || variablesToGetUpdated.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
if (!variables) {
|
||||
return undefined;
|
||||
}
|
||||
const headName = variablesToGetUpdated[0];
|
||||
const variableObj = Object.values(variables).find(
|
||||
(variable) => variable?.name === headName,
|
||||
);
|
||||
if (variableObj && variableObj.type !== 'DYNAMIC') {
|
||||
return headName;
|
||||
}
|
||||
return undefined;
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [variablesToGetUpdated, variables]);
|
||||
|
||||
const queryResponse = useGetQueryRange(
|
||||
{
|
||||
...requestData,
|
||||
@@ -224,11 +219,7 @@ function GridCardGraph({
|
||||
requestData,
|
||||
variables
|
||||
? Object.entries(variables).reduce((acc, [id, variable]) => {
|
||||
if (
|
||||
variable.type !== 'DYNAMIC' ||
|
||||
(widgetsByDynamicVariableId?.[variable.id] &&
|
||||
widgetsByDynamicVariableId?.[variable.id].includes(widget.id))
|
||||
) {
|
||||
if (variable.name && referencedVariableNames.includes(variable.name)) {
|
||||
return { ...acc, [id]: variable.selectedValue };
|
||||
}
|
||||
return acc;
|
||||
@@ -237,9 +228,6 @@ function GridCardGraph({
|
||||
...(customTimeRange && customTimeRange.startTime && customTimeRange.endTime
|
||||
? [customTimeRange.startTime, customTimeRange.endTime]
|
||||
: []),
|
||||
// Include non-dynamic variable chaining token to drive refetches
|
||||
// only when a non-dynamic variable is at the head of the queue
|
||||
...(nonDynamicVariableChainToken ? [nonDynamicVariableChainToken] : []),
|
||||
],
|
||||
retry(failureCount, error): boolean {
|
||||
if (
|
||||
@@ -252,7 +240,7 @@ function GridCardGraph({
|
||||
return failureCount < 2;
|
||||
},
|
||||
keepPreviousData: true,
|
||||
enabled: queryEnabledCondition && !nonDynamicVariableChainToken,
|
||||
enabled: queryEnabledCondition,
|
||||
refetchOnMount: false,
|
||||
onError: (error) => {
|
||||
const errorMessage =
|
||||
@@ -319,7 +307,7 @@ function GridCardGraph({
|
||||
threshold={threshold}
|
||||
headerMenuList={menuList}
|
||||
isFetchingResponse={
|
||||
queryResponse.isFetching || variablesToGetUpdated.length > 0
|
||||
queryResponse.isFetching || isPanelWaitingOnAnyVariable
|
||||
}
|
||||
setRequestData={setRequestData}
|
||||
onClickHandler={onClickHandler}
|
||||
|
||||
@@ -72,7 +72,6 @@ export interface GridCardGraphProps {
|
||||
customOnRowClick?: (record: RowData) => void;
|
||||
customTimeRangeWindowForCoRelation?: string | undefined;
|
||||
enableDrillDown?: boolean;
|
||||
widgetsByDynamicVariableId?: Record<string, string[]>;
|
||||
}
|
||||
|
||||
export interface GetGraphVisibilityStateOnLegendClickProps {
|
||||
|
||||
@@ -16,7 +16,6 @@ import { themeColors } from 'constants/theme';
|
||||
import { DEFAULT_ROW_NAME } from 'container/DashboardContainer/DashboardDescription/utils';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import { useWidgetsByDynamicVariableId } from 'hooks/dashboard/useWidgetsByDynamicVariableId';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
@@ -102,8 +101,6 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
|
||||
Record<string, { widgets: Layout[]; collapsed: boolean }>
|
||||
>({});
|
||||
|
||||
const widgetsByDynamicVariableId = useWidgetsByDynamicVariableId();
|
||||
|
||||
useEffect(() => {
|
||||
setCurrentPanelMap(panelMap);
|
||||
}, [panelMap]);
|
||||
@@ -617,7 +614,6 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
|
||||
onDragSelect={onDragSelect}
|
||||
dataAvailable={checkIfDataExists}
|
||||
enableDrillDown={enableDrillDown}
|
||||
widgetsByDynamicVariableId={widgetsByDynamicVariableId}
|
||||
/>
|
||||
</Card>
|
||||
</CardContainer>
|
||||
|
||||
@@ -5,16 +5,12 @@ import { PanelMode } from 'container/DashboardContainer/visualization/panels/typ
|
||||
import { WidgetGraphComponentProps } from 'container/GridCardLayout/GridCard/types';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { MetricQueryRangeSuccessResponse } from 'types/api/metrics/getQueryRange';
|
||||
import { QueryData } from 'types/api/widgets/getQuery';
|
||||
|
||||
export type PanelWrapperProps = {
|
||||
queryResponse: UseQueryResult<
|
||||
SuccessResponse<MetricRangePayloadProps, unknown>,
|
||||
Error
|
||||
>;
|
||||
queryResponse: UseQueryResult<MetricQueryRangeSuccessResponse, Error>;
|
||||
widget: Widgets;
|
||||
setRequestData?: WidgetGraphComponentProps['setRequestData'];
|
||||
isFullViewMode?: boolean;
|
||||
|
||||
@@ -0,0 +1,316 @@
|
||||
import { act, renderHook } from '@testing-library/react';
|
||||
import { dashboardVariablesStore } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
|
||||
import { IDashboardVariablesStoreState } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import {
|
||||
VariableFetchState,
|
||||
variableFetchStore,
|
||||
} from 'providers/Dashboard/store/variableFetchStore';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { useIsPanelWaitingOnVariable } from '../useVariableFetchState';
|
||||
|
||||
function makeVariable(
|
||||
overrides: Partial<IDashboardVariable> & { id: string },
|
||||
): IDashboardVariable {
|
||||
return {
|
||||
name: overrides.id,
|
||||
description: '',
|
||||
type: 'QUERY',
|
||||
sort: 'DISABLED',
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function resetStores(): void {
|
||||
variableFetchStore.set(() => ({
|
||||
states: {},
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
}));
|
||||
dashboardVariablesStore.set(() => ({
|
||||
dashboardId: '',
|
||||
variables: {},
|
||||
sortedVariablesArray: [],
|
||||
dependencyData: null,
|
||||
variableTypes: {},
|
||||
dynamicVariableOrder: [],
|
||||
}));
|
||||
}
|
||||
|
||||
function setFetchStates(states: Record<string, VariableFetchState>): void {
|
||||
variableFetchStore.set(() => ({
|
||||
states,
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
}));
|
||||
}
|
||||
|
||||
function setDashboardVariables(
|
||||
overrides: Partial<IDashboardVariablesStoreState>,
|
||||
): void {
|
||||
dashboardVariablesStore.set(() => ({
|
||||
dashboardId: '',
|
||||
variables: {},
|
||||
sortedVariablesArray: [],
|
||||
dependencyData: null,
|
||||
variableTypes: {},
|
||||
dynamicVariableOrder: [],
|
||||
...overrides,
|
||||
}));
|
||||
}
|
||||
|
||||
describe('useIsPanelWaitingOnVariable', () => {
|
||||
beforeEach(() => {
|
||||
resetStores();
|
||||
});
|
||||
|
||||
it('should return false when variableNames is empty', () => {
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable([]));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when all referenced variables are idle', () => {
|
||||
setFetchStates({ a: 'idle', b: 'idle' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: 'val1' }),
|
||||
b: makeVariable({ id: 'b', selectedValue: 'val2' }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a', 'b']));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when a variable is loading with empty selectedValue', () => {
|
||||
setFetchStates({ a: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: undefined }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when a variable is waiting with empty selectedValue', () => {
|
||||
setFetchStates({ a: 'waiting' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: '' }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when a variable is revalidating with empty selectedValue', () => {
|
||||
setFetchStates({ a: 'revalidating' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: undefined }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when a variable is loading but has a selectedValue', () => {
|
||||
setFetchStates({ a: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: 'some-value' }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for DYNAMIC variable with allSelected=true that is loading', () => {
|
||||
setFetchStates({ dyn: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
dyn: makeVariable({
|
||||
id: 'dyn',
|
||||
type: 'DYNAMIC',
|
||||
selectedValue: 'some-val',
|
||||
allSelected: true,
|
||||
}),
|
||||
},
|
||||
variableTypes: { dyn: 'DYNAMIC' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for DYNAMIC variable with allSelected=true that is waiting', () => {
|
||||
setFetchStates({ dyn: 'waiting' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
dyn: makeVariable({
|
||||
id: 'dyn',
|
||||
type: 'DYNAMIC',
|
||||
selectedValue: 'val',
|
||||
allSelected: true,
|
||||
}),
|
||||
},
|
||||
variableTypes: { dyn: 'DYNAMIC' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for DYNAMIC variable with allSelected=true that is idle', () => {
|
||||
setFetchStates({ dyn: 'idle' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
dyn: makeVariable({
|
||||
id: 'dyn',
|
||||
type: 'DYNAMIC',
|
||||
selectedValue: 'val',
|
||||
allSelected: true,
|
||||
}),
|
||||
},
|
||||
variableTypes: { dyn: 'DYNAMIC' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for non-DYNAMIC variable with allSelected=false and non-empty value that is loading', () => {
|
||||
setFetchStates({ a: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({
|
||||
id: 'a',
|
||||
selectedValue: 'val',
|
||||
allSelected: false,
|
||||
}),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true if any one of multiple variables is blocking', () => {
|
||||
setFetchStates({ a: 'idle', b: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: 'val' }),
|
||||
b: makeVariable({ id: 'b', selectedValue: undefined }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a', 'b']));
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when variable has no entry in fetch store (treated as idle)', () => {
|
||||
setFetchStates({}); // no state entry for 'a'
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: 'val' }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when variable is in error state with empty selectedValue', () => {
|
||||
setFetchStates({ a: 'error' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: undefined }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should react to store updates', () => {
|
||||
setFetchStates({ a: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: undefined }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(true);
|
||||
|
||||
// Simulate variable fetch completing
|
||||
act(() => {
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'idle';
|
||||
});
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle DYNAMIC variable with allSelected=false and empty selectedValue as blocking', () => {
|
||||
setFetchStates({ dyn: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
dyn: makeVariable({
|
||||
id: 'dyn',
|
||||
type: 'DYNAMIC',
|
||||
selectedValue: undefined,
|
||||
allSelected: false,
|
||||
}),
|
||||
},
|
||||
variableTypes: { dyn: 'DYNAMIC' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle variable with array selectedValue as non-blocking when loading', () => {
|
||||
setFetchStates({ a: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: ['val1', 'val2'] }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle variable with empty array selectedValue as blocking when loading', () => {
|
||||
setFetchStates({ a: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: [] }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
});
|
||||
153
frontend/src/hooks/dashboard/useVariableFetchState.ts
Normal file
153
frontend/src/hooks/dashboard/useVariableFetchState.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import { useCallback, useMemo, useRef, useSyncExternalStore } from 'react';
|
||||
import isEmpty from 'lodash-es/isEmpty';
|
||||
import {
|
||||
IVariableFetchStoreState,
|
||||
VariableFetchState,
|
||||
variableFetchStore,
|
||||
} from 'providers/Dashboard/store/variableFetchStore';
|
||||
|
||||
import { useDashboardVariablesSelector } from './useDashboardVariables';
|
||||
|
||||
/**
|
||||
* Generic selector hook for the variable fetch store.
|
||||
* Same pattern as useDashboardVariablesSelector.
|
||||
*/
|
||||
const useVariableFetchSelector = <T>(
|
||||
selector: (state: IVariableFetchStoreState) => T,
|
||||
): T => {
|
||||
const selectorRef = useRef(selector);
|
||||
selectorRef.current = selector;
|
||||
|
||||
const getSnapshot = useCallback(
|
||||
() => selectorRef.current(variableFetchStore.getSnapshot()),
|
||||
[],
|
||||
);
|
||||
|
||||
return useSyncExternalStore(variableFetchStore.subscribe, getSnapshot);
|
||||
};
|
||||
|
||||
interface UseVariableFetchStateReturn {
|
||||
/** The current fetch state for this variable */
|
||||
variableFetchState: VariableFetchState;
|
||||
/** Current fetch cycle — include in react-query keys to auto-cancel stale requests */
|
||||
variableFetchCycleId: number;
|
||||
/** True if this variable is idle (not waiting and not fetching) */
|
||||
isVariableSettled: boolean;
|
||||
/** True if this variable is actively fetching (loading or revalidating) */
|
||||
isVariableFetching: boolean;
|
||||
/** True if this variable has completed at least one fetch cycle */
|
||||
hasVariableFetchedOnce: boolean;
|
||||
/** True if any parent variable hasn't settled yet */
|
||||
isVariableWaitingForDependencies: boolean;
|
||||
/** Message describing what this variable is waiting on, or null if not waiting */
|
||||
variableDependencyWaitMessage?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Per-variable hook that exposes the fetch state of a single variable.
|
||||
* Reusable by both variable input components and panel components.
|
||||
*
|
||||
* Subscribes to both variableFetchStore (for states) and
|
||||
* dashboardVariablesStore (for parent graph) to compute derived values.
|
||||
*/
|
||||
export function useVariableFetchState(
|
||||
variableName: string,
|
||||
): UseVariableFetchStateReturn {
|
||||
// This variable's fetch state (loading, waiting, idle, etc.)
|
||||
const variableFetchState = useVariableFetchSelector(
|
||||
(s) => s.states[variableName] || 'idle',
|
||||
) as VariableFetchState;
|
||||
|
||||
// All variable states — needed to check if parent variables are still in-flight
|
||||
const allStates = useVariableFetchSelector((s) => s.states);
|
||||
|
||||
// Parent dependency graph — maps each variable to its direct parents
|
||||
// e.g. { "childVariable": ["parentVariable"] } means "childVariable" depends on "parentVariable"
|
||||
const parentGraph = useDashboardVariablesSelector(
|
||||
(s) => s.dependencyData?.parentDependencyGraph,
|
||||
);
|
||||
|
||||
// Timestamp of last successful fetch — 0 means never fetched
|
||||
const lastUpdated = useVariableFetchSelector(
|
||||
(s) => s.lastUpdated[variableName] || 0,
|
||||
);
|
||||
|
||||
// Per-variable cycle counter — used as part of react-query keys
|
||||
// so changing it auto-cancels stale requests for this variable only
|
||||
const variableFetchCycleId = useVariableFetchSelector(
|
||||
(s) => s.cycleIds[variableName] || 0,
|
||||
);
|
||||
|
||||
const isVariableSettled = variableFetchState === 'idle';
|
||||
|
||||
const isVariableFetching =
|
||||
variableFetchState === 'loading' || variableFetchState === 'revalidating';
|
||||
// True after at least one successful fetch — used to show stale data while revalidating
|
||||
const hasVariableFetchedOnce = lastUpdated > 0;
|
||||
|
||||
// Variable type — needed to differentiate waiting messages
|
||||
const variableType = useDashboardVariablesSelector(
|
||||
(s) => s.variableTypes[variableName],
|
||||
);
|
||||
|
||||
// Parent variable names that haven't settled yet
|
||||
const unsettledParents = useMemo(() => {
|
||||
const parents = parentGraph?.[variableName] || [];
|
||||
return parents.filter((p) => (allStates[p] || 'idle') !== 'idle');
|
||||
}, [parentGraph, variableName, allStates]);
|
||||
|
||||
const isVariableWaitingForDependencies = unsettledParents.length > 0;
|
||||
|
||||
const variableDependencyWaitMessage = useMemo(() => {
|
||||
if (variableFetchState !== 'waiting') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (variableType === 'DYNAMIC') {
|
||||
return 'Waiting for all query variable options to load.';
|
||||
}
|
||||
|
||||
if (unsettledParents.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const quoted = unsettledParents.map((p) => `"${p}"`);
|
||||
const names =
|
||||
quoted.length > 1
|
||||
? `${quoted.slice(0, -1).join(', ')} and ${quoted[quoted.length - 1]}`
|
||||
: quoted[0];
|
||||
return `Waiting for options of ${names} to load.`;
|
||||
}, [variableFetchState, variableType, unsettledParents]);
|
||||
|
||||
return {
|
||||
variableFetchState,
|
||||
isVariableSettled,
|
||||
isVariableWaitingForDependencies,
|
||||
variableDependencyWaitMessage,
|
||||
isVariableFetching,
|
||||
hasVariableFetchedOnce,
|
||||
variableFetchCycleId,
|
||||
};
|
||||
}
|
||||
|
||||
export function useIsPanelWaitingOnVariable(variableNames: string[]): boolean {
|
||||
const states = useVariableFetchSelector((s) => s.states);
|
||||
const dashboardVariables = useDashboardVariablesSelector((s) => s.variables);
|
||||
const variableTypesMap = useDashboardVariablesSelector((s) => s.variableTypes);
|
||||
|
||||
return variableNames.some((name) => {
|
||||
const variableFetchState = states[name];
|
||||
const { selectedValue, allSelected } = dashboardVariables?.[name] || {};
|
||||
|
||||
const isVariableInFetchingOrWaitingState =
|
||||
variableFetchState === 'loading' ||
|
||||
variableFetchState === 'revalidating' ||
|
||||
variableFetchState === 'waiting';
|
||||
|
||||
if (variableTypesMap[name] === 'DYNAMIC' && allSelected) {
|
||||
return isVariableInFetchingOrWaitingState;
|
||||
}
|
||||
|
||||
return isEmpty(selectedValue) ? isVariableInFetchingOrWaitingState : false;
|
||||
});
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import { TelemetryFieldKey } from 'api/v5/v5';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { convertKeysToColumnFields } from 'container/LogsExplorerList/utils';
|
||||
import { placeWidgetAtBottom } from 'container/NewWidget/utils';
|
||||
import { textContainsVariableReference } from 'lib/dashboardVariables/variableReference';
|
||||
import { isArray } from 'lodash-es';
|
||||
import {
|
||||
Dashboard,
|
||||
@@ -116,10 +117,17 @@ export const createDynamicVariableToWidgetsMap = (
|
||||
dynamicVariables.forEach((variable) => {
|
||||
if (
|
||||
variable.dynamicVariablesAttribute &&
|
||||
variable.name &&
|
||||
filter.key?.key === variable.dynamicVariablesAttribute &&
|
||||
((isArray(filter.value) &&
|
||||
filter.value.includes(`$${variable.name}`)) ||
|
||||
filter.value === `$${variable.name}`) &&
|
||||
(isArray(filter.value)
|
||||
? filter.value.some(
|
||||
(v) =>
|
||||
typeof v === 'string' &&
|
||||
variable.name &&
|
||||
textContainsVariableReference(v, variable.name),
|
||||
)
|
||||
: typeof filter.value === 'string' &&
|
||||
textContainsVariableReference(filter.value, variable.name)) &&
|
||||
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
|
||||
) {
|
||||
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
|
||||
@@ -132,7 +140,12 @@ export const createDynamicVariableToWidgetsMap = (
|
||||
dynamicVariables.forEach((variable) => {
|
||||
if (
|
||||
variable.dynamicVariablesAttribute &&
|
||||
queryData.filter?.expression?.includes(`$${variable.name}`) &&
|
||||
variable.name &&
|
||||
queryData.filter?.expression &&
|
||||
textContainsVariableReference(
|
||||
queryData.filter.expression,
|
||||
variable.name,
|
||||
) &&
|
||||
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
|
||||
) {
|
||||
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
|
||||
@@ -149,7 +162,9 @@ export const createDynamicVariableToWidgetsMap = (
|
||||
dynamicVariables.forEach((variable) => {
|
||||
if (
|
||||
variable.dynamicVariablesAttribute &&
|
||||
promqlQuery.query?.includes(`$${variable.name}`) &&
|
||||
variable.name &&
|
||||
promqlQuery.query &&
|
||||
textContainsVariableReference(promqlQuery.query, variable.name) &&
|
||||
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
|
||||
) {
|
||||
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
|
||||
@@ -165,7 +180,9 @@ export const createDynamicVariableToWidgetsMap = (
|
||||
dynamicVariables.forEach((variable) => {
|
||||
if (
|
||||
variable.dynamicVariablesAttribute &&
|
||||
clickhouseQuery.query?.includes(`$${variable.name}`) &&
|
||||
variable.name &&
|
||||
clickhouseQuery.query &&
|
||||
textContainsVariableReference(clickhouseQuery.query, variable.name) &&
|
||||
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
|
||||
) {
|
||||
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
|
||||
|
||||
@@ -10,13 +10,12 @@ import {
|
||||
GetQueryResultsProps,
|
||||
} from 'lib/dashboard/getQueryResults';
|
||||
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
|
||||
import { SuccessResponse, Warning } from 'types/api';
|
||||
import APIError from 'types/api/error';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { MetricQueryRangeSuccessResponse } from 'types/api/metrics/getQueryRange';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
type UseGetQueryRangeOptions = UseQueryOptions<
|
||||
SuccessResponse<MetricRangePayloadProps> & { warning?: Warning },
|
||||
MetricQueryRangeSuccessResponse,
|
||||
APIError | Error
|
||||
>;
|
||||
|
||||
@@ -30,10 +29,7 @@ type UseGetQueryRange = (
|
||||
widgetIndex: number;
|
||||
publicDashboardId: string;
|
||||
},
|
||||
) => UseQueryResult<
|
||||
SuccessResponse<MetricRangePayloadProps> & { warning?: Warning },
|
||||
Error
|
||||
>;
|
||||
) => UseQueryResult<MetricQueryRangeSuccessResponse, Error>;
|
||||
|
||||
export const useGetQueryRange: UseGetQueryRange = (
|
||||
requestData,
|
||||
@@ -145,10 +141,7 @@ export const useGetQueryRange: UseGetQueryRange = (
|
||||
};
|
||||
}, [options?.retry]);
|
||||
|
||||
return useQuery<
|
||||
SuccessResponse<MetricRangePayloadProps> & { warning?: Warning },
|
||||
APIError | Error
|
||||
>({
|
||||
return useQuery<MetricQueryRangeSuccessResponse, APIError | Error>({
|
||||
queryFn: async ({ signal }) =>
|
||||
GetMetricQueryRange(
|
||||
modifiedRequestData,
|
||||
|
||||
@@ -19,7 +19,11 @@ import { Pagination } from 'hooks/queryPagination';
|
||||
import { convertNewDataToOld } from 'lib/newQueryBuilder/convertNewDataToOld';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { SuccessResponse, SuccessResponseV2, Warning } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import {
|
||||
MetricQueryRangeSuccessResponse,
|
||||
MetricRangePayloadProps,
|
||||
} from 'types/api/metrics/getQueryRange';
|
||||
import { ExecStats, MetricRangePayloadV5 } from 'types/api/v5/queryRange';
|
||||
import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
@@ -205,13 +209,13 @@ export async function GetMetricQueryRange(
|
||||
widgetIndex: number;
|
||||
publicDashboardId: string;
|
||||
},
|
||||
): Promise<SuccessResponse<MetricRangePayloadProps> & { warning?: Warning }> {
|
||||
): Promise<MetricQueryRangeSuccessResponse> {
|
||||
let legendMap: Record<string, string>;
|
||||
let response:
|
||||
| SuccessResponse<MetricRangePayloadProps>
|
||||
| SuccessResponseV2<MetricRangePayloadV5>
|
||||
| (SuccessResponse<MetricRangePayloadProps> & { warning?: Warning });
|
||||
| MetricQueryRangeSuccessResponse
|
||||
| SuccessResponseV2<MetricRangePayloadV5>;
|
||||
let warning: Warning | undefined;
|
||||
let meta: ExecStats | undefined;
|
||||
|
||||
const panelType = props.originalGraphType || props.graphType;
|
||||
|
||||
@@ -299,6 +303,7 @@ export async function GetMetricQueryRange(
|
||||
);
|
||||
|
||||
warning = response.payload.warning || undefined;
|
||||
meta = response.payload.meta || undefined;
|
||||
} else {
|
||||
const v5Response = await getQueryRangeV5(
|
||||
v5Result.queryPayload,
|
||||
@@ -318,6 +323,7 @@ export async function GetMetricQueryRange(
|
||||
);
|
||||
|
||||
warning = response.payload.warning || undefined;
|
||||
meta = response.payload.meta || undefined;
|
||||
}
|
||||
} else {
|
||||
const legacyResult = prepareQueryRangePayload(props);
|
||||
@@ -384,6 +390,7 @@ export async function GetMetricQueryRange(
|
||||
return {
|
||||
...response,
|
||||
warning,
|
||||
meta,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -81,6 +81,7 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
barAlignment,
|
||||
barMaxWidth,
|
||||
barWidthFactor,
|
||||
stepInterval,
|
||||
} = this.props;
|
||||
if (pathBuilder) {
|
||||
return { paths: pathBuilder };
|
||||
@@ -104,6 +105,7 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
barAlignment,
|
||||
barMaxWidth,
|
||||
barWidthFactor,
|
||||
stepInterval,
|
||||
});
|
||||
|
||||
return pathsBuilder(self, seriesIdx, idx0, idx1);
|
||||
@@ -209,12 +211,14 @@ function getPathBuilder({
|
||||
barAlignment = BarAlignment.Center,
|
||||
barWidthFactor = 0.6,
|
||||
barMaxWidth = 200,
|
||||
stepInterval,
|
||||
}: {
|
||||
drawStyle: DrawStyle;
|
||||
lineInterpolation?: LineInterpolation;
|
||||
barAlignment?: BarAlignment;
|
||||
barMaxWidth?: number;
|
||||
barWidthFactor?: number;
|
||||
stepInterval?: number;
|
||||
}): Series.PathBuilder {
|
||||
if (!builders) {
|
||||
throw new Error('Required uPlot path builders are not available');
|
||||
@@ -222,14 +226,13 @@ function getPathBuilder({
|
||||
|
||||
if (drawStyle === DrawStyle.Bar) {
|
||||
const pathBuilders = uPlot.paths;
|
||||
const barsConfigKey = `bars|${barAlignment}|${barWidthFactor}|${barMaxWidth}`;
|
||||
if (!builders[barsConfigKey] && pathBuilders.bars) {
|
||||
builders[barsConfigKey] = pathBuilders.bars({
|
||||
size: [barWidthFactor, barMaxWidth],
|
||||
align: barAlignment,
|
||||
});
|
||||
}
|
||||
return builders[barsConfigKey];
|
||||
return getBarPathBuilder({
|
||||
pathBuilders,
|
||||
barAlignment,
|
||||
barWidthFactor,
|
||||
barMaxWidth,
|
||||
stepInterval,
|
||||
});
|
||||
}
|
||||
|
||||
if (drawStyle === DrawStyle.Line) {
|
||||
@@ -247,4 +250,81 @@ function getPathBuilder({
|
||||
return builders.spline;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function getBarPathBuilder({
|
||||
pathBuilders,
|
||||
barAlignment,
|
||||
barWidthFactor,
|
||||
barMaxWidth,
|
||||
stepInterval,
|
||||
}: {
|
||||
pathBuilders: typeof uPlot.paths;
|
||||
barAlignment: BarAlignment;
|
||||
barWidthFactor: number;
|
||||
barMaxWidth: number;
|
||||
stepInterval?: number;
|
||||
}): Series.PathBuilder {
|
||||
if (!builders) {
|
||||
throw new Error('Required uPlot path builders are not available');
|
||||
}
|
||||
|
||||
const barsPathBuilderFactory = pathBuilders.bars;
|
||||
|
||||
// When a stepInterval is provided (in seconds), cap the maximum bar width
|
||||
// so that a single bar never visually spans more than stepInterval worth
|
||||
// of time on the x-scale.
|
||||
if (
|
||||
typeof stepInterval === 'number' &&
|
||||
stepInterval > 0 &&
|
||||
barsPathBuilderFactory
|
||||
) {
|
||||
return (
|
||||
self: uPlot,
|
||||
seriesIdx: number,
|
||||
idx0: number,
|
||||
idx1: number,
|
||||
): Series.Paths | null => {
|
||||
let effectiveBarMaxWidth = barMaxWidth;
|
||||
|
||||
const xScale = self.scales.x as uPlot.Scale | undefined;
|
||||
if (xScale && typeof xScale.min === 'number') {
|
||||
const start = xScale.min as number;
|
||||
const end = start + stepInterval;
|
||||
const startPx = self.valToPos(start, 'x');
|
||||
const endPx = self.valToPos(end, 'x');
|
||||
const intervalPx = Math.abs(endPx - startPx);
|
||||
|
||||
if (intervalPx > 0) {
|
||||
effectiveBarMaxWidth =
|
||||
typeof barMaxWidth === 'number'
|
||||
? Math.min(barMaxWidth, intervalPx)
|
||||
: intervalPx;
|
||||
}
|
||||
}
|
||||
|
||||
const barsCfgKey = `bars|${barAlignment}|${barWidthFactor}|${effectiveBarMaxWidth}`;
|
||||
if (builders && !builders[barsCfgKey]) {
|
||||
builders[barsCfgKey] = barsPathBuilderFactory({
|
||||
size: [barWidthFactor, effectiveBarMaxWidth],
|
||||
align: barAlignment,
|
||||
});
|
||||
}
|
||||
|
||||
return builders && builders[barsCfgKey]
|
||||
? builders[barsCfgKey](self, seriesIdx, idx0, idx1)
|
||||
: null;
|
||||
};
|
||||
}
|
||||
|
||||
const barsCfgKey = `bars|${barAlignment}|${barWidthFactor}|${barMaxWidth}`;
|
||||
if (!builders[barsCfgKey] && barsPathBuilderFactory) {
|
||||
builders[barsCfgKey] = barsPathBuilderFactory({
|
||||
size: [barWidthFactor, barMaxWidth],
|
||||
align: barAlignment,
|
||||
});
|
||||
}
|
||||
|
||||
return builders[barsCfgKey];
|
||||
}
|
||||
|
||||
export type { SeriesProps };
|
||||
|
||||
@@ -176,6 +176,7 @@ export interface SeriesProps extends LineConfig, PointsConfig, BarConfig {
|
||||
show?: boolean;
|
||||
spanGaps?: boolean;
|
||||
isDarkMode?: boolean;
|
||||
stepInterval?: number;
|
||||
}
|
||||
|
||||
export interface LegendItem {
|
||||
|
||||
@@ -84,8 +84,6 @@ const DashboardContext = createContext<IDashboardContext>({
|
||||
toScrollWidgetId: '',
|
||||
setToScrollWidgetId: () => {},
|
||||
updateLocalStorageDashboardVariables: () => {},
|
||||
variablesToGetUpdated: [],
|
||||
setVariablesToGetUpdated: () => {},
|
||||
dashboardQueryRangeCalled: false,
|
||||
setDashboardQueryRangeCalled: () => {},
|
||||
selectedRowWidgetId: '',
|
||||
@@ -183,10 +181,6 @@ export function DashboardProvider({
|
||||
exact: true,
|
||||
});
|
||||
|
||||
const [variablesToGetUpdated, setVariablesToGetUpdated] = useState<string[]>(
|
||||
[],
|
||||
);
|
||||
|
||||
const [layouts, setLayouts] = useState<Layout[]>([]);
|
||||
|
||||
const [panelMap, setPanelMap] = useState<
|
||||
@@ -517,8 +511,6 @@ export function DashboardProvider({
|
||||
updatedTimeRef,
|
||||
setToScrollWidgetId,
|
||||
updateLocalStorageDashboardVariables,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
dashboardQueryRangeCalled,
|
||||
setDashboardQueryRangeCalled,
|
||||
selectedRowWidgetId,
|
||||
@@ -541,8 +533,6 @@ export function DashboardProvider({
|
||||
toScrollWidgetId,
|
||||
updateLocalStorageDashboardVariables,
|
||||
currentDashboard,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
dashboardQueryRangeCalled,
|
||||
setDashboardQueryRangeCalled,
|
||||
selectedRowWidgetId,
|
||||
|
||||
@@ -0,0 +1,527 @@
|
||||
import * as dashboardVariablesStore from '../dashboardVariables/dashboardVariablesStore';
|
||||
import { IDependencyData } from '../dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import {
|
||||
enqueueDescendantsOfVariable,
|
||||
enqueueFetchOfAllVariables,
|
||||
initializeVariableFetchStore,
|
||||
onVariableFetchComplete,
|
||||
onVariableFetchFailure,
|
||||
VariableFetchContext,
|
||||
variableFetchStore,
|
||||
} from '../variableFetchStore';
|
||||
|
||||
const getVariableDependencyContextSpy = jest.spyOn(
|
||||
dashboardVariablesStore,
|
||||
'getVariableDependencyContext',
|
||||
);
|
||||
|
||||
function resetStore(): void {
|
||||
variableFetchStore.set(() => ({
|
||||
states: {},
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
}));
|
||||
}
|
||||
|
||||
function mockContext(overrides: Partial<VariableFetchContext> = {}): void {
|
||||
getVariableDependencyContextSpy.mockReturnValue({
|
||||
doAllVariablesHaveValuesSelected: false,
|
||||
variableTypes: {},
|
||||
dynamicVariableOrder: [],
|
||||
dependencyData: null,
|
||||
...overrides,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to build a dependency data object for tests.
|
||||
* Only the fields used by the store actions are required.
|
||||
*/
|
||||
function buildDependencyData(
|
||||
overrides: Partial<IDependencyData> = {},
|
||||
): IDependencyData {
|
||||
return {
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
transitiveDescendants: {},
|
||||
hasCycle: false,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe('variableFetchStore', () => {
|
||||
beforeEach(() => {
|
||||
resetStore();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
// ==================== initializeVariableFetchStore ====================
|
||||
|
||||
describe('initializeVariableFetchStore', () => {
|
||||
it('should initialize new variables to idle', () => {
|
||||
initializeVariableFetchStore(['a', 'b', 'c']);
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states).toEqual({ a: 'idle', b: 'idle', c: 'idle' });
|
||||
});
|
||||
|
||||
it('should preserve existing states for known variables', () => {
|
||||
// Pre-set a state
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
});
|
||||
|
||||
initializeVariableFetchStore(['a', 'b']);
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.a).toBe('loading');
|
||||
expect(storeSnapshot.states.b).toBe('idle');
|
||||
});
|
||||
|
||||
it('should clean up stale variables that no longer exist', () => {
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.old = 'idle';
|
||||
d.lastUpdated.old = 100;
|
||||
d.cycleIds.old = 3;
|
||||
});
|
||||
|
||||
initializeVariableFetchStore(['a']);
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.old).toBeUndefined();
|
||||
expect(storeSnapshot.lastUpdated.old).toBeUndefined();
|
||||
expect(storeSnapshot.cycleIds.old).toBeUndefined();
|
||||
expect(storeSnapshot.states.a).toBe('idle');
|
||||
});
|
||||
|
||||
it('should handle empty variable names array', () => {
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'idle';
|
||||
});
|
||||
|
||||
initializeVariableFetchStore([]);
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
// ==================== enqueueFetchOfAllVariables ====================
|
||||
|
||||
describe('enqueueFetchOfAllVariables', () => {
|
||||
it('should no-op when dependencyData is null', () => {
|
||||
mockContext({ dependencyData: null });
|
||||
|
||||
initializeVariableFetchStore(['a']);
|
||||
enqueueFetchOfAllVariables();
|
||||
|
||||
expect(variableFetchStore.getSnapshot().states.a).toBe('idle');
|
||||
});
|
||||
|
||||
it('should set root query variables to loading and dependent ones to waiting', () => {
|
||||
// a is root (no parents), b depends on a
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
order: ['a', 'b'],
|
||||
parentDependencyGraph: { a: [], b: ['a'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY' },
|
||||
});
|
||||
|
||||
initializeVariableFetchStore(['a', 'b']);
|
||||
enqueueFetchOfAllVariables();
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.a).toBe('loading');
|
||||
expect(storeSnapshot.states.b).toBe('waiting');
|
||||
});
|
||||
|
||||
it('should set root query variables to revalidating when previously fetched', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
order: ['a'],
|
||||
parentDependencyGraph: { a: [] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
// Pre-set lastUpdated so it appears previously fetched
|
||||
variableFetchStore.update((d) => {
|
||||
d.lastUpdated.a = 1000;
|
||||
});
|
||||
|
||||
initializeVariableFetchStore(['a']);
|
||||
enqueueFetchOfAllVariables();
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.a).toBe('revalidating');
|
||||
});
|
||||
|
||||
it('should bump cycle IDs for all enqueued variables', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
order: ['a', 'b'],
|
||||
parentDependencyGraph: { a: [], b: ['a'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY' },
|
||||
});
|
||||
|
||||
initializeVariableFetchStore(['a', 'b']);
|
||||
enqueueFetchOfAllVariables();
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.cycleIds.a).toBe(1);
|
||||
expect(storeSnapshot.cycleIds.b).toBe(1);
|
||||
});
|
||||
|
||||
it('should set dynamic variables to waiting when not all variables have values', () => {
|
||||
mockContext({
|
||||
doAllVariablesHaveValuesSelected: false,
|
||||
dependencyData: buildDependencyData({ order: [] }),
|
||||
variableTypes: { dyn1: 'DYNAMIC' },
|
||||
dynamicVariableOrder: ['dyn1'],
|
||||
});
|
||||
|
||||
initializeVariableFetchStore(['dyn1']);
|
||||
enqueueFetchOfAllVariables();
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.dyn1).toBe('waiting');
|
||||
});
|
||||
|
||||
it('should set dynamic variables to loading when all variables have values', () => {
|
||||
mockContext({
|
||||
doAllVariablesHaveValuesSelected: true,
|
||||
dependencyData: buildDependencyData({ order: [] }),
|
||||
variableTypes: { dyn1: 'DYNAMIC' },
|
||||
dynamicVariableOrder: ['dyn1'],
|
||||
});
|
||||
|
||||
initializeVariableFetchStore(['dyn1']);
|
||||
enqueueFetchOfAllVariables();
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.dyn1).toBe('loading');
|
||||
});
|
||||
|
||||
it('should not treat non-QUERY parents as query parents', () => {
|
||||
// b has a CUSTOM parent — shouldn't cause waiting
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
order: ['b'],
|
||||
parentDependencyGraph: { b: ['customVar'] },
|
||||
}),
|
||||
variableTypes: { b: 'QUERY', customVar: 'CUSTOM' },
|
||||
});
|
||||
|
||||
initializeVariableFetchStore(['b', 'customVar']);
|
||||
enqueueFetchOfAllVariables();
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.b).toBe('loading');
|
||||
});
|
||||
});
|
||||
|
||||
// ==================== onVariableFetchComplete ====================
|
||||
|
||||
describe('onVariableFetchComplete', () => {
|
||||
it('should set the completed variable to idle with a lastUpdated timestamp', () => {
|
||||
mockContext();
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
});
|
||||
|
||||
const before = Date.now();
|
||||
onVariableFetchComplete('a');
|
||||
const after = Date.now();
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.a).toBe('idle');
|
||||
expect(storeSnapshot.lastUpdated.a).toBeGreaterThanOrEqual(before);
|
||||
expect(storeSnapshot.lastUpdated.a).toBeLessThanOrEqual(after);
|
||||
});
|
||||
|
||||
it('should unblock waiting query-type children', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
graph: { a: ['b'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
d.states.b = 'waiting';
|
||||
});
|
||||
|
||||
onVariableFetchComplete('a');
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.a).toBe('idle');
|
||||
expect(storeSnapshot.states.b).toBe('loading');
|
||||
});
|
||||
|
||||
it('should not unblock non-QUERY children', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
graph: { a: ['dyn1'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
d.states.dyn1 = 'waiting';
|
||||
});
|
||||
|
||||
onVariableFetchComplete('a');
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
// dyn1 is DYNAMIC, not QUERY, so it should remain waiting
|
||||
expect(storeSnapshot.states.dyn1).toBe('waiting');
|
||||
});
|
||||
|
||||
it('should unlock waiting dynamic variables when all query variables are settled', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
graph: { a: [] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
|
||||
dynamicVariableOrder: ['dyn1'],
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
d.states.dyn1 = 'waiting';
|
||||
});
|
||||
|
||||
onVariableFetchComplete('a');
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.dyn1).toBe('loading');
|
||||
});
|
||||
|
||||
it('should NOT unlock dynamic variables if a query variable is still in-flight', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
graph: { a: ['b'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY', dyn1: 'DYNAMIC' },
|
||||
dynamicVariableOrder: ['dyn1'],
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
d.states.b = 'waiting';
|
||||
d.states.dyn1 = 'waiting';
|
||||
});
|
||||
|
||||
onVariableFetchComplete('a');
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.dyn1).toBe('waiting');
|
||||
});
|
||||
});
|
||||
|
||||
// ==================== onVariableFetchFailure ====================
|
||||
|
||||
describe('onVariableFetchFailure', () => {
|
||||
it('should set the failed variable to error', () => {
|
||||
mockContext();
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
});
|
||||
|
||||
onVariableFetchFailure('a');
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.a).toBe('error');
|
||||
});
|
||||
|
||||
it('should set query-type transitive descendants to idle', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
transitiveDescendants: { a: ['b', 'c'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
d.states.b = 'waiting';
|
||||
d.states.c = 'waiting';
|
||||
});
|
||||
|
||||
onVariableFetchFailure('a');
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.a).toBe('error');
|
||||
expect(storeSnapshot.states.b).toBe('idle');
|
||||
expect(storeSnapshot.states.c).toBe('idle');
|
||||
});
|
||||
|
||||
it('should not touch non-QUERY descendants', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
transitiveDescendants: { a: ['dyn1'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
d.states.dyn1 = 'waiting';
|
||||
});
|
||||
|
||||
onVariableFetchFailure('a');
|
||||
|
||||
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('waiting');
|
||||
});
|
||||
|
||||
it('should unlock waiting dynamic variables when all query variables settle via error', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
transitiveDescendants: {},
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
|
||||
dynamicVariableOrder: ['dyn1'],
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
d.states.dyn1 = 'waiting';
|
||||
});
|
||||
|
||||
onVariableFetchFailure('a');
|
||||
|
||||
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('loading');
|
||||
});
|
||||
});
|
||||
|
||||
// ==================== enqueueDescendantsOfVariable ====================
|
||||
|
||||
describe('enqueueDescendantsOfVariable', () => {
|
||||
it('should no-op when dependencyData is null', () => {
|
||||
mockContext({ dependencyData: null });
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'idle';
|
||||
d.states.b = 'idle';
|
||||
});
|
||||
|
||||
enqueueDescendantsOfVariable('a');
|
||||
|
||||
expect(variableFetchStore.getSnapshot().states.b).toBe('idle');
|
||||
});
|
||||
|
||||
it('should enqueue query-type descendants with all parents settled', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
transitiveDescendants: { a: ['b'] },
|
||||
parentDependencyGraph: { b: ['a'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'idle';
|
||||
d.states.b = 'idle';
|
||||
});
|
||||
|
||||
enqueueDescendantsOfVariable('a');
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.b).toBe('loading');
|
||||
expect(storeSnapshot.cycleIds.b).toBe(1);
|
||||
});
|
||||
|
||||
it('should set descendants to waiting when some parents are not settled', () => {
|
||||
// b depends on both a and c; c is still loading
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
transitiveDescendants: { a: ['b'] },
|
||||
parentDependencyGraph: { b: ['a', 'c'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'idle';
|
||||
d.states.b = 'idle';
|
||||
d.states.c = 'loading';
|
||||
});
|
||||
|
||||
enqueueDescendantsOfVariable('a');
|
||||
|
||||
expect(variableFetchStore.getSnapshot().states.b).toBe('waiting');
|
||||
});
|
||||
|
||||
it('should skip non-QUERY descendants', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
transitiveDescendants: { a: ['dyn1'] },
|
||||
parentDependencyGraph: {},
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'idle';
|
||||
d.states.dyn1 = 'idle';
|
||||
});
|
||||
|
||||
enqueueDescendantsOfVariable('a');
|
||||
|
||||
// dyn1 is DYNAMIC, so it should not be touched
|
||||
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('idle');
|
||||
});
|
||||
|
||||
it('should handle chain of descendants: a -> b -> c', () => {
|
||||
// a -> b -> c, all QUERY
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
transitiveDescendants: { a: ['b', 'c'] },
|
||||
parentDependencyGraph: { b: ['a'], c: ['b'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'idle';
|
||||
d.states.b = 'idle';
|
||||
d.states.c = 'idle';
|
||||
});
|
||||
|
||||
enqueueDescendantsOfVariable('a');
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
// b's parent (a) is idle/settled → loading
|
||||
expect(storeSnapshot.states.b).toBe('loading');
|
||||
// c's parent (b) just moved to loading (not settled) → waiting
|
||||
expect(storeSnapshot.states.c).toBe('waiting');
|
||||
});
|
||||
|
||||
it('should set descendants to revalidating when previously fetched', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
transitiveDescendants: { a: ['b'] },
|
||||
parentDependencyGraph: { b: ['a'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'idle';
|
||||
d.states.b = 'idle';
|
||||
d.lastUpdated.b = 1000;
|
||||
});
|
||||
|
||||
enqueueDescendantsOfVariable('a');
|
||||
|
||||
expect(variableFetchStore.getSnapshot().states.b).toBe('revalidating');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,196 @@
|
||||
import {
|
||||
IVariableFetchStoreState,
|
||||
VariableFetchState,
|
||||
} from '../variableFetchStore';
|
||||
import {
|
||||
areAllQueryVariablesSettled,
|
||||
isSettled,
|
||||
resolveFetchState,
|
||||
unlockWaitingDynamicVariables,
|
||||
} from '../variableFetchStoreUtils';
|
||||
|
||||
describe('variableFetchStoreUtils', () => {
|
||||
describe('isSettled', () => {
|
||||
it('should return true for idle state', () => {
|
||||
expect(isSettled('idle')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for error state', () => {
|
||||
expect(isSettled('error')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for loading state', () => {
|
||||
expect(isSettled('loading')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for revalidating state', () => {
|
||||
expect(isSettled('revalidating')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for waiting state', () => {
|
||||
expect(isSettled('waiting')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for undefined', () => {
|
||||
expect(isSettled(undefined)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('resolveFetchState', () => {
|
||||
it('should return "loading" when variable has never been fetched', () => {
|
||||
const draft: IVariableFetchStoreState = {
|
||||
states: {},
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
};
|
||||
|
||||
expect(resolveFetchState(draft, 'myVar')).toBe('loading');
|
||||
});
|
||||
|
||||
it('should return "loading" when lastUpdated is 0', () => {
|
||||
const draft: IVariableFetchStoreState = {
|
||||
states: {},
|
||||
lastUpdated: { myVar: 0 },
|
||||
cycleIds: {},
|
||||
};
|
||||
|
||||
expect(resolveFetchState(draft, 'myVar')).toBe('loading');
|
||||
});
|
||||
|
||||
it('should return "revalidating" when variable has been fetched before', () => {
|
||||
const draft: IVariableFetchStoreState = {
|
||||
states: {},
|
||||
lastUpdated: { myVar: 1000 },
|
||||
cycleIds: {},
|
||||
};
|
||||
|
||||
expect(resolveFetchState(draft, 'myVar')).toBe('revalidating');
|
||||
});
|
||||
});
|
||||
|
||||
describe('areAllQueryVariablesSettled', () => {
|
||||
it('should return true when all query variables are idle', () => {
|
||||
const states: Record<string, VariableFetchState> = {
|
||||
a: 'idle',
|
||||
b: 'idle',
|
||||
};
|
||||
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
|
||||
|
||||
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when all query variables are in error', () => {
|
||||
const states: Record<string, VariableFetchState> = {
|
||||
a: 'error',
|
||||
b: 'error',
|
||||
};
|
||||
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
|
||||
|
||||
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true with a mix of idle and error query variables', () => {
|
||||
const states: Record<string, VariableFetchState> = {
|
||||
a: 'idle',
|
||||
b: 'error',
|
||||
};
|
||||
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
|
||||
|
||||
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when any query variable is loading', () => {
|
||||
const states: Record<string, VariableFetchState> = {
|
||||
a: 'idle',
|
||||
b: 'loading',
|
||||
};
|
||||
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
|
||||
|
||||
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when any query variable is waiting', () => {
|
||||
const states: Record<string, VariableFetchState> = {
|
||||
a: 'idle',
|
||||
b: 'waiting',
|
||||
};
|
||||
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
|
||||
|
||||
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(false);
|
||||
});
|
||||
|
||||
it('should ignore non-QUERY variable types', () => {
|
||||
const states: Record<string, VariableFetchState> = {
|
||||
a: 'idle',
|
||||
dynVar: 'loading',
|
||||
};
|
||||
const variableTypes = {
|
||||
a: 'QUERY' as const,
|
||||
dynVar: 'DYNAMIC' as const,
|
||||
};
|
||||
|
||||
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when there are no QUERY variables', () => {
|
||||
const states: Record<string, VariableFetchState> = {
|
||||
dynVar: 'loading',
|
||||
};
|
||||
const variableTypes = { dynVar: 'DYNAMIC' as const };
|
||||
|
||||
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('unlockWaitingDynamicVariables', () => {
|
||||
it('should transition waiting dynamic variables to loading when never fetched', () => {
|
||||
const draft: IVariableFetchStoreState = {
|
||||
states: { dyn1: 'waiting', dyn2: 'waiting' },
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
};
|
||||
|
||||
unlockWaitingDynamicVariables(draft, ['dyn1', 'dyn2']);
|
||||
|
||||
expect(draft.states.dyn1).toBe('loading');
|
||||
expect(draft.states.dyn2).toBe('loading');
|
||||
});
|
||||
|
||||
it('should transition waiting dynamic variables to revalidating when previously fetched', () => {
|
||||
const draft: IVariableFetchStoreState = {
|
||||
states: { dyn1: 'waiting' },
|
||||
lastUpdated: { dyn1: 1000 },
|
||||
cycleIds: {},
|
||||
};
|
||||
|
||||
unlockWaitingDynamicVariables(draft, ['dyn1']);
|
||||
|
||||
expect(draft.states.dyn1).toBe('revalidating');
|
||||
});
|
||||
|
||||
it('should not touch dynamic variables that are not in waiting state', () => {
|
||||
const draft: IVariableFetchStoreState = {
|
||||
states: { dyn1: 'idle', dyn2: 'loading' },
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
};
|
||||
|
||||
unlockWaitingDynamicVariables(draft, ['dyn1', 'dyn2']);
|
||||
|
||||
expect(draft.states.dyn1).toBe('idle');
|
||||
expect(draft.states.dyn2).toBe('loading');
|
||||
});
|
||||
|
||||
it('should handle empty dynamic variable order', () => {
|
||||
const draft: IVariableFetchStoreState = {
|
||||
states: { dyn1: 'waiting' },
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
};
|
||||
|
||||
unlockWaitingDynamicVariables(draft, []);
|
||||
|
||||
expect(draft.states.dyn1).toBe('waiting');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,225 @@
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import {
|
||||
dashboardVariablesStore,
|
||||
getVariableDependencyContext,
|
||||
setDashboardVariablesStore,
|
||||
updateDashboardVariablesStore,
|
||||
} from '../dashboardVariablesStore';
|
||||
import { IDashboardVariables } from '../dashboardVariablesStoreTypes';
|
||||
|
||||
function createVariable(
|
||||
overrides: Partial<IDashboardVariable> = {},
|
||||
): IDashboardVariable {
|
||||
return {
|
||||
id: 'test-id',
|
||||
name: 'test-var',
|
||||
description: '',
|
||||
type: 'QUERY',
|
||||
sort: 'DISABLED',
|
||||
showALLOption: false,
|
||||
multiSelect: false,
|
||||
order: 0,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function resetStore(): void {
|
||||
dashboardVariablesStore.set(() => ({
|
||||
dashboardId: '',
|
||||
variables: {},
|
||||
sortedVariablesArray: [],
|
||||
dependencyData: null,
|
||||
variableTypes: {},
|
||||
dynamicVariableOrder: [],
|
||||
}));
|
||||
}
|
||||
|
||||
describe('dashboardVariablesStore', () => {
|
||||
beforeEach(() => {
|
||||
resetStore();
|
||||
});
|
||||
|
||||
describe('setDashboardVariablesStore', () => {
|
||||
it('should set the dashboard variables and compute derived values', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
const storeSnapshot = dashboardVariablesStore.getSnapshot();
|
||||
expect(storeSnapshot.dashboardId).toBe('dash-1');
|
||||
expect(storeSnapshot.variables).toEqual(variables);
|
||||
expect(storeSnapshot.variableTypes).toEqual({ env: 'QUERY' });
|
||||
expect(storeSnapshot.sortedVariablesArray).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateDashboardVariablesStore', () => {
|
||||
it('should update variables and recompute derived values', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
},
|
||||
});
|
||||
|
||||
const updatedVariables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
};
|
||||
|
||||
updateDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: updatedVariables,
|
||||
});
|
||||
|
||||
const storeSnapshot = dashboardVariablesStore.getSnapshot();
|
||||
expect(storeSnapshot.variableTypes).toEqual({
|
||||
env: 'QUERY',
|
||||
dyn1: 'DYNAMIC',
|
||||
});
|
||||
expect(storeSnapshot.dynamicVariableOrder).toEqual(['dyn1']);
|
||||
});
|
||||
|
||||
it('should replace dashboardId when it does not match', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
'not-there': createVariable({ name: 'not-there', order: 0 }),
|
||||
},
|
||||
});
|
||||
|
||||
updateDashboardVariablesStore({
|
||||
dashboardId: 'dash-2',
|
||||
variables: {
|
||||
a: createVariable({ name: 'a', order: 0 }),
|
||||
},
|
||||
});
|
||||
|
||||
const storeSnapshot = dashboardVariablesStore.getSnapshot();
|
||||
expect(storeSnapshot.dashboardId).toBe('dash-2');
|
||||
expect(storeSnapshot.variableTypes).toEqual({
|
||||
a: 'QUERY',
|
||||
});
|
||||
expect(storeSnapshot.variableTypes).not.toEqual({
|
||||
'not-there': 'QUERY',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getVariableDependencyContext', () => {
|
||||
it('should return context with all fields', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({
|
||||
name: 'env',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
selectedValue: 'prod',
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const {
|
||||
variableTypes,
|
||||
dynamicVariableOrder,
|
||||
dependencyData,
|
||||
} = getVariableDependencyContext();
|
||||
|
||||
expect(variableTypes).toEqual({ env: 'QUERY' });
|
||||
expect(dynamicVariableOrder).toEqual([]);
|
||||
expect(dependencyData).not.toBeNull();
|
||||
});
|
||||
|
||||
it('should report doAllVariablesHaveValuesSelected as true when all variables have selectedValue', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({
|
||||
name: 'env',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
selectedValue: 'prod',
|
||||
}),
|
||||
region: createVariable({
|
||||
name: 'region',
|
||||
type: 'CUSTOM',
|
||||
order: 1,
|
||||
selectedValue: 'us-east',
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
|
||||
expect(doAllVariablesHaveValuesSelected).toBe(true);
|
||||
});
|
||||
|
||||
it('should report doAllVariablesHaveValuesSelected as false when some variables lack selectedValue', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({
|
||||
name: 'env',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
selectedValue: 'prod',
|
||||
}),
|
||||
region: createVariable({
|
||||
name: 'region',
|
||||
type: 'CUSTOM',
|
||||
order: 1,
|
||||
selectedValue: undefined,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
|
||||
expect(doAllVariablesHaveValuesSelected).toBe(false);
|
||||
});
|
||||
|
||||
it('should treat DYNAMIC variable with allSelected=true and selectedValue=null as having a value', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
dyn1: createVariable({
|
||||
name: 'dyn1',
|
||||
type: 'DYNAMIC',
|
||||
order: 0,
|
||||
selectedValue: null as any,
|
||||
allSelected: true,
|
||||
}),
|
||||
env: createVariable({
|
||||
name: 'env',
|
||||
type: 'QUERY',
|
||||
order: 1,
|
||||
selectedValue: 'prod',
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
|
||||
expect(doAllVariablesHaveValuesSelected).toBe(true);
|
||||
});
|
||||
|
||||
it('should report false when a DYNAMIC variable has empty selectedValue and allSelected is not true', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
dyn1: createVariable({
|
||||
name: 'dyn1',
|
||||
type: 'DYNAMIC',
|
||||
order: 0,
|
||||
selectedValue: '',
|
||||
allSelected: false,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
|
||||
expect(doAllVariablesHaveValuesSelected).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,369 @@
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { IDashboardVariables } from '../dashboardVariablesStoreTypes';
|
||||
import {
|
||||
buildDynamicVariableOrder,
|
||||
buildSortedVariablesArray,
|
||||
buildVariableTypesMap,
|
||||
computeDerivedValues,
|
||||
} from '../dashboardVariablesStoreUtils';
|
||||
|
||||
const createVariable = (
|
||||
overrides: Partial<IDashboardVariable> = {},
|
||||
): IDashboardVariable => ({
|
||||
id: 'test-id',
|
||||
name: 'test-var',
|
||||
description: '',
|
||||
type: 'QUERY',
|
||||
sort: 'DISABLED',
|
||||
showALLOption: false,
|
||||
multiSelect: false,
|
||||
order: 0,
|
||||
...overrides,
|
||||
});
|
||||
|
||||
describe('dashboardVariablesStoreUtils', () => {
|
||||
describe('buildSortedVariablesArray', () => {
|
||||
it('should sort variables by order property', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
c: createVariable({ name: 'c', order: 3 }),
|
||||
a: createVariable({ name: 'a', order: 1 }),
|
||||
b: createVariable({ name: 'b', order: 2 }),
|
||||
};
|
||||
|
||||
const result = buildSortedVariablesArray(variables);
|
||||
|
||||
expect(result.map((v) => v.name)).toEqual(['a', 'b', 'c']);
|
||||
});
|
||||
|
||||
it('should return empty array for empty variables', () => {
|
||||
const result = buildSortedVariablesArray({});
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should create copies of variables (not references)', () => {
|
||||
const original = createVariable({ name: 'a', order: 0 });
|
||||
const variables: IDashboardVariables = { a: original };
|
||||
|
||||
const result = buildSortedVariablesArray(variables);
|
||||
|
||||
expect(result[0]).not.toBe(original);
|
||||
expect(result[0]).toEqual(original);
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildVariableTypesMap', () => {
|
||||
it('should create a name-to-type mapping', () => {
|
||||
const sorted = [
|
||||
createVariable({ name: 'env', type: 'QUERY' }),
|
||||
createVariable({ name: 'region', type: 'CUSTOM' }),
|
||||
createVariable({ name: 'dynVar', type: 'DYNAMIC' }),
|
||||
createVariable({ name: 'text', type: 'TEXTBOX' }),
|
||||
];
|
||||
|
||||
const result = buildVariableTypesMap(sorted);
|
||||
|
||||
expect(result).toEqual({
|
||||
env: 'QUERY',
|
||||
region: 'CUSTOM',
|
||||
dynVar: 'DYNAMIC',
|
||||
text: 'TEXTBOX',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return empty object for empty array', () => {
|
||||
expect(buildVariableTypesMap([])).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildDynamicVariableOrder', () => {
|
||||
it('should return only DYNAMIC variable names in order', () => {
|
||||
const sorted = [
|
||||
createVariable({ name: 'queryVar', type: 'QUERY', order: 0 }),
|
||||
createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
createVariable({ name: 'customVar', type: 'CUSTOM', order: 2 }),
|
||||
createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 3 }),
|
||||
];
|
||||
|
||||
const result = buildDynamicVariableOrder(sorted);
|
||||
|
||||
expect(result).toEqual(['dyn1', 'dyn2']);
|
||||
});
|
||||
|
||||
it('should return empty array when no DYNAMIC variables exist', () => {
|
||||
const sorted = [
|
||||
createVariable({ name: 'a', type: 'QUERY' }),
|
||||
createVariable({ name: 'b', type: 'CUSTOM' }),
|
||||
];
|
||||
|
||||
expect(buildDynamicVariableOrder(sorted)).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array for empty input', () => {
|
||||
expect(buildDynamicVariableOrder([])).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('computeDerivedValues', () => {
|
||||
it('should compute all derived values from variables', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({
|
||||
name: 'env',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
}),
|
||||
dyn1: createVariable({
|
||||
name: 'dyn1',
|
||||
type: 'DYNAMIC',
|
||||
order: 1,
|
||||
}),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
expect(result.sortedVariablesArray).toHaveLength(2);
|
||||
expect(result.sortedVariablesArray[0].name).toBe('env');
|
||||
expect(result.sortedVariablesArray[1].name).toBe('dyn1');
|
||||
|
||||
expect(result.variableTypes).toEqual({
|
||||
env: 'QUERY',
|
||||
dyn1: 'DYNAMIC',
|
||||
});
|
||||
|
||||
expect(result.dynamicVariableOrder).toEqual(['dyn1']);
|
||||
|
||||
// dependencyData should exist since there are variables
|
||||
expect(result.dependencyData).not.toBeNull();
|
||||
});
|
||||
|
||||
it('should return null dependencyData for empty variables', () => {
|
||||
const result = computeDerivedValues({});
|
||||
|
||||
expect(result.sortedVariablesArray).toEqual([]);
|
||||
expect(result.dependencyData).toBeNull();
|
||||
expect(result.variableTypes).toEqual({});
|
||||
expect(result.dynamicVariableOrder).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle all four variable types together', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
queryVar: createVariable({
|
||||
name: 'queryVar',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
}),
|
||||
customVar: createVariable({
|
||||
name: 'customVar',
|
||||
type: 'CUSTOM',
|
||||
order: 1,
|
||||
}),
|
||||
dynVar: createVariable({
|
||||
name: 'dynVar',
|
||||
type: 'DYNAMIC',
|
||||
order: 2,
|
||||
}),
|
||||
textVar: createVariable({
|
||||
name: 'textVar',
|
||||
type: 'TEXTBOX',
|
||||
order: 3,
|
||||
}),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
expect(result.sortedVariablesArray).toHaveLength(4);
|
||||
expect(result.sortedVariablesArray.map((v) => v.name)).toEqual([
|
||||
'queryVar',
|
||||
'customVar',
|
||||
'dynVar',
|
||||
'textVar',
|
||||
]);
|
||||
|
||||
expect(result.variableTypes).toEqual({
|
||||
queryVar: 'QUERY',
|
||||
customVar: 'CUSTOM',
|
||||
dynVar: 'DYNAMIC',
|
||||
textVar: 'TEXTBOX',
|
||||
});
|
||||
|
||||
expect(result.dynamicVariableOrder).toEqual(['dynVar']);
|
||||
expect(result.dependencyData).not.toBeNull();
|
||||
});
|
||||
|
||||
it('should sort variables by order regardless of insertion order', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
z: createVariable({ name: 'z', type: 'QUERY', order: 4 }),
|
||||
a: createVariable({ name: 'a', type: 'CUSTOM', order: 0 }),
|
||||
m: createVariable({ name: 'm', type: 'DYNAMIC', order: 2 }),
|
||||
b: createVariable({ name: 'b', type: 'TEXTBOX', order: 1 }),
|
||||
x: createVariable({ name: 'x', type: 'QUERY', order: 3 }),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
expect(result.sortedVariablesArray.map((v) => v.name)).toEqual([
|
||||
'a',
|
||||
'b',
|
||||
'm',
|
||||
'x',
|
||||
'z',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should include multiple dynamic variables in order', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
dyn3: createVariable({ name: 'dyn3', type: 'DYNAMIC', order: 5 }),
|
||||
query1: createVariable({ name: 'query1', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
custom1: createVariable({ name: 'custom1', type: 'CUSTOM', order: 2 }),
|
||||
dyn2: createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 3 }),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
expect(result.dynamicVariableOrder).toEqual(['dyn1', 'dyn2', 'dyn3']);
|
||||
});
|
||||
|
||||
it('should build dependency data with query variable order for dependent queries', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({
|
||||
name: 'env',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
queryValue: 'SELECT DISTINCT env FROM table',
|
||||
}),
|
||||
service: createVariable({
|
||||
name: 'service',
|
||||
type: 'QUERY',
|
||||
order: 1,
|
||||
queryValue: 'SELECT DISTINCT service FROM table WHERE env={{.env}}',
|
||||
}),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
const { dependencyData } = result;
|
||||
expect(dependencyData).not.toBeNull();
|
||||
// env should appear in the dependency order (it's a root QUERY variable)
|
||||
expect(dependencyData?.order).toContain('env');
|
||||
// service depends on env, so it should also be in the order
|
||||
expect(dependencyData?.order).toContain('service');
|
||||
// env comes before service in topological order
|
||||
const envIdx = dependencyData?.order.indexOf('env') ?? -1;
|
||||
const svcIdx = dependencyData?.order.indexOf('service') ?? -1;
|
||||
expect(envIdx).toBeLessThan(svcIdx);
|
||||
});
|
||||
|
||||
it('should not include non-QUERY variables in dependency order', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({
|
||||
name: 'env',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
queryValue: 'SELECT DISTINCT env FROM table',
|
||||
}),
|
||||
customVar: createVariable({
|
||||
name: 'customVar',
|
||||
type: 'CUSTOM',
|
||||
order: 1,
|
||||
}),
|
||||
dynVar: createVariable({
|
||||
name: 'dynVar',
|
||||
type: 'DYNAMIC',
|
||||
order: 2,
|
||||
}),
|
||||
textVar: createVariable({
|
||||
name: 'textVar',
|
||||
type: 'TEXTBOX',
|
||||
order: 3,
|
||||
}),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
expect(result.dependencyData).not.toBeNull();
|
||||
// Only QUERY variables should be in the dependency order
|
||||
result.dependencyData?.order.forEach((name) => {
|
||||
expect(result.variableTypes[name]).toBe('QUERY');
|
||||
});
|
||||
});
|
||||
|
||||
it('should produce transitive descendants in dependency data', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
region: createVariable({
|
||||
name: 'region',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
queryValue: 'SELECT region FROM table',
|
||||
}),
|
||||
cluster: createVariable({
|
||||
name: 'cluster',
|
||||
type: 'QUERY',
|
||||
order: 1,
|
||||
queryValue: 'SELECT cluster FROM table WHERE region={{.region}}',
|
||||
}),
|
||||
host: createVariable({
|
||||
name: 'host',
|
||||
type: 'QUERY',
|
||||
order: 2,
|
||||
queryValue: 'SELECT host FROM table WHERE cluster={{.cluster}}',
|
||||
}),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
const { dependencyData: depData } = result;
|
||||
expect(depData).not.toBeNull();
|
||||
expect(depData?.transitiveDescendants).toBeDefined();
|
||||
// region's transitive descendants should include cluster and host
|
||||
expect(depData?.transitiveDescendants['region']).toEqual(
|
||||
expect.arrayContaining(['cluster', 'host']),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle a single variable', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
solo: createVariable({
|
||||
name: 'solo',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
}),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
expect(result.sortedVariablesArray).toHaveLength(1);
|
||||
expect(result.variableTypes).toEqual({ solo: 'QUERY' });
|
||||
expect(result.dynamicVariableOrder).toEqual([]);
|
||||
expect(result.dependencyData).not.toBeNull();
|
||||
expect(result.dependencyData?.order).toEqual(['solo']);
|
||||
});
|
||||
|
||||
it('should handle only non-QUERY variables', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
custom1: createVariable({
|
||||
name: 'custom1',
|
||||
type: 'CUSTOM',
|
||||
order: 0,
|
||||
}),
|
||||
text1: createVariable({
|
||||
name: 'text1',
|
||||
type: 'TEXTBOX',
|
||||
order: 1,
|
||||
}),
|
||||
dyn1: createVariable({
|
||||
name: 'dyn1',
|
||||
type: 'DYNAMIC',
|
||||
order: 2,
|
||||
}),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
expect(result.sortedVariablesArray).toHaveLength(3);
|
||||
// No QUERY variables, so dependency order should be empty
|
||||
expect(result.dependencyData?.order).toEqual([]);
|
||||
expect(result.dynamicVariableOrder).toEqual(['dyn1']);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,4 +1,7 @@
|
||||
import { isEmpty, isUndefined } from 'lodash-es';
|
||||
|
||||
import createStore from '../store';
|
||||
import { VariableFetchContext } from '../variableFetchStore';
|
||||
import { IDashboardVariablesStoreState } from './dashboardVariablesStoreTypes';
|
||||
import {
|
||||
computeDerivedValues,
|
||||
@@ -10,6 +13,8 @@ const initialState: IDashboardVariablesStoreState = {
|
||||
variables: {},
|
||||
sortedVariablesArray: [],
|
||||
dependencyData: null,
|
||||
variableTypes: {},
|
||||
dynamicVariableOrder: [],
|
||||
};
|
||||
|
||||
export const dashboardVariablesStore = createStore<IDashboardVariablesStoreState>(
|
||||
@@ -55,3 +60,38 @@ export function updateDashboardVariablesStore({
|
||||
updateDerivedValues(draft);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Read current store snapshot as VariableFetchContext.
|
||||
* Used by components to pass context to variableFetchStore actions
|
||||
* without creating a circular import.
|
||||
*/
|
||||
export function getVariableDependencyContext(): VariableFetchContext {
|
||||
const state = dashboardVariablesStore.getSnapshot();
|
||||
|
||||
// If every variable already has a selectedValue (e.g. persisted from
|
||||
// localStorage/URL), dynamic variables can start in parallel.
|
||||
// Otherwise they wait for query vars to settle first.
|
||||
const doAllVariablesHaveValuesSelected = Object.values(state.variables).every(
|
||||
(variable) => {
|
||||
if (
|
||||
variable.type === 'DYNAMIC' &&
|
||||
variable.selectedValue === null &&
|
||||
variable.allSelected === true
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return (
|
||||
!isUndefined(variable.selectedValue) && !isEmpty(variable.selectedValue)
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
doAllVariablesHaveValuesSelected,
|
||||
variableTypes: state.variableTypes,
|
||||
dynamicVariableOrder: state.dynamicVariableOrder,
|
||||
dependencyData: state.dependencyData,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,11 +1,18 @@
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import {
|
||||
IDashboardVariable,
|
||||
TVariableQueryType,
|
||||
} from 'types/api/dashboard/getAll';
|
||||
|
||||
export type VariableGraph = Record<string, string[]>;
|
||||
|
||||
export interface IDependencyData {
|
||||
order: string[];
|
||||
// Direct children for each variable
|
||||
graph: VariableGraph;
|
||||
// Direct parents for each variable
|
||||
parentDependencyGraph: VariableGraph;
|
||||
// Pre-computed transitive descendants for each node (all reachable nodes, not just direct children)
|
||||
transitiveDescendants: VariableGraph;
|
||||
hasCycle: boolean;
|
||||
cycleNodes?: string[];
|
||||
}
|
||||
@@ -24,6 +31,12 @@ export interface IDashboardVariablesStoreState {
|
||||
|
||||
// Derived: dependency data for QUERY variables
|
||||
dependencyData: IDependencyData | null;
|
||||
|
||||
// Derived: variable name → type mapping
|
||||
variableTypes: Record<string, TVariableQueryType>;
|
||||
|
||||
// Derived: display-ordered list of dynamic variable names
|
||||
dynamicVariableOrder: string[];
|
||||
}
|
||||
|
||||
export interface IUseDashboardVariablesReturn {
|
||||
|
||||
@@ -2,9 +2,11 @@ import {
|
||||
buildDependencies,
|
||||
buildDependencyGraph,
|
||||
} from 'container/DashboardContainer/DashboardVariablesSelection/util';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import {
|
||||
IDashboardVariable,
|
||||
TVariableQueryType,
|
||||
} from 'types/api/dashboard/getAll';
|
||||
|
||||
import { initializeVariableFetchStore } from '../variableFetchStore';
|
||||
import {
|
||||
IDashboardVariables,
|
||||
IDashboardVariablesStoreState,
|
||||
@@ -44,6 +46,7 @@ export function buildDependencyData(
|
||||
order,
|
||||
graph,
|
||||
parentDependencyGraph,
|
||||
transitiveDescendants,
|
||||
hasCycle,
|
||||
cycleNodes,
|
||||
} = buildDependencyGraph(dependencies);
|
||||
@@ -58,49 +61,62 @@ export function buildDependencyData(
|
||||
order: queryVariableOrder,
|
||||
graph,
|
||||
parentDependencyGraph,
|
||||
transitiveDescendants,
|
||||
hasCycle,
|
||||
cycleNodes,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the variable fetch store with the computed dependency data
|
||||
* Build a variable name → type mapping from sorted variables array
|
||||
*/
|
||||
function initializeFetchStore(
|
||||
export function buildVariableTypesMap(
|
||||
sortedVariablesArray: IDashboardVariable[],
|
||||
dependencyData: IDependencyData | null,
|
||||
): void {
|
||||
if (dependencyData) {
|
||||
const allVariableNames = sortedVariablesArray
|
||||
.map((v) => v.name)
|
||||
.filter((name): name is string => !!name);
|
||||
): Record<string, TVariableQueryType> {
|
||||
const types: Record<string, TVariableQueryType> = {};
|
||||
sortedVariablesArray.forEach((v) => {
|
||||
if (v.name) {
|
||||
types[v.name] = v.type;
|
||||
}
|
||||
});
|
||||
return types;
|
||||
}
|
||||
|
||||
initializeVariableFetchStore(
|
||||
allVariableNames,
|
||||
dependencyData.graph,
|
||||
dependencyData.parentDependencyGraph,
|
||||
);
|
||||
}
|
||||
/**
|
||||
* Build display-ordered list of dynamic variable names
|
||||
*/
|
||||
export function buildDynamicVariableOrder(
|
||||
sortedVariablesArray: IDashboardVariable[],
|
||||
): string[] {
|
||||
return sortedVariablesArray
|
||||
.filter((v) => v.type === 'DYNAMIC' && v.name)
|
||||
.map((v) => v.name as string);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute derived values from variables
|
||||
* This is a composition of buildSortedVariablesArray and buildDependencyData
|
||||
* Also initializes the variable fetch store with the new dependency data
|
||||
*/
|
||||
export function computeDerivedValues(
|
||||
variables: IDashboardVariablesStoreState['variables'],
|
||||
): Pick<
|
||||
IDashboardVariablesStoreState,
|
||||
'sortedVariablesArray' | 'dependencyData'
|
||||
| 'sortedVariablesArray'
|
||||
| 'dependencyData'
|
||||
| 'variableTypes'
|
||||
| 'dynamicVariableOrder'
|
||||
> {
|
||||
const sortedVariablesArray = buildSortedVariablesArray(variables);
|
||||
const dependencyData = buildDependencyData(sortedVariablesArray);
|
||||
const variableTypes = buildVariableTypesMap(sortedVariablesArray);
|
||||
const dynamicVariableOrder = buildDynamicVariableOrder(sortedVariablesArray);
|
||||
|
||||
// Initialize the variable fetch store when dependency data is computed
|
||||
initializeFetchStore(sortedVariablesArray, dependencyData);
|
||||
|
||||
return { sortedVariablesArray, dependencyData };
|
||||
return {
|
||||
sortedVariablesArray,
|
||||
dependencyData,
|
||||
variableTypes,
|
||||
dynamicVariableOrder,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -112,7 +128,8 @@ export function updateDerivedValues(
|
||||
): void {
|
||||
draft.sortedVariablesArray = buildSortedVariablesArray(draft.variables);
|
||||
draft.dependencyData = buildDependencyData(draft.sortedVariablesArray);
|
||||
|
||||
// Initialize the variable fetch store when dependency data is updated
|
||||
initializeFetchStore(draft.sortedVariablesArray, draft.dependencyData);
|
||||
draft.variableTypes = buildVariableTypesMap(draft.sortedVariablesArray);
|
||||
draft.dynamicVariableOrder = buildDynamicVariableOrder(
|
||||
draft.sortedVariablesArray,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import { VariableGraph } from 'container/DashboardContainer/DashboardVariablesSelection/util';
|
||||
|
||||
import { getVariableDependencyContext } from './dashboardVariables/dashboardVariablesStore';
|
||||
import { IDashboardVariablesStoreState } from './dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import createStore from './store';
|
||||
import {
|
||||
areAllQueryVariablesSettled,
|
||||
isSettled,
|
||||
resolveFetchState,
|
||||
unlockWaitingDynamicVariables,
|
||||
} from './variableFetchStoreUtils';
|
||||
|
||||
// Fetch state for each variable
|
||||
export type VariableFetchState =
|
||||
@@ -14,19 +20,29 @@ export interface IVariableFetchStoreState {
|
||||
// Per-variable fetch state
|
||||
states: Record<string, VariableFetchState>;
|
||||
|
||||
// Dependency graphs (set once when variables change)
|
||||
dependencyGraph: VariableGraph; // variable -> children that depend on it
|
||||
parentGraph: VariableGraph; // variable -> parents it depends on
|
||||
|
||||
// Track last update timestamp per variable to trigger re-fetches
|
||||
// Track last update timestamp per variable
|
||||
lastUpdated: Record<string, number>;
|
||||
|
||||
// Per-variable cycle counter — bumped when a variable needs to refetch.
|
||||
// Used in react-query keys to auto-cancel stale requests for that variable only.
|
||||
cycleIds: Record<string, number>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Context from dashboardVariablesStore needed by fetch actions.
|
||||
* Passed as parameter to avoid circular imports.
|
||||
*/
|
||||
export type VariableFetchContext = Pick<
|
||||
IDashboardVariablesStoreState,
|
||||
'variableTypes' | 'dynamicVariableOrder' | 'dependencyData'
|
||||
> & {
|
||||
doAllVariablesHaveValuesSelected: boolean;
|
||||
};
|
||||
|
||||
const initialState: IVariableFetchStoreState = {
|
||||
states: {},
|
||||
dependencyGraph: {},
|
||||
parentGraph: {},
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
};
|
||||
|
||||
export const variableFetchStore = createStore<IVariableFetchStoreState>(
|
||||
@@ -36,22 +52,183 @@ export const variableFetchStore = createStore<IVariableFetchStoreState>(
|
||||
// ============== Actions ==============
|
||||
|
||||
/**
|
||||
* Initialize the store with dependency graphs and set initial states
|
||||
* Initialize the store with variable names.
|
||||
* Called when dashboard variables change — sets up state entries.
|
||||
*/
|
||||
export function initializeVariableFetchStore(
|
||||
variableNames: string[],
|
||||
dependencyGraph: VariableGraph,
|
||||
parentGraph: VariableGraph,
|
||||
): void {
|
||||
export function initializeVariableFetchStore(variableNames: string[]): void {
|
||||
variableFetchStore.update((draft) => {
|
||||
draft.dependencyGraph = dependencyGraph;
|
||||
draft.parentGraph = parentGraph;
|
||||
|
||||
// Initialize all variables to idle, preserving existing ready states
|
||||
// Initialize all variables to idle, preserving existing states
|
||||
variableNames.forEach((name) => {
|
||||
if (!draft.states[name]) {
|
||||
draft.states[name] = 'idle';
|
||||
}
|
||||
});
|
||||
|
||||
// Clean up stale entries for variables that no longer exist
|
||||
const nameSet = new Set(variableNames);
|
||||
Object.keys(draft.states).forEach((name) => {
|
||||
if (!nameSet.has(name)) {
|
||||
delete draft.states[name];
|
||||
delete draft.lastUpdated[name];
|
||||
delete draft.cycleIds[name];
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a full fetch cycle for all fetchable variables.
|
||||
* Called on: initial load, time range change, or dependency graph change.
|
||||
*
|
||||
* Query variables with no query-type parents start immediately.
|
||||
* Query variables with query-type parents get 'waiting'.
|
||||
* Dynamic variables start immediately if all variables already have
|
||||
* selectedValues (e.g. persisted from localStorage/URL). Otherwise they
|
||||
* wait for all query variables to settle first.
|
||||
*/
|
||||
export function enqueueFetchOfAllVariables(): void {
|
||||
const {
|
||||
doAllVariablesHaveValuesSelected,
|
||||
dependencyData,
|
||||
variableTypes,
|
||||
dynamicVariableOrder,
|
||||
} = getVariableDependencyContext();
|
||||
if (!dependencyData) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { order: queryVariableOrder, parentDependencyGraph } = dependencyData;
|
||||
|
||||
variableFetchStore.update((draft) => {
|
||||
// Query variables: root ones start immediately, dependent ones wait
|
||||
queryVariableOrder.forEach((name) => {
|
||||
draft.cycleIds[name] = (draft.cycleIds[name] || 0) + 1;
|
||||
const parents = parentDependencyGraph[name] || [];
|
||||
const hasQueryParents = parents.some((p) => variableTypes[p] === 'QUERY');
|
||||
if (hasQueryParents) {
|
||||
draft.states[name] = 'waiting';
|
||||
} else {
|
||||
draft.states[name] = resolveFetchState(draft, name);
|
||||
}
|
||||
});
|
||||
|
||||
// Dynamic variables: start immediately if query variables have values,
|
||||
// otherwise wait for query variables to settle first
|
||||
dynamicVariableOrder.forEach((name) => {
|
||||
draft.cycleIds[name] = (draft.cycleIds[name] || 0) + 1;
|
||||
draft.states[name] = doAllVariablesHaveValuesSelected
|
||||
? resolveFetchState(draft, name)
|
||||
: 'waiting';
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a variable as completed. Unblocks waiting query-type children.
|
||||
* If all query variables are now settled, unlocks any waiting dynamic variables.
|
||||
*/
|
||||
export function onVariableFetchComplete(name: string): void {
|
||||
const {
|
||||
dependencyData,
|
||||
variableTypes,
|
||||
dynamicVariableOrder,
|
||||
} = getVariableDependencyContext();
|
||||
|
||||
variableFetchStore.update((draft) => {
|
||||
draft.states[name] = 'idle';
|
||||
draft.lastUpdated[name] = Date.now();
|
||||
|
||||
if (!dependencyData) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { graph } = dependencyData;
|
||||
|
||||
// Unblock waiting query-type children
|
||||
const children = graph[name] || [];
|
||||
children.forEach((child) => {
|
||||
if (variableTypes[child] === 'QUERY' && draft.states[child] === 'waiting') {
|
||||
draft.states[child] = resolveFetchState(draft, child);
|
||||
}
|
||||
});
|
||||
|
||||
// If all query variables are settled, unlock any waiting dynamic variables
|
||||
if (
|
||||
variableTypes[name] === 'QUERY' &&
|
||||
areAllQueryVariablesSettled(draft.states, variableTypes)
|
||||
) {
|
||||
unlockWaitingDynamicVariables(draft, dynamicVariableOrder);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a variable as errored. Sets query-type descendants to idle
|
||||
* (they can't proceed without this parent).
|
||||
* If all query variables are now settled, unlocks any waiting dynamic variables.
|
||||
*/
|
||||
export function onVariableFetchFailure(name: string): void {
|
||||
const {
|
||||
dependencyData,
|
||||
variableTypes,
|
||||
dynamicVariableOrder,
|
||||
} = getVariableDependencyContext();
|
||||
|
||||
variableFetchStore.update((draft) => {
|
||||
draft.states[name] = 'error';
|
||||
|
||||
if (!dependencyData) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Set query-type descendants to idle (can't fetch without parent)
|
||||
const descendants = dependencyData.transitiveDescendants[name] || [];
|
||||
descendants.forEach((desc) => {
|
||||
if (variableTypes[desc] === 'QUERY') {
|
||||
draft.states[desc] = 'idle';
|
||||
}
|
||||
});
|
||||
|
||||
// If all query variables are settled (error counts), unlock any waiting dynamic variables
|
||||
if (
|
||||
variableTypes[name] === 'QUERY' &&
|
||||
areAllQueryVariablesSettled(draft.states, variableTypes)
|
||||
) {
|
||||
unlockWaitingDynamicVariables(draft, dynamicVariableOrder);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Cascade a value change to query-type descendants.
|
||||
* Called when a user changes a variable's value (not from a fetch cycle).
|
||||
*
|
||||
* Direct children whose parents are all settled start immediately.
|
||||
* Deeper descendants wait until their parents complete (BFS order
|
||||
* ensures parents are set before children within a single update).
|
||||
*/
|
||||
export function enqueueDescendantsOfVariable(name: string): void {
|
||||
const { dependencyData, variableTypes } = getVariableDependencyContext();
|
||||
if (!dependencyData) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { parentDependencyGraph } = dependencyData;
|
||||
|
||||
variableFetchStore.update((draft) => {
|
||||
const descendants = dependencyData.transitiveDescendants[name] || [];
|
||||
const queryDescendants = descendants.filter(
|
||||
(desc) => variableTypes[desc] === 'QUERY',
|
||||
);
|
||||
|
||||
queryDescendants.forEach((desc) => {
|
||||
draft.cycleIds[desc] = (draft.cycleIds[desc] || 0) + 1;
|
||||
const parents = parentDependencyGraph[desc] || [];
|
||||
const allParentsSettled = parents.every((p) => isSettled(draft.states[p]));
|
||||
|
||||
draft.states[desc] = allParentsSettled
|
||||
? resolveFetchState(draft, desc)
|
||||
: 'waiting';
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -0,0 +1,46 @@
|
||||
import { TVariableQueryType } from 'types/api/dashboard/getAll';
|
||||
|
||||
import {
|
||||
IVariableFetchStoreState,
|
||||
VariableFetchState,
|
||||
} from './variableFetchStore';
|
||||
|
||||
export function isSettled(state: VariableFetchState | undefined): boolean {
|
||||
return state === 'idle' || state === 'error';
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the next fetch state based on whether the variable has been fetched before.
|
||||
*/
|
||||
export function resolveFetchState(
|
||||
draft: IVariableFetchStoreState,
|
||||
name: string,
|
||||
): VariableFetchState {
|
||||
return (draft.lastUpdated[name] || 0) > 0 ? 'revalidating' : 'loading';
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if all query variables are settled (idle or error).
|
||||
*/
|
||||
export function areAllQueryVariablesSettled(
|
||||
states: Record<string, VariableFetchState>,
|
||||
variableTypes: Record<string, TVariableQueryType>,
|
||||
): boolean {
|
||||
return Object.entries(variableTypes)
|
||||
.filter(([, type]) => type === 'QUERY')
|
||||
.every(([name]) => isSettled(states[name]));
|
||||
}
|
||||
|
||||
/**
|
||||
* Transition waiting dynamic variables to loading/revalidating if in 'waiting' state.
|
||||
*/
|
||||
export function unlockWaitingDynamicVariables(
|
||||
draft: IVariableFetchStoreState,
|
||||
dynamicVariableOrder: string[],
|
||||
): void {
|
||||
dynamicVariableOrder.forEach((dynName) => {
|
||||
if (draft.states[dynName] === 'waiting') {
|
||||
draft.states[dynName] = resolveFetchState(draft, dynName);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -47,8 +47,6 @@ export interface IDashboardContext {
|
||||
allSelected: boolean,
|
||||
isDynamic?: boolean,
|
||||
) => void;
|
||||
variablesToGetUpdated: string[];
|
||||
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
|
||||
dashboardQueryRangeCalled: boolean;
|
||||
setDashboardQueryRangeCalled: (value: boolean) => void;
|
||||
selectedRowWidgetId: string | null;
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
|
||||
import { Warning } from '..';
|
||||
import { SuccessResponse, Warning } from '..';
|
||||
import {
|
||||
IBuilderFormula,
|
||||
IBuilderQuery,
|
||||
IClickHouseQuery,
|
||||
IPromQLQuery,
|
||||
} from '../queryBuilder/queryBuilderData';
|
||||
import { ExecStats } from '../v5/queryRange';
|
||||
import { QueryData, QueryDataV3 } from '../widgets/getQuery';
|
||||
|
||||
export type QueryRangePayload = {
|
||||
@@ -35,8 +36,15 @@ export interface MetricRangePayloadProps {
|
||||
newResult: MetricRangePayloadV3;
|
||||
warnings?: string[];
|
||||
};
|
||||
meta?: ExecStats;
|
||||
}
|
||||
|
||||
/** Query range success response including optional warning and meta */
|
||||
export type MetricQueryRangeSuccessResponse = SuccessResponse<
|
||||
MetricRangePayloadProps,
|
||||
unknown
|
||||
> & { warning?: Warning; meta?: ExecStats };
|
||||
|
||||
export interface MetricRangePayloadV3 {
|
||||
data: {
|
||||
result: QueryDataV3[];
|
||||
@@ -44,4 +52,5 @@ export interface MetricRangePayloadV3 {
|
||||
warnings?: string[];
|
||||
};
|
||||
warning?: Warning;
|
||||
meta?: ExecStats;
|
||||
}
|
||||
|
||||
@@ -334,6 +334,7 @@ export interface ExecStats {
|
||||
rowsScanned: number;
|
||||
bytesScanned: number;
|
||||
durationMs: number;
|
||||
stepIntervals: Record<string, number>;
|
||||
}
|
||||
|
||||
export interface Label {
|
||||
|
||||
@@ -10,6 +10,27 @@ type Config struct {
|
||||
|
||||
type Templates struct {
|
||||
Directory string `mapstructure:"directory"`
|
||||
Format Format `mapstructure:"format"`
|
||||
}
|
||||
|
||||
type Format struct {
|
||||
Header Header `mapstructure:"header"`
|
||||
Help Help `mapstructure:"help"`
|
||||
Footer Footer `mapstructure:"footer"`
|
||||
}
|
||||
|
||||
type Header struct {
|
||||
Enabled bool `mapstructure:"enabled"`
|
||||
LogoURL string `mapstructure:"logo_url"`
|
||||
}
|
||||
|
||||
type Help struct {
|
||||
Enabled bool `mapstructure:"enabled"`
|
||||
Email string `mapstructure:"email"`
|
||||
}
|
||||
|
||||
type Footer struct {
|
||||
Enabled bool `mapstructure:"enabled"`
|
||||
}
|
||||
|
||||
type SMTP struct {
|
||||
@@ -45,6 +66,19 @@ func newConfig() factory.Config {
|
||||
Enabled: false,
|
||||
Templates: Templates{
|
||||
Directory: "/root/templates",
|
||||
Format: Format{
|
||||
Header: Header{
|
||||
Enabled: false,
|
||||
LogoURL: "",
|
||||
},
|
||||
Help: Help{
|
||||
Enabled: false,
|
||||
Email: "",
|
||||
},
|
||||
Footer: Footer{
|
||||
Enabled: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
SMTP: SMTP{
|
||||
Address: "localhost:25",
|
||||
|
||||
@@ -15,6 +15,7 @@ type provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
store emailtypes.TemplateStore
|
||||
client *client.Client
|
||||
config emailing.Config
|
||||
}
|
||||
|
||||
func NewFactory() factory.ProviderFactory[emailing.Emailing, emailing.Config] {
|
||||
@@ -55,7 +56,12 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &provider{settings: settings, store: store, client: client}, nil
|
||||
return &provider{
|
||||
settings: settings,
|
||||
store: store,
|
||||
client: client,
|
||||
config: config,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) SendHTML(ctx context.Context, to string, subject string, templateName emailtypes.TemplateName, data map[string]any) error {
|
||||
@@ -69,8 +75,19 @@ func (provider *provider) SendHTML(ctx context.Context, to string, subject strin
|
||||
return err
|
||||
}
|
||||
|
||||
// if no data is provided, create an empty map to prevent a panic when we add the format, to, and subject data
|
||||
if data == nil {
|
||||
data = make(map[string]any)
|
||||
}
|
||||
|
||||
// the following are overridden if provided in the data map
|
||||
data["format"] = provider.config.Templates.Format
|
||||
data["to"] = to
|
||||
data["subject"] = subject
|
||||
|
||||
content, err := emailtypes.NewContent(template, data)
|
||||
if err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "failed to create email content", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
@@ -30,3 +30,7 @@ func (module *getter) ListByOwnedKeyRange(ctx context.Context) ([]*types.Organiz
|
||||
|
||||
return module.store.ListByKeyRange(ctx, start, end)
|
||||
}
|
||||
|
||||
func (module *getter) GetByName(ctx context.Context, name string) (*types.Organization, error) {
|
||||
return module.store.GetByName(ctx, name)
|
||||
}
|
||||
|
||||
@@ -47,6 +47,22 @@ func (store *store) Get(ctx context.Context, id valuer.UUID) (*types.Organizatio
|
||||
return organization, nil
|
||||
}
|
||||
|
||||
func (store *store) GetByName(ctx context.Context, name string) (*types.Organization, error) {
|
||||
organization := new(types.Organization)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(organization).
|
||||
Where("name = ?", name).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrOrganizationNotFound, "organization with name %s does not exist", name)
|
||||
}
|
||||
|
||||
return organization, nil
|
||||
}
|
||||
|
||||
func (store *store) GetAll(ctx context.Context) ([]*types.Organization, error) {
|
||||
organizations := make([]*types.Organization, 0)
|
||||
err := store.
|
||||
|
||||
@@ -14,6 +14,9 @@ type Getter interface {
|
||||
|
||||
// ListByOwnedKeyRange gets all the organizations owned by the instance
|
||||
ListByOwnedKeyRange(context.Context) ([]*types.Organization, error)
|
||||
|
||||
// Gets the organization by name
|
||||
GetByName(context.Context, string) (*types.Organization, error)
|
||||
}
|
||||
|
||||
type Setter interface {
|
||||
|
||||
@@ -151,6 +151,10 @@ func (module *module) CreateCallbackAuthNSession(ctx context.Context, authNProvi
|
||||
return "", err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return "", errors.WithAdditionalf(err, "root user can only authenticate via password")
|
||||
}
|
||||
|
||||
token, err := module.tokenizer.CreateToken(ctx, authtypes.NewIdentity(user.ID, user.OrgID, user.Email, user.Role), map[string]string{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
||||
@@ -5,11 +5,22 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Password PasswordConfig `mapstructure:"password"`
|
||||
Root RootConfig `mapstructure:"root"`
|
||||
}
|
||||
|
||||
type RootConfig struct {
|
||||
Enabled bool `mapstructure:"enabled"`
|
||||
Email valuer.Email `mapstructure:"email"`
|
||||
Password string `mapstructure:"password"`
|
||||
OrgName string `mapstructure:"org_name"`
|
||||
}
|
||||
|
||||
type PasswordConfig struct {
|
||||
Reset ResetConfig `mapstructure:"reset"`
|
||||
}
|
||||
@@ -31,6 +42,10 @@ func newConfig() factory.Config {
|
||||
MaxTokenLifetime: 6 * time.Hour,
|
||||
},
|
||||
},
|
||||
Root: RootConfig{
|
||||
Enabled: false,
|
||||
OrgName: "default",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,5 +54,17 @@ func (c Config) Validate() error {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::password::reset::max_token_lifetime must be positive")
|
||||
}
|
||||
|
||||
if c.Root.Enabled {
|
||||
if c.Root.Email.IsZero() {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::email is required when root user is enabled")
|
||||
}
|
||||
if c.Root.Password == "" {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::password is required when root user is enabled")
|
||||
}
|
||||
if !types.IsPasswordValid(c.Root.Password) {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::password does not meet password requirements")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -16,6 +16,10 @@ func NewGetter(store types.UserStore) user.Getter {
|
||||
return &getter{store: store}
|
||||
}
|
||||
|
||||
func (module *getter) GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*types.User, error) {
|
||||
return module.store.GetRootUserByOrgID(ctx, orgID)
|
||||
}
|
||||
|
||||
func (module *getter) ListByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.User, error) {
|
||||
users, err := module.store.ListUsersByOrgID(ctx, orgID)
|
||||
if err != nil {
|
||||
|
||||
@@ -22,8 +22,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/dustin/go-humanize"
|
||||
"golang.org/x/text/cases"
|
||||
"golang.org/x/text/language"
|
||||
)
|
||||
|
||||
type Module struct {
|
||||
@@ -105,6 +103,12 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID valuer.UUID, userID
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if existingUser != nil {
|
||||
if err := existingUser.ErrIfRoot(); err != nil {
|
||||
return nil, errors.WithAdditionalf(err, "cannot send invite to root user")
|
||||
}
|
||||
}
|
||||
|
||||
if existingUser != nil {
|
||||
return nil, errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, "User already exists with the same email")
|
||||
}
|
||||
@@ -146,11 +150,9 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID valuer.UUID, userID
|
||||
continue
|
||||
}
|
||||
|
||||
if err := m.emailing.SendHTML(ctx, invites[i].Email.String(), "You are invited to join a team in SigNoz", emailtypes.TemplateNameInvitationEmail, map[string]any{
|
||||
"CustomerName": invites[i].Name,
|
||||
"InviterName": creator.DisplayName,
|
||||
"InviterEmail": creator.Email,
|
||||
"Link": fmt.Sprintf("%s/signup?token=%s", bulkInvites.Invites[i].FrontendBaseUrl, invites[i].Token),
|
||||
if err := m.emailing.SendHTML(ctx, invites[i].Email.String(), "You're Invited to Join SigNoz", emailtypes.TemplateNameInvitationEmail, map[string]any{
|
||||
"inviter_email": creator.Email,
|
||||
"link": fmt.Sprintf("%s/signup?token=%s", bulkInvites.Invites[i].FrontendBaseUrl, invites[i].Token),
|
||||
}); err != nil {
|
||||
m.settings.Logger().ErrorContext(ctx, "failed to send email", "error", err)
|
||||
}
|
||||
@@ -206,27 +208,21 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := existingUser.ErrIfRoot(); err != nil {
|
||||
return nil, errors.WithAdditionalf(err, "cannot update root user")
|
||||
}
|
||||
|
||||
requestor, err := m.store.GetUser(ctx, valuer.MustNewUUID(updatedBy))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// only displayName, role can be updated
|
||||
if user.DisplayName == "" {
|
||||
user.DisplayName = existingUser.DisplayName
|
||||
}
|
||||
|
||||
if user.Role == "" {
|
||||
user.Role = existingUser.Role
|
||||
}
|
||||
|
||||
if user.Role != existingUser.Role && requestor.Role != types.RoleAdmin {
|
||||
if user.Role != "" && user.Role != existingUser.Role && requestor.Role != types.RoleAdmin {
|
||||
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "only admins can change roles")
|
||||
}
|
||||
|
||||
// Make sure that th e request is not demoting the last admin user.
|
||||
// also an admin user can only change role of their own or other user
|
||||
if user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin {
|
||||
// Make sure that the request is not demoting the last admin user.
|
||||
if user.Role != "" && user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin {
|
||||
adminUsers, err := m.store.GetUsersByRoleAndOrgID(ctx, types.RoleAdmin, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -237,7 +233,7 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
|
||||
}
|
||||
}
|
||||
|
||||
if user.Role != existingUser.Role {
|
||||
if user.Role != "" && user.Role != existingUser.Role {
|
||||
err = m.authz.ModifyGrant(ctx,
|
||||
orgID,
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role),
|
||||
@@ -249,35 +245,28 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
|
||||
}
|
||||
}
|
||||
|
||||
user.UpdatedAt = time.Now()
|
||||
updatedUser, err := m.store.UpdateUser(ctx, orgID, id, user)
|
||||
if err != nil {
|
||||
existingUser.Update(user.DisplayName, user.Role)
|
||||
if err := m.UpdateAnyUser(ctx, orgID, existingUser); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
traits := types.NewTraitsFromUser(updatedUser)
|
||||
m.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traits)
|
||||
return existingUser, nil
|
||||
}
|
||||
|
||||
traits["updated_by"] = updatedBy
|
||||
m.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
|
||||
|
||||
// if the role is updated then send an email
|
||||
if existingUser.Role != updatedUser.Role {
|
||||
if err := m.emailing.SendHTML(ctx, existingUser.Email.String(), "Your Role Has Been Updated in SigNoz", emailtypes.TemplateNameUpdateRole, map[string]any{
|
||||
"CustomerName": existingUser.DisplayName,
|
||||
"UpdatedByEmail": requestor.Email,
|
||||
"OldRole": cases.Title(language.English).String(strings.ToLower(existingUser.Role.String())),
|
||||
"NewRole": cases.Title(language.English).String(strings.ToLower(updatedUser.Role.String())),
|
||||
}); err != nil {
|
||||
m.settings.Logger().ErrorContext(ctx, "failed to send email", "error", err)
|
||||
}
|
||||
func (module *Module) UpdateAnyUser(ctx context.Context, orgID valuer.UUID, user *types.User) error {
|
||||
if err := module.store.UpdateUser(ctx, orgID, user); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := m.tokenizer.DeleteIdentity(ctx, valuer.MustNewUUID(id)); err != nil {
|
||||
return nil, err
|
||||
traits := types.NewTraitsFromUser(user)
|
||||
module.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traits)
|
||||
module.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
|
||||
|
||||
if err := module.tokenizer.DeleteIdentity(ctx, user.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return updatedUser, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id string, deletedBy string) error {
|
||||
@@ -286,6 +275,10 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot delete root user")
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email.String())) {
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "integration user cannot be deleted")
|
||||
}
|
||||
@@ -380,6 +373,10 @@ func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, ema
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot reset password for root user")
|
||||
}
|
||||
|
||||
token, err := module.GetOrCreateResetPasswordToken(ctx, user.ID)
|
||||
if err != nil {
|
||||
module.settings.Logger().ErrorContext(ctx, "failed to create reset password token", "error", err)
|
||||
@@ -394,10 +391,9 @@ func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, ema
|
||||
if err := module.emailing.SendHTML(
|
||||
ctx,
|
||||
user.Email.String(),
|
||||
"Reset your SigNoz password",
|
||||
"A Password Reset Was Requested for SigNoz",
|
||||
emailtypes.TemplateNameResetPassword,
|
||||
map[string]any{
|
||||
"Name": user.DisplayName,
|
||||
"Link": resetLink,
|
||||
"Expiry": humanizedTokenLifetime,
|
||||
},
|
||||
@@ -424,6 +420,15 @@ func (module *Module) UpdatePasswordByResetPasswordToken(ctx context.Context, to
|
||||
return err
|
||||
}
|
||||
|
||||
user, err := module.store.GetUser(ctx, valuer.MustNewUUID(password.UserID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot reset password for root user")
|
||||
}
|
||||
|
||||
if err := password.Update(passwd); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -432,6 +437,15 @@ func (module *Module) UpdatePasswordByResetPasswordToken(ctx context.Context, to
|
||||
}
|
||||
|
||||
func (module *Module) UpdatePassword(ctx context.Context, userID valuer.UUID, oldpasswd string, passwd string) error {
|
||||
user, err := module.store.GetUser(ctx, userID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot change password for root user")
|
||||
}
|
||||
|
||||
password, err := module.store.GetPasswordByUserID(ctx, userID)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -493,7 +507,7 @@ func (m *Module) RevokeAPIKey(ctx context.Context, id, removedByUserID valuer.UU
|
||||
}
|
||||
|
||||
func (module *Module) CreateFirstUser(ctx context.Context, organization *types.Organization, name string, email valuer.Email, passwd string) (*types.User, error) {
|
||||
user, err := types.NewUser(name, email, types.RoleAdmin, organization.ID)
|
||||
user, err := types.NewRootUser(name, email, organization.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
187
pkg/modules/user/impluser/service.go
Normal file
187
pkg/modules/user/impluser/service.go
Normal file
@@ -0,0 +1,187 @@
|
||||
package impluser
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type service struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
store types.UserStore
|
||||
module user.Module
|
||||
orgGetter organization.Getter
|
||||
authz authz.AuthZ
|
||||
config user.RootConfig
|
||||
stopC chan struct{}
|
||||
}
|
||||
|
||||
func NewService(
|
||||
providerSettings factory.ProviderSettings,
|
||||
store types.UserStore,
|
||||
module user.Module,
|
||||
orgGetter organization.Getter,
|
||||
authz authz.AuthZ,
|
||||
config user.RootConfig,
|
||||
) user.Service {
|
||||
return &service{
|
||||
settings: factory.NewScopedProviderSettings(providerSettings, "go.signoz.io/pkg/modules/user"),
|
||||
store: store,
|
||||
module: module,
|
||||
orgGetter: orgGetter,
|
||||
authz: authz,
|
||||
config: config,
|
||||
stopC: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *service) Start(ctx context.Context) error {
|
||||
if !s.config.Enabled {
|
||||
<-s.stopC
|
||||
return nil
|
||||
}
|
||||
|
||||
ticker := time.NewTicker(10 * time.Second)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
err := s.reconcile(ctx)
|
||||
if err == nil {
|
||||
s.settings.Logger().InfoContext(ctx, "root user reconciliation completed successfully")
|
||||
<-s.stopC
|
||||
return nil
|
||||
}
|
||||
|
||||
s.settings.Logger().WarnContext(ctx, "root user reconciliation failed, retrying", "error", err)
|
||||
|
||||
select {
|
||||
case <-s.stopC:
|
||||
return nil
|
||||
case <-ticker.C:
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *service) Stop(ctx context.Context) error {
|
||||
close(s.stopC)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) reconcile(ctx context.Context) error {
|
||||
org, err := s.orgGetter.GetByName(ctx, s.config.OrgName)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
newOrg := types.NewOrganizationWithName(s.config.OrgName)
|
||||
_, err := s.module.CreateFirstUser(ctx, newOrg, s.config.Email.String(), s.config.Email, s.config.Password)
|
||||
return err
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
return s.reconcileRootUser(ctx, org.ID)
|
||||
}
|
||||
|
||||
func (s *service) reconcileRootUser(ctx context.Context, orgID valuer.UUID) error {
|
||||
existingRoot, err := s.store.GetRootUserByOrgID(ctx, orgID)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
if existingRoot == nil {
|
||||
return s.createOrPromoteRootUser(ctx, orgID)
|
||||
}
|
||||
|
||||
return s.updateExistingRootUser(ctx, orgID, existingRoot)
|
||||
}
|
||||
|
||||
func (s *service) createOrPromoteRootUser(ctx context.Context, orgID valuer.UUID) error {
|
||||
existingUser, err := s.store.GetUserByEmailAndOrgID(ctx, s.config.Email, orgID)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
if existingUser != nil {
|
||||
oldRole := existingUser.Role
|
||||
|
||||
existingUser.PromoteToRoot()
|
||||
if err := s.module.UpdateAnyUser(ctx, orgID, existingUser); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if oldRole != types.RoleAdmin {
|
||||
if err := s.authz.ModifyGrant(ctx,
|
||||
orgID,
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(oldRole),
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(types.RoleAdmin),
|
||||
authtypes.MustNewSubject(authtypes.TypeableUser, existingUser.ID.StringValue(), orgID, nil),
|
||||
); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return s.setPassword(ctx, existingUser.ID)
|
||||
}
|
||||
|
||||
// Create new root user
|
||||
newUser, err := types.NewRootUser(s.config.Email.String(), s.config.Email, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
factorPassword, err := types.NewFactorPassword(s.config.Password, newUser.ID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.module.CreateUser(ctx, newUser, user.WithFactorPassword(factorPassword))
|
||||
}
|
||||
|
||||
func (s *service) updateExistingRootUser(ctx context.Context, orgID valuer.UUID, existingRoot *types.User) error {
|
||||
existingRoot.PromoteToRoot()
|
||||
|
||||
if existingRoot.Email != s.config.Email {
|
||||
existingRoot.UpdateEmail(s.config.Email)
|
||||
if err := s.module.UpdateAnyUser(ctx, orgID, existingRoot); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return s.setPassword(ctx, existingRoot.ID)
|
||||
}
|
||||
|
||||
func (s *service) setPassword(ctx context.Context, userID valuer.UUID) error {
|
||||
password, err := s.store.GetPasswordByUserID(ctx, userID)
|
||||
if err != nil {
|
||||
if !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
factorPassword, err := types.NewFactorPassword(s.config.Password, userID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.store.CreatePassword(ctx, factorPassword)
|
||||
}
|
||||
|
||||
if !password.Equals(s.config.Password) {
|
||||
if err := password.Update(s.config.Password); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.store.UpdatePassword(ctx, password)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -210,20 +210,24 @@ func (store *store) GetUsersByRoleAndOrgID(ctx context.Context, role types.Role,
|
||||
return users, nil
|
||||
}
|
||||
|
||||
func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User) (*types.User, error) {
|
||||
user.UpdatedAt = time.Now()
|
||||
_, err := store.sqlstore.BunDB().NewUpdate().
|
||||
func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, user *types.User) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewUpdate().
|
||||
Model(user).
|
||||
Column("display_name").
|
||||
Column("email").
|
||||
Column("role").
|
||||
Column("is_root").
|
||||
Column("updated_at").
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("id = ?", user.ID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user with id: %s does not exist in org: %s", id, orgID)
|
||||
return store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user does not exist in org: %s", orgID)
|
||||
}
|
||||
return user, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) ListUsersByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.GettableUser, error) {
|
||||
@@ -602,6 +606,22 @@ func (store *store) RunInTx(ctx context.Context, cb func(ctx context.Context) er
|
||||
})
|
||||
}
|
||||
|
||||
func (store *store) GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*types.User, error) {
|
||||
user := new(types.User)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(user).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("is_root = ?", true).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "root user for org %s not found", orgID)
|
||||
}
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func (store *store) ListUsersByEmailAndOrgIDs(ctx context.Context, email valuer.Email, orgIDs []valuer.UUID) ([]*types.User, error) {
|
||||
users := []*types.User{}
|
||||
err := store.
|
||||
|
||||
7
pkg/modules/user/service.go
Normal file
7
pkg/modules/user/service.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package user
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/factory"
|
||||
|
||||
type Service interface {
|
||||
factory.Service
|
||||
}
|
||||
@@ -34,6 +34,9 @@ type Module interface {
|
||||
ForgotPassword(ctx context.Context, orgID valuer.UUID, email valuer.Email, frontendBaseURL string) error
|
||||
|
||||
UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User, updatedBy string) (*types.User, error)
|
||||
|
||||
// UpdateAnyUser updates a user and persists the changes to the database along with the analytics and identity deletion.
|
||||
UpdateAnyUser(ctx context.Context, orgID valuer.UUID, user *types.User) error
|
||||
DeleteUser(ctx context.Context, orgID valuer.UUID, id string, deletedBy string) error
|
||||
|
||||
// invite
|
||||
@@ -54,6 +57,9 @@ type Module interface {
|
||||
}
|
||||
|
||||
type Getter interface {
|
||||
// Get root user by org id.
|
||||
GetRootUserByOrgID(context.Context, valuer.UUID) (*types.User, error)
|
||||
|
||||
// Get gets the users based on the given id
|
||||
ListByOrgID(context.Context, valuer.UUID) ([]*types.User, error)
|
||||
|
||||
|
||||
@@ -183,7 +183,7 @@ type APIHandlerOpts struct {
|
||||
}
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, error) {
|
||||
querierOpts := querier.QuerierOptions{
|
||||
Reader: opts.Reader,
|
||||
Cache: opts.Signoz.Cache,
|
||||
@@ -270,6 +270,11 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
}
|
||||
}
|
||||
|
||||
// If the root user is enabled, the setup is complete
|
||||
if config.User.Root.Enabled {
|
||||
aH.SetupCompleted = true
|
||||
}
|
||||
|
||||
aH.Upgrader = &websocket.Upgrader{
|
||||
CheckOrigin: func(r *http.Request) bool {
|
||||
return true
|
||||
|
||||
@@ -135,7 +135,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
})
|
||||
}, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -625,7 +625,7 @@ func (r *BaseRule) extractMetricAndGroupBys(ctx context.Context) (map[string][]s
|
||||
|
||||
// FilterNewSeries filters out items that are too new based on metadata first_seen timestamps.
|
||||
// Returns the filtered series (old ones) excluding new series that are still within the grace period.
|
||||
func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*qbtypes.TimeSeries) ([]*qbtypes.TimeSeries, error) {
|
||||
func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*v3.Series) ([]*v3.Series, error) {
|
||||
// Extract metric names and groupBy keys
|
||||
metricToGroupedFields, err := r.extractMetricAndGroupBys(ctx)
|
||||
if err != nil {
|
||||
@@ -642,7 +642,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
|
||||
seriesIdxToLookupKeys := make(map[int][]telemetrytypes.MetricMetadataLookupKey) // series index -> lookup keys
|
||||
|
||||
for i := 0; i < len(series); i++ {
|
||||
metricLabelMap := series[i].LabelsMap()
|
||||
metricLabelMap := series[i].Labels
|
||||
|
||||
// Collect groupBy attribute-value pairs for this series
|
||||
seriesKeys := make([]telemetrytypes.MetricMetadataLookupKey, 0)
|
||||
@@ -689,7 +689,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
|
||||
}
|
||||
|
||||
// Filter series based on first_seen + delay
|
||||
filteredSeries := make([]*qbtypes.TimeSeries, 0, len(series))
|
||||
filteredSeries := make([]*v3.Series, 0, len(series))
|
||||
evalTimeMs := ts.UnixMilli()
|
||||
newGroupEvalDelayMs := r.newGroupEvalDelay.Milliseconds()
|
||||
|
||||
@@ -727,7 +727,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
|
||||
// Check if first_seen + delay has passed
|
||||
if maxFirstSeen+newGroupEvalDelayMs > evalTimeMs {
|
||||
// Still within grace period, skip this series
|
||||
r.logger.InfoContext(ctx, "Skipping new series", "rule_name", r.Name(), "series_idx", i, "max_first_seen", maxFirstSeen, "eval_time_ms", evalTimeMs, "delay_ms", newGroupEvalDelayMs, "labels", series[i].LabelsMap())
|
||||
r.logger.InfoContext(ctx, "Skipping new series", "rule_name", r.Name(), "series_idx", i, "max_first_seen", maxFirstSeen, "eval_time_ms", evalTimeMs, "delay_ms", newGroupEvalDelayMs, "labels", series[i].Labels)
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
@@ -26,33 +26,33 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// createTestSeries creates a *qbtypes.TimeSeries with the given labels and optional values
|
||||
// createTestSeries creates a *v3.Series with the given labels and optional points
|
||||
// so we don't exactly need the points in the series because the labels are used to determine if the series is new or old
|
||||
// we use the labels to create a lookup key for the series and then check the first_seen timestamp for the series in the metadata table
|
||||
func createTestSeries(labels map[string]string, points []*qbtypes.TimeSeriesValue) *qbtypes.TimeSeries {
|
||||
func createTestSeries(labels map[string]string, points []v3.Point) *v3.Series {
|
||||
if points == nil {
|
||||
points = []*qbtypes.TimeSeriesValue{}
|
||||
points = []v3.Point{}
|
||||
}
|
||||
lbls := make([]*qbtypes.Label, 0, len(labels))
|
||||
for k, v := range labels {
|
||||
lbls = append(lbls, &qbtypes.Label{Key: telemetrytypes.TelemetryFieldKey{Name: k}, Value: v})
|
||||
}
|
||||
return &qbtypes.TimeSeries{
|
||||
Labels: lbls,
|
||||
Values: points,
|
||||
return &v3.Series{
|
||||
Labels: labels,
|
||||
Points: points,
|
||||
}
|
||||
}
|
||||
|
||||
// seriesEqual compares two *qbtypes.TimeSeries by their labels
|
||||
// seriesEqual compares two v3.Series by their labels
|
||||
// Returns true if the series have the same labels (order doesn't matter)
|
||||
func seriesEqual(s1, s2 *qbtypes.TimeSeries) bool {
|
||||
m1 := s1.LabelsMap()
|
||||
m2 := s2.LabelsMap()
|
||||
if len(m1) != len(m2) {
|
||||
func seriesEqual(s1, s2 *v3.Series) bool {
|
||||
if s1 == nil && s2 == nil {
|
||||
return true
|
||||
}
|
||||
if s1 == nil || s2 == nil {
|
||||
return false
|
||||
}
|
||||
for k, v := range m1 {
|
||||
if m2[k] != v {
|
||||
if len(s1.Labels) != len(s2.Labels) {
|
||||
return false
|
||||
}
|
||||
for k, v := range s1.Labels {
|
||||
if s2.Labels[k] != v {
|
||||
return false
|
||||
}
|
||||
}
|
||||
@@ -149,11 +149,11 @@ func createPostableRule(compositeQuery *v3.CompositeQuery) ruletypes.PostableRul
|
||||
type filterNewSeriesTestCase struct {
|
||||
name string
|
||||
compositeQuery *v3.CompositeQuery
|
||||
series []*qbtypes.TimeSeries
|
||||
series []*v3.Series
|
||||
firstSeenMap map[telemetrytypes.MetricMetadataLookupKey]int64
|
||||
newGroupEvalDelay valuer.TextDuration
|
||||
evalTime time.Time
|
||||
expectedFiltered []*qbtypes.TimeSeries // series that should be in the final filtered result (old enough)
|
||||
expectedFiltered []*v3.Series // series that should be in the final filtered result (old enough)
|
||||
expectError bool
|
||||
}
|
||||
|
||||
@@ -193,7 +193,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-new", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-missing", "env": "stage"}, nil),
|
||||
@@ -205,7 +205,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-missing", "env": "stage"}, nil),
|
||||
}, // svc-old and svc-missing should be included; svc-new is filtered out
|
||||
@@ -227,7 +227,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-new1", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-new2", "env": "stage"}, nil),
|
||||
},
|
||||
@@ -237,7 +237,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{}, // all should be filtered out (new series)
|
||||
expectedFiltered: []*v3.Series{}, // all should be filtered out (new series)
|
||||
},
|
||||
{
|
||||
name: "all old series - ClickHouse query",
|
||||
@@ -254,7 +254,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-old2", "env": "stage"}, nil),
|
||||
},
|
||||
@@ -264,7 +264,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-old2", "env": "stage"}, nil),
|
||||
}, // all should be included (old series)
|
||||
@@ -292,13 +292,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
}, // early return, no filtering - all series included
|
||||
},
|
||||
@@ -322,13 +322,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
}, // early return, no filtering - all series included
|
||||
},
|
||||
@@ -358,13 +358,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"status": "200"}, nil), // no service_name or env
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"status": "200"}, nil),
|
||||
}, // series included as we can't decide if it's new or old
|
||||
},
|
||||
@@ -385,7 +385,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-no-metadata", "env": "prod"}, nil),
|
||||
},
|
||||
@@ -393,7 +393,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
// svc-no-metadata has no entry in firstSeenMap
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc-no-metadata", "env": "prod"}, nil),
|
||||
}, // both should be included - svc-old is old, svc-no-metadata can't be decided
|
||||
@@ -413,7 +413,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
|
||||
},
|
||||
// Only provide metadata for service_name, not env
|
||||
@@ -423,7 +423,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
|
||||
}, // has some metadata, uses max first_seen which is old
|
||||
},
|
||||
@@ -453,11 +453,11 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{},
|
||||
series: []*v3.Series{},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{},
|
||||
expectedFiltered: []*v3.Series{},
|
||||
},
|
||||
{
|
||||
name: "zero delay - Builder",
|
||||
@@ -485,13 +485,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
|
||||
newGroupEvalDelay: valuer.TextDuration{}, // zero delay
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
}, // with zero delay, all series pass
|
||||
},
|
||||
@@ -526,7 +526,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: mergeFirstSeenMaps(
|
||||
@@ -535,7 +535,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
},
|
||||
@@ -565,7 +565,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
// service_name is old, env is new - should use max (new)
|
||||
@@ -575,7 +575,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{}, // max first_seen is new, so should be filtered out
|
||||
expectedFiltered: []*v3.Series{}, // max first_seen is new, so should be filtered out
|
||||
},
|
||||
{
|
||||
name: "Logs query - should skip filtering and return empty skip indexes",
|
||||
@@ -600,14 +600,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
|
||||
}, // Logs queries should return early, no filtering - all included
|
||||
@@ -635,14 +635,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
series: []*qbtypes.TimeSeries{
|
||||
series: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*qbtypes.TimeSeries{
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
|
||||
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
|
||||
}, // Traces queries should return early, no filtering - all included
|
||||
@@ -724,14 +724,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
// Build a map to count occurrences of each unique label combination in expected series
|
||||
expectedCounts := make(map[string]int)
|
||||
for _, expected := range tt.expectedFiltered {
|
||||
key := labelsKey(expected.LabelsMap())
|
||||
key := labelsKey(expected.Labels)
|
||||
expectedCounts[key]++
|
||||
}
|
||||
|
||||
// Build a map to count occurrences of each unique label combination in filtered series
|
||||
actualCounts := make(map[string]int)
|
||||
for _, filtered := range filteredSeries {
|
||||
key := labelsKey(filtered.LabelsMap())
|
||||
key := labelsKey(filtered.Labels)
|
||||
actualCounts[key]++
|
||||
}
|
||||
|
||||
|
||||
@@ -12,18 +12,19 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/formatter"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
)
|
||||
|
||||
type PromRule struct {
|
||||
*BaseRule
|
||||
version string
|
||||
prometheus prometheus.Prometheus
|
||||
}
|
||||
|
||||
@@ -47,6 +48,7 @@ func NewPromRule(
|
||||
|
||||
p := PromRule{
|
||||
BaseRule: baseRule,
|
||||
version: postableRule.Version,
|
||||
prometheus: prometheus,
|
||||
}
|
||||
p.logger = logger
|
||||
@@ -81,30 +83,48 @@ func (r *PromRule) GetSelectedQuery() string {
|
||||
}
|
||||
|
||||
func (r *PromRule) getPqlQuery() (string, error) {
|
||||
if len(r.ruleCondition.CompositeQuery.Queries) > 0 {
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
for _, item := range r.ruleCondition.CompositeQuery.Queries {
|
||||
switch item.Type {
|
||||
case qbtypes.QueryTypePromQL:
|
||||
promQuery, ok := item.Spec.(qbtypes.PromQuery)
|
||||
if !ok {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query spec %T", item.Spec)
|
||||
}
|
||||
if promQuery.Name == selectedQuery {
|
||||
return promQuery.Query, nil
|
||||
if r.version == "v5" {
|
||||
if len(r.ruleCondition.CompositeQuery.Queries) > 0 {
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
for _, item := range r.ruleCondition.CompositeQuery.Queries {
|
||||
switch item.Type {
|
||||
case qbtypes.QueryTypePromQL:
|
||||
promQuery, ok := item.Spec.(qbtypes.PromQuery)
|
||||
if !ok {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query spec %T", item.Spec)
|
||||
}
|
||||
if promQuery.Name == selectedQuery {
|
||||
return promQuery.Query, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return "", fmt.Errorf("invalid promql rule setup")
|
||||
}
|
||||
return "", fmt.Errorf("invalid promql rule setup")
|
||||
|
||||
if r.ruleCondition.CompositeQuery.QueryType == v3.QueryTypePromQL {
|
||||
if len(r.ruleCondition.CompositeQuery.PromQueries) > 0 {
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
if promQuery, ok := r.ruleCondition.CompositeQuery.PromQueries[selectedQuery]; ok {
|
||||
query := promQuery.Query
|
||||
if query == "" {
|
||||
return query, fmt.Errorf("a promquery needs to be set for this rule to function")
|
||||
}
|
||||
return query, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("invalid promql rule query")
|
||||
}
|
||||
|
||||
func matrixToTimeSeries(res promql.Matrix) []*qbtypes.TimeSeries {
|
||||
result := make([]*qbtypes.TimeSeries, 0, len(res))
|
||||
func (r *PromRule) matrixToV3Series(res promql.Matrix) []*v3.Series {
|
||||
v3Series := make([]*v3.Series, 0, len(res))
|
||||
for _, series := range res {
|
||||
result = append(result, promSeriesToTimeSeries(series))
|
||||
commonSeries := toCommonSeries(series)
|
||||
v3Series = append(v3Series, &commonSeries)
|
||||
}
|
||||
return result
|
||||
return v3Series
|
||||
}
|
||||
|
||||
func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletypes.Vector, error) {
|
||||
@@ -123,31 +143,31 @@ func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletype
|
||||
return nil, err
|
||||
}
|
||||
|
||||
seriesToProcess := matrixToTimeSeries(res)
|
||||
matrixToProcess := r.matrixToV3Series(res)
|
||||
|
||||
hasData := len(seriesToProcess) > 0
|
||||
hasData := len(matrixToProcess) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
// Filter out new series if newGroupEvalDelay is configured
|
||||
if r.ShouldSkipNewGroups() {
|
||||
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
|
||||
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, matrixToProcess)
|
||||
// In case of error we log the error and continue with the original series
|
||||
if filterErr != nil {
|
||||
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
|
||||
} else {
|
||||
seriesToProcess = filteredSeries
|
||||
matrixToProcess = filteredSeries
|
||||
}
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
for _, series := range seriesToProcess {
|
||||
for _, series := range matrixToProcess {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(
|
||||
ctx, "not enough data points to evaluate series, skipping",
|
||||
"rule_id", r.ID(), "num_points", len(series.Values), "required_points", r.Condition().RequiredNumPoints,
|
||||
"rule_id", r.ID(), "num_points", len(series.Points), "required_points", r.Condition().RequiredNumPoints,
|
||||
)
|
||||
continue
|
||||
}
|
||||
@@ -434,25 +454,26 @@ func (r *PromRule) RunAlertQuery(ctx context.Context, qs string, start, end time
|
||||
}
|
||||
}
|
||||
|
||||
func promSeriesToTimeSeries(series promql.Series) *qbtypes.TimeSeries {
|
||||
ts := &qbtypes.TimeSeries{
|
||||
Labels: make([]*qbtypes.Label, 0, len(series.Metric)),
|
||||
Values: make([]*qbtypes.TimeSeriesValue, 0, len(series.Floats)),
|
||||
func toCommonSeries(series promql.Series) v3.Series {
|
||||
commonSeries := v3.Series{
|
||||
Labels: make(map[string]string),
|
||||
LabelsArray: make([]map[string]string, 0),
|
||||
Points: make([]v3.Point, 0),
|
||||
}
|
||||
|
||||
for _, lbl := range series.Metric {
|
||||
ts.Labels = append(ts.Labels, &qbtypes.Label{
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: lbl.Name},
|
||||
Value: lbl.Value,
|
||||
commonSeries.Labels[lbl.Name] = lbl.Value
|
||||
commonSeries.LabelsArray = append(commonSeries.LabelsArray, map[string]string{
|
||||
lbl.Name: lbl.Value,
|
||||
})
|
||||
}
|
||||
|
||||
for _, f := range series.Floats {
|
||||
ts.Values = append(ts.Values, &qbtypes.TimeSeriesValue{
|
||||
commonSeries.Points = append(commonSeries.Points, v3.Point{
|
||||
Timestamp: f.T,
|
||||
Value: f.F,
|
||||
})
|
||||
}
|
||||
|
||||
return ts
|
||||
return commonSeries
|
||||
}
|
||||
|
||||
@@ -20,7 +20,6 @@ import (
|
||||
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
@@ -48,13 +47,9 @@ func TestPromRuleEval(t *testing.T) {
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "A",
|
||||
Query: "dummy_query", // This is not used in the test
|
||||
},
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {
|
||||
Query: "dummy_query", // This is not used in the test
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -67,7 +62,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp string
|
||||
matchType string
|
||||
target float64
|
||||
expectedAlertSample float64
|
||||
expectedAlertSample v3.Point
|
||||
expectedVectorValues []float64 // Expected values in result vector
|
||||
}{
|
||||
// Test cases for Equals Always
|
||||
@@ -85,7 +80,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "3", // Equals
|
||||
matchType: "2", // Always
|
||||
target: 0.0,
|
||||
expectedAlertSample: 0.0,
|
||||
expectedAlertSample: v3.Point{Value: 0.0},
|
||||
expectedVectorValues: []float64{0.0},
|
||||
},
|
||||
{
|
||||
@@ -150,7 +145,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "3", // Equals
|
||||
matchType: "1", // Once
|
||||
target: 0.0,
|
||||
expectedAlertSample: 0.0,
|
||||
expectedAlertSample: v3.Point{Value: 0.0},
|
||||
expectedVectorValues: []float64{0.0},
|
||||
},
|
||||
{
|
||||
@@ -167,7 +162,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "3", // Equals
|
||||
matchType: "1", // Once
|
||||
target: 0.0,
|
||||
expectedAlertSample: 0.0,
|
||||
expectedAlertSample: v3.Point{Value: 0.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -183,7 +178,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "3", // Equals
|
||||
matchType: "1", // Once
|
||||
target: 0.0,
|
||||
expectedAlertSample: 0.0,
|
||||
expectedAlertSample: v3.Point{Value: 0.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -216,7 +211,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "1", // Greater Than
|
||||
matchType: "2", // Always
|
||||
target: 1.5,
|
||||
expectedAlertSample: 2.0,
|
||||
expectedAlertSample: v3.Point{Value: 2.0},
|
||||
expectedVectorValues: []float64{2.0},
|
||||
},
|
||||
{
|
||||
@@ -233,7 +228,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "1", // Above
|
||||
matchType: "2", // Always
|
||||
target: 2.0,
|
||||
expectedAlertSample: 3.0,
|
||||
expectedAlertSample: v3.Point{Value: 3.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -249,7 +244,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "2", // Below
|
||||
matchType: "2", // Always
|
||||
target: 13.0,
|
||||
expectedAlertSample: 12.0,
|
||||
expectedAlertSample: v3.Point{Value: 12.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -281,7 +276,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "1", // Greater Than
|
||||
matchType: "1", // Once
|
||||
target: 4.5,
|
||||
expectedAlertSample: 10.0,
|
||||
expectedAlertSample: v3.Point{Value: 10.0},
|
||||
expectedVectorValues: []float64{10.0},
|
||||
},
|
||||
{
|
||||
@@ -344,7 +339,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "4", // Not Equals
|
||||
matchType: "2", // Always
|
||||
target: 0.0,
|
||||
expectedAlertSample: 1.0,
|
||||
expectedAlertSample: v3.Point{Value: 1.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -376,7 +371,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "4", // Not Equals
|
||||
matchType: "1", // Once
|
||||
target: 0.0,
|
||||
expectedAlertSample: 1.0,
|
||||
expectedAlertSample: v3.Point{Value: 1.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -407,7 +402,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "4", // Not Equals
|
||||
matchType: "1", // Once
|
||||
target: 0.0,
|
||||
expectedAlertSample: 1.0,
|
||||
expectedAlertSample: v3.Point{Value: 1.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -423,7 +418,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "4", // Not Equals
|
||||
matchType: "1", // Once
|
||||
target: 0.0,
|
||||
expectedAlertSample: 1.0,
|
||||
expectedAlertSample: v3.Point{Value: 1.0},
|
||||
},
|
||||
// Test cases for Less Than Always
|
||||
{
|
||||
@@ -440,7 +435,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "2", // Less Than
|
||||
matchType: "2", // Always
|
||||
target: 4,
|
||||
expectedAlertSample: 1.5,
|
||||
expectedAlertSample: v3.Point{Value: 1.5},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -472,7 +467,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "2", // Less Than
|
||||
matchType: "1", // Once
|
||||
target: 4,
|
||||
expectedAlertSample: 2.5,
|
||||
expectedAlertSample: v3.Point{Value: 2.5},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -504,7 +499,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "3", // Equals
|
||||
matchType: "3", // OnAverage
|
||||
target: 6.0,
|
||||
expectedAlertSample: 6.0,
|
||||
expectedAlertSample: v3.Point{Value: 6.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -535,7 +530,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "4", // Not Equals
|
||||
matchType: "3", // OnAverage
|
||||
target: 4.5,
|
||||
expectedAlertSample: 6.0,
|
||||
expectedAlertSample: v3.Point{Value: 6.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -566,7 +561,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "1", // Greater Than
|
||||
matchType: "3", // OnAverage
|
||||
target: 4.5,
|
||||
expectedAlertSample: 6.0,
|
||||
expectedAlertSample: v3.Point{Value: 6.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -582,7 +577,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "2", // Less Than
|
||||
matchType: "3", // OnAverage
|
||||
target: 12.0,
|
||||
expectedAlertSample: 6.0,
|
||||
expectedAlertSample: v3.Point{Value: 6.0},
|
||||
},
|
||||
// Test cases for InTotal
|
||||
{
|
||||
@@ -599,7 +594,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "3", // Equals
|
||||
matchType: "4", // InTotal
|
||||
target: 30.0,
|
||||
expectedAlertSample: 30.0,
|
||||
expectedAlertSample: v3.Point{Value: 30.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -626,7 +621,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "4", // Not Equals
|
||||
matchType: "4", // InTotal
|
||||
target: 9.0,
|
||||
expectedAlertSample: 10.0,
|
||||
expectedAlertSample: v3.Point{Value: 10.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -650,7 +645,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "1", // Greater Than
|
||||
matchType: "4", // InTotal
|
||||
target: 10.0,
|
||||
expectedAlertSample: 20.0,
|
||||
expectedAlertSample: v3.Point{Value: 20.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -675,7 +670,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
compareOp: "2", // Less Than
|
||||
matchType: "4", // InTotal
|
||||
target: 30.0,
|
||||
expectedAlertSample: 20.0,
|
||||
expectedAlertSample: v3.Point{Value: 20.0},
|
||||
},
|
||||
{
|
||||
values: pql.Series{
|
||||
@@ -713,7 +708,7 @@ func TestPromRuleEval(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
resultVectors, err := rule.Threshold.Eval(*promSeriesToTimeSeries(c.values), rule.Unit(), ruletypes.EvalData{})
|
||||
resultVectors, err := rule.Threshold.Eval(toCommonSeries(c.values), rule.Unit(), ruletypes.EvalData{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Compare full result vector with expected vector
|
||||
@@ -729,12 +724,12 @@ func TestPromRuleEval(t *testing.T) {
|
||||
if len(resultVectors) > 0 {
|
||||
found := false
|
||||
for _, sample := range resultVectors {
|
||||
if sample.V == c.expectedAlertSample {
|
||||
if sample.V == c.expectedAlertSample.Value {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
assert.True(t, found, "Expected alert sample value %.2f not found in result vectors for case %d. Got values: %v", c.expectedAlertSample, idx, actualValues)
|
||||
assert.True(t, found, "Expected alert sample value %.2f not found in result vectors for case %d. Got values: %v", c.expectedAlertSample.Value, idx, actualValues)
|
||||
}
|
||||
} else {
|
||||
assert.Empty(t, resultVectors, "Expected no alert but got result vectors for case %d", idx)
|
||||
@@ -759,13 +754,9 @@ func TestPromRuleUnitCombinations(t *testing.T) {
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "A",
|
||||
Query: "test_metric",
|
||||
},
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {
|
||||
Query: "test_metric",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1022,13 +1013,9 @@ func _Enable_this_after_9146_issue_fix_is_merged_TestPromRuleNoData(t *testing.T
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "A",
|
||||
Query: "test_metric",
|
||||
},
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {
|
||||
Query: "test_metric",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1137,13 +1124,9 @@ func TestMultipleThresholdPromRule(t *testing.T) {
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "A",
|
||||
Query: "test_metric",
|
||||
},
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {
|
||||
Query: "test_metric",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1378,11 +1361,8 @@ func TestPromRule_NoData(t *testing.T) {
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
|
||||
},
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {Query: "test_metric"},
|
||||
},
|
||||
},
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
@@ -1506,11 +1486,8 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
|
||||
Target: &target,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
|
||||
},
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {Query: "test_metric"},
|
||||
},
|
||||
},
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
@@ -1658,11 +1635,8 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
|
||||
},
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {Query: "test_metric"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -1,24 +1,38 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"math"
|
||||
"net/url"
|
||||
"reflect"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/contextlinks"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
|
||||
"github.com/SigNoz/signoz/pkg/transition"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/querier"
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
querytemplate "github.com/SigNoz/signoz/pkg/query-service/utils/queryTemplate"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
|
||||
|
||||
logsv3 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v3"
|
||||
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/formatter"
|
||||
|
||||
querierV5 "github.com/SigNoz/signoz/pkg/querier"
|
||||
@@ -28,9 +42,23 @@ import (
|
||||
|
||||
type ThresholdRule struct {
|
||||
*BaseRule
|
||||
// Ever since we introduced the new metrics query builder, the version is "v4"
|
||||
// for all the rules
|
||||
// if the version is "v3", then we use the old querier
|
||||
// if the version is "v4", then we use the new querierV2
|
||||
version string
|
||||
|
||||
// querierV5 is the query builder v5 querier used for all alert rule evaluation
|
||||
// querier is used for alerts created before the introduction of new metrics query builder
|
||||
querier interfaces.Querier
|
||||
// querierV2 is used for alerts created after the introduction of new metrics query builder
|
||||
querierV2 interfaces.Querier
|
||||
|
||||
// querierV5 is used for alerts migrated after the introduction of new query builder
|
||||
querierV5 querierV5.Querier
|
||||
|
||||
// used for attribute metadata enrichment for logs and traces
|
||||
logsKeys map[string]v3.AttributeKey
|
||||
spansKeys map[string]v3.AttributeKey
|
||||
}
|
||||
|
||||
var _ Rule = (*ThresholdRule)(nil)
|
||||
@@ -54,10 +82,25 @@ func NewThresholdRule(
|
||||
}
|
||||
|
||||
t := ThresholdRule{
|
||||
BaseRule: baseRule,
|
||||
querierV5: querierV5,
|
||||
BaseRule: baseRule,
|
||||
version: p.Version,
|
||||
}
|
||||
|
||||
querierOption := querier.QuerierOptions{
|
||||
Reader: reader,
|
||||
Cache: nil,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
}
|
||||
|
||||
querierOptsV2 := querierV2.QuerierOptions{
|
||||
Reader: reader,
|
||||
Cache: nil,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
}
|
||||
|
||||
t.querier = querier.NewQuerier(querierOption)
|
||||
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
|
||||
t.querierV5 = querierV5
|
||||
t.reader = reader
|
||||
return &t, nil
|
||||
}
|
||||
@@ -77,9 +120,169 @@ func (r *ThresholdRule) Type() ruletypes.RuleType {
|
||||
return ruletypes.RuleTypeThreshold
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
|
||||
func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3.QueryRangeParamsV3, error) {
|
||||
r.logger.InfoContext(
|
||||
ctx, "prepare query range request", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
|
||||
ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
|
||||
)
|
||||
|
||||
startTs, endTs := r.Timestamps(ts)
|
||||
start, end := startTs.UnixMilli(), endTs.UnixMilli()
|
||||
|
||||
if r.ruleCondition.QueryType() == v3.QueryTypeClickHouseSQL {
|
||||
params := &v3.QueryRangeParamsV3{
|
||||
Start: start,
|
||||
End: end,
|
||||
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: r.ruleCondition.CompositeQuery.QueryType,
|
||||
PanelType: r.ruleCondition.CompositeQuery.PanelType,
|
||||
BuilderQueries: make(map[string]*v3.BuilderQuery),
|
||||
ClickHouseQueries: make(map[string]*v3.ClickHouseQuery),
|
||||
PromQueries: make(map[string]*v3.PromQuery),
|
||||
Unit: r.ruleCondition.CompositeQuery.Unit,
|
||||
},
|
||||
Variables: make(map[string]interface{}),
|
||||
NoCache: true,
|
||||
}
|
||||
querytemplate.AssignReservedVarsV3(params)
|
||||
for name, chQuery := range r.ruleCondition.CompositeQuery.ClickHouseQueries {
|
||||
if chQuery.Disabled {
|
||||
continue
|
||||
}
|
||||
tmpl := template.New("clickhouse-query")
|
||||
tmpl, err := tmpl.Parse(chQuery.Query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var query bytes.Buffer
|
||||
err = tmpl.Execute(&query, params.Variables)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
params.CompositeQuery.ClickHouseQueries[name] = &v3.ClickHouseQuery{
|
||||
Query: query.String(),
|
||||
Disabled: chQuery.Disabled,
|
||||
Legend: chQuery.Legend,
|
||||
}
|
||||
}
|
||||
return params, nil
|
||||
}
|
||||
|
||||
if r.ruleCondition.CompositeQuery != nil && r.ruleCondition.CompositeQuery.BuilderQueries != nil {
|
||||
for _, q := range r.ruleCondition.CompositeQuery.BuilderQueries {
|
||||
// If the step interval is less than the minimum allowed step interval, set it to the minimum allowed step interval
|
||||
if minStep := common.MinAllowedStepInterval(start, end); q.StepInterval < minStep {
|
||||
q.StepInterval = minStep
|
||||
}
|
||||
|
||||
q.SetShiftByFromFunc()
|
||||
|
||||
if q.DataSource == v3.DataSourceMetrics {
|
||||
// if the time range is greater than 1 day, and less than 1 week set the step interval to be multiple of 5 minutes
|
||||
// if the time range is greater than 1 week, set the step interval to be multiple of 30 mins
|
||||
if end-start >= 24*time.Hour.Milliseconds() && end-start < 7*24*time.Hour.Milliseconds() {
|
||||
q.StepInterval = int64(math.Round(float64(q.StepInterval)/300)) * 300
|
||||
} else if end-start >= 7*24*time.Hour.Milliseconds() {
|
||||
q.StepInterval = int64(math.Round(float64(q.StepInterval)/1800)) * 1800
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if r.ruleCondition.CompositeQuery.PanelType != v3.PanelTypeGraph {
|
||||
r.ruleCondition.CompositeQuery.PanelType = v3.PanelTypeGraph
|
||||
}
|
||||
|
||||
// default mode
|
||||
return &v3.QueryRangeParamsV3{
|
||||
Start: start,
|
||||
End: end,
|
||||
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
|
||||
CompositeQuery: r.ruleCondition.CompositeQuery,
|
||||
Variables: make(map[string]interface{}),
|
||||
NoCache: true,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) prepareLinksToLogs(ctx context.Context, ts time.Time, lbls labels.Labels) string {
|
||||
if r.version == "v5" {
|
||||
return r.prepareLinksToLogsV5(ctx, ts, lbls)
|
||||
}
|
||||
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
|
||||
qr, err := r.prepareQueryRange(ctx, ts)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
start := time.UnixMilli(qr.Start)
|
||||
end := time.UnixMilli(qr.End)
|
||||
|
||||
// TODO(srikanthccv): handle formula queries
|
||||
if selectedQuery < "A" || selectedQuery > "Z" {
|
||||
return ""
|
||||
}
|
||||
|
||||
q := r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery]
|
||||
if q == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
if q.DataSource != v3.DataSourceLogs {
|
||||
return ""
|
||||
}
|
||||
|
||||
queryFilter := []v3.FilterItem{}
|
||||
if q.Filters != nil {
|
||||
queryFilter = q.Filters.Items
|
||||
}
|
||||
|
||||
filterItems := contextlinks.PrepareFilters(lbls.Map(), queryFilter, q.GroupBy, r.logsKeys)
|
||||
|
||||
return contextlinks.PrepareLinksToLogs(start, end, filterItems)
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) prepareLinksToTraces(ctx context.Context, ts time.Time, lbls labels.Labels) string {
|
||||
if r.version == "v5" {
|
||||
return r.prepareLinksToTracesV5(ctx, ts, lbls)
|
||||
}
|
||||
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
|
||||
qr, err := r.prepareQueryRange(ctx, ts)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
start := time.UnixMilli(qr.Start)
|
||||
end := time.UnixMilli(qr.End)
|
||||
|
||||
// TODO(srikanthccv): handle formula queries
|
||||
if selectedQuery < "A" || selectedQuery > "Z" {
|
||||
return ""
|
||||
}
|
||||
|
||||
q := r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery]
|
||||
if q == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
if q.DataSource != v3.DataSourceTraces {
|
||||
return ""
|
||||
}
|
||||
|
||||
queryFilter := []v3.FilterItem{}
|
||||
if q.Filters != nil {
|
||||
queryFilter = q.Filters.Items
|
||||
}
|
||||
|
||||
filterItems := contextlinks.PrepareFilters(lbls.Map(), queryFilter, q.GroupBy, r.spansKeys)
|
||||
|
||||
return contextlinks.PrepareLinksToTraces(start, end, filterItems)
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
|
||||
r.logger.InfoContext(
|
||||
ctx, "prepare query range request v5", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
|
||||
)
|
||||
|
||||
startTs, endTs := r.Timestamps(ts)
|
||||
@@ -99,10 +302,10 @@ func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*q
|
||||
return req, nil
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) prepareLinksToLogs(ctx context.Context, ts time.Time, lbls labels.Labels) string {
|
||||
func (r *ThresholdRule) prepareLinksToLogsV5(ctx context.Context, ts time.Time, lbls labels.Labels) string {
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
|
||||
qr, err := r.prepareQueryRange(ctx, ts)
|
||||
qr, err := r.prepareQueryRangeV5(ctx, ts)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
@@ -139,10 +342,10 @@ func (r *ThresholdRule) prepareLinksToLogs(ctx context.Context, ts time.Time, lb
|
||||
return contextlinks.PrepareLinksToLogsV5(start, end, whereClause)
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) prepareLinksToTraces(ctx context.Context, ts time.Time, lbls labels.Labels) string {
|
||||
func (r *ThresholdRule) prepareLinksToTracesV5(ctx context.Context, ts time.Time, lbls labels.Labels) string {
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
|
||||
qr, err := r.prepareQueryRange(ctx, ts)
|
||||
qr, err := r.prepareQueryRangeV5(ctx, ts)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
@@ -188,6 +391,115 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = r.PopulateTemporality(ctx, orgID, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("internal error while setting temporality")
|
||||
}
|
||||
|
||||
if params.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
hasLogsQuery := false
|
||||
hasTracesQuery := false
|
||||
for _, query := range params.CompositeQuery.BuilderQueries {
|
||||
if query.DataSource == v3.DataSourceLogs {
|
||||
hasLogsQuery = true
|
||||
}
|
||||
if query.DataSource == v3.DataSourceTraces {
|
||||
hasTracesQuery = true
|
||||
}
|
||||
}
|
||||
|
||||
if hasLogsQuery {
|
||||
// check if any enrichment is required for logs if yes then enrich them
|
||||
if logsv3.EnrichmentRequired(params) {
|
||||
logsFields, apiErr := r.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(params.CompositeQuery))
|
||||
if apiErr != nil {
|
||||
return nil, apiErr.ToError()
|
||||
}
|
||||
logsKeys := model.GetLogFieldsV3(ctx, params, logsFields)
|
||||
r.logsKeys = logsKeys
|
||||
logsv3.Enrich(params, logsKeys)
|
||||
}
|
||||
}
|
||||
|
||||
if hasTracesQuery {
|
||||
spanKeys, err := r.reader.GetSpanAttributeKeysByNames(ctx, logsv3.GetFieldNames(params.CompositeQuery))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r.spansKeys = spanKeys
|
||||
tracesV4.Enrich(params, spanKeys)
|
||||
}
|
||||
}
|
||||
|
||||
var results []*v3.Result
|
||||
var queryErrors map[string]error
|
||||
|
||||
if r.version == "v4" {
|
||||
results, queryErrors, err = r.querierV2.QueryRange(ctx, orgID, params)
|
||||
} else {
|
||||
results, queryErrors, err = r.querier.QueryRange(ctx, orgID, params)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
r.logger.ErrorContext(ctx, "failed to get alert query range result", "rule_name", r.Name(), "error", err, "query_errors", queryErrors)
|
||||
return nil, fmt.Errorf("internal error while querying")
|
||||
}
|
||||
|
||||
if params.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
results, err = postprocess.PostProcessResult(results, params)
|
||||
if err != nil {
|
||||
r.logger.ErrorContext(ctx, "failed to post process result", "rule_name", r.Name(), "error", err)
|
||||
return nil, fmt.Errorf("internal error while post processing")
|
||||
}
|
||||
}
|
||||
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
|
||||
var queryResult *v3.Result
|
||||
for _, res := range results {
|
||||
if res.QueryName == selectedQuery {
|
||||
queryResult = res
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
hasData := queryResult != nil && len(queryResult.Series) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
if queryResult == nil {
|
||||
r.logger.WarnContext(ctx, "query result is nil", "rule_name", r.Name(), "query_name", selectedQuery)
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
for _, series := range queryResult.Series {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
}
|
||||
resultSeries, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
SendUnmatched: r.ShouldSendUnmatched(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resultVector = append(resultVector, resultSeries...)
|
||||
}
|
||||
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
|
||||
params, err := r.prepareQueryRangeV5(ctx, ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var results []*v3.Result
|
||||
|
||||
v5Result, err := r.querierV5.QueryRange(ctx, orgID, params)
|
||||
if err != nil {
|
||||
@@ -195,24 +507,26 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
|
||||
return nil, fmt.Errorf("internal error while querying")
|
||||
}
|
||||
|
||||
for _, item := range v5Result.Data.Results {
|
||||
if tsData, ok := item.(*qbtypes.TimeSeriesData); ok {
|
||||
results = append(results, transition.ConvertV5TimeSeriesDataToV4Result(tsData))
|
||||
} else {
|
||||
// NOTE: should not happen but just to ensure we don't miss it if it happens for some reason
|
||||
r.logger.WarnContext(ctx, "expected qbtypes.TimeSeriesData but got", "item_type", reflect.TypeOf(item))
|
||||
}
|
||||
}
|
||||
|
||||
selectedQuery := r.GetSelectedQuery()
|
||||
|
||||
var queryResult *qbtypes.TimeSeriesData
|
||||
for _, item := range v5Result.Data.Results {
|
||||
if tsData, ok := item.(*qbtypes.TimeSeriesData); ok && tsData.QueryName == selectedQuery {
|
||||
queryResult = tsData
|
||||
var queryResult *v3.Result
|
||||
for _, res := range results {
|
||||
if res.QueryName == selectedQuery {
|
||||
queryResult = res
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
var allSeries []*qbtypes.TimeSeries
|
||||
if queryResult != nil {
|
||||
for _, bucket := range queryResult.Aggregations {
|
||||
allSeries = append(allSeries, bucket.Series...)
|
||||
}
|
||||
}
|
||||
|
||||
hasData := len(allSeries) > 0
|
||||
hasData := queryResult != nil && len(queryResult.Series) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
@@ -225,7 +539,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
|
||||
}
|
||||
|
||||
// Filter out new series if newGroupEvalDelay is configured
|
||||
seriesToProcess := allSeries
|
||||
seriesToProcess := queryResult.Series
|
||||
if r.ShouldSkipNewGroups() {
|
||||
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
|
||||
// In case of error we log the error and continue with the original series
|
||||
@@ -238,7 +552,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
|
||||
|
||||
for _, series := range seriesToProcess {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Values), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
}
|
||||
resultSeries, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
@@ -259,7 +573,16 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
|
||||
valueFormatter := formatter.FromUnit(r.Unit())
|
||||
|
||||
res, err := r.buildAndRunQuery(ctx, r.orgID, ts)
|
||||
var res ruletypes.Vector
|
||||
var err error
|
||||
|
||||
if r.version == "v5" {
|
||||
r.logger.InfoContext(ctx, "running v5 query")
|
||||
res, err = r.buildAndRunQueryV5(ctx, r.orgID, ts)
|
||||
} else {
|
||||
r.logger.InfoContext(ctx, "running v4 query")
|
||||
res, err = r.buildAndRunQuery(ctx, r.orgID, ts)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return 0, err
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -167,7 +167,7 @@ func NewSQLMigrationProviderFactories(
|
||||
sqlmigration.NewMigrateRbacToAuthzFactory(sqlstore),
|
||||
sqlmigration.NewMigratePublicDashboardsFactory(sqlstore),
|
||||
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
|
||||
sqlmigration.NewMigrateRulesV4ToV5Factory(sqlstore, telemetryStore),
|
||||
sqlmigration.NewAddRootUserFactory(sqlstore, sqlschema),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -389,6 +389,8 @@ func New(
|
||||
// Initialize all modules
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard)
|
||||
|
||||
userService := impluser.NewService(providerSettings, impluser.NewStore(sqlstore, providerSettings), modules.User, orgGetter, authz, config.User.Root)
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz)
|
||||
|
||||
@@ -438,6 +440,7 @@ func New(
|
||||
factory.NewNamedService(factory.MustNewName("statsreporter"), statsReporter),
|
||||
factory.NewNamedService(factory.MustNewName("tokenizer"), tokenizer),
|
||||
factory.NewNamedService(factory.MustNewName("authz"), authz),
|
||||
factory.NewNamedService(factory.MustNewName("user"), userService),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
80
pkg/sqlmigration/064_add_root_user.go
Normal file
80
pkg/sqlmigration/064_add_root_user.go
Normal file
@@ -0,0 +1,80 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type addRootUser struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
sqlschema sqlschema.SQLSchema
|
||||
}
|
||||
|
||||
func NewAddRootUserFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("add_root_user"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
|
||||
return &addRootUser{
|
||||
sqlstore: sqlstore,
|
||||
sqlschema: sqlschema,
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (migration *addRootUser) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addRootUser) Up(ctx context.Context, db *bun.DB) error {
|
||||
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
table, uniqueConstraints, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("users"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
column := &sqlschema.Column{
|
||||
Name: sqlschema.ColumnName("is_root"),
|
||||
DataType: sqlschema.DataTypeBoolean,
|
||||
Nullable: false,
|
||||
}
|
||||
|
||||
sqls := migration.sqlschema.Operator().AddColumn(table, uniqueConstraints, column, false)
|
||||
for _, sql := range sqls {
|
||||
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, true); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addRootUser) Down(ctx context.Context, db *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
@@ -1,194 +0,0 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/transition"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type migrateRulesV4ToV5 struct {
|
||||
store sqlstore.SQLStore
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
logger *slog.Logger
|
||||
}
|
||||
|
||||
func NewMigrateRulesV4ToV5Factory(
|
||||
store sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(
|
||||
factory.MustNewName("migrate_rules_v4_to_v5"),
|
||||
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return &migrateRulesV4ToV5{
|
||||
store: store,
|
||||
telemetryStore: telemetryStore,
|
||||
logger: ps.Logger,
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (migration *migrateRulesV4ToV5) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *migrateRulesV4ToV5) getLogDuplicateKeys(ctx context.Context) ([]string, error) {
|
||||
query := `
|
||||
SELECT name
|
||||
FROM (
|
||||
SELECT DISTINCT name FROM signoz_logs.distributed_logs_attribute_keys
|
||||
INTERSECT
|
||||
SELECT DISTINCT name FROM signoz_logs.distributed_logs_resource_keys
|
||||
)
|
||||
ORDER BY name
|
||||
`
|
||||
|
||||
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
|
||||
if err != nil {
|
||||
migration.logger.WarnContext(ctx, "failed to query log duplicate keys", "error", err)
|
||||
return nil, nil
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var keys []string
|
||||
for rows.Next() {
|
||||
var key string
|
||||
if err := rows.Scan(&key); err != nil {
|
||||
migration.logger.WarnContext(ctx, "failed to scan log duplicate key", "error", err)
|
||||
continue
|
||||
}
|
||||
keys = append(keys, key)
|
||||
}
|
||||
|
||||
return keys, nil
|
||||
}
|
||||
|
||||
func (migration *migrateRulesV4ToV5) getTraceDuplicateKeys(ctx context.Context) ([]string, error) {
|
||||
query := `
|
||||
SELECT tagKey
|
||||
FROM signoz_traces.distributed_span_attributes_keys
|
||||
WHERE tagType IN ('tag', 'resource')
|
||||
GROUP BY tagKey
|
||||
HAVING COUNT(DISTINCT tagType) > 1
|
||||
ORDER BY tagKey
|
||||
`
|
||||
|
||||
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
|
||||
if err != nil {
|
||||
migration.logger.WarnContext(ctx, "failed to query trace duplicate keys", "error", err)
|
||||
return nil, nil
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var keys []string
|
||||
for rows.Next() {
|
||||
var key string
|
||||
if err := rows.Scan(&key); err != nil {
|
||||
migration.logger.WarnContext(ctx, "failed to scan trace duplicate key", "error", err)
|
||||
continue
|
||||
}
|
||||
keys = append(keys, key)
|
||||
}
|
||||
|
||||
return keys, nil
|
||||
}
|
||||
|
||||
func (migration *migrateRulesV4ToV5) Up(ctx context.Context, db *bun.DB) error {
|
||||
logsKeys, err := migration.getLogDuplicateKeys(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tracesKeys, err := migration.getTraceDuplicateKeys(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
var rules []struct {
|
||||
ID string `bun:"id"`
|
||||
Data map[string]any `bun:"data"`
|
||||
}
|
||||
|
||||
err = tx.NewSelect().
|
||||
Table("rule").
|
||||
Column("id", "data").
|
||||
Scan(ctx, &rules)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
alertsMigrator := transition.NewAlertMigrateV5(migration.logger, logsKeys, tracesKeys)
|
||||
|
||||
for _, rule := range rules {
|
||||
version, _ := rule.Data["version"].(string)
|
||||
if version == "v5" {
|
||||
continue
|
||||
}
|
||||
|
||||
migration.logger.InfoContext(ctx, "migrating rule v4 to v5", "rule_id", rule.ID, "current_version", version)
|
||||
|
||||
// Check if the queries envelope already exists and is non-empty
|
||||
hasQueriesEnvelope := false
|
||||
if condition, ok := rule.Data["condition"].(map[string]any); ok {
|
||||
if compositeQuery, ok := condition["compositeQuery"].(map[string]any); ok {
|
||||
if queries, ok := compositeQuery["queries"].([]any); ok && len(queries) > 0 {
|
||||
hasQueriesEnvelope = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if hasQueriesEnvelope {
|
||||
// Case 2: Already has queries envelope, just bump version
|
||||
migration.logger.InfoContext(ctx, "rule already has queries envelope, bumping version", "rule_id", rule.ID)
|
||||
rule.Data["version"] = "v5"
|
||||
} else {
|
||||
// Case 1: Old format, run full migration
|
||||
migration.logger.InfoContext(ctx, "rule has old format, running full migration", "rule_id", rule.ID)
|
||||
alertsMigrator.Migrate(ctx, rule.Data)
|
||||
// Force version to v5 regardless of Migrate return value
|
||||
rule.Data["version"] = "v5"
|
||||
}
|
||||
|
||||
dataJSON, err := json.Marshal(rule.Data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = tx.NewUpdate().
|
||||
Table("rule").
|
||||
Set("data = ?", string(dataJSON)).
|
||||
Where("id = ?", rule.ID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
func (migration *migrateRulesV4ToV5) Down(ctx context.Context, db *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
102
pkg/transition/v5_to_v4.go
Normal file
102
pkg/transition/v5_to_v4.go
Normal file
@@ -0,0 +1,102 @@
|
||||
package transition
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
// ConvertV5TimeSeriesDataToV4Result converts v5 TimeSeriesData to v4 Result
|
||||
func ConvertV5TimeSeriesDataToV4Result(v5Data *qbtypes.TimeSeriesData) *v3.Result {
|
||||
if v5Data == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
result := &v3.Result{
|
||||
QueryName: v5Data.QueryName,
|
||||
Series: make([]*v3.Series, 0),
|
||||
}
|
||||
|
||||
toV4Series := func(ts *qbtypes.TimeSeries) *v3.Series {
|
||||
series := &v3.Series{
|
||||
Labels: make(map[string]string),
|
||||
LabelsArray: make([]map[string]string, 0),
|
||||
Points: make([]v3.Point, 0, len(ts.Values)),
|
||||
}
|
||||
|
||||
for _, label := range ts.Labels {
|
||||
valueStr := fmt.Sprintf("%v", label.Value)
|
||||
series.Labels[label.Key.Name] = valueStr
|
||||
}
|
||||
|
||||
if len(series.Labels) > 0 {
|
||||
series.LabelsArray = append(series.LabelsArray, series.Labels)
|
||||
}
|
||||
|
||||
for _, tsValue := range ts.Values {
|
||||
if tsValue.Partial {
|
||||
continue
|
||||
}
|
||||
|
||||
point := v3.Point{
|
||||
Timestamp: tsValue.Timestamp,
|
||||
Value: tsValue.Value,
|
||||
}
|
||||
series.Points = append(series.Points, point)
|
||||
}
|
||||
return series
|
||||
}
|
||||
|
||||
for _, aggBucket := range v5Data.Aggregations {
|
||||
for _, ts := range aggBucket.Series {
|
||||
result.Series = append(result.Series, toV4Series(ts))
|
||||
}
|
||||
|
||||
if len(aggBucket.AnomalyScores) != 0 {
|
||||
result.AnomalyScores = make([]*v3.Series, 0)
|
||||
for _, ts := range aggBucket.AnomalyScores {
|
||||
result.AnomalyScores = append(result.AnomalyScores, toV4Series(ts))
|
||||
}
|
||||
}
|
||||
|
||||
if len(aggBucket.PredictedSeries) != 0 {
|
||||
result.PredictedSeries = make([]*v3.Series, 0)
|
||||
for _, ts := range aggBucket.PredictedSeries {
|
||||
result.PredictedSeries = append(result.PredictedSeries, toV4Series(ts))
|
||||
}
|
||||
}
|
||||
|
||||
if len(aggBucket.LowerBoundSeries) != 0 {
|
||||
result.LowerBoundSeries = make([]*v3.Series, 0)
|
||||
for _, ts := range aggBucket.LowerBoundSeries {
|
||||
result.LowerBoundSeries = append(result.LowerBoundSeries, toV4Series(ts))
|
||||
}
|
||||
}
|
||||
|
||||
if len(aggBucket.UpperBoundSeries) != 0 {
|
||||
result.UpperBoundSeries = make([]*v3.Series, 0)
|
||||
for _, ts := range aggBucket.UpperBoundSeries {
|
||||
result.UpperBoundSeries = append(result.UpperBoundSeries, toV4Series(ts))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// ConvertV5TimeSeriesDataSliceToV4Results converts a slice of v5 TimeSeriesData to v4 QueryRangeResponse
|
||||
func ConvertV5TimeSeriesDataSliceToV4Results(v5DataSlice []*qbtypes.TimeSeriesData) *v3.QueryRangeResponse {
|
||||
response := &v3.QueryRangeResponse{
|
||||
ResultType: "matrix", // Time series data is typically "matrix" type
|
||||
Result: make([]*v3.Result, 0, len(v5DataSlice)),
|
||||
}
|
||||
|
||||
for _, v5Data := range v5DataSlice {
|
||||
if result := ConvertV5TimeSeriesDataToV4Result(v5Data); result != nil {
|
||||
response.Result = append(response.Result, result)
|
||||
}
|
||||
}
|
||||
|
||||
return response
|
||||
}
|
||||
@@ -12,13 +12,12 @@ import (
|
||||
var (
|
||||
// Templates is a list of all the templates that are supported by the emailing service.
|
||||
// This list should be updated whenever a new template is added.
|
||||
Templates = []TemplateName{TemplateNameInvitationEmail, TemplateNameUpdateRole, TemplateNameResetPassword}
|
||||
Templates = []TemplateName{TemplateNameInvitationEmail, TemplateNameResetPassword}
|
||||
)
|
||||
|
||||
var (
|
||||
TemplateNameInvitationEmail = TemplateName{valuer.NewString("invitation_email")}
|
||||
TemplateNameUpdateRole = TemplateName{valuer.NewString("update_role")}
|
||||
TemplateNameResetPassword = TemplateName{valuer.NewString("reset_password_email")}
|
||||
TemplateNameInvitationEmail = TemplateName{valuer.NewString("invitation")}
|
||||
TemplateNameResetPassword = TemplateName{valuer.NewString("reset_password")}
|
||||
)
|
||||
|
||||
type TemplateName struct{ valuer.String }
|
||||
@@ -27,8 +26,6 @@ func NewTemplateName(name string) (TemplateName, error) {
|
||||
switch name {
|
||||
case TemplateNameInvitationEmail.StringValue():
|
||||
return TemplateNameInvitationEmail, nil
|
||||
case TemplateNameUpdateRole.StringValue():
|
||||
return TemplateNameUpdateRole, nil
|
||||
case TemplateNameResetPassword.StringValue():
|
||||
return TemplateNameResetPassword, nil
|
||||
default:
|
||||
@@ -40,7 +37,7 @@ func NewContent(template *template.Template, data map[string]any) ([]byte, error
|
||||
buf := bytes.NewBuffer(nil)
|
||||
err := template.Execute(buf, data)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to execute template")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return buf.Bytes(), nil
|
||||
|
||||
@@ -41,6 +41,22 @@ func NewOrganization(displayName string) *Organization {
|
||||
}
|
||||
}
|
||||
|
||||
func NewOrganizationWithName(name string) *Organization {
|
||||
id := valuer.GenerateUUID()
|
||||
return &Organization{
|
||||
Identifiable: Identifiable{
|
||||
ID: id,
|
||||
},
|
||||
TimeAuditable: TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
Name: name,
|
||||
DisplayName: name,
|
||||
Key: NewOrganizationKey(id),
|
||||
}
|
||||
}
|
||||
|
||||
func NewOrganizationKey(orgID valuer.UUID) uint32 {
|
||||
hasher := fnv.New32a()
|
||||
|
||||
@@ -74,6 +90,7 @@ type TTLSetting struct {
|
||||
type OrganizationStore interface {
|
||||
Create(context.Context, *Organization) error
|
||||
Get(context.Context, valuer.UUID) (*Organization, error)
|
||||
GetByName(context.Context, string) (*Organization, error)
|
||||
GetAll(context.Context) ([]*Organization, error)
|
||||
ListByKeyRange(context.Context, uint32, uint32) ([]*Organization, error)
|
||||
Update(context.Context, *Organization) error
|
||||
|
||||
@@ -76,33 +76,6 @@ type TimeSeries struct {
|
||||
Values []*TimeSeriesValue `json:"values"`
|
||||
}
|
||||
|
||||
// LabelsMap converts the label slice to a map[string]string for use in
|
||||
// alert evaluation helpers that operate on flat label maps.
|
||||
func (ts *TimeSeries) LabelsMap() map[string]string {
|
||||
if ts == nil {
|
||||
return nil
|
||||
}
|
||||
m := make(map[string]string, len(ts.Labels))
|
||||
for _, l := range ts.Labels {
|
||||
m[l.Key.Name] = fmt.Sprintf("%v", l.Value)
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// NonPartialValues returns only the values where Partial is false.
|
||||
func (ts *TimeSeries) NonPartialValues() []*TimeSeriesValue {
|
||||
if ts == nil {
|
||||
return nil
|
||||
}
|
||||
result := make([]*TimeSeriesValue, 0, len(ts.Values))
|
||||
for _, v := range ts.Values {
|
||||
if !v.Partial {
|
||||
result = append(result, v)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
type Label struct {
|
||||
Key telemetrytypes.TelemetryFieldKey `json:"key"`
|
||||
Value any `json:"value"`
|
||||
|
||||
@@ -203,11 +203,11 @@ func (rc *RuleCondition) IsValid() bool {
|
||||
}
|
||||
|
||||
// ShouldEval checks if the further series should be evaluated at all for alerts.
|
||||
func (rc *RuleCondition) ShouldEval(series *qbtypes.TimeSeries) bool {
|
||||
func (rc *RuleCondition) ShouldEval(series *v3.Series) bool {
|
||||
if rc == nil {
|
||||
return true
|
||||
}
|
||||
return !rc.RequireMinPoints || len(series.NonPartialValues()) >= rc.RequiredNumPoints
|
||||
return !rc.RequireMinPoints || len(series.Points) >= rc.RequiredNumPoints
|
||||
}
|
||||
|
||||
// QueryType is a shorthand method to get query type
|
||||
|
||||
@@ -355,14 +355,6 @@ func (r *PostableRule) validate() error {
|
||||
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "composite query is required"))
|
||||
}
|
||||
|
||||
if r.Version != "" && r.Version != "v5" {
|
||||
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "only version v5 is supported, got %q", r.Version))
|
||||
}
|
||||
|
||||
if r.Version == "v5" && r.RuleCondition.CompositeQuery != nil && len(r.RuleCondition.CompositeQuery.Queries) == 0 {
|
||||
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "queries envelope is required in compositeQuery"))
|
||||
}
|
||||
|
||||
if isAllQueriesDisabled(r.RuleCondition.CompositeQuery) {
|
||||
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "all queries are disabled in rule condition"))
|
||||
}
|
||||
|
||||
@@ -8,8 +8,6 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
func TestIsAllQueriesDisabled(t *testing.T) {
|
||||
@@ -623,9 +621,9 @@ func TestParseIntoRuleThresholdGeneration(t *testing.T) {
|
||||
}
|
||||
|
||||
// Test that threshold can evaluate properly
|
||||
vector, err := threshold.Eval(qbtypes.TimeSeries{
|
||||
Values: []*qbtypes.TimeSeriesValue{{Value: 0.15, Timestamp: 1000}}, // 150ms in seconds
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "test"}, Value: "label"}},
|
||||
vector, err := threshold.Eval(v3.Series{
|
||||
Points: []v3.Point{{Value: 0.15, Timestamp: 1000}}, // 150ms in seconds
|
||||
Labels: map[string]string{"test": "label"},
|
||||
}, "", EvalData{})
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error in shouldAlert: %v", err)
|
||||
@@ -700,9 +698,9 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
|
||||
}
|
||||
|
||||
// Test with a value that should trigger both WARNING and CRITICAL thresholds
|
||||
vector, err := threshold.Eval(qbtypes.TimeSeries{
|
||||
Values: []*qbtypes.TimeSeriesValue{{Value: 95.0, Timestamp: 1000}}, // 95% CPU usage
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "service"}, Value: "test"}},
|
||||
vector, err := threshold.Eval(v3.Series{
|
||||
Points: []v3.Point{{Value: 95.0, Timestamp: 1000}}, // 95% CPU usage
|
||||
Labels: map[string]string{"service": "test"},
|
||||
}, "", EvalData{})
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error in shouldAlert: %v", err)
|
||||
@@ -710,9 +708,9 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
|
||||
|
||||
assert.Equal(t, 2, len(vector))
|
||||
|
||||
vector, err = threshold.Eval(qbtypes.TimeSeries{
|
||||
Values: []*qbtypes.TimeSeriesValue{{Value: 75.0, Timestamp: 1000}}, // 75% CPU usage
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "service"}, Value: "test"}},
|
||||
vector, err = threshold.Eval(v3.Series{
|
||||
Points: []v3.Point{{Value: 75.0, Timestamp: 1000}}, // 75% CPU usage
|
||||
Labels: map[string]string{"service": "test"},
|
||||
}, "", EvalData{})
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error in shouldAlert: %v", err)
|
||||
@@ -725,7 +723,7 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
ruleJSON []byte
|
||||
series qbtypes.TimeSeries
|
||||
series v3.Series
|
||||
shouldAlert bool
|
||||
expectedValue float64
|
||||
}{
|
||||
@@ -753,9 +751,9 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`),
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: -2.1}, // below & at least once, should alert
|
||||
{Timestamp: 2000, Value: -2.3},
|
||||
},
|
||||
@@ -787,9 +785,9 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`), // below & at least once, no value below -2.0
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: -1.9},
|
||||
{Timestamp: 2000, Value: -1.8},
|
||||
},
|
||||
@@ -820,9 +818,9 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`), // above & at least once, should alert
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: 2.1}, // above 2.0, should alert
|
||||
{Timestamp: 2000, Value: 2.2},
|
||||
},
|
||||
@@ -854,9 +852,9 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`),
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: 1.1},
|
||||
{Timestamp: 2000, Value: 1.2},
|
||||
},
|
||||
@@ -887,9 +885,9 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`), // below and all the times
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: -2.1}, // all below -2
|
||||
{Timestamp: 2000, Value: -2.2},
|
||||
{Timestamp: 3000, Value: -2.5},
|
||||
@@ -922,9 +920,9 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`),
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: -3.0},
|
||||
{Timestamp: 2000, Value: -1.0}, // above -2, breaks condition
|
||||
{Timestamp: 3000, Value: -2.5},
|
||||
@@ -956,10 +954,10 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`),
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
{Timestamp: 1000, Value: -8.0}, // abs(-8) >= 7, alert
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: -8.0}, // abs(−8) >= 7, alert
|
||||
{Timestamp: 2000, Value: 5.0},
|
||||
},
|
||||
},
|
||||
@@ -990,9 +988,9 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`),
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: 80.0}, // below 90, should alert
|
||||
{Timestamp: 2000, Value: 85.0},
|
||||
},
|
||||
@@ -1024,9 +1022,9 @@ func TestAnomalyNegationEval(t *testing.T) {
|
||||
"selectedQuery": "A"
|
||||
}
|
||||
}`),
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"host": "server1"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: 1000, Value: 60.0}, // below, should alert
|
||||
{Timestamp: 2000, Value: 90.0},
|
||||
},
|
||||
|
||||
@@ -8,8 +8,8 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/converter"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
@@ -83,7 +83,7 @@ func (eval EvalData) HasActiveAlert(sampleLabelFp uint64) bool {
|
||||
type RuleThreshold interface {
|
||||
// Eval runs the given series through the threshold rules
|
||||
// using the given EvalData and returns the matching series
|
||||
Eval(series qbtypes.TimeSeries, unit string, evalData EvalData) (Vector, error)
|
||||
Eval(series v3.Series, unit string, evalData EvalData) (Vector, error)
|
||||
GetRuleReceivers() []RuleReceivers
|
||||
}
|
||||
|
||||
@@ -122,11 +122,10 @@ func (r BasicRuleThresholds) Validate() error {
|
||||
return errors.Join(errs...)
|
||||
}
|
||||
|
||||
func (r BasicRuleThresholds) Eval(series qbtypes.TimeSeries, unit string, evalData EvalData) (Vector, error) {
|
||||
func (r BasicRuleThresholds) Eval(series v3.Series, unit string, evalData EvalData) (Vector, error) {
|
||||
var resultVector Vector
|
||||
thresholds := []BasicRuleThreshold(r)
|
||||
sortThresholds(thresholds)
|
||||
seriesLabels := series.LabelsMap()
|
||||
for _, threshold := range thresholds {
|
||||
smpl, shouldAlert := threshold.shouldAlert(series, unit)
|
||||
if shouldAlert {
|
||||
@@ -138,15 +137,15 @@ func (r BasicRuleThresholds) Eval(series qbtypes.TimeSeries, unit string, evalDa
|
||||
resultVector = append(resultVector, smpl)
|
||||
continue
|
||||
} else if evalData.SendUnmatched {
|
||||
// Sanitise the series values to remove any NaN, Inf, or partial values
|
||||
values := filterValidValues(series.Values)
|
||||
if len(values) == 0 {
|
||||
// Sanitise the series points to remove any NaN or Inf values
|
||||
series.Points = removeGroupinSetPoints(series)
|
||||
if len(series.Points) == 0 {
|
||||
continue
|
||||
}
|
||||
// prepare the sample with the first value of the series
|
||||
// prepare the sample with the first point of the series
|
||||
smpl := Sample{
|
||||
Point: Point{T: values[0].Timestamp, V: values[0].Value},
|
||||
Metric: PrepareSampleLabelsForRule(seriesLabels, threshold.Name),
|
||||
Point: Point{T: series.Points[0].Timestamp, V: series.Points[0].Value},
|
||||
Metric: PrepareSampleLabelsForRule(series.Labels, threshold.Name),
|
||||
Target: *threshold.TargetValue,
|
||||
TargetUnit: threshold.TargetUnit,
|
||||
}
|
||||
@@ -161,7 +160,7 @@ func (r BasicRuleThresholds) Eval(series qbtypes.TimeSeries, unit string, evalDa
|
||||
if threshold.RecoveryTarget == nil {
|
||||
continue
|
||||
}
|
||||
sampleLabels := PrepareSampleLabelsForRule(seriesLabels, threshold.Name)
|
||||
sampleLabels := PrepareSampleLabelsForRule(series.Labels, threshold.Name)
|
||||
alertHash := sampleLabels.Hash()
|
||||
// check if alert is active and then check if recovery threshold matches
|
||||
if evalData.HasActiveAlert(alertHash) {
|
||||
@@ -256,23 +255,18 @@ func (b BasicRuleThreshold) Validate() error {
|
||||
return errors.Join(errs...)
|
||||
}
|
||||
|
||||
func (b BasicRuleThreshold) matchesRecoveryThreshold(series qbtypes.TimeSeries, ruleUnit string) (Sample, bool) {
|
||||
func (b BasicRuleThreshold) matchesRecoveryThreshold(series v3.Series, ruleUnit string) (Sample, bool) {
|
||||
return b.shouldAlertWithTarget(series, b.recoveryTarget(ruleUnit))
|
||||
}
|
||||
func (b BasicRuleThreshold) shouldAlert(series qbtypes.TimeSeries, ruleUnit string) (Sample, bool) {
|
||||
func (b BasicRuleThreshold) shouldAlert(series v3.Series, ruleUnit string) (Sample, bool) {
|
||||
return b.shouldAlertWithTarget(series, b.target(ruleUnit))
|
||||
}
|
||||
|
||||
// filterValidValues returns only the values that are valid for alert evaluation:
|
||||
// non-partial, non-NaN, non-Inf, and with non-negative timestamps.
|
||||
func filterValidValues(values []*qbtypes.TimeSeriesValue) []*qbtypes.TimeSeriesValue {
|
||||
var result []*qbtypes.TimeSeriesValue
|
||||
for _, v := range values {
|
||||
if v.Partial {
|
||||
continue
|
||||
}
|
||||
if v.Timestamp >= 0 && !math.IsNaN(v.Value) && !math.IsInf(v.Value, 0) {
|
||||
result = append(result, v)
|
||||
func removeGroupinSetPoints(series v3.Series) []v3.Point {
|
||||
var result []v3.Point
|
||||
for _, s := range series.Points {
|
||||
if s.Timestamp >= 0 && !math.IsNaN(s.Value) && !math.IsInf(s.Value, 0) {
|
||||
result = append(result, s)
|
||||
}
|
||||
}
|
||||
return result
|
||||
@@ -290,15 +284,15 @@ func PrepareSampleLabelsForRule(seriesLabels map[string]string, thresholdName st
|
||||
return lb.Labels()
|
||||
}
|
||||
|
||||
func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, target float64) (Sample, bool) {
|
||||
func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float64) (Sample, bool) {
|
||||
var shouldAlert bool
|
||||
var alertSmpl Sample
|
||||
lbls := PrepareSampleLabelsForRule(series.LabelsMap(), b.Name)
|
||||
lbls := PrepareSampleLabelsForRule(series.Labels, b.Name)
|
||||
|
||||
values := filterValidValues(series.Values)
|
||||
series.Points = removeGroupinSetPoints(series)
|
||||
|
||||
// nothing to evaluate
|
||||
if len(values) == 0 {
|
||||
if len(series.Points) == 0 {
|
||||
return alertSmpl, false
|
||||
}
|
||||
|
||||
@@ -306,7 +300,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
case AtleastOnce:
|
||||
// If any sample matches the condition, the rule is firing.
|
||||
if b.CompareOp == ValueIsAbove {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value > target {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = true
|
||||
@@ -314,7 +308,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
}
|
||||
}
|
||||
} else if b.CompareOp == ValueIsBelow {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value < target {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = true
|
||||
@@ -322,7 +316,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
}
|
||||
}
|
||||
} else if b.CompareOp == ValueIsEq {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value == target {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = true
|
||||
@@ -330,7 +324,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
}
|
||||
}
|
||||
} else if b.CompareOp == ValueIsNotEq {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value != target {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = true
|
||||
@@ -338,7 +332,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
}
|
||||
}
|
||||
} else if b.CompareOp == ValueOutsideBounds {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if math.Abs(smpl.Value) >= target {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = true
|
||||
@@ -351,7 +345,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
shouldAlert = true
|
||||
alertSmpl = Sample{Point: Point{V: target}, Metric: lbls}
|
||||
if b.CompareOp == ValueIsAbove {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value <= target {
|
||||
shouldAlert = false
|
||||
break
|
||||
@@ -360,7 +354,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
// use min value from the series
|
||||
if shouldAlert {
|
||||
var minValue float64 = math.Inf(1)
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value < minValue {
|
||||
minValue = smpl.Value
|
||||
}
|
||||
@@ -368,7 +362,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
alertSmpl = Sample{Point: Point{V: minValue}, Metric: lbls}
|
||||
}
|
||||
} else if b.CompareOp == ValueIsBelow {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value >= target {
|
||||
shouldAlert = false
|
||||
break
|
||||
@@ -376,7 +370,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
}
|
||||
if shouldAlert {
|
||||
var maxValue float64 = math.Inf(-1)
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value > maxValue {
|
||||
maxValue = smpl.Value
|
||||
}
|
||||
@@ -384,14 +378,14 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
alertSmpl = Sample{Point: Point{V: maxValue}, Metric: lbls}
|
||||
}
|
||||
} else if b.CompareOp == ValueIsEq {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value != target {
|
||||
shouldAlert = false
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if b.CompareOp == ValueIsNotEq {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value == target {
|
||||
shouldAlert = false
|
||||
break
|
||||
@@ -399,7 +393,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
}
|
||||
// use any non-inf or nan value from the series
|
||||
if shouldAlert {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if !math.IsInf(smpl.Value, 0) && !math.IsNaN(smpl.Value) {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
break
|
||||
@@ -407,7 +401,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
}
|
||||
}
|
||||
} else if b.CompareOp == ValueOutsideBounds {
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if math.Abs(smpl.Value) < target {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = false
|
||||
@@ -418,7 +412,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
case OnAverage:
|
||||
// If the average of all samples matches the condition, the rule is firing.
|
||||
var sum, count float64
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if math.IsNaN(smpl.Value) || math.IsInf(smpl.Value, 0) {
|
||||
continue
|
||||
}
|
||||
@@ -452,7 +446,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
// If the sum of all samples matches the condition, the rule is firing.
|
||||
var sum float64
|
||||
|
||||
for _, smpl := range values {
|
||||
for _, smpl := range series.Points {
|
||||
if math.IsNaN(smpl.Value) || math.IsInf(smpl.Value, 0) {
|
||||
continue
|
||||
}
|
||||
@@ -483,22 +477,21 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
|
||||
case Last:
|
||||
// If the last sample matches the condition, the rule is firing.
|
||||
shouldAlert = false
|
||||
lastValue := values[len(values)-1].Value
|
||||
alertSmpl = Sample{Point: Point{V: lastValue}, Metric: lbls}
|
||||
alertSmpl = Sample{Point: Point{V: series.Points[len(series.Points)-1].Value}, Metric: lbls}
|
||||
if b.CompareOp == ValueIsAbove {
|
||||
if lastValue > target {
|
||||
if series.Points[len(series.Points)-1].Value > target {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if b.CompareOp == ValueIsBelow {
|
||||
if lastValue < target {
|
||||
if series.Points[len(series.Points)-1].Value < target {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if b.CompareOp == ValueIsEq {
|
||||
if lastValue == target {
|
||||
if series.Points[len(series.Points)-1].Value == target {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if b.CompareOp == ValueIsNotEq {
|
||||
if lastValue != target {
|
||||
if series.Points[len(series.Points)-1].Value != target {
|
||||
shouldAlert = true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,24 +6,16 @@ import (
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
target := 100.0
|
||||
|
||||
makeLabel := func(name, value string) *qbtypes.Label {
|
||||
return &qbtypes.Label{Key: telemetrytypes.TelemetryFieldKey{Name: name}, Value: value}
|
||||
}
|
||||
makeValue := func(value float64, ts int64) *qbtypes.TimeSeriesValue {
|
||||
return &qbtypes.TimeSeriesValue{Value: value, Timestamp: ts}
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
threshold BasicRuleThreshold
|
||||
series qbtypes.TimeSeries
|
||||
series v3.Series
|
||||
ruleUnit string
|
||||
shouldAlert bool
|
||||
}{
|
||||
@@ -36,9 +28,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)}, // 150ms in seconds
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.15, Timestamp: 1000}, // 150ms in seconds
|
||||
},
|
||||
},
|
||||
ruleUnit: "s",
|
||||
shouldAlert: true,
|
||||
@@ -52,9 +46,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(0.05, 1000)}, // 50ms in seconds
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.05, Timestamp: 1000}, // 50ms in seconds
|
||||
},
|
||||
},
|
||||
ruleUnit: "s",
|
||||
shouldAlert: false,
|
||||
@@ -68,9 +64,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(150000, 1000)}, // 150000ms = 150s
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 150000, Timestamp: 1000}, // 150000ms = 150s
|
||||
},
|
||||
},
|
||||
ruleUnit: "ms",
|
||||
shouldAlert: true,
|
||||
@@ -85,9 +83,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)}, // 0.15KiB ≈ 153.6 bytes
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.15, Timestamp: 1000}, // 0.15KiB ≈ 153.6 bytes
|
||||
},
|
||||
},
|
||||
ruleUnit: "kbytes",
|
||||
shouldAlert: true,
|
||||
@@ -101,9 +101,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.15, Timestamp: 1000},
|
||||
},
|
||||
},
|
||||
ruleUnit: "mbytes",
|
||||
shouldAlert: true,
|
||||
@@ -118,9 +120,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsBelow,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(0.05, 1000)}, // 50ms in seconds
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.05, Timestamp: 1000}, // 50ms in seconds
|
||||
},
|
||||
},
|
||||
ruleUnit: "s",
|
||||
shouldAlert: true,
|
||||
@@ -134,12 +138,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: OnAverage,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
makeValue(0.08, 1000), // 80ms
|
||||
makeValue(0.12, 2000), // 120ms
|
||||
makeValue(0.15, 3000), // 150ms
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.08, Timestamp: 1000}, // 80ms
|
||||
{Value: 0.12, Timestamp: 2000}, // 120ms
|
||||
{Value: 0.15, Timestamp: 3000}, // 150ms
|
||||
},
|
||||
},
|
||||
ruleUnit: "s",
|
||||
@@ -154,12 +158,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: InTotal,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
makeValue(0.04, 1000), // 40MB
|
||||
makeValue(0.05, 2000), // 50MB
|
||||
makeValue(0.03, 3000), // 30MB
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.04, Timestamp: 1000}, // 40MB
|
||||
{Value: 0.05, Timestamp: 2000}, // 50MB
|
||||
{Value: 0.03, Timestamp: 3000}, // 30MB
|
||||
},
|
||||
},
|
||||
ruleUnit: "decgbytes",
|
||||
@@ -174,12 +178,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AllTheTimes,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
makeValue(0.11, 1000), // 110ms
|
||||
makeValue(0.12, 2000), // 120ms
|
||||
makeValue(0.15, 3000), // 150ms
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.11, Timestamp: 1000}, // 110ms
|
||||
{Value: 0.12, Timestamp: 2000}, // 120ms
|
||||
{Value: 0.15, Timestamp: 3000}, // 150ms
|
||||
},
|
||||
},
|
||||
ruleUnit: "s",
|
||||
@@ -194,11 +198,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: Last,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
makeValue(0.15, 1000), // 150kB
|
||||
makeValue(0.05, 2000), // 50kB (last value)
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.15, Timestamp: 1000}, // 150kB
|
||||
{Value: 0.05, Timestamp: 2000}, // 50kB (last value)
|
||||
},
|
||||
},
|
||||
ruleUnit: "decmbytes",
|
||||
@@ -214,9 +218,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)},
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 0.15, Timestamp: 1000},
|
||||
},
|
||||
},
|
||||
ruleUnit: "KBs",
|
||||
shouldAlert: true,
|
||||
@@ -231,9 +237,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(150, 1000)}, // 150ms
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 150, Timestamp: 1000}, // 150ms
|
||||
},
|
||||
},
|
||||
ruleUnit: "ms",
|
||||
shouldAlert: true,
|
||||
@@ -248,9 +256,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ValueIsAbove,
|
||||
},
|
||||
series: qbtypes.TimeSeries{
|
||||
Labels: []*qbtypes.Label{makeLabel("service", "test")},
|
||||
Values: []*qbtypes.TimeSeriesValue{makeValue(150, 1000)}, // 150 (unitless)
|
||||
series: v3.Series{
|
||||
Labels: map[string]string{"service": "test"},
|
||||
Points: []v3.Point{
|
||||
{Value: 150, Timestamp: 1000}, // 150 (unitless)
|
||||
},
|
||||
},
|
||||
ruleUnit: "",
|
||||
shouldAlert: true,
|
||||
|
||||
@@ -11,15 +11,16 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
ErrCodeUserNotFound = errors.MustNewCode("user_not_found")
|
||||
ErrCodeAmbiguousUser = errors.MustNewCode("ambiguous_user")
|
||||
ErrUserAlreadyExists = errors.MustNewCode("user_already_exists")
|
||||
ErrPasswordAlreadyExists = errors.MustNewCode("password_already_exists")
|
||||
ErrResetPasswordTokenAlreadyExists = errors.MustNewCode("reset_password_token_already_exists")
|
||||
ErrPasswordNotFound = errors.MustNewCode("password_not_found")
|
||||
ErrResetPasswordTokenNotFound = errors.MustNewCode("reset_password_token_not_found")
|
||||
ErrAPIKeyAlreadyExists = errors.MustNewCode("api_key_already_exists")
|
||||
ErrAPIKeyNotFound = errors.MustNewCode("api_key_not_found")
|
||||
ErrCodeUserNotFound = errors.MustNewCode("user_not_found")
|
||||
ErrCodeAmbiguousUser = errors.MustNewCode("ambiguous_user")
|
||||
ErrUserAlreadyExists = errors.MustNewCode("user_already_exists")
|
||||
ErrPasswordAlreadyExists = errors.MustNewCode("password_already_exists")
|
||||
ErrResetPasswordTokenAlreadyExists = errors.MustNewCode("reset_password_token_already_exists")
|
||||
ErrPasswordNotFound = errors.MustNewCode("password_not_found")
|
||||
ErrResetPasswordTokenNotFound = errors.MustNewCode("reset_password_token_not_found")
|
||||
ErrAPIKeyAlreadyExists = errors.MustNewCode("api_key_already_exists")
|
||||
ErrAPIKeyNotFound = errors.MustNewCode("api_key_not_found")
|
||||
ErrCodeRootUserOperationUnsupported = errors.MustNewCode("root_user_operation_unsupported")
|
||||
)
|
||||
|
||||
type GettableUser = User
|
||||
@@ -29,9 +30,10 @@ type User struct {
|
||||
|
||||
Identifiable
|
||||
DisplayName string `bun:"display_name" json:"displayName"`
|
||||
Email valuer.Email `bun:"email,type:text" json:"email"`
|
||||
Role Role `bun:"role,type:text" json:"role"`
|
||||
OrgID valuer.UUID `bun:"org_id,type:text" json:"orgId"`
|
||||
Email valuer.Email `bun:"email" json:"email"`
|
||||
Role Role `bun:"role" json:"role"`
|
||||
OrgID valuer.UUID `bun:"org_id" json:"orgId"`
|
||||
IsRoot bool `bun:"is_root" json:"isRoot"`
|
||||
TimeAuditable
|
||||
}
|
||||
|
||||
@@ -64,6 +66,7 @@ func NewUser(displayName string, email valuer.Email, role Role, orgID valuer.UUI
|
||||
Email: email,
|
||||
Role: role,
|
||||
OrgID: orgID,
|
||||
IsRoot: false,
|
||||
TimeAuditable: TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
@@ -71,6 +74,65 @@ func NewUser(displayName string, email valuer.Email, role Role, orgID valuer.UUI
|
||||
}, nil
|
||||
}
|
||||
|
||||
func NewRootUser(displayName string, email valuer.Email, orgID valuer.UUID) (*User, error) {
|
||||
if email.IsZero() {
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "email is required")
|
||||
}
|
||||
|
||||
if orgID.IsZero() {
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgID is required")
|
||||
}
|
||||
|
||||
return &User{
|
||||
Identifiable: Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
DisplayName: displayName,
|
||||
Email: email,
|
||||
Role: RoleAdmin,
|
||||
OrgID: orgID,
|
||||
IsRoot: true,
|
||||
TimeAuditable: TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Update applies mutable fields from the input to the user. Immutable fields
|
||||
// (email, is_root, org_id, id) are preserved. Only non-zero input fields are applied.
|
||||
func (u *User) Update(displayName string, role Role) {
|
||||
if displayName != "" {
|
||||
u.DisplayName = displayName
|
||||
}
|
||||
if role != "" {
|
||||
u.Role = role
|
||||
}
|
||||
u.UpdatedAt = time.Now()
|
||||
}
|
||||
|
||||
// PromoteToRoot promotes the user to a root user with admin role.
|
||||
func (u *User) PromoteToRoot() {
|
||||
u.IsRoot = true
|
||||
u.Role = RoleAdmin
|
||||
u.UpdatedAt = time.Now()
|
||||
}
|
||||
|
||||
// UpdateEmail updates the email of the user.
|
||||
func (u *User) UpdateEmail(email valuer.Email) {
|
||||
u.Email = email
|
||||
u.UpdatedAt = time.Now()
|
||||
}
|
||||
|
||||
// ErrIfRoot returns an error if the user is a root user. The caller should
|
||||
// enrich the error with the specific operation using errors.WithAdditionalf.
|
||||
func (u *User) ErrIfRoot() error {
|
||||
if u.IsRoot {
|
||||
return errors.New(errors.TypeUnsupported, ErrCodeRootUserOperationUnsupported, "this operation is not supported for the root user")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewTraitsFromUser(user *User) map[string]any {
|
||||
return map[string]any{
|
||||
"name": user.DisplayName,
|
||||
@@ -133,7 +195,7 @@ type UserStore interface {
|
||||
// List users by email and org ids.
|
||||
ListUsersByEmailAndOrgIDs(ctx context.Context, email valuer.Email, orgIDs []valuer.UUID) ([]*User, error)
|
||||
|
||||
UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *User) (*User, error)
|
||||
UpdateUser(ctx context.Context, orgID valuer.UUID, user *User) error
|
||||
DeleteUser(ctx context.Context, orgID string, id string) error
|
||||
|
||||
// Creates a password.
|
||||
@@ -156,6 +218,9 @@ type UserStore interface {
|
||||
|
||||
CountByOrgID(ctx context.Context, orgID valuer.UUID) (int64, error)
|
||||
|
||||
// Get root user by org.
|
||||
GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*User, error)
|
||||
|
||||
// Transaction
|
||||
RunInTx(ctx context.Context, cb func(ctx context.Context) error) error
|
||||
}
|
||||
|
||||
91
templates/email/invitation.gotmpl
Normal file
91
templates/email/invitation.gotmpl
Normal file
@@ -0,0 +1,91 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<title>You're Invited to Join SigNoz</title>
|
||||
</head>
|
||||
|
||||
<body style="margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,'Helvetica Neue',Arial,sans-serif;line-height:1.6;color:#333;background:#fff">
|
||||
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="background:#fff">
|
||||
<tr>
|
||||
<td align="center" style="padding:0">
|
||||
<table role="presentation" width="600" cellspacing="0" cellpadding="0" border="0" style="max-width:600px;width:100%">
|
||||
{{ if .format.Header.Enabled }}
|
||||
<tr>
|
||||
<td align="center" style="padding:16px 20px 16px">
|
||||
<img src="{{.format.Header.LogoURL}}" alt="SigNoz" width="160" height="40" style="display:block;border:0;outline:none;max-width:100%;height:auto">
|
||||
</td>
|
||||
</tr>
|
||||
{{ end }}
|
||||
<tr>
|
||||
<td style="padding:16px 20px 16px">
|
||||
<p style="margin:0 0 16px;font-size:16px;color:#333">
|
||||
Hi there,
|
||||
</p>
|
||||
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
|
||||
You've been invited by <strong>{{.inviter_email}}</strong> to join their SigNoz organization.
|
||||
</p>
|
||||
<p style="margin:0 0 12px;font-size:16px;color:#333;line-height:1.6">
|
||||
A new account has been created for you with the following details:
|
||||
</p>
|
||||
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
|
||||
<tr>
|
||||
<td style="padding:20px;background:#f5f5f5;border-radius:6px;border-left:4px solid #4E74F8">
|
||||
<p style="margin:0;font-size:15px;color:#333;line-height:1.6">
|
||||
<strong>Email:</strong> {{.to}}
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
|
||||
Accept the invitation to get started.
|
||||
</p>
|
||||
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="{{.link}}" target="_blank" style="display:inline-block;padding:16px 48px;font-size:16px;font-weight:600;color:#fff;background:#4E74F8;text-decoration:none;border-radius:4px">
|
||||
Accept Invitation
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
<p style="margin:0 0 4px;font-size:13px;color:#666;text-align:center">
|
||||
Button not working? Copy and paste this link into your browser:
|
||||
</p>
|
||||
<p style="margin:0 0 16px;font-size:13px;color:#4E74F8;word-break:break-all;text-align:center">
|
||||
<a href="{{.link}}" style="color:#4E74F8;text-decoration:none">
|
||||
{{.link}}
|
||||
</a>
|
||||
</p>
|
||||
{{ if .format.Help.Enabled }}
|
||||
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
|
||||
Need help? Chat with our team in the SigNoz application or email us at <a href="mailto:{{.format.Help.Email}}" style="color:#4E74F8;text-decoration:none">{{.format.Help.Email}}</a>.
|
||||
</p>
|
||||
{{ end }}
|
||||
<p style="margin:0;font-size:16px;color:#333;line-height:1.6">
|
||||
Thanks,<br><strong>The SigNoz Team</strong>
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
{{ if .format.Footer.Enabled }}
|
||||
<tr>
|
||||
<td align="center" style="padding:8px 16px 8px">
|
||||
<p style="margin:0 0 8px;font-size:12px;color:#999;line-height:1.5">
|
||||
<a href="https://signoz.io/terms-of-service/" style="color:#4E74F8;text-decoration:none">Terms of Service</a> - <a href="https://signoz.io/privacy/" style="color:#4E74F8;text-decoration:none">Privacy Policy</a>
|
||||
</p>
|
||||
<p style="margin:0;font-size:12px;color:#999;line-height:1.5">
|
||||
© 2026 SigNoz Inc.
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
{{ end }}
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
@@ -1,14 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<p>Hi {{.CustomerName}},</p>
|
||||
<p>You have been invited to join SigNoz project by {{.InviterName}} ({{.InviterEmail}}).</p>
|
||||
<p>Please click on the following button to accept the invitation:</p>
|
||||
<a href="{{.Link}}" style="background-color: #000000; color: white; padding: 14px 20px; text-align: center; text-decoration: none; display: inline-block;">Accept Invitation</a>
|
||||
<p>Button not working? Paste the following link into your browser:</p>
|
||||
<p>{{.Link}}</p>
|
||||
<p>Follow docs here 👉 to <a href="https://signoz.io/docs/cloud/">Get Started with SigNoz Cloud</a></p>
|
||||
<p>Thanks,</p>
|
||||
<p>SigNoz Team</p>
|
||||
</body>
|
||||
</html>
|
||||
91
templates/email/reset_password.gotmpl
Normal file
91
templates/email/reset_password.gotmpl
Normal file
@@ -0,0 +1,91 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<title>{{.subject}}</title>
|
||||
</head>
|
||||
|
||||
<body style="margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,'Helvetica Neue',Arial,sans-serif;line-height:1.6;color:#333;background:#fff">
|
||||
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="background:#fff">
|
||||
<tr>
|
||||
<td align="center" style="padding:0">
|
||||
<table role="presentation" width="600" cellspacing="0" cellpadding="0" border="0" style="max-width:600px;width:100%">
|
||||
{{ if .format.Header.Enabled }}
|
||||
<tr>
|
||||
<td align="center" style="padding:16px 20px 16px">
|
||||
<img src="{{.format.Header.LogoURL}}" alt="SigNoz" width="160" height="40" style="display:block;border:0;outline:none;max-width:100%;height:auto">
|
||||
</td>
|
||||
</tr>
|
||||
{{ end }}
|
||||
<tr>
|
||||
<td style="padding:16px 20px 16px">
|
||||
<p style="margin:0 0 16px;font-size:16px;color:#333">
|
||||
Hi there,
|
||||
</p>
|
||||
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
|
||||
A password reset was requested for your SigNoz account.
|
||||
</p>
|
||||
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
|
||||
Click the button below to reset your password:
|
||||
</p>
|
||||
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="{{.Link}}" target="_blank" style="display:inline-block;padding:16px 48px;font-size:16px;font-weight:600;color:#fff;background:#4E74F8;text-decoration:none;border-radius:4px">
|
||||
Reset Password
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
<p style="margin:0 0 4px;font-size:13px;color:#666;text-align:center">
|
||||
Button not working? Copy and paste this link into your browser:
|
||||
</p>
|
||||
<p style="margin:0 0 16px;font-size:13px;color:#4E74F8;word-break:break-all;text-align:center">
|
||||
<a href="{{.Link}}" style="color:#4E74F8;text-decoration:none">
|
||||
{{.Link}}
|
||||
</a>
|
||||
</p>
|
||||
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
|
||||
<tr>
|
||||
<td style="padding:16px;background:#fff4e6;border-radius:6px;border-left:4px solid #ff9800">
|
||||
<p style="margin:0;font-size:14px;color:#333;line-height:1.6">
|
||||
<strong>⏱️ This link will expire in {{.Expiry}}.</strong>
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
|
||||
If you didn't request this password reset, please ignore this email. Your password will remain unchanged.
|
||||
</p>
|
||||
{{ if .format.Help.Enabled }}
|
||||
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
|
||||
Need help? Chat with our team in the SigNoz application or email us at <a href="mailto:{{.format.Help.Email}}" style="color:#4E74F8;text-decoration:none">{{.format.Help.Email}}</a>.
|
||||
</p>
|
||||
{{ end }}
|
||||
<p style="margin:0;font-size:16px;color:#333;line-height:1.6">
|
||||
Thanks,<br><strong>The SigNoz Team</strong>
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
{{ if .format.Footer.Enabled }}
|
||||
<tr>
|
||||
<td align="center" style="padding:8px 16px 8px">
|
||||
<p style="margin:0 0 8px;font-size:12px;color:#999;line-height:1.5">
|
||||
<a href="https://signoz.io/terms-of-service/" style="color:#4E74F8;text-decoration:none">Terms of Service</a> - <a href="https://signoz.io/privacy/" style="color:#4E74F8;text-decoration:none">Privacy Policy</a>
|
||||
</p>
|
||||
<p style="margin:0;font-size:12px;color:#999;line-height:1.5">
|
||||
© 2026 SigNoz Inc.
|
||||
</p>
|
||||
</td>
|
||||
</tr>
|
||||
{{ end }}
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
@@ -1,13 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<p>Hello {{.Name}},</p>
|
||||
<p>You requested a password reset for your SigNoz account.</p>
|
||||
<p>Click the link below to reset your password:</p>
|
||||
<a href="{{.Link}}">Reset Password</a>
|
||||
<p>This link will expire in {{.Expiry}}.</p>
|
||||
<p>If you didn't request this, please ignore this email. Your password will remain unchanged.</p>
|
||||
<br>
|
||||
<p>Best regards,<br>The SigNoz Team</p>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,21 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
Hi {{.CustomerName}},<br>
|
||||
Your role in <strong>SigNoz</strong> has been updated by {{.UpdatedByEmail}}.
|
||||
<p>
|
||||
<strong>Previous Role:</strong> {{.OldRole}}<br>
|
||||
<strong>New Role:</strong> {{.NewRole}}
|
||||
</p>
|
||||
{{if eq .OldRole "Admin"}}
|
||||
<p>
|
||||
If you were not expecting this change or have any questions, please contact us at <a href="mailto:support@signoz.io">support@signoz.io</a>.
|
||||
</p>
|
||||
{{else}}
|
||||
<p>
|
||||
If you were not expecting this change or have any questions, please reach out to your administrator.
|
||||
</p>
|
||||
{{end}}
|
||||
<p>Best regards,<br>The SigNoz Team</p>
|
||||
</body>
|
||||
</html>
|
||||
Reference in New Issue
Block a user