Compare commits

..

2 Commits

Author SHA1 Message Date
Srikanth Chekuri
25e0c19ddb Merge branch 'main' into remove-v4-support-rules 2026-02-14 23:51:21 +05:30
srikanthccv
8a6abb2f09 chore: remove support for older versions in rules 2026-02-14 23:48:09 +05:30
93 changed files with 2240 additions and 6510 deletions

6
.github/CODEOWNERS vendored
View File

@@ -43,12 +43,6 @@
/pkg/analytics/ @vikrantgupta25
/pkg/statsreporter/ @vikrantgupta25
# Emailing Owners
/pkg/emailing/ @vikrantgupta25
/pkg/types/emailtypes/ @vikrantgupta25
/templates/email/ @vikrantgupta25
# Querier Owners
/pkg/querier/ @srikanthccv

View File

@@ -14,8 +14,5 @@
},
"[sql]": {
"editor.defaultFormatter": "adpyke.vscode-sql-formatter"
},
"[html]": {
"editor.defaultFormatter": "vscode.html-language-features"
}
}

View File

@@ -193,15 +193,6 @@ emailing:
templates:
# The directory containing the email templates. This directory should contain a list of files defined at pkg/types/emailtypes/template.go.
directory: /opt/signoz/conf/templates/email
format:
header:
enabled: false
logo_url: ""
help:
enabled: false
email: ""
footer:
enabled: false
smtp:
# The SMTP server address.
address: localhost:25

View File

@@ -5,23 +5,16 @@ import (
"encoding/json"
"fmt"
"log/slog"
"math"
"strings"
"sync"
"time"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
@@ -32,6 +25,8 @@ import (
querierV5 "github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
@@ -48,17 +43,12 @@ type AnomalyRule struct {
reader interfaces.Reader
// querierV2 is used for alerts created after the introduction of new metrics query builder
querierV2 interfaces.Querier
// querierV5 is used for alerts migrated after the introduction of new query builder
// querierV5 is the query builder v5 querier used for all alert rule evaluation
querierV5 querierV5.Querier
provider anomaly.Provider
providerV2 anomalyV2.Provider
version string
logger *slog.Logger
logger *slog.Logger
seasonality anomaly.Seasonality
}
@@ -102,34 +92,6 @@ func NewAnomalyRule(
logger.Info("using seasonality", "seasonality", t.seasonality.String())
querierOptsV2 := querierV2.QuerierOptions{
Reader: reader,
Cache: cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
}
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
t.reader = reader
if t.seasonality == anomaly.SeasonalityHourly {
t.provider = anomaly.NewHourlyProvider(
anomaly.WithCache[*anomaly.HourlyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.HourlyProvider](reader),
)
} else if t.seasonality == anomaly.SeasonalityDaily {
t.provider = anomaly.NewDailyProvider(
anomaly.WithCache[*anomaly.DailyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.DailyProvider](reader),
)
} else if t.seasonality == anomaly.SeasonalityWeekly {
t.provider = anomaly.NewWeeklyProvider(
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
)
}
if t.seasonality == anomaly.SeasonalityHourly {
t.providerV2 = anomalyV2.NewHourlyProvider(
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](querierV5),
@@ -148,7 +110,7 @@ func NewAnomalyRule(
}
t.querierV5 = querierV5
t.version = p.Version
t.reader = reader
t.logger = logger
return &t, nil
}
@@ -157,36 +119,9 @@ func (r *AnomalyRule) Type() ruletypes.RuleType {
return RuleTypeAnomaly
}
func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3.QueryRangeParamsV3, error) {
func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
r.logger.InfoContext(
ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds(),
)
st, en := r.Timestamps(ts)
start := st.UnixMilli()
end := en.UnixMilli()
compositeQuery := r.Condition().CompositeQuery
if compositeQuery.PanelType != v3.PanelTypeGraph {
compositeQuery.PanelType = v3.PanelTypeGraph
}
// default mode
return &v3.QueryRangeParamsV3{
Start: start,
End: end,
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
CompositeQuery: compositeQuery,
Variables: make(map[string]interface{}, 0),
NoCache: false,
}, nil
}
func (r *AnomalyRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
r.logger.InfoContext(ctx, "prepare query range request v5", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds())
r.logger.InfoContext(ctx, "prepare query range request", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds())
startTs, endTs := r.Timestamps(ts)
start, end := startTs.UnixMilli(), endTs.UnixMilli()
@@ -215,60 +150,6 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
if err != nil {
return nil, err
}
err = r.PopulateTemporality(ctx, orgID, params)
if err != nil {
return nil, fmt.Errorf("internal error while setting temporality")
}
anomalies, err := r.provider.GetAnomalies(ctx, orgID, &anomaly.GetAnomaliesRequest{
Params: params,
Seasonality: r.seasonality,
})
if err != nil {
return nil, err
}
var queryResult *v3.Result
for _, result := range anomalies.Results {
if result.QueryName == r.GetSelectedQuery() {
queryResult = result
break
}
}
hasData := len(queryResult.AnomalyScores) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
var resultVector ruletypes.Vector
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
for _, series := range queryResult.AnomalyScores {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
ActiveAlerts: r.ActiveAlertsLabelFP(),
SendUnmatched: r.ShouldSendUnmatched(),
})
if err != nil {
return nil, err
}
resultVector = append(resultVector, results...)
}
return resultVector, nil
}
func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
params, err := r.prepareQueryRangeV5(ctx, ts)
if err != nil {
return nil, err
}
anomalies, err := r.providerV2.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{
Params: *params,
@@ -290,20 +171,25 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
r.logger.WarnContext(ctx, "nil qb result", "ts", ts.UnixMilli())
}
queryResult := transition.ConvertV5TimeSeriesDataToV4Result(qbResult)
var anomalyScores []*qbtypes.TimeSeries
if qbResult != nil {
for _, bucket := range qbResult.Aggregations {
anomalyScores = append(anomalyScores, bucket.AnomalyScores...)
}
}
hasData := len(queryResult.AnomalyScores) > 0
hasData := len(anomalyScores) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
var resultVector ruletypes.Vector
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
scoresJSON, _ := json.Marshal(anomalyScores)
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
// Filter out new series if newGroupEvalDelay is configured
seriesToProcess := queryResult.AnomalyScores
seriesToProcess := anomalyScores
if r.ShouldSkipNewGroups() {
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
// In case of error we log the error and continue with the original series
@@ -316,7 +202,7 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
for _, series := range seriesToProcess {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Values), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
@@ -337,16 +223,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
valueFormatter := formatter.FromUnit(r.Unit())
var res ruletypes.Vector
var err error
if r.version == "v5" {
r.logger.InfoContext(ctx, "running v5 query")
res, err = r.buildAndRunQueryV5(ctx, r.OrgID(), ts)
} else {
r.logger.InfoContext(ctx, "running v4 query")
res, err = r.buildAndRunQuery(ctx, r.OrgID(), ts)
}
res, err := r.buildAndRunQuery(ctx, r.OrgID(), ts)
if err != nil {
return 0, err
}

View File

@@ -8,28 +8,27 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/valuer"
)
// mockAnomalyProvider is a mock implementation of anomaly.Provider for testing.
// We need this because the anomaly provider makes 6 different queries for various
// time periods (current, past period, current season, past season, past 2 seasons,
// past 3 seasons), making it cumbersome to create mock data.
type mockAnomalyProvider struct {
responses []*anomaly.GetAnomaliesResponse
// mockAnomalyProviderV2 is a mock implementation of anomalyV2.Provider for testing.
type mockAnomalyProviderV2 struct {
responses []*anomalyV2.AnomaliesResponse
callCount int
}
func (m *mockAnomalyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomaly.GetAnomaliesRequest) (*anomaly.GetAnomaliesResponse, error) {
func (m *mockAnomalyProviderV2) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomalyV2.AnomaliesRequest) (*anomalyV2.AnomaliesResponse, error) {
if m.callCount >= len(m.responses) {
return &anomaly.GetAnomaliesResponse{Results: []*v3.Result{}}, nil
return &anomalyV2.AnomaliesResponse{Results: []*qbtypes.TimeSeriesData{}}, nil
}
resp := m.responses[m.callCount]
m.callCount++
@@ -82,11 +81,11 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
},
}
responseNoData := &anomaly.GetAnomaliesResponse{
Results: []*v3.Result{
responseNoData := &anomalyV2.AnomaliesResponse{
Results: []*qbtypes.TimeSeriesData{
{
QueryName: "A",
AnomalyScores: []*v3.Series{},
QueryName: "A",
Aggregations: []*qbtypes.AggregationBucket{},
},
},
}
@@ -129,8 +128,8 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
)
require.NoError(t, err)
rule.provider = &mockAnomalyProvider{
responses: []*anomaly.GetAnomaliesResponse{responseNoData},
rule.providerV2 = &mockAnomalyProviderV2{
responses: []*anomalyV2.AnomaliesResponse{responseNoData},
}
alertsFound, err := rule.Eval(context.Background(), evalTime)
@@ -190,11 +189,11 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
},
}
responseNoData := &anomaly.GetAnomaliesResponse{
Results: []*v3.Result{
responseNoData := &anomalyV2.AnomaliesResponse{
Results: []*qbtypes.TimeSeriesData{
{
QueryName: "A",
AnomalyScores: []*v3.Series{},
QueryName: "A",
Aggregations: []*qbtypes.AggregationBucket{},
},
},
}
@@ -228,16 +227,22 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
t1 := baseTime.Add(5 * time.Minute)
t2 := t1.Add(c.timeBetweenEvals)
responseWithData := &anomaly.GetAnomaliesResponse{
Results: []*v3.Result{
responseWithData := &anomalyV2.AnomaliesResponse{
Results: []*qbtypes.TimeSeriesData{
{
QueryName: "A",
AnomalyScores: []*v3.Series{
Aggregations: []*qbtypes.AggregationBucket{
{
Labels: map[string]string{"test": "label"},
Points: []v3.Point{
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
AnomalyScores: []*qbtypes.TimeSeries{
{
Labels: []*qbtypes.Label{
{Key: telemetrytypes.TelemetryFieldKey{Name: "test"}, Value: "label"},
},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
},
},
},
},
},
@@ -252,8 +257,8 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil)
require.NoError(t, err)
rule.provider = &mockAnomalyProvider{
responses: []*anomaly.GetAnomaliesResponse{responseWithData, responseNoData},
rule.providerV2 = &mockAnomalyProviderV2{
responses: []*anomalyV2.AnomaliesResponse{responseWithData, responseNoData},
}
alertsFound1, err := rule.Eval(context.Background(), t1)

View File

@@ -55,7 +55,6 @@
"@signozhq/icons": "0.1.0",
"@signozhq/input": "0.0.2",
"@signozhq/popover": "0.0.0",
"@signozhq/radio-group": "0.0.2",
"@signozhq/resizable": "0.0.0",
"@signozhq/sonner": "0.1.0",
"@signozhq/table": "0.3.7",

View File

@@ -73,7 +73,7 @@ describe('convertV5ResponseToLegacy', () => {
const v5Data: QueryRangeResponseV5 = {
type: 'time_series',
data: { results: [timeSeries] },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
};
const params = makeBaseParams('time_series', [
@@ -156,7 +156,7 @@ describe('convertV5ResponseToLegacy', () => {
const v5Data: QueryRangeResponseV5 = {
type: 'scalar',
data: { results: [scalar] },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
};
const params = makeBaseParams('scalar', [
@@ -239,7 +239,7 @@ describe('convertV5ResponseToLegacy', () => {
const v5Data: QueryRangeResponseV5 = {
type: 'scalar',
data: { results: [scalar] },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
};
const params = makeBaseParams('scalar', [

View File

@@ -388,7 +388,6 @@ export function convertV5ResponseToLegacy(
warnings: v5Data?.data?.warning || [],
},
warning: v5Data?.warning || undefined,
meta: v5Data?.meta,
},
warning: v5Data?.warning || undefined,
};
@@ -407,7 +406,6 @@ export function convertV5ResponseToLegacy(
payload: {
data: convertedData,
warning: v5Response.payload?.data?.warning || undefined,
meta: v5Data?.meta,
},
};

View File

@@ -21,7 +21,6 @@ import '@signozhq/design-tokens';
import '@signozhq/icons';
import '@signozhq/input';
import '@signozhq/popover';
import '@signozhq/radio-group';
import '@signozhq/resizable';
import '@signozhq/sonner';
import '@signozhq/table';

View File

@@ -78,10 +78,12 @@ function TestWrapper({ children }: { children: React.ReactNode }): JSX.Element {
describe('VariableItem Integration Tests', () => {
let user: ReturnType<typeof userEvent.setup>;
let mockOnValueUpdate: jest.Mock;
let mockSetVariablesToGetUpdated: jest.Mock;
beforeEach(() => {
user = userEvent.setup();
mockOnValueUpdate = jest.fn();
mockSetVariablesToGetUpdated = jest.fn();
jest.clearAllMocks();
});
@@ -100,6 +102,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -145,6 +150,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -187,6 +195,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -236,6 +247,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -258,6 +272,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -291,6 +308,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -324,6 +344,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -346,6 +369,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -379,6 +405,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -432,6 +461,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -476,6 +508,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -513,6 +548,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -544,6 +582,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);

View File

@@ -9,15 +9,11 @@ import {
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
import {
enqueueDescendantsOfVariable,
enqueueFetchOfAllVariables,
initializeVariableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { GlobalReducer } from 'types/reducer/globalTime';
import { onUpdateVariableNode } from './util';
import VariableItem from './VariableItem';
import './DashboardVariableSelection.styles.scss';
@@ -26,6 +22,8 @@ function DashboardVariableSelection(): JSX.Element | null {
const {
setSelectedDashboard,
updateLocalStorageDashboardVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
} = useDashboard();
const { updateUrlVariable, getUrlVariables } = useVariablesFromUrl();
@@ -57,14 +55,11 @@ function DashboardVariableSelection(): JSX.Element | null {
[dependencyData?.order],
);
// Initialize fetch store then start a new fetch cycle.
// Runs on dependency order changes, and time range changes.
// Trigger refetch when dependency order changes or global time changes
useEffect(() => {
const allVariableNames = sortedVariablesArray
.map((v) => v.name)
.filter((name): name is string => !!name);
initializeVariableFetchStore(allVariableNames);
enqueueFetchOfAllVariables();
if (dependencyData?.order && dependencyData.order.length > 0) {
setVariablesToGetUpdated(dependencyData?.order || []);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [dependencyOrderKey, minTime, maxTime]);
@@ -126,14 +121,29 @@ function DashboardVariableSelection(): JSX.Element | null {
return prev;
});
// Cascade: enqueue query-type descendants for refetching
enqueueDescendantsOfVariable(name);
if (dependencyData) {
const updatedVariables: string[] = [];
onUpdateVariableNode(
name,
dependencyData.graph,
dependencyData.order,
(node) => updatedVariables.push(node),
);
setVariablesToGetUpdated((prev) => [
...new Set([...prev, ...updatedVariables.filter((v) => v !== name)]),
]);
} else {
setVariablesToGetUpdated((prev) => prev.filter((v) => v !== name));
}
},
[
// This can be removed
dashboardVariables,
updateLocalStorageDashboardVariables,
dependencyData,
updateUrlVariable,
setSelectedDashboard,
setVariablesToGetUpdated,
],
);
@@ -148,6 +158,9 @@ function DashboardVariableSelection(): JSX.Element | null {
existingVariables={dashboardVariables}
variableData={variable}
onValueUpdate={onValueUpdate}
variablesToGetUpdated={variablesToGetUpdated}
setVariablesToGetUpdated={setVariablesToGetUpdated}
dependencyData={dependencyData}
/>
);
})}

View File

@@ -2,25 +2,18 @@ import { memo, useCallback, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import { getFieldValues } from 'api/dynamicVariables/getFieldValues';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { DEBOUNCE_DELAY } from 'constants/queryBuilderFilterConfig';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useVariableFetchState } from 'hooks/dashboard/useVariableFetchState';
import useDebounce from 'hooks/useDebounce';
import { isEmpty } from 'lodash-es';
import { AppState } from 'store/reducers';
import { SuccessResponseV2 } from 'types/api';
import { FieldValueResponse } from 'types/api/dynamicVariables/getFieldValues';
import { GlobalReducer } from 'types/reducer/globalTime';
import { isRetryableError as checkIfRetryableError } from 'utils/errorUtils';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import {
buildExistingDynamicVariableQuery,
extractErrorMessage,
getOptionsForDynamicVariable,
mergeUniqueStrings,
settleVariableFetch,
} from './util';
import { getOptionsForDynamicVariable } from './util';
import { VariableItemProps } from './VariableItem';
import { dynamicVariableSelectStrategy } from './variableSelectStrategy/dynamicVariableSelectStrategy';
@@ -31,6 +24,7 @@ type DynamicVariableInputProps = Pick<
'variableData' | 'onValueUpdate' | 'existingVariables'
>;
// eslint-disable-next-line sonarjs/cognitive-complexity
function DynamicVariableInput({
variableData,
onValueUpdate,
@@ -61,8 +55,14 @@ function DynamicVariableInput({
const debouncedApiSearchText = useDebounce(apiSearchText, DEBOUNCE_DELAY);
// Build a memoized list of all currently available option strings (normalized + related)
const allAvailableOptionStrings = useMemo(
() => mergeUniqueStrings(optionsData, relatedValues),
() => [
...new Set([
...optionsData.map((v) => v.toString()),
...relatedValues.map((v) => v.toString()),
]),
],
[optionsData, relatedValues],
);
@@ -104,24 +104,67 @@ function DynamicVariableInput({
(state) => state.globalTime,
);
const {
variableFetchCycleId,
isVariableSettled,
isVariableFetching,
hasVariableFetchedOnce,
isVariableWaitingForDependencies,
variableDependencyWaitMessage,
} = useVariableFetchState(variableData.name || '');
// existing query is the query made from the other dynamic variables around this one with there current values
// for e.g. k8s.namespace.name IN ["zeus", "gene"] AND doc_op_type IN ["test"]
// eslint-disable-next-line sonarjs/cognitive-complexity
const existingQuery = useMemo(() => {
if (!existingVariables || !variableData.dynamicVariablesAttribute) {
return '';
}
const existingQuery = useMemo(
() =>
buildExistingDynamicVariableQuery(
existingVariables,
variableData.id,
!!variableData.dynamicVariablesAttribute,
),
[existingVariables, variableData.id, variableData.dynamicVariablesAttribute],
);
const queryParts: string[] = [];
Object.entries(existingVariables).forEach(([, variable]) => {
// Skip the current variable being processed
if (variable.id === variableData.id) {
return;
}
// Only include dynamic variables that have selected values and are not selected as ALL
if (
variable.type === 'DYNAMIC' &&
variable.dynamicVariablesAttribute &&
variable.selectedValue &&
!isEmpty(variable.selectedValue) &&
(variable.showALLOption ? !variable.allSelected : true)
) {
const attribute = variable.dynamicVariablesAttribute;
const values = Array.isArray(variable.selectedValue)
? variable.selectedValue
: [variable.selectedValue];
// Filter out empty values and convert to strings
const validValues = values
.filter((val) => val !== null && val !== undefined && val !== '')
.map((val) => val.toString());
if (validValues.length > 0) {
// Format values for query - wrap strings in quotes, keep numbers as is
const formattedValues = validValues.map((val) => {
// Check if value is a number
const numValue = Number(val);
if (!Number.isNaN(numValue) && Number.isFinite(numValue)) {
return val; // Keep as number
}
// Escape single quotes and wrap in quotes
return `'${val.replace(/'/g, "\\'")}'`;
});
if (formattedValues.length === 1) {
queryParts.push(`${attribute} = ${formattedValues[0]}`);
} else {
queryParts.push(`${attribute} IN [${formattedValues.join(', ')}]`);
}
}
}
});
return queryParts.join(' AND ');
}, [
existingVariables,
variableData.id,
variableData.dynamicVariablesAttribute,
]);
// Wrap the hook's onDropdownVisibleChange to also track isDropdownOpen and handle cleanup
const handleSelectDropdownVisibilityChange = useCallback(
@@ -139,73 +182,6 @@ function DynamicVariableInput({
[onDropdownVisibleChange, optionsData, originalRelatedValues],
);
const handleQuerySuccess = useCallback(
(data: SuccessResponseV2<FieldValueResponse>): void => {
const newNormalizedValues = data.data?.normalizedValues || [];
const newRelatedValues = data.data?.relatedValues || [];
if (!debouncedApiSearchText) {
setOptionsData(newNormalizedValues);
setIsComplete(data.data?.complete || false);
}
setFilteredOptionsData(newNormalizedValues);
setRelatedValues(newRelatedValues);
setOriginalRelatedValues(newRelatedValues);
// Sync temp selection with latest API values when ALL is active and dropdown is open
if (variableData.allSelected && isDropdownOpen) {
const latestValues = mergeUniqueStrings(
newNormalizedValues,
newRelatedValues,
);
const currentStrings = Array.isArray(tempSelection)
? tempSelection.map((v) => v.toString())
: tempSelection
? [tempSelection.toString()]
: [];
const areSame =
currentStrings.length === latestValues.length &&
latestValues.every((v) => currentStrings.includes(v));
if (!areSame) {
setTempSelection(latestValues);
}
}
// Apply default if no value is selected (e.g., new variable, first load)
if (!debouncedApiSearchText) {
applyDefaultIfNeeded(
mergeUniqueStrings(newNormalizedValues, newRelatedValues),
);
}
settleVariableFetch(variableData.name, 'complete');
},
[
debouncedApiSearchText,
variableData.allSelected,
variableData.name,
isDropdownOpen,
tempSelection,
setTempSelection,
applyDefaultIfNeeded,
],
);
const handleQueryError = useCallback(
(error: { message?: string } | null): void => {
if (error) {
setErrorMessage(extractErrorMessage(error));
setIsRetryableError(checkIfRetryableError(error));
}
settleVariableFetch(variableData.name, 'failure');
},
[variableData.name],
);
const { isLoading, refetch } = useQuery(
[
REACT_QUERY_KEY.DASHBOARD_BY_ID,
@@ -216,22 +192,13 @@ function DynamicVariableInput({
debouncedApiSearchText,
variableData.dynamicVariablesSource,
variableData.dynamicVariablesAttribute,
variableFetchCycleId,
],
{
/*
* enabled if
* - we have dynamic variable source and attribute defined (ALWAYS)
* - AND
* - we're either still fetching variable options
* - OR
* - if variable is in idle state and we have already fetched options for it
**/
enabled:
variableData.type === 'DYNAMIC' &&
!!variableData.dynamicVariablesSource &&
!!variableData.dynamicVariablesAttribute &&
(isVariableFetching || (isVariableSettled && hasVariableFetchedOnce)),
queryFn: ({ signal }) =>
!!variableData.dynamicVariablesAttribute,
queryFn: () =>
getFieldValues(
variableData.dynamicVariablesSource?.toLowerCase() === 'all telemetry'
? undefined
@@ -244,10 +211,70 @@ function DynamicVariableInput({
minTime,
maxTime,
existingQuery,
signal,
),
onSuccess: handleQuerySuccess,
onError: handleQueryError,
onSuccess: (data) => {
const newNormalizedValues = data.data?.normalizedValues || [];
const newRelatedValues = data.data?.relatedValues || [];
if (!debouncedApiSearchText) {
setOptionsData(newNormalizedValues);
setIsComplete(data.data?.complete || false);
}
setFilteredOptionsData(newNormalizedValues);
setRelatedValues(newRelatedValues);
setOriginalRelatedValues(newRelatedValues);
// Only run auto-check logic when necessary to avoid performance issues
if (variableData.allSelected && isDropdownOpen) {
// Build the latest full list from API (normalized + related)
const latestValues = [
...new Set([
...newNormalizedValues.map((v) => v.toString()),
...newRelatedValues.map((v) => v.toString()),
]),
];
// Update temp selection to exactly reflect latest API values when ALL is active
const currentStrings = Array.isArray(tempSelection)
? tempSelection.map((v) => v.toString())
: tempSelection
? [tempSelection.toString()]
: [];
const areSame =
currentStrings.length === latestValues.length &&
latestValues.every((v) => currentStrings.includes(v));
if (!areSame) {
setTempSelection(latestValues);
}
}
// Apply default if no value is selected (e.g., new variable, first load)
if (!debouncedApiSearchText) {
const allNewOptions = [
...new Set([
...newNormalizedValues.map((v) => v.toString()),
...newRelatedValues.map((v) => v.toString()),
]),
];
applyDefaultIfNeeded(allNewOptions);
}
},
onError: (error: any) => {
if (error) {
let message = SOMETHING_WENT_WRONG;
if (error?.message) {
message = error?.message;
} else {
message =
'Please make sure configuration is valid and you have required setup and permissions';
}
setErrorMessage(message);
// Check if error is retryable (5xx) or not (4xx)
const isRetryable = checkIfRetryableError(error);
setIsRetryableError(isRetryable);
}
},
},
);
@@ -309,8 +336,6 @@ function DynamicVariableInput({
showRetryButton={isRetryableError}
showIncompleteDataMessage={!isComplete && filteredOptionsData.length > 0}
onSearch={handleSearch}
waiting={isVariableWaitingForDependencies}
waitingMessage={variableDependencyWaitMessage}
/>
);
}

View File

@@ -3,9 +3,8 @@ import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useVariableFetchState } from 'hooks/dashboard/useVariableFetchState';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import { isArray, isEmpty, isString } from 'lodash-es';
import { isArray, isString } from 'lodash-es';
import { AppState } from 'store/reducers';
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
import { GlobalReducer } from 'types/reducer/globalTime';
@@ -13,18 +12,26 @@ import { GlobalReducer } from 'types/reducer/globalTime';
import { variablePropsToPayloadVariables } from '../utils';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { areArraysEqual, settleVariableFetch } from './util';
import { areArraysEqual, checkAPIInvocation } from './util';
import { VariableItemProps } from './VariableItem';
import { queryVariableSelectStrategy } from './variableSelectStrategy/queryVariableSelectStrategy';
type QueryVariableInputProps = Pick<
VariableItemProps,
'variableData' | 'existingVariables' | 'onValueUpdate'
| 'variableData'
| 'existingVariables'
| 'onValueUpdate'
| 'variablesToGetUpdated'
| 'setVariablesToGetUpdated'
| 'dependencyData'
>;
function QueryVariableInput({
variableData,
existingVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dependencyData,
onValueUpdate,
}: QueryVariableInputProps): JSX.Element {
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
@@ -36,15 +43,6 @@ function QueryVariableInput({
(state) => state.globalTime,
);
const {
variableFetchCycleId,
isVariableSettled,
isVariableFetching,
hasVariableFetchedOnce,
isVariableWaitingForDependencies,
variableDependencyWaitMessage,
} = useVariableFetchState(variableData.name || '');
const {
tempSelection,
setTempSelection,
@@ -62,6 +60,16 @@ function QueryVariableInput({
strategy: queryVariableSelectStrategy,
});
const validVariableUpdate = useCallback((): boolean => {
if (!variableData.name) {
return false;
}
return Boolean(
variablesToGetUpdated.length &&
variablesToGetUpdated[0] === variableData.name,
);
}, [variableData.name, variablesToGetUpdated]);
const getOptions = useCallback(
// eslint-disable-next-line sonarjs/cognitive-complexity
(variablesRes: VariableResponseProps | null): void => {
@@ -95,24 +103,18 @@ function QueryVariableInput({
valueNotInList = true;
}
if (variableData.name && (valueNotInList || variableData.allSelected)) {
// variablesData.allSelected is added for the case where on change of options we need to update the
// local storage
if (
variableData.name &&
(validVariableUpdate() || valueNotInList || variableData.allSelected)
) {
if (
variableData.allSelected &&
variableData.multiSelect &&
variableData.showALLOption
) {
if (
variableData.name &&
variableData.id &&
!isEmpty(variableData.selectedValue)
) {
onValueUpdate(
variableData.name,
variableData.id,
newOptionsData,
true,
);
}
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
// Update tempSelection to maintain ALL state when dropdown is open
if (tempSelection !== undefined) {
@@ -130,11 +132,7 @@ function QueryVariableInput({
newOptionsData.every((option) => selectedValue.includes(option));
}
if (
variableData.name &&
variableData.id &&
!isEmpty(variableData.selectedValue)
) {
if (variableData.name && variableData.id) {
onValueUpdate(variableData.name, variableData.id, value, allSelected);
}
}
@@ -143,6 +141,10 @@ function QueryVariableInput({
setOptionsData(newOptionsData);
// Apply default if no value is selected (e.g., new variable, first load)
applyDefaultIfNeeded(newOptionsData);
} else {
setVariablesToGetUpdated((prev) =>
prev.filter((name) => name !== variableData.name),
);
}
}
} catch (e) {
@@ -155,6 +157,8 @@ function QueryVariableInput({
onValueUpdate,
tempSelection,
setTempSelection,
validVariableUpdate,
setVariablesToGetUpdated,
applyDefaultIfNeeded,
],
);
@@ -165,28 +169,27 @@ function QueryVariableInput({
variableData.name || '',
`${minTime}`,
`${maxTime}`,
variableFetchCycleId,
JSON.stringify(dependencyData?.order),
],
{
/*
* enabled if
* - we're either still fetching variable options
* - OR
* - if variable is in idle state and we have already fetched options for it
**/
enabled: isVariableFetching || (isVariableSettled && hasVariableFetchedOnce),
queryFn: ({ signal }) =>
dashboardVariablesQuery(
{
query: variableData.queryValue || '',
variables: variablePropsToPayloadVariables(existingVariables),
},
signal,
enabled:
variableData &&
checkAPIInvocation(
variablesToGetUpdated,
variableData,
dependencyData?.parentDependencyGraph,
),
queryFn: () =>
dashboardVariablesQuery({
query: variableData.queryValue || '',
variables: variablePropsToPayloadVariables(existingVariables),
}),
refetchOnWindowFocus: false,
onSuccess: (response) => {
getOptions(response.payload);
settleVariableFetch(variableData.name, 'complete');
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
},
onError: (error: {
details: {
@@ -203,7 +206,9 @@ function QueryVariableInput({
}
setErrorMessage(message);
}
settleVariableFetch(variableData.name, 'failure');
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
},
},
);
@@ -237,8 +242,6 @@ function QueryVariableInput({
loading={isLoading}
errorMessage={errorMessage}
onRetry={handleRetry}
waiting={isVariableWaitingForDependencies}
waitingMessage={variableDependencyWaitMessage}
/>
);
}

View File

@@ -28,8 +28,6 @@ interface SelectVariableInputProps {
showRetryButton?: boolean;
showIncompleteDataMessage?: boolean;
onSearch?: (searchTerm: string) => void;
waiting?: boolean;
waitingMessage?: string;
}
const MAX_TAG_DISPLAY_VALUES = 10;
@@ -67,7 +65,6 @@ function SelectVariableInput({
showRetryButton,
showIncompleteDataMessage,
onSearch,
waitingMessage,
}: SelectVariableInputProps): JSX.Element {
const commonProps = useMemo(
() => ({
@@ -81,6 +78,7 @@ function SelectVariableInput({
className: 'variable-select',
popupClassName: 'dropdown-styles',
getPopupContainer: popupContainer,
style: SelectItemStyle,
showSearch: true,
bordered: false,
@@ -88,8 +86,6 @@ function SelectVariableInput({
'data-testid': 'variable-select',
onChange,
loading,
waitingMessage,
style: SelectItemStyle,
options,
errorMessage,
onRetry,
@@ -105,7 +101,6 @@ function SelectVariableInput({
defaultValue,
onChange,
loading,
waitingMessage,
options,
value,
errorMessage,

View File

@@ -47,6 +47,14 @@ describe('VariableItem', () => {
variableData={mockVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -61,6 +69,14 @@ describe('VariableItem', () => {
variableData={mockVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -76,6 +92,14 @@ describe('VariableItem', () => {
variableData={mockVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -109,6 +133,14 @@ describe('VariableItem', () => {
variableData={mockCustomVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -131,6 +163,14 @@ describe('VariableItem', () => {
variableData={customVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -145,6 +185,14 @@ describe('VariableItem', () => {
variableData={mockCustomVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);

View File

@@ -1,6 +1,7 @@
import { memo } from 'react';
import { InfoCircleOutlined } from '@ant-design/icons';
import { Tooltip, Typography } from 'antd';
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import CustomVariableInput from './CustomVariableInput';
@@ -20,12 +21,18 @@ export interface VariableItemProps {
allSelected: boolean,
haveCustomValuesSelected?: boolean,
) => void;
variablesToGetUpdated: string[];
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
dependencyData: IDependencyData | null;
}
function VariableItem({
variableData,
onValueUpdate,
existingVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dependencyData,
}: VariableItemProps): JSX.Element {
const { name, description, type: variableType } = variableData;
@@ -58,6 +65,9 @@ function VariableItem({
variableData={variableData}
onValueUpdate={onValueUpdate}
existingVariables={existingVariables}
variablesToGetUpdated={variablesToGetUpdated}
setVariablesToGetUpdated={setVariablesToGetUpdated}
dependencyData={dependencyData}
/>
)}
{variableType === 'DYNAMIC' && (

View File

@@ -7,19 +7,6 @@ import { IDashboardVariable } from 'types/api/dashboard/getAll';
import DynamicVariableInput from '../DynamicVariableInput';
// Mock useVariableFetchState to return "fetching" state so useQuery is enabled
jest.mock('hooks/dashboard/useVariableFetchState', () => ({
useVariableFetchState: (): Record<string, unknown> => ({
variableFetchCycleId: 0,
variableFetchState: 'loading',
isVariableSettled: false,
isVariableFetching: true,
hasVariableFetchedOnce: false,
isVariableWaitingForDependencies: false,
variableDependencyWaitMessage: '',
}),
}));
// Don't mock the components - use real ones
// Mock for useQuery
@@ -230,10 +217,9 @@ describe('DynamicVariableInput Component', () => {
'',
'Traces',
'service.name',
0, // variableFetchCycleId
],
expect.objectContaining({
enabled: true, // isVariableFetching is true from mock
enabled: true, // Type is 'DYNAMIC'
queryFn: expect.any(Function),
onSuccess: expect.any(Function),
onError: expect.any(Function),

View File

@@ -8,6 +8,14 @@ import '@testing-library/jest-dom/extend-expect';
import VariableItem from '../VariableItem';
const mockOnValueUpdate = jest.fn();
const mockSetVariablesToGetUpdated = jest.fn();
const baseDependencyData = {
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
};
const TEST_VARIABLE_ID = 'test_variable';
const VARIABLE_SELECT_TESTID = 'variable-select';
@@ -23,6 +31,9 @@ const renderVariableItem = (
variableData={variableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={baseDependencyData}
/>
</MockQueryClientProvider>,
);

View File

@@ -2,12 +2,14 @@ import {
buildDependencies,
buildDependencyGraph,
buildParentDependencyGraph,
checkAPIInvocation,
onUpdateVariableNode,
VariableGraph,
} from '../util';
import {
buildDependenciesMock,
buildGraphMock,
checkAPIInvocationMock,
onUpdateVariableNodeMock,
} from './mock';
@@ -70,6 +72,97 @@ describe('dashboardVariables - utilities and processors', () => {
});
});
describe('checkAPIInvocation', () => {
const {
variablesToGetUpdated,
variableData,
parentDependencyGraph,
} = checkAPIInvocationMock;
const mockRootElement = {
name: 'deployment_environment',
key: '036a47cd-9ffc-47de-9f27-0329198964a8',
id: '036a47cd-9ffc-47de-9f27-0329198964a8',
modificationUUID: '5f71b591-f583-497c-839d-6a1590c3f60f',
selectedValue: 'production',
type: 'QUERY',
// ... other properties omitted for brevity
} as any;
describe('edge cases', () => {
it('should return false when variableData is empty', () => {
expect(
checkAPIInvocation(
variablesToGetUpdated,
variableData,
parentDependencyGraph,
),
).toBeFalsy();
});
it('should return true when parentDependencyGraph is empty', () => {
expect(
checkAPIInvocation(variablesToGetUpdated, variableData, {}),
).toBeFalsy();
});
});
describe('variable sequences', () => {
it('should return true for valid sequence', () => {
expect(
checkAPIInvocation(
['k8s_node_name', 'k8s_namespace_name'],
variableData,
parentDependencyGraph,
),
).toBeTruthy();
});
it('should return false for invalid sequence', () => {
expect(
checkAPIInvocation(
['k8s_cluster_name', 'k8s_node_name', 'k8s_namespace_name'],
variableData,
parentDependencyGraph,
),
).toBeFalsy();
});
it('should return false when variableData is not in sequence', () => {
expect(
checkAPIInvocation(
['deployment_environment', 'service_name', 'endpoint'],
variableData,
parentDependencyGraph,
),
).toBeFalsy();
});
});
describe('root element behavior', () => {
it('should return true for valid root element sequence', () => {
expect(
checkAPIInvocation(
[
'deployment_environment',
'service_name',
'endpoint',
'http_status_code',
],
mockRootElement,
parentDependencyGraph,
),
).toBeTruthy();
});
it('should return true for empty variablesToGetUpdated array', () => {
expect(
checkAPIInvocation([], mockRootElement, parentDependencyGraph),
).toBeTruthy();
});
});
});
describe('Graph Building Utilities', () => {
const { graph } = buildGraphMock;
const { variables } = buildDependenciesMock;
@@ -130,86 +223,10 @@ describe('dashboardVariables - utilities and processors', () => {
},
hasCycle: false,
cycleNodes: undefined,
transitiveDescendants: {
deployment_environment: ['service_name', 'endpoint', 'http_status_code'],
endpoint: ['http_status_code'],
environment: [],
http_status_code: [],
k8s_cluster_name: ['k8s_node_name', 'k8s_namespace_name'],
k8s_namespace_name: [],
k8s_node_name: ['k8s_namespace_name'],
service_name: ['endpoint', 'http_status_code'],
},
};
expect(buildDependencyGraph(graph)).toEqual(expected);
});
it('should return empty transitiveDescendants for an empty graph', () => {
const result = buildDependencyGraph({});
expect(result.transitiveDescendants).toEqual({});
expect(result.order).toEqual([]);
expect(result.hasCycle).toBe(false);
});
it('should compute transitiveDescendants for a linear chain (a -> b -> c)', () => {
const linearGraph: VariableGraph = {
a: ['b'],
b: ['c'],
c: [],
};
const result = buildDependencyGraph(linearGraph);
expect(result.transitiveDescendants).toEqual({
a: ['b', 'c'],
b: ['c'],
c: [],
});
});
it('should compute transitiveDescendants for a diamond dependency (a -> b, a -> c, b -> d, c -> d)', () => {
const diamondGraph: VariableGraph = {
a: ['b', 'c'],
b: ['d'],
c: ['d'],
d: [],
};
const result = buildDependencyGraph(diamondGraph);
expect(result.transitiveDescendants.a).toEqual(
expect.arrayContaining(['b', 'c', 'd']),
);
expect(result.transitiveDescendants.a).toHaveLength(3);
expect(result.transitiveDescendants.b).toEqual(['d']);
expect(result.transitiveDescendants.c).toEqual(['d']);
expect(result.transitiveDescendants.d).toEqual([]);
});
it('should handle disconnected components in transitiveDescendants', () => {
const disconnectedGraph: VariableGraph = {
a: ['b'],
b: [],
x: ['y'],
y: [],
};
const result = buildDependencyGraph(disconnectedGraph);
expect(result.transitiveDescendants.a).toEqual(['b']);
expect(result.transitiveDescendants.b).toEqual([]);
expect(result.transitiveDescendants.x).toEqual(['y']);
expect(result.transitiveDescendants.y).toEqual([]);
});
it('should return empty transitiveDescendants for all leaf nodes', () => {
const leafOnlyGraph: VariableGraph = {
a: [],
b: [],
c: [],
};
const result = buildDependencyGraph(leafOnlyGraph);
expect(result.transitiveDescendants).toEqual({
a: [],
b: [],
c: [],
});
});
});
describe('buildDependencies', () => {

View File

@@ -1,3 +1,36 @@
/* eslint-disable sonarjs/no-duplicate-string */
export const checkAPIInvocationMock = {
variablesToGetUpdated: [],
variableData: {
name: 'k8s_node_name',
key: '4d71d385-beaf-4434-8dbf-c62be68049fc',
allSelected: false,
customValue: '',
description: '',
id: '4d71d385-beaf-4434-8dbf-c62be68049fc',
modificationUUID: '77233d3c-96d7-4ccb-aa9d-11b04d563068',
multiSelect: false,
order: 6,
queryValue:
"SELECT JSONExtractString(labels, 'k8s_node_name') AS k8s_node_name\nFROM signoz_metrics.distributed_time_series_v4_1day\nWHERE metric_name = 'k8s_node_cpu_time' AND JSONExtractString(labels, 'k8s_cluster_name') = {{.k8s_cluster_name}}\nGROUP BY k8s_node_name",
selectedValue: 'gke-signoz-saas-si-consumer-bsc-e2sd4-a6d430fa-gvm2',
showALLOption: false,
sort: 'DISABLED',
textboxValue: '',
type: 'QUERY',
},
parentDependencyGraph: {
deployment_environment: [],
service_name: ['deployment_environment'],
endpoint: ['deployment_environment', 'service_name'],
http_status_code: ['endpoint'],
k8s_cluster_name: [],
environment: [],
k8s_node_name: ['k8s_cluster_name'],
k8s_namespace_name: ['k8s_cluster_name', 'k8s_node_name'],
},
} as any;
export const onUpdateVariableNodeMock = {
nodeToUpdate: 'deployment_environment',
graph: {

View File

@@ -1,16 +1,9 @@
import { OptionData } from 'components/NewSelect/types';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { textContainsVariableReference } from 'lib/dashboardVariables/variableReference';
import { isEmpty } from 'lodash-es';
import { isEmpty, isNull } from 'lodash-es';
import {
IDashboardVariables,
IDependencyData,
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import {
onVariableFetchComplete,
onVariableFetchFailure,
variableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
export function areArraysEqual(
@@ -52,16 +45,30 @@ const getDependentVariablesBasedOnVariableName = (
}
return variables
.map((variable) => {
?.map((variable: any) => {
if (variable.type === 'QUERY') {
// Combined pattern for all formats
// {{.variable_name}} - original format
// $variable_name - dollar prefix format
// [[variable_name]] - square bracket format
// {{variable_name}} - without dot format
const patterns = [
`\\{\\{\\s*?\\.${variableName}\\s*?\\}\\}`, // {{.var}}
`\\{\\{\\s*${variableName}\\s*\\}\\}`, // {{var}}
`\\$${variableName}\\b`, // $var
`\\[\\[\\s*${variableName}\\s*\\]\\]`, // [[var]]
];
const combinedRegex = new RegExp(patterns.join('|'));
const queryValue = variable.queryValue || '';
if (textContainsVariableReference(queryValue, variableName)) {
const dependVarReMatch = queryValue.match(combinedRegex);
if (dependVarReMatch !== null && dependVarReMatch.length > 0) {
return variable.name;
}
}
return null;
})
.filter((val): val is string => val !== null);
.filter((val: string | null) => !isNull(val));
};
export type VariableGraph = Record<string, string[]>;
@@ -239,26 +246,10 @@ export const buildDependencyGraph = (
const hasCycle = topologicalOrder.length !== Object.keys(dependencies)?.length;
// Pre-compute transitive descendants by walking topological order in reverse.
// Each node's transitive descendants = direct children + their transitive descendants.
const transitiveDescendants: VariableGraph = {};
for (let i = topologicalOrder.length - 1; i >= 0; i--) {
const node = topologicalOrder[i];
const desc = new Set<string>();
for (const child of adjList[node] || []) {
desc.add(child);
for (const d of transitiveDescendants[child] || []) {
desc.add(d);
}
}
transitiveDescendants[node] = Array.from(desc);
}
return {
order: topologicalOrder,
graph: adjList,
parentDependencyGraph: buildParentDependencyGraph(adjList),
transitiveDescendants,
hasCycle,
cycleNodes,
};
@@ -293,6 +284,33 @@ export const onUpdateVariableNode = (
});
};
export const checkAPIInvocation = (
variablesToGetUpdated: string[],
variableData: IDashboardVariable,
parentDependencyGraph?: VariableGraph,
): boolean => {
if (isEmpty(variableData.name)) {
return false;
}
if (isEmpty(parentDependencyGraph)) {
return false;
}
// if no dependency then true
const haveDependency =
parentDependencyGraph?.[variableData.name || '']?.length > 0;
if (!haveDependency) {
return true;
}
// if variable is in the list and has dependency then check if its the top element in the queue then true else false
return (
variablesToGetUpdated.length > 0 &&
variablesToGetUpdated[0] === variableData.name
);
};
export const getOptionsForDynamicVariable = (
normalizedValues: (string | number | boolean)[],
relatedValues: string[],
@@ -357,130 +375,3 @@ export const getSelectValue = (
}
return selectedValue?.toString();
};
/**
* Merges multiple arrays of values into a single deduplicated string array.
*/
export function mergeUniqueStrings(
...arrays: (string | number | boolean)[][]
): string[] {
return [...new Set(arrays.flatMap((arr) => arr.map((v) => v.toString())))];
}
function isEligibleFilterVariable(
variable: IDashboardVariable,
currentVariableId: string,
): boolean {
if (variable.id === currentVariableId) {
return false;
}
if (variable.type !== 'DYNAMIC') {
return false;
}
if (!variable.dynamicVariablesAttribute) {
return false;
}
if (!variable.selectedValue || isEmpty(variable.selectedValue)) {
return false;
}
return !(variable.showALLOption && variable.allSelected);
}
function formatQueryValue(val: string): string {
const numValue = Number(val);
if (!Number.isNaN(numValue) && Number.isFinite(numValue)) {
return val;
}
return `'${val.replace(/'/g, "\\'")}'`;
}
function buildQueryPart(attribute: string, values: string[]): string {
const formatted = values.map(formatQueryValue);
if (formatted.length === 1) {
return `${attribute} = ${formatted[0]}`;
}
return `${attribute} IN [${formatted.join(', ')}]`;
}
/**
* Builds a filter query string from sibling dynamic variables' selected values.
* e.g. `k8s.namespace.name IN ['zeus', 'gene'] AND doc_op_type = 'test'`
*/
export function buildExistingDynamicVariableQuery(
existingVariables: IDashboardVariables | null,
currentVariableId: string,
hasDynamicAttribute: boolean,
): string {
if (!existingVariables || !hasDynamicAttribute) {
return '';
}
const queryParts: string[] = [];
for (const variable of Object.values(existingVariables)) {
// Skip the current variable being processed
if (!isEligibleFilterVariable(variable, currentVariableId)) {
continue;
}
const rawValues = Array.isArray(variable.selectedValue)
? variable.selectedValue
: [variable.selectedValue];
// Filter out empty values and convert to strings
const validValues = rawValues
.filter(
(val): val is string | number | boolean =>
val !== null && val !== undefined && val !== '',
)
.map((val) => val.toString());
if (validValues.length > 0 && variable.dynamicVariablesAttribute) {
queryParts.push(
buildQueryPart(variable.dynamicVariablesAttribute, validValues),
);
}
}
return queryParts.join(' AND ');
}
function isVariableInActiveFetchState(state: string | undefined): boolean {
return state === 'loading' || state === 'revalidating';
}
/**
* Completes or fails a variable's fetch state machine transition.
* No-ops if the variable is not currently in an active fetch state.
*/
export function settleVariableFetch(
name: string | undefined,
outcome: 'complete' | 'failure',
): void {
if (!name) {
return;
}
const currentState = variableFetchStore.getSnapshot().states[name];
if (!isVariableInActiveFetchState(currentState)) {
return;
}
if (outcome === 'complete') {
onVariableFetchComplete(name);
} else {
onVariableFetchFailure(name);
}
}
export function extractErrorMessage(
error: { message?: string } | null,
): string {
if (!error) {
return SOMETHING_WENT_WRONG;
}
return (
error.message ||
'Please make sure configuration is valid and you have required setup and permissions'
);
}

View File

@@ -1,31 +1,4 @@
jest.mock('providers/Dashboard/store/variableFetchStore', () => ({
variableFetchStore: {
getSnapshot: jest.fn(),
},
onVariableFetchComplete: jest.fn(),
onVariableFetchFailure: jest.fn(),
}));
import {
onVariableFetchComplete,
onVariableFetchFailure,
variableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import {
areArraysEqual,
buildExistingDynamicVariableQuery,
extractErrorMessage,
mergeUniqueStrings,
onUpdateVariableNode,
settleVariableFetch,
VariableGraph,
} from './util';
// ────────────────────────────────────────────────────────────────
// Existing tests
// ────────────────────────────────────────────────────────────────
import { areArraysEqual, onUpdateVariableNode, VariableGraph } from './util';
describe('areArraysEqual', () => {
it('should return true for equal arrays with same order', () => {
@@ -176,348 +149,3 @@ describe('onUpdateVariableNode', () => {
expect(visited).toEqual(['namespace', 'service', 'pod']);
});
});
// ────────────────────────────────────────────────────────────────
// New tests for functions added in recent commits
// ────────────────────────────────────────────────────────────────
function makeDynamicVar(
overrides: Partial<IDashboardVariable> & { id: string },
): IDashboardVariable {
return {
name: overrides.id,
description: '',
type: 'DYNAMIC',
sort: 'DISABLED',
multiSelect: false,
showALLOption: false,
allSelected: false,
dynamicVariablesAttribute: 'attr',
selectedValue: 'some-value',
...overrides,
} as IDashboardVariable;
}
describe('mergeUniqueStrings', () => {
it('should merge two arrays and deduplicate', () => {
expect(mergeUniqueStrings(['a', 'b'], ['b', 'c'])).toEqual(['a', 'b', 'c']);
});
it('should convert numbers and booleans to strings', () => {
expect(mergeUniqueStrings([1, true, 'hello'], [2, false])).toEqual([
'1',
'true',
'hello',
'2',
'false',
]);
});
it('should deduplicate when number and its string form both appear', () => {
expect(mergeUniqueStrings([42], ['42'])).toEqual(['42']);
});
it('should handle a single array', () => {
expect(mergeUniqueStrings(['x', 'y', 'x'])).toEqual(['x', 'y']);
});
it('should handle three or more arrays', () => {
expect(mergeUniqueStrings(['a'], ['b'], ['c'], ['a', 'c'])).toEqual([
'a',
'b',
'c',
]);
});
it('should return empty array when no arrays are provided', () => {
expect(mergeUniqueStrings()).toEqual([]);
});
it('should return empty array when all input arrays are empty', () => {
expect(mergeUniqueStrings([], [], [])).toEqual([]);
});
it('should preserve order of first occurrence', () => {
expect(mergeUniqueStrings(['c', 'a'], ['b', 'a'])).toEqual(['c', 'a', 'b']);
});
});
describe('buildExistingDynamicVariableQuery', () => {
// --- Guard clauses ---
it('should return empty string when existingVariables is null', () => {
expect(buildExistingDynamicVariableQuery(null, 'v1', true)).toBe('');
});
it('should return empty string when hasDynamicAttribute is false', () => {
const variables = { v2: makeDynamicVar({ id: 'v2' }) };
expect(buildExistingDynamicVariableQuery(variables, 'v1', false)).toBe('');
});
// --- Eligibility filtering ---
it('should skip the current variable (same id)', () => {
const variables = {
v1: makeDynamicVar({
id: 'v1',
dynamicVariablesAttribute: 'ns',
selectedValue: 'prod',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip non-DYNAMIC variables', () => {
const variables = {
v2: makeDynamicVar({ id: 'v2', type: 'QUERY' as any }),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip variables without dynamicVariablesAttribute', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: undefined,
selectedValue: 'val',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip variables with null selectedValue', () => {
const variables = {
v2: makeDynamicVar({ id: 'v2', selectedValue: null }),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip variables with empty string selectedValue', () => {
const variables = {
v2: makeDynamicVar({ id: 'v2', selectedValue: '' }),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip variables with empty array selectedValue', () => {
const variables = {
v2: makeDynamicVar({ id: 'v2', selectedValue: [] }),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip variables where showALLOption and allSelected are both true', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
showALLOption: true,
allSelected: true,
dynamicVariablesAttribute: 'ns',
selectedValue: 'prod',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should include variable with showALLOption true but allSelected false', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
showALLOption: true,
allSelected: false,
dynamicVariablesAttribute: 'ns',
selectedValue: 'prod',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"ns = 'prod'",
);
});
// --- Value formatting ---
it('should quote string values in the query', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'k8s.namespace.name',
selectedValue: 'zeus',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"k8s.namespace.name = 'zeus'",
);
});
it('should leave numeric values unquoted', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'http.status_code',
selectedValue: '200',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
'http.status_code = 200',
);
});
it('should escape single quotes in string values', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'user.name',
selectedValue: "O'Brien",
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"user.name = 'O\\'Brien'",
);
});
it('should build an IN clause for array selectedValue with multiple items', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'k8s.namespace.name',
selectedValue: ['zeus', 'gene'],
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"k8s.namespace.name IN ['zeus', 'gene']",
);
});
it('should handle mix of numeric and string values in IN clause', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'http.status_code',
selectedValue: ['200', 'unknown'],
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"http.status_code IN [200, 'unknown']",
);
});
it('should filter out empty string values from array', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'region',
selectedValue: ['us-east', '', 'eu-west'],
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"region IN ['us-east', 'eu-west']",
);
});
// --- Multiple siblings ---
it('should join multiple sibling variables with AND', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'k8s.namespace.name',
selectedValue: ['zeus', 'gene'],
}),
v3: makeDynamicVar({
id: 'v3',
dynamicVariablesAttribute: 'doc_op_type',
selectedValue: 'test',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"k8s.namespace.name IN ['zeus', 'gene'] AND doc_op_type = 'test'",
);
});
it('should return empty string when no variables are eligible', () => {
const variables = {
v1: makeDynamicVar({ id: 'v1' }), // same as current — skipped
v2: makeDynamicVar({ id: 'v2', type: 'QUERY' as any }), // not DYNAMIC
v3: makeDynamicVar({ id: 'v3', selectedValue: null }), // no value
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
});
describe('settleVariableFetch', () => {
const mockGetSnapshot = variableFetchStore.getSnapshot as jest.Mock;
const mockComplete = onVariableFetchComplete as jest.Mock;
const mockFailure = onVariableFetchFailure as jest.Mock;
beforeEach(() => {
jest.clearAllMocks();
});
it('should no-op when name is undefined', () => {
settleVariableFetch(undefined, 'complete');
expect(mockGetSnapshot).not.toHaveBeenCalled();
expect(mockComplete).not.toHaveBeenCalled();
expect(mockFailure).not.toHaveBeenCalled();
});
it.each(['idle', 'waiting', 'error', undefined] as const)(
'should no-op when variable state is %s',
(state) => {
mockGetSnapshot.mockReturnValue({ states: { myVar: state } });
settleVariableFetch('myVar', 'complete');
expect(mockComplete).not.toHaveBeenCalled();
expect(mockFailure).not.toHaveBeenCalled();
},
);
it('should call onVariableFetchComplete when state is loading and outcome is complete', () => {
mockGetSnapshot.mockReturnValue({ states: { myVar: 'loading' } });
settleVariableFetch('myVar', 'complete');
expect(mockComplete).toHaveBeenCalledWith('myVar');
expect(mockFailure).not.toHaveBeenCalled();
});
it('should call onVariableFetchComplete when state is revalidating and outcome is complete', () => {
mockGetSnapshot.mockReturnValue({ states: { myVar: 'revalidating' } });
settleVariableFetch('myVar', 'complete');
expect(mockComplete).toHaveBeenCalledWith('myVar');
expect(mockFailure).not.toHaveBeenCalled();
});
it('should call onVariableFetchFailure when state is loading and outcome is failure', () => {
mockGetSnapshot.mockReturnValue({ states: { myVar: 'loading' } });
settleVariableFetch('myVar', 'failure');
expect(mockFailure).toHaveBeenCalledWith('myVar');
expect(mockComplete).not.toHaveBeenCalled();
});
it('should call onVariableFetchFailure when state is revalidating and outcome is failure', () => {
mockGetSnapshot.mockReturnValue({ states: { myVar: 'revalidating' } });
settleVariableFetch('myVar', 'failure');
expect(mockFailure).toHaveBeenCalledWith('myVar');
expect(mockComplete).not.toHaveBeenCalled();
});
});
describe('extractErrorMessage', () => {
const FALLBACK_MESSAGE =
'Please make sure configuration is valid and you have required setup and permissions';
it('should return SOMETHING_WENT_WRONG when error is null', () => {
expect(extractErrorMessage(null)).toBe('Something went wrong');
});
it('should return the error message when it exists', () => {
expect(extractErrorMessage({ message: 'Query timeout' })).toBe(
'Query timeout',
);
});
it('should return fallback when error object has no message property', () => {
expect(extractErrorMessage({})).toBe(FALLBACK_MESSAGE);
});
it('should return fallback when error.message is empty string', () => {
expect(extractErrorMessage({ message: '' })).toBe(FALLBACK_MESSAGE);
});
it('should return fallback when error.message is undefined', () => {
expect(extractErrorMessage({ message: undefined })).toBe(FALLBACK_MESSAGE);
});
});

View File

@@ -1,11 +1,17 @@
import { VariableItemProps } from '../VariableItem';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
export interface VariableSelectStrategy {
handleChange(params: {
value: string | string[];
variableData: VariableItemProps['variableData'];
onValueUpdate: VariableItemProps['onValueUpdate'];
variableData: IDashboardVariable;
optionsData: (string | number | boolean)[];
allAvailableOptionStrings: string[];
onValueUpdate: (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
) => void;
}): void;
}

View File

@@ -17,19 +17,6 @@ import { IDashboardVariable } from 'types/api/dashboard/getAll';
import DynamicVariableInput from '../DashboardVariablesSelection/DynamicVariableInput';
// Mock useVariableFetchState to return "fetching" state so useQuery is enabled
jest.mock('hooks/dashboard/useVariableFetchState', () => ({
useVariableFetchState: (): Record<string, unknown> => ({
variableFetchCycleId: 0,
variableFetchState: 'loading',
isVariableSettled: false,
isVariableFetching: true,
hasVariableFetchedOnce: false,
isVariableWaitingForDependencies: false,
variableDependencyWaitMessage: '',
}),
}));
// Mock the getFieldValues API
jest.mock('api/dynamicVariables/getFieldValues', () => ({
getFieldValues: jest.fn(),
@@ -108,7 +95,7 @@ describe('Dynamic Variable Default Behavior', () => {
}
}
if (queryFn) {
queryFn({ signal: undefined });
queryFn();
}
}
}, [enabled, variableName, dynamicVarsKey]); // Only depend on enabled/keys
@@ -247,7 +234,6 @@ describe('Dynamic Variable Default Behavior', () => {
'2023-01-01T00:00:00Z',
'2023-01-02T00:00:00Z',
'',
undefined, // signal
);
});
@@ -501,7 +487,6 @@ describe('Dynamic Variable Default Behavior', () => {
'2023-01-01T00:00:00Z',
'2023-01-02T00:00:00Z',
'',
undefined, // signal
);
});

View File

@@ -49,11 +49,15 @@ const mockDashboard = {
// Mock the dashboard provider with stable functions to prevent infinite loops
const mockSetSelectedDashboard = jest.fn();
const mockUpdateLocalStorageDashboardVariables = jest.fn();
const mockSetVariablesToGetUpdated = jest.fn();
jest.mock('providers/Dashboard/Dashboard', () => ({
useDashboard: (): any => ({
selectedDashboard: mockDashboard,
setSelectedDashboard: mockSetSelectedDashboard,
updateLocalStorageDashboardVariables: mockUpdateLocalStorageDashboardVariables,
variablesToGetUpdated: ['env'], // Stable initial value
setVariablesToGetUpdated: mockSetVariablesToGetUpdated,
}),
}));

View File

@@ -1,117 +0,0 @@
import { AlignedData } from 'uplot';
import { getInitialStackedBands, stack } from '../stackUtils';
describe('stackUtils', () => {
describe('stack', () => {
const neverOmit = (): boolean => false;
it('preserves time axis as first row', () => {
const data: AlignedData = [
[100, 200, 300],
[1, 2, 3],
[4, 5, 6],
];
const { data: result } = stack(data, neverOmit);
expect(result[0]).toEqual([100, 200, 300]);
});
it('stacks value series cumulatively (last = raw, first = total)', () => {
// Time, then 3 value series. Stack order: last series stays raw, then we add upward.
const data: AlignedData = [
[0, 1, 2],
[1, 2, 3], // series 1
[4, 5, 6], // series 2
[7, 8, 9], // series 3
];
const { data: result } = stack(data, neverOmit);
// result[1] = s1+s2+s3, result[2] = s2+s3, result[3] = s3
expect(result[1]).toEqual([12, 15, 18]); // 1+4+7, 2+5+8, 3+6+9
expect(result[2]).toEqual([11, 13, 15]); // 4+7, 5+8, 6+9
expect(result[3]).toEqual([7, 8, 9]);
});
it('treats null values as 0 when stacking', () => {
const data: AlignedData = [
[0, 1],
[1, null],
[null, 10],
];
const { data: result } = stack(data, neverOmit);
expect(result[1]).toEqual([1, 10]); // total
expect(result[2]).toEqual([0, 10]); // last series with null→0
});
it('copies omitted series as-is without accumulating', () => {
// Omit series 2 (index 2); series 1 and 3 are stacked.
const data: AlignedData = [
[0, 1],
[10, 20], // series 1
[100, 200], // series 2 - omitted
[1, 2], // series 3
];
const omitSeries2 = (i: number): boolean => i === 2;
const { data: result } = stack(data, omitSeries2);
// series 3 raw: [1, 2]; series 2 omitted: [100, 200] as-is; series 1 stacked with s3: [11, 22]
expect(result[1]).toEqual([11, 22]); // 10+1, 20+2
expect(result[2]).toEqual([100, 200]); // copied, not stacked
expect(result[3]).toEqual([1, 2]);
});
it('returns bands between consecutive visible series when none omitted', () => {
const data: AlignedData = [
[0, 1],
[1, 2],
[3, 4],
[5, 6],
];
const { bands } = stack(data, neverOmit);
expect(bands).toEqual([{ series: [1, 2] }, { series: [2, 3] }]);
});
it('returns bands only between visible series when some are omitted', () => {
// 4 value series; omit index 2. Visible: 1, 3, 4. Bands: [1,3], [3,4]
const data: AlignedData = [[0], [1], [2], [3], [4]];
const omitSeries2 = (i: number): boolean => i === 2;
const { bands } = stack(data, omitSeries2);
expect(bands).toEqual([{ series: [1, 3] }, { series: [3, 4] }]);
});
it('returns empty bands when only one value series', () => {
const data: AlignedData = [
[0, 1],
[1, 2],
];
const { bands } = stack(data, neverOmit);
expect(bands).toEqual([]);
});
});
describe('getInitialStackedBands', () => {
it('returns one band between each consecutive pair for seriesCount 3', () => {
expect(getInitialStackedBands(3)).toEqual([
{ series: [1, 2] },
{ series: [2, 3] },
]);
});
it('returns empty array for seriesCount 0 or 1', () => {
expect(getInitialStackedBands(0)).toEqual([]);
expect(getInitialStackedBands(1)).toEqual([]);
});
it('returns single band for seriesCount 2', () => {
expect(getInitialStackedBands(2)).toEqual([{ series: [1, 2] }]);
});
it('returns bands [1,2], [2,3], ..., [n-1, n] for seriesCount n', () => {
const bands = getInitialStackedBands(5);
expect(bands).toEqual([
{ series: [1, 2] },
{ series: [2, 3] },
{ series: [3, 4] },
{ series: [4, 5] },
]);
});
});
});

View File

@@ -1,116 +0,0 @@
import uPlot, { AlignedData } from 'uplot';
/**
* Stack data cumulatively (top-down: first series = top, last = bottom).
* When `omit(seriesIndex)` returns true, that series is excluded from stacking.
*/
export function stack(
data: AlignedData,
omit: (seriesIndex: number) => boolean,
): { data: AlignedData; bands: uPlot.Band[] } {
const timeAxis = data[0];
const pointCount = timeAxis.length;
const valueSeriesCount = data.length - 1; // exclude time axis
const stackedSeries = buildStackedSeries({
data,
valueSeriesCount,
pointCount,
omit,
});
const bands = buildFillBands(valueSeriesCount + 1, omit); // +1 for 1-based series indices
return {
data: [timeAxis, ...stackedSeries] as AlignedData,
bands,
};
}
interface BuildStackedSeriesParams {
data: AlignedData;
valueSeriesCount: number;
pointCount: number;
omit: (seriesIndex: number) => boolean;
}
/**
* Accumulate from last series upward: last series = raw values, first = total.
* Omitted series are copied as-is (no accumulation).
*/
function buildStackedSeries({
data,
valueSeriesCount,
pointCount,
omit,
}: BuildStackedSeriesParams): (number | null)[][] {
const stackedSeries: (number | null)[][] = Array(valueSeriesCount);
const cumulativeSums = Array(pointCount).fill(0) as number[];
for (let seriesIndex = valueSeriesCount; seriesIndex >= 1; seriesIndex--) {
const rawValues = data[seriesIndex] as (number | null)[];
if (omit(seriesIndex)) {
stackedSeries[seriesIndex - 1] = rawValues;
} else {
stackedSeries[seriesIndex - 1] = rawValues.map((rawValue, pointIndex) => {
const numericValue = rawValue == null ? 0 : Number(rawValue);
return (cumulativeSums[pointIndex] += numericValue);
});
}
}
return stackedSeries;
}
/**
* Bands define fill between consecutive visible series for stacked appearance.
* uPlot format: [upperSeriesIdx, lowerSeriesIdx].
*/
function buildFillBands(
seriesLength: number,
omit: (seriesIndex: number) => boolean,
): uPlot.Band[] {
const bands: uPlot.Band[] = [];
for (let seriesIndex = 1; seriesIndex < seriesLength; seriesIndex++) {
if (omit(seriesIndex)) {
continue;
}
const nextVisibleSeriesIndex = findNextVisibleSeriesIndex(
seriesLength,
seriesIndex,
omit,
);
if (nextVisibleSeriesIndex !== -1) {
bands.push({ series: [seriesIndex, nextVisibleSeriesIndex] });
}
}
return bands;
}
function findNextVisibleSeriesIndex(
seriesLength: number,
afterIndex: number,
omit: (seriesIndex: number) => boolean,
): number {
for (let i = afterIndex + 1; i < seriesLength; i++) {
if (!omit(i)) {
return i;
}
}
return -1;
}
/**
* Returns band indices for initial stacked state (no series omitted).
* Top-down: first series at top, band fills between consecutive series.
* uPlot band format: [upperSeriesIdx, lowerSeriesIdx].
*/
export function getInitialStackedBands(seriesCount: number): uPlot.Band[] {
const bands: uPlot.Band[] = [];
for (let seriesIndex = 1; seriesIndex < seriesCount; seriesIndex++) {
bands.push({ series: [seriesIndex, seriesIndex + 1] });
}
return bands;
}

View File

@@ -1,313 +0,0 @@
import { renderHook } from '@testing-library/react';
import uPlot from 'uplot';
import type { UseBarChartStackingParams } from '../useBarChartStacking';
import { useBarChartStacking } from '../useBarChartStacking';
type MockConfig = { addHook: jest.Mock };
function asConfig(c: MockConfig): UseBarChartStackingParams['config'] {
return (c as unknown) as UseBarChartStackingParams['config'];
}
function createMockConfig(): {
config: MockConfig;
invokeSetData: (plot: uPlot) => void;
invokeSetSeries: (
plot: uPlot,
seriesIndex: number | null,
opts: Partial<uPlot.Series> & { focus?: boolean },
) => void;
removeSetData: jest.Mock;
removeSetSeries: jest.Mock;
} {
let setDataHandler: ((plot: uPlot) => void) | null = null;
let setSeriesHandler:
| ((plot: uPlot, seriesIndex: number | null, opts: uPlot.Series) => void)
| null = null;
const removeSetData = jest.fn();
const removeSetSeries = jest.fn();
const addHook = jest.fn(
(
hookName: string,
handler: (plot: uPlot, ...args: unknown[]) => void,
): (() => void) => {
if (hookName === 'setData') {
setDataHandler = handler as (plot: uPlot) => void;
return removeSetData;
}
if (hookName === 'setSeries') {
setSeriesHandler = handler as (
plot: uPlot,
seriesIndex: number | null,
opts: uPlot.Series,
) => void;
return removeSetSeries;
}
return jest.fn();
},
);
const config: MockConfig = { addHook };
const invokeSetData = (plot: uPlot): void => {
setDataHandler?.(plot);
};
const invokeSetSeries = (
plot: uPlot,
seriesIndex: number | null,
opts: Partial<uPlot.Series> & { focus?: boolean },
): void => {
setSeriesHandler?.(plot, seriesIndex, opts as uPlot.Series);
};
return {
config,
invokeSetData,
invokeSetSeries,
removeSetData,
removeSetSeries,
};
}
function createMockPlot(overrides: Partial<uPlot> = {}): uPlot {
return ({
data: [
[0, 1, 2],
[1, 2, 3],
[4, 5, 6],
],
series: [{ show: true }, { show: true }, { show: true }],
delBand: jest.fn(),
addBand: jest.fn(),
setData: jest.fn(),
...overrides,
} as unknown) as uPlot;
}
describe('useBarChartStacking', () => {
it('returns data as-is when isStackedBarChart is false', () => {
const data: uPlot.AlignedData = [
[100, 200],
[1, 2],
[3, 4],
];
const { result } = renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: false,
config: null,
}),
);
expect(result.current).toBe(data);
});
it('returns data as-is when config is null and isStackedBarChart is true', () => {
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
[4, 5],
];
const { result } = renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: null,
}),
);
// Still returns stacked data (computed in useMemo); no hooks registered
expect(result.current[0]).toEqual([0, 1]);
expect(result.current[1]).toEqual([5, 7]); // stacked
expect(result.current[2]).toEqual([4, 5]);
});
it('returns stacked data when isStackedBarChart is true and multiple value series', () => {
const data: uPlot.AlignedData = [
[0, 1, 2],
[1, 2, 3],
[4, 5, 6],
[7, 8, 9],
];
const { result } = renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: null,
}),
);
expect(result.current[0]).toEqual([0, 1, 2]);
expect(result.current[1]).toEqual([12, 15, 18]); // s1+s2+s3
expect(result.current[2]).toEqual([11, 13, 15]); // s2+s3
expect(result.current[3]).toEqual([7, 8, 9]);
});
it('returns data as-is when only one value series (no stacking needed)', () => {
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
];
const { result } = renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: null,
}),
);
expect(result.current).toEqual(data);
});
it('registers setData and setSeries hooks when isStackedBarChart and config provided', () => {
const { config } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
[3, 4],
];
renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: asConfig(config),
}),
);
expect(config.addHook).toHaveBeenCalledWith('setData', expect.any(Function));
expect(config.addHook).toHaveBeenCalledWith(
'setSeries',
expect.any(Function),
);
});
it('does not register hooks when isStackedBarChart is false', () => {
const { config } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
[3, 4],
];
renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: false,
config: asConfig(config),
}),
);
expect(config.addHook).not.toHaveBeenCalled();
});
it('calls cleanup when unmounted', () => {
const { config, removeSetData, removeSetSeries } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
[3, 4],
];
const { unmount } = renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: asConfig(config),
}),
);
unmount();
expect(removeSetData).toHaveBeenCalled();
expect(removeSetSeries).toHaveBeenCalled();
});
it('re-stacks and updates plot when setData hook is invoked', () => {
const { config, invokeSetData } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1, 2],
[1, 2, 3],
[4, 5, 6],
];
const plot = createMockPlot({
data: [
[0, 1, 2],
[5, 7, 9],
[4, 5, 6],
],
});
renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: asConfig(config),
}),
);
invokeSetData(plot);
expect(plot.delBand).toHaveBeenCalledWith(null);
expect(plot.addBand).toHaveBeenCalled();
expect(plot.setData).toHaveBeenCalledWith(
expect.arrayContaining([
[0, 1, 2],
expect.any(Array), // stacked row 1
expect.any(Array), // stacked row 2
]),
);
});
it('re-stacks when setSeries hook is invoked (e.g. legend toggle)', () => {
const { config, invokeSetSeries } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1],
[10, 20],
[5, 10],
];
// Plot data must match unstacked length so canApplyStacking passes
const plot = createMockPlot({
data: [
[0, 1],
[15, 30],
[5, 10],
],
});
renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: asConfig(config),
}),
);
invokeSetSeries(plot, 1, { show: false });
expect(plot.setData).toHaveBeenCalled();
});
it('does not re-stack when setSeries is called with focus option', () => {
const { config, invokeSetSeries } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
[3, 4],
];
const plot = createMockPlot();
renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: asConfig(config),
}),
);
(plot.setData as jest.Mock).mockClear();
invokeSetSeries(plot, 1, { focus: true } as uPlot.Series);
expect(plot.setData).not.toHaveBeenCalled();
});
});

View File

@@ -11,7 +11,6 @@ import {
VisibilityMode,
} from 'lib/uPlotV2/config/types';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import { get } from 'lodash-es';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
@@ -78,12 +77,6 @@ export function prepareBarPanelConfig({
builder.setBands(getInitialStackedBands(seriesCount));
}
const stepIntervals: Record<string, number> = get(
apiResponse,
'data.newResult.meta.stepIntervals',
{},
);
const seriesList: QueryData[] = apiResponse?.data?.result || [];
seriesList.forEach((series) => {
const baseLabelName = getLabelName(
@@ -96,8 +89,6 @@ export function prepareBarPanelConfig({
? getLegend(series, currentQuery, baseLabelName)
: baseLabelName;
const currentStepInterval = get(stepIntervals, series.queryName, undefined);
builder.addSeries({
scaleKey: 'y',
drawStyle: DrawStyle.Bar,
@@ -110,7 +101,6 @@ export function prepareBarPanelConfig({
showPoints: VisibilityMode.Never,
pointSize: 5,
isDarkMode,
stepInterval: currentStepInterval,
});
});

View File

@@ -14,6 +14,11 @@ export interface GraphVisibilityState {
dataIndex: SeriesVisibilityItem[];
}
export interface SeriesVisibilityState {
labels: string[];
visibility: boolean[];
}
/**
* Context in which a panel is rendered. Used to vary behavior (e.g. persistence,
* interactions) per context.

View File

@@ -62,10 +62,10 @@ describe('legendVisibilityUtils', () => {
const result = getStoredSeriesVisibility('widget-1');
expect(result).not.toBeNull();
expect(result).toEqual([
{ label: 'CPU', show: true },
{ label: 'Memory', show: false },
]);
expect(result).toEqual({
labels: ['CPU', 'Memory'],
visibility: [true, false],
});
});
it('returns visibility by index including duplicate labels', () => {
@@ -85,11 +85,10 @@ describe('legendVisibilityUtils', () => {
const result = getStoredSeriesVisibility('widget-1');
expect(result).not.toBeNull();
expect(result).toEqual([
{ label: 'CPU', show: true },
{ label: 'CPU', show: false },
{ label: 'Memory', show: false },
]);
expect(result).toEqual({
labels: ['CPU', 'CPU', 'Memory'],
visibility: [true, false, false],
});
});
it('returns null on malformed JSON in localStorage', () => {
@@ -128,10 +127,10 @@ describe('legendVisibilityUtils', () => {
const stored = getStoredSeriesVisibility('widget-1');
expect(stored).not.toBeNull();
expect(stored).toEqual([
{ label: 'CPU', show: true },
{ label: 'Memory', show: false },
]);
expect(stored).toEqual({
labels: ['CPU', 'Memory'],
visibility: [true, false],
});
});
it('adds a new widget entry when other widgets already exist', () => {
@@ -150,7 +149,7 @@ describe('legendVisibilityUtils', () => {
const stored = getStoredSeriesVisibility('widget-new');
expect(stored).not.toBeNull();
expect(stored).toEqual([{ label: 'CPU', show: false }]);
expect(stored).toEqual({ labels: ['CPU'], visibility: [false] });
});
it('updates existing widget visibility when entry already exists', () => {
@@ -176,10 +175,10 @@ describe('legendVisibilityUtils', () => {
const stored = getStoredSeriesVisibility('widget-1');
expect(stored).not.toBeNull();
expect(stored).toEqual([
{ label: 'CPU', show: false },
{ label: 'Memory', show: true },
]);
expect(stored).toEqual({
labels: ['CPU', 'Memory'],
visibility: [false, true],
});
});
it('silently handles malformed existing JSON without throwing', () => {
@@ -202,10 +201,10 @@ describe('legendVisibilityUtils', () => {
const stored = getStoredSeriesVisibility('widget-1');
expect(stored).not.toBeNull();
expect(stored).toEqual([
{ label: 'x-axis', show: true },
{ label: 'CPU', show: false },
]);
expect(stored).toEqual({
labels: ['x-axis', 'CPU'],
visibility: [true, false],
});
const expected = [
{
name: 'widget-1',
@@ -232,12 +231,14 @@ describe('legendVisibilityUtils', () => {
{ label: 'B', show: true },
]);
expect(getStoredSeriesVisibility('widget-a')).toEqual([
{ label: 'A', show: true },
]);
expect(getStoredSeriesVisibility('widget-b')).toEqual([
{ label: 'B', show: true },
]);
expect(getStoredSeriesVisibility('widget-a')).toEqual({
labels: ['A'],
visibility: [true],
});
expect(getStoredSeriesVisibility('widget-b')).toEqual({
labels: ['B'],
visibility: [true],
});
});
it('calls setItem with storage key and stringified visibility states', () => {

View File

@@ -1,6 +1,10 @@
import { LOCALSTORAGE } from 'constants/localStorage';
import { GraphVisibilityState, SeriesVisibilityItem } from '../types';
import {
GraphVisibilityState,
SeriesVisibilityItem,
SeriesVisibilityState,
} from '../types';
/**
* Retrieves the stored series visibility for a specific widget from localStorage by index.
@@ -10,7 +14,7 @@ import { GraphVisibilityState, SeriesVisibilityItem } from '../types';
*/
export function getStoredSeriesVisibility(
widgetId: string,
): SeriesVisibilityItem[] | null {
): SeriesVisibilityState | null {
try {
const storedData = localStorage.getItem(LOCALSTORAGE.GRAPH_VISIBILITY_STATES);
@@ -25,7 +29,10 @@ export function getStoredSeriesVisibility(
return null;
}
return widgetState.dataIndex;
return {
labels: widgetState.dataIndex.map((item) => item.label),
visibility: widgetState.dataIndex.map((item) => item.show),
};
} catch (error) {
if (error instanceof SyntaxError) {
// If the stored data is malformed, remove it

View File

@@ -6,7 +6,7 @@ import logEvent from 'api/common/logEvent';
import PromQLIcon from 'assets/Dashboard/PromQl';
import { QueryBuilderV2 } from 'components/QueryBuilderV2/QueryBuilderV2';
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
import { ENTITY_VERSION_V4 } from 'constants/app';
import { ENTITY_VERSION_V5 } from 'constants/app';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { QBShortcuts } from 'constants/shortcuts/QBShortcuts';
import RunQueryBtn from 'container/QueryBuilder/components/RunQueryBtn/RunQueryBtn';
@@ -63,12 +63,8 @@ function QuerySection({
signalSource: signalSource === 'meter' ? 'meter' : '',
}}
showTraceOperator={alertType === AlertTypes.TRACES_BASED_ALERT}
showFunctions={
(alertType === AlertTypes.METRICS_BASED_ALERT &&
alertDef.version === ENTITY_VERSION_V4) ||
alertType === AlertTypes.LOGS_BASED_ALERT
}
version={alertDef.version || 'v3'}
showFunctions
version={ENTITY_VERSION_V5}
onSignalSourceChange={handleSignalSourceChange}
signalSourceChangeEnabled
/>

View File

@@ -6,12 +6,10 @@ import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { populateMultipleResults } from 'container/NewWidget/LeftContainer/WidgetGraph/util';
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/types';
import { useIsPanelWaitingOnVariable } from 'hooks/dashboard/useVariableFetchState';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
import { useIntersectionObserver } from 'hooks/useIntersectionObserver';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { getVariableReferencesInQuery } from 'lib/dashboardVariables/variableReference';
import getTimeString from 'lib/getTimeString';
import { isEqual } from 'lodash-es';
import isEmpty from 'lodash-es/isEmpty';
@@ -55,6 +53,7 @@ function GridCardGraph({
customOnRowClick,
customTimeRangeWindowForCoRelation,
enableDrillDown,
widgetsByDynamicVariableId,
}: GridCardGraphProps): JSX.Element {
const dispatch = useDispatch();
const [errorMessage, setErrorMessage] = useState<string>();
@@ -65,8 +64,8 @@ function GridCardGraph({
toScrollWidgetId,
setToScrollWidgetId,
setDashboardQueryRangeCalled,
variablesToGetUpdated,
} = useDashboard();
const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
AppState,
GlobalReducer
@@ -118,25 +117,10 @@ function GridCardGraph({
const updatedQuery = widget?.query;
const referencedVariableNames = useMemo(() => {
if (!variables || !updatedQuery) {
return [];
}
const allNames = Object.values(variables)
.map((v) => v.name)
.filter((name): name is string => !!name);
return getVariableReferencesInQuery(updatedQuery, allNames);
}, [updatedQuery, variables]);
const isEmptyWidget =
widget?.id === PANEL_TYPES.EMPTY_WIDGET || isEmpty(widget);
const isPanelWaitingOnAnyVariable = useIsPanelWaitingOnVariable(
referencedVariableNames,
);
const queryEnabledCondition =
isVisible && !isEmptyWidget && isQueryEnabled && !isPanelWaitingOnAnyVariable;
const queryEnabledCondition = isVisible && !isEmptyWidget && isQueryEnabled;
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
if (widget.panelTypes !== PANEL_TYPES.LIST) {
@@ -193,6 +177,27 @@ function GridCardGraph({
[requestData.query],
);
// Bring back dependency on variable chaining for panels to refetch,
// but only for non-dynamic variables. We derive a stable token from
// the head of the variablesToGetUpdated queue when it's non-dynamic.
const nonDynamicVariableChainToken = useMemo(() => {
if (!variablesToGetUpdated || variablesToGetUpdated.length === 0) {
return undefined;
}
if (!variables) {
return undefined;
}
const headName = variablesToGetUpdated[0];
const variableObj = Object.values(variables).find(
(variable) => variable?.name === headName,
);
if (variableObj && variableObj.type !== 'DYNAMIC') {
return headName;
}
return undefined;
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [variablesToGetUpdated, variables]);
const queryResponse = useGetQueryRange(
{
...requestData,
@@ -219,7 +224,11 @@ function GridCardGraph({
requestData,
variables
? Object.entries(variables).reduce((acc, [id, variable]) => {
if (variable.name && referencedVariableNames.includes(variable.name)) {
if (
variable.type !== 'DYNAMIC' ||
(widgetsByDynamicVariableId?.[variable.id] &&
widgetsByDynamicVariableId?.[variable.id].includes(widget.id))
) {
return { ...acc, [id]: variable.selectedValue };
}
return acc;
@@ -228,6 +237,9 @@ function GridCardGraph({
...(customTimeRange && customTimeRange.startTime && customTimeRange.endTime
? [customTimeRange.startTime, customTimeRange.endTime]
: []),
// Include non-dynamic variable chaining token to drive refetches
// only when a non-dynamic variable is at the head of the queue
...(nonDynamicVariableChainToken ? [nonDynamicVariableChainToken] : []),
],
retry(failureCount, error): boolean {
if (
@@ -240,7 +252,7 @@ function GridCardGraph({
return failureCount < 2;
},
keepPreviousData: true,
enabled: queryEnabledCondition,
enabled: queryEnabledCondition && !nonDynamicVariableChainToken,
refetchOnMount: false,
onError: (error) => {
const errorMessage =
@@ -307,7 +319,7 @@ function GridCardGraph({
threshold={threshold}
headerMenuList={menuList}
isFetchingResponse={
queryResponse.isFetching || isPanelWaitingOnAnyVariable
queryResponse.isFetching || variablesToGetUpdated.length > 0
}
setRequestData={setRequestData}
onClickHandler={onClickHandler}

View File

@@ -72,6 +72,7 @@ export interface GridCardGraphProps {
customOnRowClick?: (record: RowData) => void;
customTimeRangeWindowForCoRelation?: string | undefined;
enableDrillDown?: boolean;
widgetsByDynamicVariableId?: Record<string, string[]>;
}
export interface GetGraphVisibilityStateOnLegendClickProps {

View File

@@ -16,6 +16,7 @@ import { themeColors } from 'constants/theme';
import { DEFAULT_ROW_NAME } from 'container/DashboardContainer/DashboardDescription/utils';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
import { useWidgetsByDynamicVariableId } from 'hooks/dashboard/useWidgetsByDynamicVariableId';
import useComponentPermission from 'hooks/useComponentPermission';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
@@ -101,6 +102,8 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
Record<string, { widgets: Layout[]; collapsed: boolean }>
>({});
const widgetsByDynamicVariableId = useWidgetsByDynamicVariableId();
useEffect(() => {
setCurrentPanelMap(panelMap);
}, [panelMap]);
@@ -614,6 +617,7 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
onDragSelect={onDragSelect}
dataAvailable={checkIfDataExists}
enableDrillDown={enableDrillDown}
widgetsByDynamicVariableId={widgetsByDynamicVariableId}
/>
</Card>
</CardContainer>

View File

@@ -477,67 +477,47 @@
}
}
.observability-tools-radio-container {
display: flex;
flex-wrap: wrap;
gap: 0 12px;
width: 528px;
.observability-tool-radio-item {
display: flex;
align-items: center;
gap: 8px;
height: 32px;
width: calc((528px - 12px) / 2);
flex: 0 0 calc((528px - 12px) / 2);
label {
color: var(--l1-foreground);
font-size: 13px;
cursor: pointer;
}
button[role='radio'] {
&[data-state='unchecked'] {
border-color: var(--l3-border) !important;
border-width: 1px !important;
}
}
&.observability-tool-others-item {
.onboarding-questionaire-other-input {
flex: 1;
}
}
}
}
.migration-timeline-radio-container,
.opentelemetry-radio-container {
display: flex;
flex-wrap: wrap;
gap: 0 12px;
width: 528px;
.migration-timeline-radio-item,
.opentelemetry-radio-item {
display: flex;
align-items: center;
gap: 8px;
height: 32px;
width: calc((528px - 12px) / 2);
flex: 0 0 calc((528px - 12px) / 2);
.opentelemetry-radio-group {
width: 100%;
label {
color: var(--l1-foreground);
font-size: 13px;
cursor: pointer;
.opentelemetry-radio-items-wrapper {
display: flex;
flex-direction: row;
flex-wrap: nowrap;
gap: 12px;
width: 100%;
}
button[role='radio'] {
&[data-state='unchecked'] {
border-color: var(--l3-border) !important;
border-width: 1px !important;
.opentelemetry-radio-item {
display: flex;
align-items: center;
gap: 8px;
height: 32px;
width: calc((528px - 12px) / 2);
min-width: 258px;
flex: 0 0 calc((528px - 12px) / 2);
color: var(--l1-foreground);
font-family: Inter, sans-serif;
font-size: 13px;
font-weight: 400;
line-height: 1;
letter-spacing: -0.065px;
box-sizing: border-box;
.ant-radio {
.ant-radio-inner {
width: 16px;
height: 16px;
border-color: var(--l3-border);
}
&.ant-radio-checked .ant-radio-inner {
border-color: var(--bg-robin-500);
background-color: var(--bg-robin-500);
}
}
}
}
@@ -997,6 +977,27 @@
color: var(--bg-slate-300);
}
.opentelemetry-radio-container {
.opentelemetry-radio-group {
.opentelemetry-radio-items-wrapper {
.opentelemetry-radio-item {
color: var(--l1-foreground);
.ant-radio {
.ant-radio-inner {
border-color: var(--l3-border);
}
&.ant-radio-checked .ant-radio-inner {
border-color: var(--bg-robin-500);
background-color: var(--bg-robin-500);
}
}
}
}
}
}
.onboarding-back-button {
border-color: var(--text-vanilla-300);
color: var(--l3-foreground);

View File

@@ -1,27 +1,34 @@
/* eslint-disable sonarjs/cognitive-complexity */
import { useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { Button } from '@signozhq/button';
import { Checkbox } from '@signozhq/checkbox';
import { Input } from '@signozhq/input';
import {
RadioGroup,
RadioGroupItem,
RadioGroupLabel,
} from '@signozhq/radio-group';
import { Typography } from 'antd';
import { Radio, Typography } from 'antd';
import { RadioChangeEvent } from 'antd/es/radio';
import logEvent from 'api/common/logEvent';
import { ArrowRight } from 'lucide-react';
import editOrg from 'api/organization/editOrg';
import { useNotifications } from 'hooks/useNotifications';
import { ArrowRight, Loader2 } from 'lucide-react';
import { useAppContext } from 'providers/App/App';
import '../OnboardingQuestionaire.styles.scss';
export interface OrgData {
id: string;
displayName: string;
}
export interface OrgDetails {
organisationName: string;
usesObservability: boolean | null;
observabilityTool: string | null;
otherTool: string | null;
usesOtel: boolean | null;
migrationTimeline: string | null;
}
interface OrgQuestionsProps {
currentOrgData: OrgData | null;
orgDetails: OrgDetails;
onNext: (details: OrgDetails) => void;
}
@@ -38,14 +45,19 @@ const observabilityTools = {
Others: 'Others',
};
const migrationTimelineOptions = {
lessThanMonth: 'Less than a month',
oneToThreeMonths: '1-3 months',
greaterThanThreeMonths: 'Greater than 3 months',
justExploring: 'Just exploring',
};
function OrgQuestions({
currentOrgData,
orgDetails,
onNext,
}: OrgQuestionsProps): JSX.Element {
const { updateOrg } = useAppContext();
const { notifications } = useNotifications();
function OrgQuestions({ orgDetails, onNext }: OrgQuestionsProps): JSX.Element {
const { t } = useTranslation(['organizationsettings', 'common']);
const [organisationName, setOrganisationName] = useState<string>(
orgDetails?.organisationName || '',
);
const [observabilityTool, setObservabilityTool] = useState<string | null>(
orgDetails?.observabilityTool || null,
);
@@ -54,33 +66,92 @@ function OrgQuestions({ orgDetails, onNext }: OrgQuestionsProps): JSX.Element {
);
const [isNextDisabled, setIsNextDisabled] = useState<boolean>(true);
useEffect(() => {
setOrganisationName(orgDetails.organisationName);
}, [orgDetails.organisationName]);
const [isLoading, setIsLoading] = useState<boolean>(false);
const [usesOtel, setUsesOtel] = useState<boolean | null>(orgDetails.usesOtel);
const [migrationTimeline, setMigrationTimeline] = useState<string | null>(
orgDetails?.migrationTimeline || null,
);
const showMigrationQuestion =
observabilityTool !== null && observabilityTool !== 'None';
const handleNext = (): void => {
const handleOrgNameUpdate = async (): Promise<void> => {
const usesObservability =
!observabilityTool?.includes('None') && observabilityTool !== null;
logEvent('Org Onboarding: Answered', {
usesObservability,
observabilityTool,
otherTool,
usesOtel,
migrationTimeline,
});
/* Early bailout if orgData is not set or if the organisation name is not set or if the organisation name is empty or if the organisation name is the same as the one in the orgData */
if (
!currentOrgData ||
!organisationName ||
organisationName === '' ||
orgDetails.organisationName === organisationName
) {
logEvent('Org Onboarding: Answered', {
usesObservability,
observabilityTool,
otherTool,
usesOtel,
});
onNext({
usesObservability,
observabilityTool,
otherTool,
usesOtel,
migrationTimeline,
});
onNext({
organisationName,
usesObservability,
observabilityTool,
otherTool,
usesOtel,
});
return;
}
try {
setIsLoading(true);
const { statusCode, error } = await editOrg({
displayName: organisationName,
orgId: currentOrgData.id,
});
if (statusCode === 204) {
updateOrg(currentOrgData?.id, organisationName);
logEvent('Org Onboarding: Org Name Updated', {
organisationName,
});
logEvent('Org Onboarding: Answered', {
usesObservability,
observabilityTool,
otherTool,
usesOtel,
});
onNext({
organisationName,
usesObservability,
observabilityTool,
otherTool,
usesOtel,
});
} else {
logEvent('Org Onboarding: Org Name Update Failed', {
organisationName: orgDetails.organisationName,
});
notifications.error({
message:
error ||
t('something_went_wrong', {
ns: 'common',
}),
});
}
setIsLoading(false);
} catch (error) {
setIsLoading(false);
notifications.error({
message: t('something_went_wrong', {
ns: 'common',
}),
});
}
};
const isValidUsesObservability = (): boolean => {
@@ -102,29 +173,22 @@ function OrgQuestions({ orgDetails, onNext }: OrgQuestionsProps): JSX.Element {
useEffect(() => {
const isValidObservability = isValidUsesObservability();
const isMigrationValid = !showMigrationQuestion || migrationTimeline !== null;
if (usesOtel !== null && isValidObservability && isMigrationValid) {
if (organisationName !== '' && usesOtel !== null && isValidObservability) {
setIsNextDisabled(false);
} else {
setIsNextDisabled(true);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [
usesOtel,
observabilityTool,
otherTool,
migrationTimeline,
showMigrationQuestion,
]);
}, [organisationName, usesOtel, observabilityTool, otherTool]);
const handleObservabilityToolChange = (value: string): void => {
setObservabilityTool(value);
if (value !== 'Others') {
setOtherTool('');
}
if (value === 'None') {
setMigrationTimeline(null);
const createObservabilityToolHandler = (tool: string) => (
checked: boolean,
): void => {
if (checked) {
setObservabilityTool(tool);
} else if (observabilityTool === tool) {
setObservabilityTool(null);
}
};
@@ -132,6 +196,10 @@ function OrgQuestions({ orgDetails, onNext }: OrgQuestionsProps): JSX.Element {
setUsesOtel(value === 'yes');
};
const handleOnNext = (): void => {
handleOrgNameUpdate();
};
return (
<div className="questions-container">
<div className="onboarding-header-section">
@@ -146,24 +214,40 @@ function OrgQuestions({ orgDetails, onNext }: OrgQuestionsProps): JSX.Element {
<div className="questions-form-container">
<div className="questions-form">
<div className="form-group">
<label className="question" htmlFor="organisationName">
Name of your company
</label>
<Input
type="text"
name="organisationName"
id="organisationName"
placeholder="e.g. Simpsonville"
autoComplete="off"
value={organisationName}
onChange={(e): void => setOrganisationName(e.target.value)}
/>
</div>
<div className="form-group">
<label className="question" htmlFor="observabilityTool">
Which observability tool do you currently use?
</label>
<RadioGroup
value={observabilityTool || ''}
onValueChange={handleObservabilityToolChange}
className="observability-tools-radio-container"
>
<div className="observability-tools-checkbox-container">
{Object.entries(observabilityTools).map(([tool, label]) => {
if (tool === 'Others') {
return (
<div
key={tool}
className="radio-item observability-tool-radio-item observability-tool-others-item"
className="checkbox-item observability-tool-checkbox-item observability-tool-others-item"
>
<RadioGroupItem value={tool} id={`radio-${tool}`} />
{observabilityTool === 'Others' ? (
<Checkbox
id={`checkbox-${tool}`}
checked={observabilityTool === tool}
onCheckedChange={createObservabilityToolHandler(tool)}
labelName={observabilityTool === 'Others' ? '' : label}
/>
{observabilityTool === 'Others' && (
<Input
type="text"
className="onboarding-questionaire-other-input"
@@ -172,60 +256,55 @@ function OrgQuestions({ orgDetails, onNext }: OrgQuestionsProps): JSX.Element {
autoFocus
onChange={(e): void => setOtherTool(e.target.value)}
/>
) : (
<RadioGroupLabel htmlFor={`radio-${tool}`}>{label}</RadioGroupLabel>
)}
</div>
);
}
return (
<div key={tool} className="radio-item observability-tool-radio-item">
<RadioGroupItem value={tool} id={`radio-${tool}`} />
<RadioGroupLabel htmlFor={`radio-${tool}`}>{label}</RadioGroupLabel>
<div
key={tool}
className="checkbox-item observability-tool-checkbox-item"
>
<Checkbox
id={`checkbox-${tool}`}
checked={observabilityTool === tool}
onCheckedChange={createObservabilityToolHandler(tool)}
labelName={label}
/>
</div>
);
})}
</RadioGroup>
</div>
{showMigrationQuestion && (
<div className="form-group">
<div className="question">
What is your timeline for migrating to SigNoz?
</div>
<RadioGroup
value={migrationTimeline || ''}
onValueChange={setMigrationTimeline}
className="migration-timeline-radio-container"
>
{Object.entries(migrationTimelineOptions).map(([key, label]) => (
<div key={key} className="radio-item migration-timeline-radio-item">
<RadioGroupItem value={key} id={`radio-migration-${key}`} />
<RadioGroupLabel htmlFor={`radio-migration-${key}`}>
{label}
</RadioGroupLabel>
</div>
))}
</RadioGroup>
</div>
)}
</div>
<div className="form-group">
<div className="question">Do you already use OpenTelemetry?</div>
<RadioGroup
value={usesOtel === true ? 'yes' : usesOtel === false ? 'no' : ''}
onValueChange={handleOtelChange}
className="opentelemetry-radio-container"
>
<div className="radio-item opentelemetry-radio-item">
<RadioGroupItem value="yes" id="radio-otel-yes" />
<RadioGroupLabel htmlFor="radio-otel-yes">Yes</RadioGroupLabel>
</div>
<div className="radio-item opentelemetry-radio-item">
<RadioGroupItem value="no" id="radio-otel-no" />
<RadioGroupLabel htmlFor="radio-otel-no">No</RadioGroupLabel>
</div>
</RadioGroup>
<div className="opentelemetry-radio-container">
<Radio.Group
value={((): string | undefined => {
if (usesOtel === true) {
return 'yes';
}
if (usesOtel === false) {
return 'no';
}
return undefined;
})()}
onChange={(e: RadioChangeEvent): void =>
handleOtelChange(e.target.value)
}
className="opentelemetry-radio-group"
>
<div className="opentelemetry-radio-items-wrapper">
<Radio value="yes" className="opentelemetry-radio-item">
Yes
</Radio>
<Radio value="no" className="opentelemetry-radio-item">
No
</Radio>
</div>
</Radio.Group>
</div>
</div>
</div>
@@ -233,9 +312,15 @@ function OrgQuestions({ orgDetails, onNext }: OrgQuestionsProps): JSX.Element {
variant="solid"
color="primary"
className={`onboarding-next-button ${isNextDisabled ? 'disabled' : ''}`}
onClick={handleNext}
onClick={handleOnNext}
disabled={isNextDisabled}
suffixIcon={<ArrowRight size={12} />}
suffixIcon={
isLoading ? (
<Loader2 className="animate-spin" size={12} />
) : (
<ArrowRight size={12} />
)
}
>
Next
</Button>

View File

@@ -69,7 +69,7 @@ describe('OnboardingQuestionaire Component', () => {
render(<OnboardingQuestionaire />);
expect(screen.getByText(/welcome to signoz cloud/i)).toBeInTheDocument();
expect(screen.getByLabelText(/name of your company/i)).toBeInTheDocument();
expect(
screen.getByText(/which observability tool do you currently use/i),
).toBeInTheDocument();
@@ -86,12 +86,15 @@ describe('OnboardingQuestionaire Component', () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<OnboardingQuestionaire />);
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
const datadogCheckbox = screen.getByLabelText(/datadog/i);
await user.click(datadogCheckbox);
const otelYes = screen.getByRole('radio', { name: /yes/i });
await user.click(otelYes);
await user.click(screen.getByLabelText(/just exploring/i));
const nextButton = await screen.findByRole('button', { name: /next/i });
expect(nextButton).not.toBeDisabled();
@@ -109,38 +112,15 @@ describe('OnboardingQuestionaire Component', () => {
).toBeInTheDocument();
});
it('shows migration timeline options only when specific observability tools are selected', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<OnboardingQuestionaire />);
// Initially not visible
expect(
screen.queryByText(/What is your timeline for migrating to SigNoz/i),
).not.toBeInTheDocument();
const datadogCheckbox = screen.getByLabelText(/datadog/i);
await user.click(datadogCheckbox);
expect(
await screen.findByText(/What is your timeline for migrating to SigNoz/i),
).toBeInTheDocument();
// Not visible when None is selected
const noneCheckbox = screen.getByLabelText(/none\/starting fresh/i);
await user.click(noneCheckbox);
expect(
screen.queryByText(/What is your timeline for migrating to SigNoz/i),
).not.toBeInTheDocument();
});
it('proceeds to step 2 when next is clicked', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<OnboardingQuestionaire />);
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
await user.click(screen.getByLabelText(/just exploring/i));
const nextButton = screen.getByRole('button', { name: /next/i });
await user.click(nextButton);
@@ -157,10 +137,11 @@ describe('OnboardingQuestionaire Component', () => {
render(<OnboardingQuestionaire />);
// Navigate to step 2
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
await user.click(screen.getByLabelText(/just exploring/i));
await user.click(screen.getByRole('button', { name: /next/i }));
expect(
@@ -176,10 +157,11 @@ describe('OnboardingQuestionaire Component', () => {
render(<OnboardingQuestionaire />);
// Navigate to step 2
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
await user.click(screen.getByLabelText(/just exploring/i));
await user.click(screen.getByRole('button', { name: /next/i }));
await waitFor(() => {
@@ -193,10 +175,11 @@ describe('OnboardingQuestionaire Component', () => {
render(<OnboardingQuestionaire />);
// Navigate to step 2
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
await user.click(screen.getByLabelText(/just exploring/i));
await user.click(screen.getByRole('button', { name: /next/i }));
expect(
@@ -220,10 +203,11 @@ describe('OnboardingQuestionaire Component', () => {
render(<OnboardingQuestionaire />);
// Navigate to step 2
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
await user.click(screen.getByLabelText(/just exploring/i));
await user.click(screen.getByRole('button', { name: /next/i }));
expect(
@@ -248,10 +232,11 @@ describe('OnboardingQuestionaire Component', () => {
render(<OnboardingQuestionaire />);
// Navigate through steps 1 and 2
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
await user.click(screen.getByLabelText(/just exploring/i));
await user.click(screen.getByRole('button', { name: /next/i }));
expect(
@@ -282,10 +267,11 @@ describe('OnboardingQuestionaire Component', () => {
render(<OnboardingQuestionaire />);
// Navigate to step 3
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
await user.click(screen.getByLabelText(/just exploring/i));
await user.click(screen.getByRole('button', { name: /next/i }));
expect(
@@ -304,4 +290,40 @@ describe('OnboardingQuestionaire Component', () => {
).toBeInTheDocument();
});
});
describe('Error Handling', () => {
it('handles organization update error gracefully', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(
rest.put(EDIT_ORG_ENDPOINT, (_, res, ctx) =>
res(
ctx.status(500),
ctx.json({
error: {
code: 'INTERNAL_ERROR',
message: 'Failed to update organization',
},
}),
),
),
);
render(<OnboardingQuestionaire />);
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
const nextButton = screen.getByRole('button', { name: /next/i });
await user.click(nextButton);
// Component should still be functional
await waitFor(() => {
expect(nextButton).not.toBeDisabled();
});
});
});
});

View File

@@ -23,7 +23,7 @@ import InviteTeamMembers from './InviteTeamMembers/InviteTeamMembers';
import OptimiseSignozNeeds, {
OptimiseSignozDetails,
} from './OptimiseSignozNeeds/OptimiseSignozNeeds';
import OrgQuestions, { OrgDetails } from './OrgQuestions/OrgQuestions';
import OrgQuestions, { OrgData, OrgDetails } from './OrgQuestions/OrgQuestions';
import './OnboardingQuestionaire.styles.scss';
@@ -37,11 +37,11 @@ export const showErrorNotification = (
};
const INITIAL_ORG_DETAILS: OrgDetails = {
organisationName: '',
usesObservability: true,
observabilityTool: '',
otherTool: '',
usesOtel: null,
migrationTimeline: null,
};
const INITIAL_SIGNOZ_DETAILS: SignozDetails = {
@@ -79,11 +79,25 @@ function OnboardingQuestionaire(): JSX.Element {
InviteTeamMembersProps[] | null
>(null);
const [currentOrgData, setCurrentOrgData] = useState<OrgData | null>(null);
const [
updatingOrgOnboardingStatus,
setUpdatingOrgOnboardingStatus,
] = useState<boolean>(false);
useEffect(() => {
if (org) {
setCurrentOrgData(org[0]);
setOrgDetails({
...orgDetails,
organisationName: org[0].displayName,
});
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [org]);
useEffect(() => {
logEvent('Org Onboarding: Started', {
org_id: org?.[0]?.id,
@@ -161,7 +175,6 @@ function OnboardingQuestionaire(): JSX.Element {
? (orgDetails?.otherTool as string)
: (orgDetails?.observabilityTool as string),
where_did_you_discover_signoz: signozDetails?.discoverSignoz as string,
timeline_for_migrating_to_signoz: orgDetails?.migrationTimeline as string,
reasons_for_interest_in_signoz: signozDetails?.interestInSignoz?.includes(
'Others',
)
@@ -195,6 +208,7 @@ function OnboardingQuestionaire(): JSX.Element {
<div className="onboarding-questionaire-content">
{currentStep === 1 && (
<OrgQuestions
currentOrgData={currentOrgData}
orgDetails={{
...orgDetails,
usesOtel: orgDetails.usesOtel ?? null,

View File

@@ -1,11 +1,11 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import BarPanel from 'container/DashboardContainer/visualization/panels/BarPanel/BarPanel';
import TimeSeriesPanel from '../DashboardContainer/visualization/panels/TimeSeriesPanel/TimeSeriesPanel';
import HistogramPanelWrapper from './HistogramPanelWrapper';
import ListPanelWrapper from './ListPanelWrapper';
import PiePanelWrapper from './PiePanelWrapper';
import TablePanelWrapper from './TablePanelWrapper';
import UplotPanelWrapper from './UplotPanelWrapper';
import ValuePanelWrapper from './ValuePanelWrapper';
export const PanelTypeVsPanelWrapper = {
@@ -16,7 +16,7 @@ export const PanelTypeVsPanelWrapper = {
[PANEL_TYPES.TRACE]: null,
[PANEL_TYPES.EMPTY_WIDGET]: null,
[PANEL_TYPES.PIE]: PiePanelWrapper,
[PANEL_TYPES.BAR]: BarPanel,
[PANEL_TYPES.BAR]: UplotPanelWrapper,
[PANEL_TYPES.HISTOGRAM]: HistogramPanelWrapper,
};

View File

@@ -5,12 +5,16 @@ import { PanelMode } from 'container/DashboardContainer/visualization/panels/typ
import { WidgetGraphComponentProps } from 'container/GridCardLayout/GridCard/types';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin';
import { SuccessResponse } from 'types/api';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricQueryRangeSuccessResponse } from 'types/api/metrics/getQueryRange';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { QueryData } from 'types/api/widgets/getQuery';
export type PanelWrapperProps = {
queryResponse: UseQueryResult<MetricQueryRangeSuccessResponse, Error>;
queryResponse: UseQueryResult<
SuccessResponse<MetricRangePayloadProps, unknown>,
Error
>;
widget: Widgets;
setRequestData?: WidgetGraphComponentProps['setRequestData'];
isFullViewMode?: boolean;

View File

@@ -1,316 +0,0 @@
import { act, renderHook } from '@testing-library/react';
import { dashboardVariablesStore } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
import { IDashboardVariablesStoreState } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import {
VariableFetchState,
variableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { useIsPanelWaitingOnVariable } from '../useVariableFetchState';
function makeVariable(
overrides: Partial<IDashboardVariable> & { id: string },
): IDashboardVariable {
return {
name: overrides.id,
description: '',
type: 'QUERY',
sort: 'DISABLED',
multiSelect: false,
showALLOption: false,
...overrides,
};
}
function resetStores(): void {
variableFetchStore.set(() => ({
states: {},
lastUpdated: {},
cycleIds: {},
}));
dashboardVariablesStore.set(() => ({
dashboardId: '',
variables: {},
sortedVariablesArray: [],
dependencyData: null,
variableTypes: {},
dynamicVariableOrder: [],
}));
}
function setFetchStates(states: Record<string, VariableFetchState>): void {
variableFetchStore.set(() => ({
states,
lastUpdated: {},
cycleIds: {},
}));
}
function setDashboardVariables(
overrides: Partial<IDashboardVariablesStoreState>,
): void {
dashboardVariablesStore.set(() => ({
dashboardId: '',
variables: {},
sortedVariablesArray: [],
dependencyData: null,
variableTypes: {},
dynamicVariableOrder: [],
...overrides,
}));
}
describe('useIsPanelWaitingOnVariable', () => {
beforeEach(() => {
resetStores();
});
it('should return false when variableNames is empty', () => {
const { result } = renderHook(() => useIsPanelWaitingOnVariable([]));
expect(result.current).toBe(false);
});
it('should return false when all referenced variables are idle', () => {
setFetchStates({ a: 'idle', b: 'idle' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: 'val1' }),
b: makeVariable({ id: 'b', selectedValue: 'val2' }),
},
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a', 'b']));
expect(result.current).toBe(false);
});
it('should return true when a variable is loading with empty selectedValue', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: undefined }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(true);
});
it('should return true when a variable is waiting with empty selectedValue', () => {
setFetchStates({ a: 'waiting' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: '' }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(true);
});
it('should return true when a variable is revalidating with empty selectedValue', () => {
setFetchStates({ a: 'revalidating' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: undefined }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(true);
});
it('should return false when a variable is loading but has a selectedValue', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: 'some-value' }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(false);
});
it('should return true for DYNAMIC variable with allSelected=true that is loading', () => {
setFetchStates({ dyn: 'loading' });
setDashboardVariables({
variables: {
dyn: makeVariable({
id: 'dyn',
type: 'DYNAMIC',
selectedValue: 'some-val',
allSelected: true,
}),
},
variableTypes: { dyn: 'DYNAMIC' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
expect(result.current).toBe(true);
});
it('should return true for DYNAMIC variable with allSelected=true that is waiting', () => {
setFetchStates({ dyn: 'waiting' });
setDashboardVariables({
variables: {
dyn: makeVariable({
id: 'dyn',
type: 'DYNAMIC',
selectedValue: 'val',
allSelected: true,
}),
},
variableTypes: { dyn: 'DYNAMIC' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
expect(result.current).toBe(true);
});
it('should return false for DYNAMIC variable with allSelected=true that is idle', () => {
setFetchStates({ dyn: 'idle' });
setDashboardVariables({
variables: {
dyn: makeVariable({
id: 'dyn',
type: 'DYNAMIC',
selectedValue: 'val',
allSelected: true,
}),
},
variableTypes: { dyn: 'DYNAMIC' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
expect(result.current).toBe(false);
});
it('should return false for non-DYNAMIC variable with allSelected=false and non-empty value that is loading', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({
id: 'a',
selectedValue: 'val',
allSelected: false,
}),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(false);
});
it('should return true if any one of multiple variables is blocking', () => {
setFetchStates({ a: 'idle', b: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: 'val' }),
b: makeVariable({ id: 'b', selectedValue: undefined }),
},
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a', 'b']));
expect(result.current).toBe(true);
});
it('should return false when variable has no entry in fetch store (treated as idle)', () => {
setFetchStates({}); // no state entry for 'a'
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: 'val' }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(false);
});
it('should return false when variable is in error state with empty selectedValue', () => {
setFetchStates({ a: 'error' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: undefined }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(false);
});
it('should react to store updates', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: undefined }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(true);
// Simulate variable fetch completing
act(() => {
variableFetchStore.update((d) => {
d.states.a = 'idle';
});
});
expect(result.current).toBe(false);
});
it('should handle DYNAMIC variable with allSelected=false and empty selectedValue as blocking', () => {
setFetchStates({ dyn: 'loading' });
setDashboardVariables({
variables: {
dyn: makeVariable({
id: 'dyn',
type: 'DYNAMIC',
selectedValue: undefined,
allSelected: false,
}),
},
variableTypes: { dyn: 'DYNAMIC' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
expect(result.current).toBe(true);
});
it('should handle variable with array selectedValue as non-blocking when loading', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: ['val1', 'val2'] }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(false);
});
it('should handle variable with empty array selectedValue as blocking when loading', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: [] }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(true);
});
});

View File

@@ -1,153 +0,0 @@
import { useCallback, useMemo, useRef, useSyncExternalStore } from 'react';
import isEmpty from 'lodash-es/isEmpty';
import {
IVariableFetchStoreState,
VariableFetchState,
variableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { useDashboardVariablesSelector } from './useDashboardVariables';
/**
* Generic selector hook for the variable fetch store.
* Same pattern as useDashboardVariablesSelector.
*/
const useVariableFetchSelector = <T>(
selector: (state: IVariableFetchStoreState) => T,
): T => {
const selectorRef = useRef(selector);
selectorRef.current = selector;
const getSnapshot = useCallback(
() => selectorRef.current(variableFetchStore.getSnapshot()),
[],
);
return useSyncExternalStore(variableFetchStore.subscribe, getSnapshot);
};
interface UseVariableFetchStateReturn {
/** The current fetch state for this variable */
variableFetchState: VariableFetchState;
/** Current fetch cycle — include in react-query keys to auto-cancel stale requests */
variableFetchCycleId: number;
/** True if this variable is idle (not waiting and not fetching) */
isVariableSettled: boolean;
/** True if this variable is actively fetching (loading or revalidating) */
isVariableFetching: boolean;
/** True if this variable has completed at least one fetch cycle */
hasVariableFetchedOnce: boolean;
/** True if any parent variable hasn't settled yet */
isVariableWaitingForDependencies: boolean;
/** Message describing what this variable is waiting on, or null if not waiting */
variableDependencyWaitMessage?: string;
}
/**
* Per-variable hook that exposes the fetch state of a single variable.
* Reusable by both variable input components and panel components.
*
* Subscribes to both variableFetchStore (for states) and
* dashboardVariablesStore (for parent graph) to compute derived values.
*/
export function useVariableFetchState(
variableName: string,
): UseVariableFetchStateReturn {
// This variable's fetch state (loading, waiting, idle, etc.)
const variableFetchState = useVariableFetchSelector(
(s) => s.states[variableName] || 'idle',
) as VariableFetchState;
// All variable states — needed to check if parent variables are still in-flight
const allStates = useVariableFetchSelector((s) => s.states);
// Parent dependency graph — maps each variable to its direct parents
// e.g. { "childVariable": ["parentVariable"] } means "childVariable" depends on "parentVariable"
const parentGraph = useDashboardVariablesSelector(
(s) => s.dependencyData?.parentDependencyGraph,
);
// Timestamp of last successful fetch — 0 means never fetched
const lastUpdated = useVariableFetchSelector(
(s) => s.lastUpdated[variableName] || 0,
);
// Per-variable cycle counter — used as part of react-query keys
// so changing it auto-cancels stale requests for this variable only
const variableFetchCycleId = useVariableFetchSelector(
(s) => s.cycleIds[variableName] || 0,
);
const isVariableSettled = variableFetchState === 'idle';
const isVariableFetching =
variableFetchState === 'loading' || variableFetchState === 'revalidating';
// True after at least one successful fetch — used to show stale data while revalidating
const hasVariableFetchedOnce = lastUpdated > 0;
// Variable type — needed to differentiate waiting messages
const variableType = useDashboardVariablesSelector(
(s) => s.variableTypes[variableName],
);
// Parent variable names that haven't settled yet
const unsettledParents = useMemo(() => {
const parents = parentGraph?.[variableName] || [];
return parents.filter((p) => (allStates[p] || 'idle') !== 'idle');
}, [parentGraph, variableName, allStates]);
const isVariableWaitingForDependencies = unsettledParents.length > 0;
const variableDependencyWaitMessage = useMemo(() => {
if (variableFetchState !== 'waiting') {
return;
}
if (variableType === 'DYNAMIC') {
return 'Waiting for all query variable options to load.';
}
if (unsettledParents.length === 0) {
return;
}
const quoted = unsettledParents.map((p) => `"${p}"`);
const names =
quoted.length > 1
? `${quoted.slice(0, -1).join(', ')} and ${quoted[quoted.length - 1]}`
: quoted[0];
return `Waiting for options of ${names} to load.`;
}, [variableFetchState, variableType, unsettledParents]);
return {
variableFetchState,
isVariableSettled,
isVariableWaitingForDependencies,
variableDependencyWaitMessage,
isVariableFetching,
hasVariableFetchedOnce,
variableFetchCycleId,
};
}
export function useIsPanelWaitingOnVariable(variableNames: string[]): boolean {
const states = useVariableFetchSelector((s) => s.states);
const dashboardVariables = useDashboardVariablesSelector((s) => s.variables);
const variableTypesMap = useDashboardVariablesSelector((s) => s.variableTypes);
return variableNames.some((name) => {
const variableFetchState = states[name];
const { selectedValue, allSelected } = dashboardVariables?.[name] || {};
const isVariableInFetchingOrWaitingState =
variableFetchState === 'loading' ||
variableFetchState === 'revalidating' ||
variableFetchState === 'waiting';
if (variableTypesMap[name] === 'DYNAMIC' && allSelected) {
return isVariableInFetchingOrWaitingState;
}
return isEmpty(selectedValue) ? isVariableInFetchingOrWaitingState : false;
});
}

View File

@@ -3,7 +3,6 @@ import { TelemetryFieldKey } from 'api/v5/v5';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { convertKeysToColumnFields } from 'container/LogsExplorerList/utils';
import { placeWidgetAtBottom } from 'container/NewWidget/utils';
import { textContainsVariableReference } from 'lib/dashboardVariables/variableReference';
import { isArray } from 'lodash-es';
import {
Dashboard,
@@ -117,17 +116,10 @@ export const createDynamicVariableToWidgetsMap = (
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
variable.name &&
filter.key?.key === variable.dynamicVariablesAttribute &&
(isArray(filter.value)
? filter.value.some(
(v) =>
typeof v === 'string' &&
variable.name &&
textContainsVariableReference(v, variable.name),
)
: typeof filter.value === 'string' &&
textContainsVariableReference(filter.value, variable.name)) &&
((isArray(filter.value) &&
filter.value.includes(`$${variable.name}`)) ||
filter.value === `$${variable.name}`) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
@@ -140,12 +132,7 @@ export const createDynamicVariableToWidgetsMap = (
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
variable.name &&
queryData.filter?.expression &&
textContainsVariableReference(
queryData.filter.expression,
variable.name,
) &&
queryData.filter?.expression?.includes(`$${variable.name}`) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
@@ -162,9 +149,7 @@ export const createDynamicVariableToWidgetsMap = (
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
variable.name &&
promqlQuery.query &&
textContainsVariableReference(promqlQuery.query, variable.name) &&
promqlQuery.query?.includes(`$${variable.name}`) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
@@ -180,9 +165,7 @@ export const createDynamicVariableToWidgetsMap = (
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
variable.name &&
clickhouseQuery.query &&
textContainsVariableReference(clickhouseQuery.query, variable.name) &&
clickhouseQuery.query?.includes(`$${variable.name}`) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);

View File

@@ -10,12 +10,13 @@ import {
GetQueryResultsProps,
} from 'lib/dashboard/getQueryResults';
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
import { SuccessResponse, Warning } from 'types/api';
import APIError from 'types/api/error';
import { MetricQueryRangeSuccessResponse } from 'types/api/metrics/getQueryRange';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { DataSource } from 'types/common/queryBuilder';
type UseGetQueryRangeOptions = UseQueryOptions<
MetricQueryRangeSuccessResponse,
SuccessResponse<MetricRangePayloadProps> & { warning?: Warning },
APIError | Error
>;
@@ -29,7 +30,10 @@ type UseGetQueryRange = (
widgetIndex: number;
publicDashboardId: string;
},
) => UseQueryResult<MetricQueryRangeSuccessResponse, Error>;
) => UseQueryResult<
SuccessResponse<MetricRangePayloadProps> & { warning?: Warning },
Error
>;
export const useGetQueryRange: UseGetQueryRange = (
requestData,
@@ -141,7 +145,10 @@ export const useGetQueryRange: UseGetQueryRange = (
};
}, [options?.retry]);
return useQuery<MetricQueryRangeSuccessResponse, APIError | Error>({
return useQuery<
SuccessResponse<MetricRangePayloadProps> & { warning?: Warning },
APIError | Error
>({
queryFn: async ({ signal }) =>
GetMetricQueryRange(
modifiedRequestData,

View File

@@ -19,11 +19,7 @@ import { Pagination } from 'hooks/queryPagination';
import { convertNewDataToOld } from 'lib/newQueryBuilder/convertNewDataToOld';
import { isEmpty } from 'lodash-es';
import { SuccessResponse, SuccessResponseV2, Warning } from 'types/api';
import {
MetricQueryRangeSuccessResponse,
MetricRangePayloadProps,
} from 'types/api/metrics/getQueryRange';
import { ExecStats, MetricRangePayloadV5 } from 'types/api/v5/queryRange';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
@@ -209,13 +205,13 @@ export async function GetMetricQueryRange(
widgetIndex: number;
publicDashboardId: string;
},
): Promise<MetricQueryRangeSuccessResponse> {
): Promise<SuccessResponse<MetricRangePayloadProps> & { warning?: Warning }> {
let legendMap: Record<string, string>;
let response:
| MetricQueryRangeSuccessResponse
| SuccessResponseV2<MetricRangePayloadV5>;
| SuccessResponse<MetricRangePayloadProps>
| SuccessResponseV2<MetricRangePayloadV5>
| (SuccessResponse<MetricRangePayloadProps> & { warning?: Warning });
let warning: Warning | undefined;
let meta: ExecStats | undefined;
const panelType = props.originalGraphType || props.graphType;
@@ -303,7 +299,6 @@ export async function GetMetricQueryRange(
);
warning = response.payload.warning || undefined;
meta = response.payload.meta || undefined;
} else {
const v5Response = await getQueryRangeV5(
v5Result.queryPayload,
@@ -323,7 +318,6 @@ export async function GetMetricQueryRange(
);
warning = response.payload.warning || undefined;
meta = response.payload.meta || undefined;
}
} else {
const legacyResult = prepareQueryRangePayload(props);
@@ -390,7 +384,6 @@ export async function GetMetricQueryRange(
return {
...response,
warning,
meta,
};
}

View File

@@ -1,4 +1,4 @@
import { SeriesVisibilityItem } from 'container/DashboardContainer/visualization/panels/types';
import { SeriesVisibilityState } from 'container/DashboardContainer/visualization/panels/types';
import { getStoredSeriesVisibility } from 'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils';
import { ThresholdsDrawHookOptions } from 'lib/uPlotV2/hooks/types';
import { thresholdsDrawHook } from 'lib/uPlotV2/hooks/useThresholdsDrawHook';
@@ -238,7 +238,7 @@ export class UPlotConfigBuilder extends ConfigBuilder<
/**
* Returns stored series visibility by index from localStorage when preferences source is LOCAL_STORAGE, otherwise null.
*/
private getStoredVisibility(): SeriesVisibilityItem[] | null {
private getStoredVisibility(): SeriesVisibilityState | null {
if (
this.widgetId &&
this.selectionPreferencesSource === SelectionPreferencesSource.LOCAL_STORAGE
@@ -248,98 +248,14 @@ export class UPlotConfigBuilder extends ConfigBuilder<
return null;
}
/**
* Derive visibility resolution state from stored preferences and current series:
* - visibleStoredLabels: labels that should always be visible
* - hiddenStoredLabels: labels that should always be hidden
* - hasActivePreference: whether a "mix" preference applies to new labels
*/
// eslint-disable-next-line sonarjs/cognitive-complexity
private getVisibilityResolutionState(): {
visibleStoredLabels: Set<string>;
hiddenStoredLabels: Set<string>;
hasActivePreference: boolean;
} {
const seriesVisibilityState = this.getStoredVisibility();
if (!seriesVisibilityState || seriesVisibilityState.length === 0) {
return {
visibleStoredLabels: new Set<string>(),
hiddenStoredLabels: new Set<string>(),
hasActivePreference: false,
};
}
// Single pass over stored items to derive:
// - visibleStoredLabels: any label that is ever stored as visible
// - hiddenStoredLabels: labels that are only ever stored as hidden
// - hasMixPreference: there is at least one visible and one hidden entry
const visibleStoredLabels = new Set<string>();
const hiddenStoredLabels = new Set<string>();
let hasAnyVisible = false;
let hasAnyHidden = false;
for (const { label, show } of seriesVisibilityState) {
if (show) {
hasAnyVisible = true;
visibleStoredLabels.add(label);
// If a label is ever visible, it should not be treated as "only hidden"
if (hiddenStoredLabels.has(label)) {
hiddenStoredLabels.delete(label);
}
} else {
hasAnyHidden = true;
// Only track as hidden if we have not already seen it as visible
if (!visibleStoredLabels.has(label)) {
hiddenStoredLabels.add(label);
}
}
}
const hasMixPreference = hasAnyVisible && hasAnyHidden;
// Current series labels in this chart.
const currentSeriesLabels = this.series.map(
(s: UPlotSeriesBuilder) => s.getConfig().label ?? '',
);
// Check if any stored "visible" label exists in the current series list.
const hasVisibleIntersection =
visibleStoredLabels.size > 0 &&
currentSeriesLabels.some((label) => visibleStoredLabels.has(label));
// Active preference only when there is a mix AND at least one visible
// stored label is present in the current series list.
const hasActivePreference = hasMixPreference && hasVisibleIntersection;
// We apply stored visibility in two cases:
// - There is an active preference (mix + intersection), OR
// - There is no mix (all true or all false) preserve legacy behavior.
const shouldApplyStoredVisibility = !hasMixPreference || hasActivePreference;
if (!shouldApplyStoredVisibility) {
return {
visibleStoredLabels: new Set<string>(),
hiddenStoredLabels: new Set<string>(),
hasActivePreference,
};
}
return {
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
};
}
/**
* Get legend items with visibility state restored from localStorage if available
*/
getLegendItems(): Record<number, LegendItem> {
const {
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
} = this.getVisibilityResolutionState();
const seriesVisibilityState = this.getStoredVisibility();
const isAnySeriesHidden = !!seriesVisibilityState?.visibility?.some(
(show) => !show,
);
return this.series.reduce((acc, s: UPlotSeriesBuilder, index: number) => {
const seriesConfig = s.getConfig();
@@ -347,11 +263,11 @@ export class UPlotConfigBuilder extends ConfigBuilder<
// +1 because uPlot series 0 is x-axis/time; data series are at 1, 2, ... (also matches stored visibility[0]=time, visibility[1]=first data, ...)
const seriesIndex = index + 1;
const show = resolveSeriesVisibility({
seriesIndex,
seriesShow: seriesConfig.show,
seriesLabel: label,
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
seriesVisibilityState,
isAnySeriesHidden,
});
acc[seriesIndex] = {
@@ -380,23 +296,22 @@ export class UPlotConfigBuilder extends ConfigBuilder<
...DEFAULT_PLOT_CONFIG,
};
const {
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
} = this.getVisibilityResolutionState();
const seriesVisibilityState = this.getStoredVisibility();
const isAnySeriesHidden = !!seriesVisibilityState?.visibility?.some(
(show) => !show,
);
config.series = [
{ value: (): string => '' }, // Base series for timestamp
...this.series.map((s) => {
...this.series.map((s, index) => {
const series = s.getConfig();
// Stored visibility[0] is x-axis/time; data series start at visibility[1]
const visible = resolveSeriesVisibility({
seriesIndex: index + 1,
seriesShow: series.show,
seriesLabel: series.label ?? '',
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
seriesVisibilityState,
isAnySeriesHidden,
});
return {
...series,

View File

@@ -81,7 +81,6 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
barAlignment,
barMaxWidth,
barWidthFactor,
stepInterval,
} = this.props;
if (pathBuilder) {
return { paths: pathBuilder };
@@ -105,7 +104,6 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
barAlignment,
barMaxWidth,
barWidthFactor,
stepInterval,
});
return pathsBuilder(self, seriesIdx, idx0, idx1);
@@ -211,14 +209,12 @@ function getPathBuilder({
barAlignment = BarAlignment.Center,
barWidthFactor = 0.6,
barMaxWidth = 200,
stepInterval,
}: {
drawStyle: DrawStyle;
lineInterpolation?: LineInterpolation;
barAlignment?: BarAlignment;
barMaxWidth?: number;
barWidthFactor?: number;
stepInterval?: number;
}): Series.PathBuilder {
if (!builders) {
throw new Error('Required uPlot path builders are not available');
@@ -226,13 +222,14 @@ function getPathBuilder({
if (drawStyle === DrawStyle.Bar) {
const pathBuilders = uPlot.paths;
return getBarPathBuilder({
pathBuilders,
barAlignment,
barWidthFactor,
barMaxWidth,
stepInterval,
});
const barsConfigKey = `bars|${barAlignment}|${barWidthFactor}|${barMaxWidth}`;
if (!builders[barsConfigKey] && pathBuilders.bars) {
builders[barsConfigKey] = pathBuilders.bars({
size: [barWidthFactor, barMaxWidth],
align: barAlignment,
});
}
return builders[barsConfigKey];
}
if (drawStyle === DrawStyle.Line) {
@@ -250,81 +247,4 @@ function getPathBuilder({
return builders.spline;
}
// eslint-disable-next-line sonarjs/cognitive-complexity
function getBarPathBuilder({
pathBuilders,
barAlignment,
barWidthFactor,
barMaxWidth,
stepInterval,
}: {
pathBuilders: typeof uPlot.paths;
barAlignment: BarAlignment;
barWidthFactor: number;
barMaxWidth: number;
stepInterval?: number;
}): Series.PathBuilder {
if (!builders) {
throw new Error('Required uPlot path builders are not available');
}
const barsPathBuilderFactory = pathBuilders.bars;
// When a stepInterval is provided (in seconds), cap the maximum bar width
// so that a single bar never visually spans more than stepInterval worth
// of time on the x-scale.
if (
typeof stepInterval === 'number' &&
stepInterval > 0 &&
barsPathBuilderFactory
) {
return (
self: uPlot,
seriesIdx: number,
idx0: number,
idx1: number,
): Series.Paths | null => {
let effectiveBarMaxWidth = barMaxWidth;
const xScale = self.scales.x as uPlot.Scale | undefined;
if (xScale && typeof xScale.min === 'number') {
const start = xScale.min as number;
const end = start + stepInterval;
const startPx = self.valToPos(start, 'x');
const endPx = self.valToPos(end, 'x');
const intervalPx = Math.abs(endPx - startPx);
if (intervalPx > 0) {
effectiveBarMaxWidth =
typeof barMaxWidth === 'number'
? Math.min(barMaxWidth, intervalPx)
: intervalPx;
}
}
const barsCfgKey = `bars|${barAlignment}|${barWidthFactor}|${effectiveBarMaxWidth}`;
if (builders && !builders[barsCfgKey]) {
builders[barsCfgKey] = barsPathBuilderFactory({
size: [barWidthFactor, effectiveBarMaxWidth],
align: barAlignment,
});
}
return builders && builders[barsCfgKey]
? builders[barsCfgKey](self, seriesIdx, idx0, idx1)
: null;
};
}
const barsCfgKey = `bars|${barAlignment}|${barWidthFactor}|${barMaxWidth}`;
if (!builders[barsCfgKey] && barsPathBuilderFactory) {
builders[barsCfgKey] = barsPathBuilderFactory({
size: [barWidthFactor, barMaxWidth],
align: barAlignment,
});
}
return builders[barsCfgKey];
}
export type { SeriesProps };

View File

@@ -186,10 +186,11 @@ describe('UPlotConfigBuilder', () => {
});
it('restores visibility state from localStorage when selectionPreferencesSource is LOCAL_STORAGE', () => {
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue([
{ label: 'Requests', show: true },
{ label: 'Errors', show: false },
]);
// Index 0 = x-axis/time; indices 1,2 = data series (Requests, Errors). resolveSeriesVisibility matches by seriesIndex + seriesLabel.
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue({
labels: ['x-axis', 'Requests', 'Errors'],
visibility: [true, true, false],
});
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',
@@ -201,7 +202,7 @@ describe('UPlotConfigBuilder', () => {
const legendItems = builder.getLegendItems();
// When any series is hidden, visibility is driven by stored label-based preferences
// When any series is hidden, legend visibility is driven by the stored map
expect(legendItems[1].show).toBe(true);
expect(legendItems[2].show).toBe(false);
@@ -212,109 +213,6 @@ describe('UPlotConfigBuilder', () => {
expect(secondSeries?.show).toBe(false);
});
it('hides new series by default when there is a mixed preference and a visible label matches current series', () => {
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue([
{ label: 'Requests', show: true },
{ label: 'Errors', show: false },
]);
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
});
builder.addSeries(createSeriesProps({ label: 'Requests' }));
builder.addSeries(createSeriesProps({ label: 'Errors' }));
builder.addSeries(createSeriesProps({ label: 'Latency' }));
const legendItems = builder.getLegendItems();
// Stored labels: Requests (visible), Errors (hidden).
// New label "Latency" should be hidden because there is a mixed preference
// and "Requests" (a visible stored label) is present in the current series.
expect(legendItems[1].label).toBe('Requests');
expect(legendItems[1].show).toBe(true);
expect(legendItems[2].label).toBe('Errors');
expect(legendItems[2].show).toBe(false);
expect(legendItems[3].label).toBe('Latency');
expect(legendItems[3].show).toBe(false);
const config = builder.getConfig();
const [, firstSeries, secondSeries, thirdSeries] = config.series ?? [];
expect(firstSeries?.label).toBe('Requests');
expect(firstSeries?.show).toBe(true);
expect(secondSeries?.label).toBe('Errors');
expect(secondSeries?.show).toBe(false);
expect(thirdSeries?.label).toBe('Latency');
expect(thirdSeries?.show).toBe(false);
});
it('shows all series when there is a mixed preference but no visible stored labels match current series', () => {
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue([
{ label: 'StoredVisible', show: true },
{ label: 'StoredHidden', show: false },
]);
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
});
// None of these labels intersect with the stored visible label "StoredVisible"
builder.addSeries(createSeriesProps({ label: 'CPU' }));
builder.addSeries(createSeriesProps({ label: 'Memory' }));
const legendItems = builder.getLegendItems();
// Mixed preference exists in storage, but since no visible labels intersect
// with current series, stored preferences are ignored and all are visible.
expect(legendItems[1].label).toBe('CPU');
expect(legendItems[1].show).toBe(true);
expect(legendItems[2].label).toBe('Memory');
expect(legendItems[2].show).toBe(true);
const config = builder.getConfig();
const [, firstSeries, secondSeries] = config.series ?? [];
expect(firstSeries?.label).toBe('CPU');
expect(firstSeries?.show).toBe(true);
expect(secondSeries?.label).toBe('Memory');
expect(secondSeries?.show).toBe(true);
});
it('treats duplicate labels as visible when any stored entry for that label is visible', () => {
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue([
{ label: 'CPU', show: true },
{ label: 'CPU', show: false },
]);
const builder = new UPlotConfigBuilder({
widgetId: 'widget-dup',
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
});
// Two series with the same label; both should be visible because at least
// one stored entry for "CPU" is visible.
builder.addSeries(createSeriesProps({ label: 'CPU' }));
builder.addSeries(createSeriesProps({ label: 'CPU' }));
const legendItems = builder.getLegendItems();
expect(legendItems[1].label).toBe('CPU');
expect(legendItems[1].show).toBe(true);
expect(legendItems[2].label).toBe('CPU');
expect(legendItems[2].show).toBe(true);
const config = builder.getConfig();
const [, firstSeries, secondSeries] = config.series ?? [];
expect(firstSeries?.label).toBe('CPU');
expect(firstSeries?.show).toBe(true);
expect(secondSeries?.label).toBe('CPU');
expect(secondSeries?.show).toBe(true);
});
it('does not attempt to read stored visibility when using in-memory preferences', () => {
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',

View File

@@ -176,7 +176,6 @@ export interface SeriesProps extends LineConfig, PointsConfig, BarConfig {
show?: boolean;
spanGaps?: boolean;
isDarkMode?: boolean;
stepInterval?: number;
}
export interface LegendItem {

View File

@@ -1,44 +1,25 @@
/**
* Resolve the visibility of a single series based on:
* - Stored per-series visibility (when applicable)
* - Whether there is an "active preference" (mix of visible/hidden that matches current series)
* - The series' own default show flag
*/
import { SeriesVisibilityState } from 'container/DashboardContainer/visualization/panels/types';
export function resolveSeriesVisibility({
seriesIndex,
seriesShow,
seriesLabel,
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
seriesVisibilityState,
isAnySeriesHidden,
}: {
seriesIndex: number;
seriesShow: boolean | undefined | null;
seriesLabel: string;
visibleStoredLabels: Set<string> | null;
hiddenStoredLabels: Set<string> | null;
hasActivePreference: boolean;
seriesVisibilityState: SeriesVisibilityState | null;
isAnySeriesHidden: boolean;
}): boolean {
const isStoredVisible = !!visibleStoredLabels?.has(seriesLabel);
const isStoredHidden = !!hiddenStoredLabels?.has(seriesLabel);
// If the label is explicitly stored as visible, always show it.
if (isStoredVisible) {
return true;
if (
isAnySeriesHidden &&
seriesVisibilityState?.visibility &&
seriesVisibilityState.labels.length > seriesIndex &&
seriesVisibilityState.labels[seriesIndex] === seriesLabel
) {
return seriesVisibilityState.visibility[seriesIndex] ?? false;
}
// If the label is explicitly stored as hidden (and never stored as visible),
// always hide it.
if (isStoredHidden) {
return false;
}
// "Active preference" means:
// - There is a mix of visible/hidden in storage, AND
// - At least one stored *visible* label exists in the current series list.
// For such a preference, any new/unknown series should be hidden by default.
if (hasActivePreference) {
return false;
}
// Otherwise fall back to the series' own config or show by default.
return seriesShow ?? true;
}

View File

@@ -84,6 +84,8 @@ const DashboardContext = createContext<IDashboardContext>({
toScrollWidgetId: '',
setToScrollWidgetId: () => {},
updateLocalStorageDashboardVariables: () => {},
variablesToGetUpdated: [],
setVariablesToGetUpdated: () => {},
dashboardQueryRangeCalled: false,
setDashboardQueryRangeCalled: () => {},
selectedRowWidgetId: '',
@@ -181,6 +183,10 @@ export function DashboardProvider({
exact: true,
});
const [variablesToGetUpdated, setVariablesToGetUpdated] = useState<string[]>(
[],
);
const [layouts, setLayouts] = useState<Layout[]>([]);
const [panelMap, setPanelMap] = useState<
@@ -511,6 +517,8 @@ export function DashboardProvider({
updatedTimeRef,
setToScrollWidgetId,
updateLocalStorageDashboardVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dashboardQueryRangeCalled,
setDashboardQueryRangeCalled,
selectedRowWidgetId,
@@ -533,6 +541,8 @@ export function DashboardProvider({
toScrollWidgetId,
updateLocalStorageDashboardVariables,
currentDashboard,
variablesToGetUpdated,
setVariablesToGetUpdated,
dashboardQueryRangeCalled,
setDashboardQueryRangeCalled,
selectedRowWidgetId,

View File

@@ -1,527 +0,0 @@
import * as dashboardVariablesStore from '../dashboardVariables/dashboardVariablesStore';
import { IDependencyData } from '../dashboardVariables/dashboardVariablesStoreTypes';
import {
enqueueDescendantsOfVariable,
enqueueFetchOfAllVariables,
initializeVariableFetchStore,
onVariableFetchComplete,
onVariableFetchFailure,
VariableFetchContext,
variableFetchStore,
} from '../variableFetchStore';
const getVariableDependencyContextSpy = jest.spyOn(
dashboardVariablesStore,
'getVariableDependencyContext',
);
function resetStore(): void {
variableFetchStore.set(() => ({
states: {},
lastUpdated: {},
cycleIds: {},
}));
}
function mockContext(overrides: Partial<VariableFetchContext> = {}): void {
getVariableDependencyContextSpy.mockReturnValue({
doAllVariablesHaveValuesSelected: false,
variableTypes: {},
dynamicVariableOrder: [],
dependencyData: null,
...overrides,
});
}
/**
* Helper to build a dependency data object for tests.
* Only the fields used by the store actions are required.
*/
function buildDependencyData(
overrides: Partial<IDependencyData> = {},
): IDependencyData {
return {
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
...overrides,
};
}
describe('variableFetchStore', () => {
beforeEach(() => {
resetStore();
jest.clearAllMocks();
});
// ==================== initializeVariableFetchStore ====================
describe('initializeVariableFetchStore', () => {
it('should initialize new variables to idle', () => {
initializeVariableFetchStore(['a', 'b', 'c']);
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states).toEqual({ a: 'idle', b: 'idle', c: 'idle' });
});
it('should preserve existing states for known variables', () => {
// Pre-set a state
variableFetchStore.update((d) => {
d.states.a = 'loading';
});
initializeVariableFetchStore(['a', 'b']);
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('loading');
expect(storeSnapshot.states.b).toBe('idle');
});
it('should clean up stale variables that no longer exist', () => {
variableFetchStore.update((d) => {
d.states.old = 'idle';
d.lastUpdated.old = 100;
d.cycleIds.old = 3;
});
initializeVariableFetchStore(['a']);
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.old).toBeUndefined();
expect(storeSnapshot.lastUpdated.old).toBeUndefined();
expect(storeSnapshot.cycleIds.old).toBeUndefined();
expect(storeSnapshot.states.a).toBe('idle');
});
it('should handle empty variable names array', () => {
variableFetchStore.update((d) => {
d.states.a = 'idle';
});
initializeVariableFetchStore([]);
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states).toEqual({});
});
});
// ==================== enqueueFetchOfAllVariables ====================
describe('enqueueFetchOfAllVariables', () => {
it('should no-op when dependencyData is null', () => {
mockContext({ dependencyData: null });
initializeVariableFetchStore(['a']);
enqueueFetchOfAllVariables();
expect(variableFetchStore.getSnapshot().states.a).toBe('idle');
});
it('should set root query variables to loading and dependent ones to waiting', () => {
// a is root (no parents), b depends on a
mockContext({
dependencyData: buildDependencyData({
order: ['a', 'b'],
parentDependencyGraph: { a: [], b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
initializeVariableFetchStore(['a', 'b']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('loading');
expect(storeSnapshot.states.b).toBe('waiting');
});
it('should set root query variables to revalidating when previously fetched', () => {
mockContext({
dependencyData: buildDependencyData({
order: ['a'],
parentDependencyGraph: { a: [] },
}),
variableTypes: { a: 'QUERY' },
});
// Pre-set lastUpdated so it appears previously fetched
variableFetchStore.update((d) => {
d.lastUpdated.a = 1000;
});
initializeVariableFetchStore(['a']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('revalidating');
});
it('should bump cycle IDs for all enqueued variables', () => {
mockContext({
dependencyData: buildDependencyData({
order: ['a', 'b'],
parentDependencyGraph: { a: [], b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
initializeVariableFetchStore(['a', 'b']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.cycleIds.a).toBe(1);
expect(storeSnapshot.cycleIds.b).toBe(1);
});
it('should set dynamic variables to waiting when not all variables have values', () => {
mockContext({
doAllVariablesHaveValuesSelected: false,
dependencyData: buildDependencyData({ order: [] }),
variableTypes: { dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
initializeVariableFetchStore(['dyn1']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.dyn1).toBe('waiting');
});
it('should set dynamic variables to loading when all variables have values', () => {
mockContext({
doAllVariablesHaveValuesSelected: true,
dependencyData: buildDependencyData({ order: [] }),
variableTypes: { dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
initializeVariableFetchStore(['dyn1']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.dyn1).toBe('loading');
});
it('should not treat non-QUERY parents as query parents', () => {
// b has a CUSTOM parent — shouldn't cause waiting
mockContext({
dependencyData: buildDependencyData({
order: ['b'],
parentDependencyGraph: { b: ['customVar'] },
}),
variableTypes: { b: 'QUERY', customVar: 'CUSTOM' },
});
initializeVariableFetchStore(['b', 'customVar']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.b).toBe('loading');
});
});
// ==================== onVariableFetchComplete ====================
describe('onVariableFetchComplete', () => {
it('should set the completed variable to idle with a lastUpdated timestamp', () => {
mockContext();
variableFetchStore.update((d) => {
d.states.a = 'loading';
});
const before = Date.now();
onVariableFetchComplete('a');
const after = Date.now();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('idle');
expect(storeSnapshot.lastUpdated.a).toBeGreaterThanOrEqual(before);
expect(storeSnapshot.lastUpdated.a).toBeLessThanOrEqual(after);
});
it('should unblock waiting query-type children', () => {
mockContext({
dependencyData: buildDependencyData({
graph: { a: ['b'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.b = 'waiting';
});
onVariableFetchComplete('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('idle');
expect(storeSnapshot.states.b).toBe('loading');
});
it('should not unblock non-QUERY children', () => {
mockContext({
dependencyData: buildDependencyData({
graph: { a: ['dyn1'] },
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.dyn1 = 'waiting';
});
onVariableFetchComplete('a');
const storeSnapshot = variableFetchStore.getSnapshot();
// dyn1 is DYNAMIC, not QUERY, so it should remain waiting
expect(storeSnapshot.states.dyn1).toBe('waiting');
});
it('should unlock waiting dynamic variables when all query variables are settled', () => {
mockContext({
dependencyData: buildDependencyData({
graph: { a: [] },
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.dyn1 = 'waiting';
});
onVariableFetchComplete('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.dyn1).toBe('loading');
});
it('should NOT unlock dynamic variables if a query variable is still in-flight', () => {
mockContext({
dependencyData: buildDependencyData({
graph: { a: ['b'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.b = 'waiting';
d.states.dyn1 = 'waiting';
});
onVariableFetchComplete('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.dyn1).toBe('waiting');
});
});
// ==================== onVariableFetchFailure ====================
describe('onVariableFetchFailure', () => {
it('should set the failed variable to error', () => {
mockContext();
variableFetchStore.update((d) => {
d.states.a = 'loading';
});
onVariableFetchFailure('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('error');
});
it('should set query-type transitive descendants to idle', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b', 'c'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.b = 'waiting';
d.states.c = 'waiting';
});
onVariableFetchFailure('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('error');
expect(storeSnapshot.states.b).toBe('idle');
expect(storeSnapshot.states.c).toBe('idle');
});
it('should not touch non-QUERY descendants', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['dyn1'] },
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.dyn1 = 'waiting';
});
onVariableFetchFailure('a');
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('waiting');
});
it('should unlock waiting dynamic variables when all query variables settle via error', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: {},
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.dyn1 = 'waiting';
});
onVariableFetchFailure('a');
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('loading');
});
});
// ==================== enqueueDescendantsOfVariable ====================
describe('enqueueDescendantsOfVariable', () => {
it('should no-op when dependencyData is null', () => {
mockContext({ dependencyData: null });
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
});
enqueueDescendantsOfVariable('a');
expect(variableFetchStore.getSnapshot().states.b).toBe('idle');
});
it('should enqueue query-type descendants with all parents settled', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b'] },
parentDependencyGraph: { b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
});
enqueueDescendantsOfVariable('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.b).toBe('loading');
expect(storeSnapshot.cycleIds.b).toBe(1);
});
it('should set descendants to waiting when some parents are not settled', () => {
// b depends on both a and c; c is still loading
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b'] },
parentDependencyGraph: { b: ['a', 'c'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
d.states.c = 'loading';
});
enqueueDescendantsOfVariable('a');
expect(variableFetchStore.getSnapshot().states.b).toBe('waiting');
});
it('should skip non-QUERY descendants', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['dyn1'] },
parentDependencyGraph: {},
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.dyn1 = 'idle';
});
enqueueDescendantsOfVariable('a');
// dyn1 is DYNAMIC, so it should not be touched
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('idle');
});
it('should handle chain of descendants: a -> b -> c', () => {
// a -> b -> c, all QUERY
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b', 'c'] },
parentDependencyGraph: { b: ['a'], c: ['b'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
d.states.c = 'idle';
});
enqueueDescendantsOfVariable('a');
const storeSnapshot = variableFetchStore.getSnapshot();
// b's parent (a) is idle/settled → loading
expect(storeSnapshot.states.b).toBe('loading');
// c's parent (b) just moved to loading (not settled) → waiting
expect(storeSnapshot.states.c).toBe('waiting');
});
it('should set descendants to revalidating when previously fetched', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b'] },
parentDependencyGraph: { b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
d.lastUpdated.b = 1000;
});
enqueueDescendantsOfVariable('a');
expect(variableFetchStore.getSnapshot().states.b).toBe('revalidating');
});
});
});

View File

@@ -1,196 +0,0 @@
import {
IVariableFetchStoreState,
VariableFetchState,
} from '../variableFetchStore';
import {
areAllQueryVariablesSettled,
isSettled,
resolveFetchState,
unlockWaitingDynamicVariables,
} from '../variableFetchStoreUtils';
describe('variableFetchStoreUtils', () => {
describe('isSettled', () => {
it('should return true for idle state', () => {
expect(isSettled('idle')).toBe(true);
});
it('should return true for error state', () => {
expect(isSettled('error')).toBe(true);
});
it('should return false for loading state', () => {
expect(isSettled('loading')).toBe(false);
});
it('should return false for revalidating state', () => {
expect(isSettled('revalidating')).toBe(false);
});
it('should return false for waiting state', () => {
expect(isSettled('waiting')).toBe(false);
});
it('should return false for undefined', () => {
expect(isSettled(undefined)).toBe(false);
});
});
describe('resolveFetchState', () => {
it('should return "loading" when variable has never been fetched', () => {
const draft: IVariableFetchStoreState = {
states: {},
lastUpdated: {},
cycleIds: {},
};
expect(resolveFetchState(draft, 'myVar')).toBe('loading');
});
it('should return "loading" when lastUpdated is 0', () => {
const draft: IVariableFetchStoreState = {
states: {},
lastUpdated: { myVar: 0 },
cycleIds: {},
};
expect(resolveFetchState(draft, 'myVar')).toBe('loading');
});
it('should return "revalidating" when variable has been fetched before', () => {
const draft: IVariableFetchStoreState = {
states: {},
lastUpdated: { myVar: 1000 },
cycleIds: {},
};
expect(resolveFetchState(draft, 'myVar')).toBe('revalidating');
});
});
describe('areAllQueryVariablesSettled', () => {
it('should return true when all query variables are idle', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
b: 'idle',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
it('should return true when all query variables are in error', () => {
const states: Record<string, VariableFetchState> = {
a: 'error',
b: 'error',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
it('should return true with a mix of idle and error query variables', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
b: 'error',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
it('should return false when any query variable is loading', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
b: 'loading',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(false);
});
it('should return false when any query variable is waiting', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
b: 'waiting',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(false);
});
it('should ignore non-QUERY variable types', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
dynVar: 'loading',
};
const variableTypes = {
a: 'QUERY' as const,
dynVar: 'DYNAMIC' as const,
};
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
it('should return true when there are no QUERY variables', () => {
const states: Record<string, VariableFetchState> = {
dynVar: 'loading',
};
const variableTypes = { dynVar: 'DYNAMIC' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
});
describe('unlockWaitingDynamicVariables', () => {
it('should transition waiting dynamic variables to loading when never fetched', () => {
const draft: IVariableFetchStoreState = {
states: { dyn1: 'waiting', dyn2: 'waiting' },
lastUpdated: {},
cycleIds: {},
};
unlockWaitingDynamicVariables(draft, ['dyn1', 'dyn2']);
expect(draft.states.dyn1).toBe('loading');
expect(draft.states.dyn2).toBe('loading');
});
it('should transition waiting dynamic variables to revalidating when previously fetched', () => {
const draft: IVariableFetchStoreState = {
states: { dyn1: 'waiting' },
lastUpdated: { dyn1: 1000 },
cycleIds: {},
};
unlockWaitingDynamicVariables(draft, ['dyn1']);
expect(draft.states.dyn1).toBe('revalidating');
});
it('should not touch dynamic variables that are not in waiting state', () => {
const draft: IVariableFetchStoreState = {
states: { dyn1: 'idle', dyn2: 'loading' },
lastUpdated: {},
cycleIds: {},
};
unlockWaitingDynamicVariables(draft, ['dyn1', 'dyn2']);
expect(draft.states.dyn1).toBe('idle');
expect(draft.states.dyn2).toBe('loading');
});
it('should handle empty dynamic variable order', () => {
const draft: IVariableFetchStoreState = {
states: { dyn1: 'waiting' },
lastUpdated: {},
cycleIds: {},
};
unlockWaitingDynamicVariables(draft, []);
expect(draft.states.dyn1).toBe('waiting');
});
});
});

View File

@@ -1,297 +0,0 @@
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import {
dashboardVariablesStore,
getVariableDependencyContext,
setDashboardVariablesStore,
updateDashboardVariablesStore,
} from '../dashboardVariablesStore';
import { IDashboardVariables } from '../dashboardVariablesStoreTypes';
function createVariable(
overrides: Partial<IDashboardVariable> = {},
): IDashboardVariable {
return {
id: 'test-id',
name: 'test-var',
description: '',
type: 'QUERY',
sort: 'DISABLED',
showALLOption: false,
multiSelect: false,
order: 0,
...overrides,
};
}
function resetStore(): void {
dashboardVariablesStore.set(() => ({
dashboardId: '',
variables: {},
sortedVariablesArray: [],
dependencyData: null,
variableTypes: {},
dynamicVariableOrder: [],
}));
}
describe('dashboardVariablesStore', () => {
beforeEach(() => {
resetStore();
});
describe('setDashboardVariablesStore', () => {
it('should set the dashboard variables and compute derived values', () => {
const variables: IDashboardVariables = {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
};
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
const storeSnapshot = dashboardVariablesStore.getSnapshot();
expect(storeSnapshot.dashboardId).toBe('dash-1');
expect(storeSnapshot.variables).toEqual(variables);
expect(storeSnapshot.variableTypes).toEqual({ env: 'QUERY' });
expect(storeSnapshot.sortedVariablesArray).toHaveLength(1);
});
});
describe('updateDashboardVariablesStore', () => {
it('should update variables and recompute derived values', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
},
});
const updatedVariables: IDashboardVariables = {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
};
updateDashboardVariablesStore({
dashboardId: 'dash-1',
variables: updatedVariables,
});
const storeSnapshot = dashboardVariablesStore.getSnapshot();
expect(storeSnapshot.variableTypes).toEqual({
env: 'QUERY',
dyn1: 'DYNAMIC',
});
expect(storeSnapshot.dynamicVariableOrder).toEqual(['dyn1']);
});
it('should replace dashboardId when it does not match', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
'not-there': createVariable({ name: 'not-there', order: 0 }),
},
});
updateDashboardVariablesStore({
dashboardId: 'dash-2',
variables: {
a: createVariable({ name: 'a', order: 0 }),
},
});
const storeSnapshot = dashboardVariablesStore.getSnapshot();
expect(storeSnapshot.dashboardId).toBe('dash-2');
expect(storeSnapshot.variableTypes).toEqual({
a: 'QUERY',
});
expect(storeSnapshot.variableTypes).not.toEqual({
'not-there': 'QUERY',
});
});
});
describe('getVariableDependencyContext', () => {
it('should return context with all fields', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
selectedValue: 'prod',
}),
},
});
const {
variableTypes,
dynamicVariableOrder,
dependencyData,
} = getVariableDependencyContext();
expect(variableTypes).toEqual({ env: 'QUERY' });
expect(dynamicVariableOrder).toEqual([]);
expect(dependencyData).not.toBeNull();
});
it('should report doAllVariablesHaveValuesSelected as true when all variables have selectedValue', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
selectedValue: 'prod',
}),
region: createVariable({
name: 'region',
type: 'CUSTOM',
order: 1,
selectedValue: 'us-east',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should report doAllVariablesHaveValuesSelected as false when some variables lack selectedValue', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
selectedValue: 'prod',
}),
region: createVariable({
name: 'region',
type: 'CUSTOM',
order: 1,
selectedValue: undefined,
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(false);
});
it('should treat DYNAMIC variable with allSelected=true and selectedValue=null as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: null as any,
allSelected: true,
}),
env: createVariable({
name: 'env',
type: 'QUERY',
order: 1,
selectedValue: 'prod',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should treat DYNAMIC variable with allSelected=true and selectedValue=undefined as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: undefined,
allSelected: true,
}),
env: createVariable({
name: 'env',
type: 'QUERY',
order: 1,
selectedValue: 'prod',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should treat DYNAMIC variable with allSelected=true and empty string selectedValue as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: '',
allSelected: true,
}),
env: createVariable({
name: 'env',
type: 'QUERY',
order: 1,
selectedValue: 'prod',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should treat DYNAMIC variable with allSelected=true and empty array selectedValue as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: [] as any,
allSelected: true,
}),
env: createVariable({
name: 'env',
type: 'QUERY',
order: 1,
selectedValue: 'prod',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should report false when a DYNAMIC variable has empty selectedValue and allSelected is not true', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: '',
allSelected: false,
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(false);
});
});
});

View File

@@ -1,369 +0,0 @@
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { IDashboardVariables } from '../dashboardVariablesStoreTypes';
import {
buildDynamicVariableOrder,
buildSortedVariablesArray,
buildVariableTypesMap,
computeDerivedValues,
} from '../dashboardVariablesStoreUtils';
const createVariable = (
overrides: Partial<IDashboardVariable> = {},
): IDashboardVariable => ({
id: 'test-id',
name: 'test-var',
description: '',
type: 'QUERY',
sort: 'DISABLED',
showALLOption: false,
multiSelect: false,
order: 0,
...overrides,
});
describe('dashboardVariablesStoreUtils', () => {
describe('buildSortedVariablesArray', () => {
it('should sort variables by order property', () => {
const variables: IDashboardVariables = {
c: createVariable({ name: 'c', order: 3 }),
a: createVariable({ name: 'a', order: 1 }),
b: createVariable({ name: 'b', order: 2 }),
};
const result = buildSortedVariablesArray(variables);
expect(result.map((v) => v.name)).toEqual(['a', 'b', 'c']);
});
it('should return empty array for empty variables', () => {
const result = buildSortedVariablesArray({});
expect(result).toEqual([]);
});
it('should create copies of variables (not references)', () => {
const original = createVariable({ name: 'a', order: 0 });
const variables: IDashboardVariables = { a: original };
const result = buildSortedVariablesArray(variables);
expect(result[0]).not.toBe(original);
expect(result[0]).toEqual(original);
});
});
describe('buildVariableTypesMap', () => {
it('should create a name-to-type mapping', () => {
const sorted = [
createVariable({ name: 'env', type: 'QUERY' }),
createVariable({ name: 'region', type: 'CUSTOM' }),
createVariable({ name: 'dynVar', type: 'DYNAMIC' }),
createVariable({ name: 'text', type: 'TEXTBOX' }),
];
const result = buildVariableTypesMap(sorted);
expect(result).toEqual({
env: 'QUERY',
region: 'CUSTOM',
dynVar: 'DYNAMIC',
text: 'TEXTBOX',
});
});
it('should return empty object for empty array', () => {
expect(buildVariableTypesMap([])).toEqual({});
});
});
describe('buildDynamicVariableOrder', () => {
it('should return only DYNAMIC variable names in order', () => {
const sorted = [
createVariable({ name: 'queryVar', type: 'QUERY', order: 0 }),
createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
createVariable({ name: 'customVar', type: 'CUSTOM', order: 2 }),
createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 3 }),
];
const result = buildDynamicVariableOrder(sorted);
expect(result).toEqual(['dyn1', 'dyn2']);
});
it('should return empty array when no DYNAMIC variables exist', () => {
const sorted = [
createVariable({ name: 'a', type: 'QUERY' }),
createVariable({ name: 'b', type: 'CUSTOM' }),
];
expect(buildDynamicVariableOrder(sorted)).toEqual([]);
});
it('should return empty array for empty input', () => {
expect(buildDynamicVariableOrder([])).toEqual([]);
});
});
describe('computeDerivedValues', () => {
it('should compute all derived values from variables', () => {
const variables: IDashboardVariables = {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
}),
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 1,
}),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray).toHaveLength(2);
expect(result.sortedVariablesArray[0].name).toBe('env');
expect(result.sortedVariablesArray[1].name).toBe('dyn1');
expect(result.variableTypes).toEqual({
env: 'QUERY',
dyn1: 'DYNAMIC',
});
expect(result.dynamicVariableOrder).toEqual(['dyn1']);
// dependencyData should exist since there are variables
expect(result.dependencyData).not.toBeNull();
});
it('should return null dependencyData for empty variables', () => {
const result = computeDerivedValues({});
expect(result.sortedVariablesArray).toEqual([]);
expect(result.dependencyData).toBeNull();
expect(result.variableTypes).toEqual({});
expect(result.dynamicVariableOrder).toEqual([]);
});
it('should handle all four variable types together', () => {
const variables: IDashboardVariables = {
queryVar: createVariable({
name: 'queryVar',
type: 'QUERY',
order: 0,
}),
customVar: createVariable({
name: 'customVar',
type: 'CUSTOM',
order: 1,
}),
dynVar: createVariable({
name: 'dynVar',
type: 'DYNAMIC',
order: 2,
}),
textVar: createVariable({
name: 'textVar',
type: 'TEXTBOX',
order: 3,
}),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray).toHaveLength(4);
expect(result.sortedVariablesArray.map((v) => v.name)).toEqual([
'queryVar',
'customVar',
'dynVar',
'textVar',
]);
expect(result.variableTypes).toEqual({
queryVar: 'QUERY',
customVar: 'CUSTOM',
dynVar: 'DYNAMIC',
textVar: 'TEXTBOX',
});
expect(result.dynamicVariableOrder).toEqual(['dynVar']);
expect(result.dependencyData).not.toBeNull();
});
it('should sort variables by order regardless of insertion order', () => {
const variables: IDashboardVariables = {
z: createVariable({ name: 'z', type: 'QUERY', order: 4 }),
a: createVariable({ name: 'a', type: 'CUSTOM', order: 0 }),
m: createVariable({ name: 'm', type: 'DYNAMIC', order: 2 }),
b: createVariable({ name: 'b', type: 'TEXTBOX', order: 1 }),
x: createVariable({ name: 'x', type: 'QUERY', order: 3 }),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray.map((v) => v.name)).toEqual([
'a',
'b',
'm',
'x',
'z',
]);
});
it('should include multiple dynamic variables in order', () => {
const variables: IDashboardVariables = {
dyn3: createVariable({ name: 'dyn3', type: 'DYNAMIC', order: 5 }),
query1: createVariable({ name: 'query1', type: 'QUERY', order: 0 }),
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
custom1: createVariable({ name: 'custom1', type: 'CUSTOM', order: 2 }),
dyn2: createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 3 }),
};
const result = computeDerivedValues(variables);
expect(result.dynamicVariableOrder).toEqual(['dyn1', 'dyn2', 'dyn3']);
});
it('should build dependency data with query variable order for dependent queries', () => {
const variables: IDashboardVariables = {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
queryValue: 'SELECT DISTINCT env FROM table',
}),
service: createVariable({
name: 'service',
type: 'QUERY',
order: 1,
queryValue: 'SELECT DISTINCT service FROM table WHERE env={{.env}}',
}),
};
const result = computeDerivedValues(variables);
const { dependencyData } = result;
expect(dependencyData).not.toBeNull();
// env should appear in the dependency order (it's a root QUERY variable)
expect(dependencyData?.order).toContain('env');
// service depends on env, so it should also be in the order
expect(dependencyData?.order).toContain('service');
// env comes before service in topological order
const envIdx = dependencyData?.order.indexOf('env') ?? -1;
const svcIdx = dependencyData?.order.indexOf('service') ?? -1;
expect(envIdx).toBeLessThan(svcIdx);
});
it('should not include non-QUERY variables in dependency order', () => {
const variables: IDashboardVariables = {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
queryValue: 'SELECT DISTINCT env FROM table',
}),
customVar: createVariable({
name: 'customVar',
type: 'CUSTOM',
order: 1,
}),
dynVar: createVariable({
name: 'dynVar',
type: 'DYNAMIC',
order: 2,
}),
textVar: createVariable({
name: 'textVar',
type: 'TEXTBOX',
order: 3,
}),
};
const result = computeDerivedValues(variables);
expect(result.dependencyData).not.toBeNull();
// Only QUERY variables should be in the dependency order
result.dependencyData?.order.forEach((name) => {
expect(result.variableTypes[name]).toBe('QUERY');
});
});
it('should produce transitive descendants in dependency data', () => {
const variables: IDashboardVariables = {
region: createVariable({
name: 'region',
type: 'QUERY',
order: 0,
queryValue: 'SELECT region FROM table',
}),
cluster: createVariable({
name: 'cluster',
type: 'QUERY',
order: 1,
queryValue: 'SELECT cluster FROM table WHERE region={{.region}}',
}),
host: createVariable({
name: 'host',
type: 'QUERY',
order: 2,
queryValue: 'SELECT host FROM table WHERE cluster={{.cluster}}',
}),
};
const result = computeDerivedValues(variables);
const { dependencyData: depData } = result;
expect(depData).not.toBeNull();
expect(depData?.transitiveDescendants).toBeDefined();
// region's transitive descendants should include cluster and host
expect(depData?.transitiveDescendants['region']).toEqual(
expect.arrayContaining(['cluster', 'host']),
);
});
it('should handle a single variable', () => {
const variables: IDashboardVariables = {
solo: createVariable({
name: 'solo',
type: 'QUERY',
order: 0,
}),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray).toHaveLength(1);
expect(result.variableTypes).toEqual({ solo: 'QUERY' });
expect(result.dynamicVariableOrder).toEqual([]);
expect(result.dependencyData).not.toBeNull();
expect(result.dependencyData?.order).toEqual(['solo']);
});
it('should handle only non-QUERY variables', () => {
const variables: IDashboardVariables = {
custom1: createVariable({
name: 'custom1',
type: 'CUSTOM',
order: 0,
}),
text1: createVariable({
name: 'text1',
type: 'TEXTBOX',
order: 1,
}),
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 2,
}),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray).toHaveLength(3);
// No QUERY variables, so dependency order should be empty
expect(result.dependencyData?.order).toEqual([]);
expect(result.dynamicVariableOrder).toEqual(['dyn1']);
});
});
});

View File

@@ -1,7 +1,4 @@
import { isEmpty, isUndefined } from 'lodash-es';
import createStore from '../store';
import { VariableFetchContext } from '../variableFetchStore';
import { IDashboardVariablesStoreState } from './dashboardVariablesStoreTypes';
import {
computeDerivedValues,
@@ -13,8 +10,6 @@ const initialState: IDashboardVariablesStoreState = {
variables: {},
sortedVariablesArray: [],
dependencyData: null,
variableTypes: {},
dynamicVariableOrder: [],
};
export const dashboardVariablesStore = createStore<IDashboardVariablesStoreState>(
@@ -60,38 +55,3 @@ export function updateDashboardVariablesStore({
updateDerivedValues(draft);
});
}
/**
* Read current store snapshot as VariableFetchContext.
* Used by components to pass context to variableFetchStore actions
* without creating a circular import.
*/
export function getVariableDependencyContext(): VariableFetchContext {
const state = dashboardVariablesStore.getSnapshot();
// If every variable already has a selectedValue (e.g. persisted from
// localStorage/URL), dynamic variables can start in parallel.
// Otherwise they wait for query vars to settle first.
const doAllVariablesHaveValuesSelected = Object.values(state.variables).every(
(variable) => {
if (
variable.type === 'DYNAMIC' &&
(variable.selectedValue === null || isEmpty(variable.selectedValue)) &&
variable.allSelected === true
) {
return true;
}
return (
!isUndefined(variable.selectedValue) && !isEmpty(variable.selectedValue)
);
},
);
return {
doAllVariablesHaveValuesSelected,
variableTypes: state.variableTypes,
dynamicVariableOrder: state.dynamicVariableOrder,
dependencyData: state.dependencyData,
};
}

View File

@@ -1,18 +1,11 @@
import {
IDashboardVariable,
TVariableQueryType,
} from 'types/api/dashboard/getAll';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
export type VariableGraph = Record<string, string[]>;
export interface IDependencyData {
order: string[];
// Direct children for each variable
graph: VariableGraph;
// Direct parents for each variable
parentDependencyGraph: VariableGraph;
// Pre-computed transitive descendants for each node (all reachable nodes, not just direct children)
transitiveDescendants: VariableGraph;
hasCycle: boolean;
cycleNodes?: string[];
}
@@ -31,12 +24,6 @@ export interface IDashboardVariablesStoreState {
// Derived: dependency data for QUERY variables
dependencyData: IDependencyData | null;
// Derived: variable name → type mapping
variableTypes: Record<string, TVariableQueryType>;
// Derived: display-ordered list of dynamic variable names
dynamicVariableOrder: string[];
}
export interface IUseDashboardVariablesReturn {

View File

@@ -2,11 +2,9 @@ import {
buildDependencies,
buildDependencyGraph,
} from 'container/DashboardContainer/DashboardVariablesSelection/util';
import {
IDashboardVariable,
TVariableQueryType,
} from 'types/api/dashboard/getAll';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { initializeVariableFetchStore } from '../variableFetchStore';
import {
IDashboardVariables,
IDashboardVariablesStoreState,
@@ -46,7 +44,6 @@ export function buildDependencyData(
order,
graph,
parentDependencyGraph,
transitiveDescendants,
hasCycle,
cycleNodes,
} = buildDependencyGraph(dependencies);
@@ -61,62 +58,49 @@ export function buildDependencyData(
order: queryVariableOrder,
graph,
parentDependencyGraph,
transitiveDescendants,
hasCycle,
cycleNodes,
};
}
/**
* Build a variable name → type mapping from sorted variables array
* Initialize the variable fetch store with the computed dependency data
*/
export function buildVariableTypesMap(
function initializeFetchStore(
sortedVariablesArray: IDashboardVariable[],
): Record<string, TVariableQueryType> {
const types: Record<string, TVariableQueryType> = {};
sortedVariablesArray.forEach((v) => {
if (v.name) {
types[v.name] = v.type;
}
});
return types;
}
dependencyData: IDependencyData | null,
): void {
if (dependencyData) {
const allVariableNames = sortedVariablesArray
.map((v) => v.name)
.filter((name): name is string => !!name);
/**
* Build display-ordered list of dynamic variable names
*/
export function buildDynamicVariableOrder(
sortedVariablesArray: IDashboardVariable[],
): string[] {
return sortedVariablesArray
.filter((v) => v.type === 'DYNAMIC' && v.name)
.map((v) => v.name as string);
initializeVariableFetchStore(
allVariableNames,
dependencyData.graph,
dependencyData.parentDependencyGraph,
);
}
}
/**
* Compute derived values from variables
* This is a composition of buildSortedVariablesArray and buildDependencyData
* Also initializes the variable fetch store with the new dependency data
*/
export function computeDerivedValues(
variables: IDashboardVariablesStoreState['variables'],
): Pick<
IDashboardVariablesStoreState,
| 'sortedVariablesArray'
| 'dependencyData'
| 'variableTypes'
| 'dynamicVariableOrder'
'sortedVariablesArray' | 'dependencyData'
> {
const sortedVariablesArray = buildSortedVariablesArray(variables);
const dependencyData = buildDependencyData(sortedVariablesArray);
const variableTypes = buildVariableTypesMap(sortedVariablesArray);
const dynamicVariableOrder = buildDynamicVariableOrder(sortedVariablesArray);
return {
sortedVariablesArray,
dependencyData,
variableTypes,
dynamicVariableOrder,
};
// Initialize the variable fetch store when dependency data is computed
initializeFetchStore(sortedVariablesArray, dependencyData);
return { sortedVariablesArray, dependencyData };
}
/**
@@ -128,8 +112,7 @@ export function updateDerivedValues(
): void {
draft.sortedVariablesArray = buildSortedVariablesArray(draft.variables);
draft.dependencyData = buildDependencyData(draft.sortedVariablesArray);
draft.variableTypes = buildVariableTypesMap(draft.sortedVariablesArray);
draft.dynamicVariableOrder = buildDynamicVariableOrder(
draft.sortedVariablesArray,
);
// Initialize the variable fetch store when dependency data is updated
initializeFetchStore(draft.sortedVariablesArray, draft.dependencyData);
}

View File

@@ -1,12 +1,6 @@
import { getVariableDependencyContext } from './dashboardVariables/dashboardVariablesStore';
import { IDashboardVariablesStoreState } from './dashboardVariables/dashboardVariablesStoreTypes';
import { VariableGraph } from 'container/DashboardContainer/DashboardVariablesSelection/util';
import createStore from './store';
import {
areAllQueryVariablesSettled,
isSettled,
resolveFetchState,
unlockWaitingDynamicVariables,
} from './variableFetchStoreUtils';
// Fetch state for each variable
export type VariableFetchState =
@@ -20,29 +14,19 @@ export interface IVariableFetchStoreState {
// Per-variable fetch state
states: Record<string, VariableFetchState>;
// Track last update timestamp per variable
// Dependency graphs (set once when variables change)
dependencyGraph: VariableGraph; // variable -> children that depend on it
parentGraph: VariableGraph; // variable -> parents it depends on
// Track last update timestamp per variable to trigger re-fetches
lastUpdated: Record<string, number>;
// Per-variable cycle counter — bumped when a variable needs to refetch.
// Used in react-query keys to auto-cancel stale requests for that variable only.
cycleIds: Record<string, number>;
}
/**
* Context from dashboardVariablesStore needed by fetch actions.
* Passed as parameter to avoid circular imports.
*/
export type VariableFetchContext = Pick<
IDashboardVariablesStoreState,
'variableTypes' | 'dynamicVariableOrder' | 'dependencyData'
> & {
doAllVariablesHaveValuesSelected: boolean;
};
const initialState: IVariableFetchStoreState = {
states: {},
dependencyGraph: {},
parentGraph: {},
lastUpdated: {},
cycleIds: {},
};
export const variableFetchStore = createStore<IVariableFetchStoreState>(
@@ -52,183 +36,22 @@ export const variableFetchStore = createStore<IVariableFetchStoreState>(
// ============== Actions ==============
/**
* Initialize the store with variable names.
* Called when dashboard variables change — sets up state entries.
* Initialize the store with dependency graphs and set initial states
*/
export function initializeVariableFetchStore(variableNames: string[]): void {
export function initializeVariableFetchStore(
variableNames: string[],
dependencyGraph: VariableGraph,
parentGraph: VariableGraph,
): void {
variableFetchStore.update((draft) => {
// Initialize all variables to idle, preserving existing states
draft.dependencyGraph = dependencyGraph;
draft.parentGraph = parentGraph;
// Initialize all variables to idle, preserving existing ready states
variableNames.forEach((name) => {
if (!draft.states[name]) {
draft.states[name] = 'idle';
}
});
// Clean up stale entries for variables that no longer exist
const nameSet = new Set(variableNames);
Object.keys(draft.states).forEach((name) => {
if (!nameSet.has(name)) {
delete draft.states[name];
delete draft.lastUpdated[name];
delete draft.cycleIds[name];
}
});
});
}
/**
* Start a full fetch cycle for all fetchable variables.
* Called on: initial load, time range change, or dependency graph change.
*
* Query variables with no query-type parents start immediately.
* Query variables with query-type parents get 'waiting'.
* Dynamic variables start immediately if all variables already have
* selectedValues (e.g. persisted from localStorage/URL). Otherwise they
* wait for all query variables to settle first.
*/
export function enqueueFetchOfAllVariables(): void {
const {
doAllVariablesHaveValuesSelected,
dependencyData,
variableTypes,
dynamicVariableOrder,
} = getVariableDependencyContext();
if (!dependencyData) {
return;
}
const { order: queryVariableOrder, parentDependencyGraph } = dependencyData;
variableFetchStore.update((draft) => {
// Query variables: root ones start immediately, dependent ones wait
queryVariableOrder.forEach((name) => {
draft.cycleIds[name] = (draft.cycleIds[name] || 0) + 1;
const parents = parentDependencyGraph[name] || [];
const hasQueryParents = parents.some((p) => variableTypes[p] === 'QUERY');
if (hasQueryParents) {
draft.states[name] = 'waiting';
} else {
draft.states[name] = resolveFetchState(draft, name);
}
});
// Dynamic variables: start immediately if query variables have values,
// otherwise wait for query variables to settle first
dynamicVariableOrder.forEach((name) => {
draft.cycleIds[name] = (draft.cycleIds[name] || 0) + 1;
draft.states[name] = doAllVariablesHaveValuesSelected
? resolveFetchState(draft, name)
: 'waiting';
});
});
}
/**
* Mark a variable as completed. Unblocks waiting query-type children.
* If all query variables are now settled, unlocks any waiting dynamic variables.
*/
export function onVariableFetchComplete(name: string): void {
const {
dependencyData,
variableTypes,
dynamicVariableOrder,
} = getVariableDependencyContext();
variableFetchStore.update((draft) => {
draft.states[name] = 'idle';
draft.lastUpdated[name] = Date.now();
if (!dependencyData) {
return;
}
const { graph } = dependencyData;
// Unblock waiting query-type children
const children = graph[name] || [];
children.forEach((child) => {
if (variableTypes[child] === 'QUERY' && draft.states[child] === 'waiting') {
draft.states[child] = resolveFetchState(draft, child);
}
});
// If all query variables are settled, unlock any waiting dynamic variables
if (
variableTypes[name] === 'QUERY' &&
areAllQueryVariablesSettled(draft.states, variableTypes)
) {
unlockWaitingDynamicVariables(draft, dynamicVariableOrder);
}
});
}
/**
* Mark a variable as errored. Sets query-type descendants to idle
* (they can't proceed without this parent).
* If all query variables are now settled, unlocks any waiting dynamic variables.
*/
export function onVariableFetchFailure(name: string): void {
const {
dependencyData,
variableTypes,
dynamicVariableOrder,
} = getVariableDependencyContext();
variableFetchStore.update((draft) => {
draft.states[name] = 'error';
if (!dependencyData) {
return;
}
// Set query-type descendants to idle (can't fetch without parent)
const descendants = dependencyData.transitiveDescendants[name] || [];
descendants.forEach((desc) => {
if (variableTypes[desc] === 'QUERY') {
draft.states[desc] = 'idle';
}
});
// If all query variables are settled (error counts), unlock any waiting dynamic variables
if (
variableTypes[name] === 'QUERY' &&
areAllQueryVariablesSettled(draft.states, variableTypes)
) {
unlockWaitingDynamicVariables(draft, dynamicVariableOrder);
}
});
}
/**
* Cascade a value change to query-type descendants.
* Called when a user changes a variable's value (not from a fetch cycle).
*
* Direct children whose parents are all settled start immediately.
* Deeper descendants wait until their parents complete (BFS order
* ensures parents are set before children within a single update).
*/
export function enqueueDescendantsOfVariable(name: string): void {
const { dependencyData, variableTypes } = getVariableDependencyContext();
if (!dependencyData) {
return;
}
const { parentDependencyGraph } = dependencyData;
variableFetchStore.update((draft) => {
const descendants = dependencyData.transitiveDescendants[name] || [];
const queryDescendants = descendants.filter(
(desc) => variableTypes[desc] === 'QUERY',
);
queryDescendants.forEach((desc) => {
draft.cycleIds[desc] = (draft.cycleIds[desc] || 0) + 1;
const parents = parentDependencyGraph[desc] || [];
const allParentsSettled = parents.every((p) => isSettled(draft.states[p]));
draft.states[desc] = allParentsSettled
? resolveFetchState(draft, desc)
: 'waiting';
});
});
}

View File

@@ -1,46 +0,0 @@
import { TVariableQueryType } from 'types/api/dashboard/getAll';
import {
IVariableFetchStoreState,
VariableFetchState,
} from './variableFetchStore';
export function isSettled(state: VariableFetchState | undefined): boolean {
return state === 'idle' || state === 'error';
}
/**
* Resolve the next fetch state based on whether the variable has been fetched before.
*/
export function resolveFetchState(
draft: IVariableFetchStoreState,
name: string,
): VariableFetchState {
return (draft.lastUpdated[name] || 0) > 0 ? 'revalidating' : 'loading';
}
/**
* Check if all query variables are settled (idle or error).
*/
export function areAllQueryVariablesSettled(
states: Record<string, VariableFetchState>,
variableTypes: Record<string, TVariableQueryType>,
): boolean {
return Object.entries(variableTypes)
.filter(([, type]) => type === 'QUERY')
.every(([name]) => isSettled(states[name]));
}
/**
* Transition waiting dynamic variables to loading/revalidating if in 'waiting' state.
*/
export function unlockWaitingDynamicVariables(
draft: IVariableFetchStoreState,
dynamicVariableOrder: string[],
): void {
dynamicVariableOrder.forEach((dynName) => {
if (draft.states[dynName] === 'waiting') {
draft.states[dynName] = resolveFetchState(draft, dynName);
}
});
}

View File

@@ -47,6 +47,8 @@ export interface IDashboardContext {
allSelected: boolean,
isDynamic?: boolean,
) => void;
variablesToGetUpdated: string[];
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
dashboardQueryRangeCalled: boolean;
setDashboardQueryRangeCalled: (value: boolean) => void;
selectedRowWidgetId: string | null;

View File

@@ -1,14 +1,13 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { EQueryType } from 'types/common/dashboard';
import { SuccessResponse, Warning } from '..';
import { Warning } from '..';
import {
IBuilderFormula,
IBuilderQuery,
IClickHouseQuery,
IPromQLQuery,
} from '../queryBuilder/queryBuilderData';
import { ExecStats } from '../v5/queryRange';
import { QueryData, QueryDataV3 } from '../widgets/getQuery';
export type QueryRangePayload = {
@@ -36,15 +35,8 @@ export interface MetricRangePayloadProps {
newResult: MetricRangePayloadV3;
warnings?: string[];
};
meta?: ExecStats;
}
/** Query range success response including optional warning and meta */
export type MetricQueryRangeSuccessResponse = SuccessResponse<
MetricRangePayloadProps,
unknown
> & { warning?: Warning; meta?: ExecStats };
export interface MetricRangePayloadV3 {
data: {
result: QueryDataV3[];
@@ -52,5 +44,4 @@ export interface MetricRangePayloadV3 {
warnings?: string[];
};
warning?: Warning;
meta?: ExecStats;
}

View File

@@ -7,5 +7,4 @@ export interface UpdateProfileProps {
number_of_services: number;
number_of_hosts: number;
where_did_you_discover_signoz: string;
timeline_for_migrating_to_signoz: string;
}

View File

@@ -334,7 +334,6 @@ export interface ExecStats {
rowsScanned: number;
bytesScanned: number;
durationMs: number;
stepIntervals: Record<string, number>;
}
export interface Label {

View File

@@ -4073,16 +4073,6 @@
"@radix-ui/react-primitive" "1.0.3"
"@radix-ui/react-slot" "1.0.2"
"@radix-ui/react-collection@1.1.7":
version "1.1.7"
resolved "https://registry.yarnpkg.com/@radix-ui/react-collection/-/react-collection-1.1.7.tgz#d05c25ca9ac4695cc19ba91f42f686e3ea2d9aec"
integrity sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw==
dependencies:
"@radix-ui/react-compose-refs" "1.1.2"
"@radix-ui/react-context" "1.1.2"
"@radix-ui/react-primitive" "2.1.3"
"@radix-ui/react-slot" "1.2.3"
"@radix-ui/react-compose-refs@1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@radix-ui/react-compose-refs/-/react-compose-refs-1.0.0.tgz#37595b1f16ec7f228d698590e78eeed18ff218ae"
@@ -4169,11 +4159,6 @@
dependencies:
"@babel/runtime" "^7.13.10"
"@radix-ui/react-direction@1.1.1":
version "1.1.1"
resolved "https://registry.yarnpkg.com/@radix-ui/react-direction/-/react-direction-1.1.1.tgz#39e5a5769e676c753204b792fbe6cf508e550a14"
integrity sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw==
"@radix-ui/react-dismissable-layer@1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.0.tgz#35b7826fa262fd84370faef310e627161dffa76b"
@@ -4402,22 +4387,6 @@
dependencies:
"@radix-ui/react-slot" "1.2.4"
"@radix-ui/react-radio-group@^1.3.4":
version "1.3.8"
resolved "https://registry.yarnpkg.com/@radix-ui/react-radio-group/-/react-radio-group-1.3.8.tgz#93f102b5b948d602c2f2adb1bc5c347cbaf64bd9"
integrity sha512-VBKYIYImA5zsxACdisNQ3BjCBfmbGH3kQlnFVqlWU4tXwjy7cGX8ta80BcrO+WJXIn5iBylEH3K6ZTlee//lgQ==
dependencies:
"@radix-ui/primitive" "1.1.3"
"@radix-ui/react-compose-refs" "1.1.2"
"@radix-ui/react-context" "1.1.2"
"@radix-ui/react-direction" "1.1.1"
"@radix-ui/react-presence" "1.1.5"
"@radix-ui/react-primitive" "2.1.3"
"@radix-ui/react-roving-focus" "1.1.11"
"@radix-ui/react-use-controllable-state" "1.2.2"
"@radix-ui/react-use-previous" "1.1.1"
"@radix-ui/react-use-size" "1.1.1"
"@radix-ui/react-roving-focus@1.0.4":
version "1.0.4"
resolved "https://registry.yarnpkg.com/@radix-ui/react-roving-focus/-/react-roving-focus-1.0.4.tgz#e90c4a6a5f6ac09d3b8c1f5b5e81aab2f0db1974"
@@ -4434,21 +4403,6 @@
"@radix-ui/react-use-callback-ref" "1.0.1"
"@radix-ui/react-use-controllable-state" "1.0.1"
"@radix-ui/react-roving-focus@1.1.11":
version "1.1.11"
resolved "https://registry.yarnpkg.com/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.11.tgz#ef54384b7361afc6480dcf9907ef2fedb5080fd9"
integrity sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA==
dependencies:
"@radix-ui/primitive" "1.1.3"
"@radix-ui/react-collection" "1.1.7"
"@radix-ui/react-compose-refs" "1.1.2"
"@radix-ui/react-context" "1.1.2"
"@radix-ui/react-direction" "1.1.1"
"@radix-ui/react-id" "1.1.1"
"@radix-ui/react-primitive" "2.1.3"
"@radix-ui/react-use-callback-ref" "1.1.1"
"@radix-ui/react-use-controllable-state" "1.2.2"
"@radix-ui/react-slot@1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@radix-ui/react-slot/-/react-slot-1.0.0.tgz#7fa805b99891dea1e862d8f8fbe07f4d6d0fd698"
@@ -5121,20 +5075,6 @@
tailwind-merge "^2.5.2"
tailwindcss-animate "^1.0.7"
"@signozhq/radio-group@0.0.2":
version "0.0.2"
resolved "https://registry.yarnpkg.com/@signozhq/radio-group/-/radio-group-0.0.2.tgz#4b13567bfee2645226f2cf41f261bbb288e1be4b"
integrity sha512-ahykmA5hPujOC964CFveMlQ12tWSyut2CUiFRqT1QxRkOLS2R44Qn2hh2psqJJ18JMX/24ZYCAIh9Bdd5XW+7g==
dependencies:
"@radix-ui/react-icons" "^1.3.0"
"@radix-ui/react-radio-group" "^1.3.4"
"@radix-ui/react-slot" "^1.1.0"
class-variance-authority "^0.7.0"
clsx "^2.1.1"
lucide-react "^0.445.0"
tailwind-merge "^2.5.2"
tailwindcss-animate "^1.0.7"
"@signozhq/resizable@0.0.0":
version "0.0.0"
resolved "https://registry.yarnpkg.com/@signozhq/resizable/-/resizable-0.0.0.tgz#a517818b9f9bcdaeafc55ae134be86522bc90e9f"

View File

@@ -10,27 +10,6 @@ type Config struct {
type Templates struct {
Directory string `mapstructure:"directory"`
Format Format `mapstructure:"format"`
}
type Format struct {
Header Header `mapstructure:"header"`
Help Help `mapstructure:"help"`
Footer Footer `mapstructure:"footer"`
}
type Header struct {
Enabled bool `mapstructure:"enabled"`
LogoURL string `mapstructure:"logo_url"`
}
type Help struct {
Enabled bool `mapstructure:"enabled"`
Email string `mapstructure:"email"`
}
type Footer struct {
Enabled bool `mapstructure:"enabled"`
}
type SMTP struct {
@@ -66,19 +45,6 @@ func newConfig() factory.Config {
Enabled: false,
Templates: Templates{
Directory: "/root/templates",
Format: Format{
Header: Header{
Enabled: false,
LogoURL: "",
},
Help: Help{
Enabled: false,
Email: "",
},
Footer: Footer{
Enabled: false,
},
},
},
SMTP: SMTP{
Address: "localhost:25",

View File

@@ -15,7 +15,6 @@ type provider struct {
settings factory.ScopedProviderSettings
store emailtypes.TemplateStore
client *client.Client
config emailing.Config
}
func NewFactory() factory.ProviderFactory[emailing.Emailing, emailing.Config] {
@@ -56,12 +55,7 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
return nil, err
}
return &provider{
settings: settings,
store: store,
client: client,
config: config,
}, nil
return &provider{settings: settings, store: store, client: client}, nil
}
func (provider *provider) SendHTML(ctx context.Context, to string, subject string, templateName emailtypes.TemplateName, data map[string]any) error {
@@ -75,19 +69,8 @@ func (provider *provider) SendHTML(ctx context.Context, to string, subject strin
return err
}
// if no data is provided, create an empty map to prevent a panic when we add the format, to, and subject data
if data == nil {
data = make(map[string]any)
}
// the following are overridden if provided in the data map
data["format"] = provider.config.Templates.Format
data["to"] = to
data["subject"] = subject
content, err := emailtypes.NewContent(template, data)
if err != nil {
provider.settings.Logger().ErrorContext(ctx, "failed to create email content", "error", err)
return err
}

View File

@@ -259,11 +259,6 @@ func (h *handler) DeleteUser(w http.ResponseWriter, r *http.Request) {
return
}
if claims.UserID == id {
render.Error(w, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "users cannot delete themselves"))
return
}
if err := h.module.DeleteUser(ctx, valuer.MustNewUUID(claims.OrgID), id, claims.UserID); err != nil {
render.Error(w, err)
return

View File

@@ -22,6 +22,8 @@ import (
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/dustin/go-humanize"
"golang.org/x/text/cases"
"golang.org/x/text/language"
)
type Module struct {
@@ -144,9 +146,11 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID valuer.UUID, userID
continue
}
if err := m.emailing.SendHTML(ctx, invites[i].Email.String(), "You're Invited to Join SigNoz", emailtypes.TemplateNameInvitationEmail, map[string]any{
"inviter_email": creator.Email,
"link": fmt.Sprintf("%s/signup?token=%s", bulkInvites.Invites[i].FrontendBaseUrl, invites[i].Token),
if err := m.emailing.SendHTML(ctx, invites[i].Email.String(), "You are invited to join a team in SigNoz", emailtypes.TemplateNameInvitationEmail, map[string]any{
"CustomerName": invites[i].Name,
"InviterName": creator.DisplayName,
"InviterEmail": creator.Email,
"Link": fmt.Sprintf("%s/signup?token=%s", bulkInvites.Invites[i].FrontendBaseUrl, invites[i].Token),
}); err != nil {
m.settings.Logger().ErrorContext(ctx, "failed to send email", "error", err)
}
@@ -257,6 +261,18 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
traits["updated_by"] = updatedBy
m.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
// if the role is updated then send an email
if existingUser.Role != updatedUser.Role {
if err := m.emailing.SendHTML(ctx, existingUser.Email.String(), "Your Role Has Been Updated in SigNoz", emailtypes.TemplateNameUpdateRole, map[string]any{
"CustomerName": existingUser.DisplayName,
"UpdatedByEmail": requestor.Email,
"OldRole": cases.Title(language.English).String(strings.ToLower(existingUser.Role.String())),
"NewRole": cases.Title(language.English).String(strings.ToLower(updatedUser.Role.String())),
}); err != nil {
m.settings.Logger().ErrorContext(ctx, "failed to send email", "error", err)
}
}
if err := m.tokenizer.DeleteIdentity(ctx, valuer.MustNewUUID(id)); err != nil {
return nil, err
}
@@ -378,9 +394,10 @@ func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, ema
if err := module.emailing.SendHTML(
ctx,
user.Email.String(),
"A Password Reset Was Requested for SigNoz",
"Reset your SigNoz password",
emailtypes.TemplateNameResetPassword,
map[string]any{
"Name": user.DisplayName,
"Link": resetLink,
"Expiry": humanizedTokenLifetime,
},

View File

@@ -625,7 +625,7 @@ func (r *BaseRule) extractMetricAndGroupBys(ctx context.Context) (map[string][]s
// FilterNewSeries filters out items that are too new based on metadata first_seen timestamps.
// Returns the filtered series (old ones) excluding new series that are still within the grace period.
func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*v3.Series) ([]*v3.Series, error) {
func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*qbtypes.TimeSeries) ([]*qbtypes.TimeSeries, error) {
// Extract metric names and groupBy keys
metricToGroupedFields, err := r.extractMetricAndGroupBys(ctx)
if err != nil {
@@ -642,7 +642,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
seriesIdxToLookupKeys := make(map[int][]telemetrytypes.MetricMetadataLookupKey) // series index -> lookup keys
for i := 0; i < len(series); i++ {
metricLabelMap := series[i].Labels
metricLabelMap := series[i].LabelsMap()
// Collect groupBy attribute-value pairs for this series
seriesKeys := make([]telemetrytypes.MetricMetadataLookupKey, 0)
@@ -689,7 +689,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
}
// Filter series based on first_seen + delay
filteredSeries := make([]*v3.Series, 0, len(series))
filteredSeries := make([]*qbtypes.TimeSeries, 0, len(series))
evalTimeMs := ts.UnixMilli()
newGroupEvalDelayMs := r.newGroupEvalDelay.Milliseconds()
@@ -727,7 +727,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
// Check if first_seen + delay has passed
if maxFirstSeen+newGroupEvalDelayMs > evalTimeMs {
// Still within grace period, skip this series
r.logger.InfoContext(ctx, "Skipping new series", "rule_name", r.Name(), "series_idx", i, "max_first_seen", maxFirstSeen, "eval_time_ms", evalTimeMs, "delay_ms", newGroupEvalDelayMs, "labels", series[i].Labels)
r.logger.InfoContext(ctx, "Skipping new series", "rule_name", r.Name(), "series_idx", i, "max_first_seen", maxFirstSeen, "eval_time_ms", evalTimeMs, "delay_ms", newGroupEvalDelayMs, "labels", series[i].LabelsMap())
continue
}

View File

@@ -26,33 +26,33 @@ import (
"github.com/SigNoz/signoz/pkg/valuer"
)
// createTestSeries creates a *v3.Series with the given labels and optional points
// createTestSeries creates a *qbtypes.TimeSeries with the given labels and optional values
// so we don't exactly need the points in the series because the labels are used to determine if the series is new or old
// we use the labels to create a lookup key for the series and then check the first_seen timestamp for the series in the metadata table
func createTestSeries(labels map[string]string, points []v3.Point) *v3.Series {
func createTestSeries(labels map[string]string, points []*qbtypes.TimeSeriesValue) *qbtypes.TimeSeries {
if points == nil {
points = []v3.Point{}
points = []*qbtypes.TimeSeriesValue{}
}
return &v3.Series{
Labels: labels,
Points: points,
lbls := make([]*qbtypes.Label, 0, len(labels))
for k, v := range labels {
lbls = append(lbls, &qbtypes.Label{Key: telemetrytypes.TelemetryFieldKey{Name: k}, Value: v})
}
return &qbtypes.TimeSeries{
Labels: lbls,
Values: points,
}
}
// seriesEqual compares two v3.Series by their labels
// seriesEqual compares two *qbtypes.TimeSeries by their labels
// Returns true if the series have the same labels (order doesn't matter)
func seriesEqual(s1, s2 *v3.Series) bool {
if s1 == nil && s2 == nil {
return true
}
if s1 == nil || s2 == nil {
func seriesEqual(s1, s2 *qbtypes.TimeSeries) bool {
m1 := s1.LabelsMap()
m2 := s2.LabelsMap()
if len(m1) != len(m2) {
return false
}
if len(s1.Labels) != len(s2.Labels) {
return false
}
for k, v := range s1.Labels {
if s2.Labels[k] != v {
for k, v := range m1 {
if m2[k] != v {
return false
}
}
@@ -149,11 +149,11 @@ func createPostableRule(compositeQuery *v3.CompositeQuery) ruletypes.PostableRul
type filterNewSeriesTestCase struct {
name string
compositeQuery *v3.CompositeQuery
series []*v3.Series
series []*qbtypes.TimeSeries
firstSeenMap map[telemetrytypes.MetricMetadataLookupKey]int64
newGroupEvalDelay valuer.TextDuration
evalTime time.Time
expectedFiltered []*v3.Series // series that should be in the final filtered result (old enough)
expectedFiltered []*qbtypes.TimeSeries // series that should be in the final filtered result (old enough)
expectError bool
}
@@ -193,7 +193,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-new", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-missing", "env": "stage"}, nil),
@@ -205,7 +205,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-missing", "env": "stage"}, nil),
}, // svc-old and svc-missing should be included; svc-new is filtered out
@@ -227,7 +227,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-new1", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-new2", "env": "stage"}, nil),
},
@@ -237,7 +237,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{}, // all should be filtered out (new series)
expectedFiltered: []*qbtypes.TimeSeries{}, // all should be filtered out (new series)
},
{
name: "all old series - ClickHouse query",
@@ -254,7 +254,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-old2", "env": "stage"}, nil),
},
@@ -264,7 +264,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-old2", "env": "stage"}, nil),
}, // all should be included (old series)
@@ -292,13 +292,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
}, // early return, no filtering - all series included
},
@@ -322,13 +322,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
}, // early return, no filtering - all series included
},
@@ -358,13 +358,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"status": "200"}, nil), // no service_name or env
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"status": "200"}, nil),
}, // series included as we can't decide if it's new or old
},
@@ -385,7 +385,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-no-metadata", "env": "prod"}, nil),
},
@@ -393,7 +393,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
// svc-no-metadata has no entry in firstSeenMap
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-no-metadata", "env": "prod"}, nil),
}, // both should be included - svc-old is old, svc-no-metadata can't be decided
@@ -413,7 +413,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
},
// Only provide metadata for service_name, not env
@@ -423,7 +423,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
}, // has some metadata, uses max first_seen which is old
},
@@ -453,11 +453,11 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{},
series: []*qbtypes.TimeSeries{},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{},
expectedFiltered: []*qbtypes.TimeSeries{},
},
{
name: "zero delay - Builder",
@@ -485,13 +485,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
newGroupEvalDelay: valuer.TextDuration{}, // zero delay
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
}, // with zero delay, all series pass
},
@@ -526,7 +526,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: mergeFirstSeenMaps(
@@ -535,7 +535,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
},
@@ -565,7 +565,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
// service_name is old, env is new - should use max (new)
@@ -575,7 +575,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{}, // max first_seen is new, so should be filtered out
expectedFiltered: []*qbtypes.TimeSeries{}, // max first_seen is new, so should be filtered out
},
{
name: "Logs query - should skip filtering and return empty skip indexes",
@@ -600,14 +600,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
}, // Logs queries should return early, no filtering - all included
@@ -635,14 +635,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
}, // Traces queries should return early, no filtering - all included
@@ -724,14 +724,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
// Build a map to count occurrences of each unique label combination in expected series
expectedCounts := make(map[string]int)
for _, expected := range tt.expectedFiltered {
key := labelsKey(expected.Labels)
key := labelsKey(expected.LabelsMap())
expectedCounts[key]++
}
// Build a map to count occurrences of each unique label combination in filtered series
actualCounts := make(map[string]int)
for _, filtered := range filteredSeries {
key := labelsKey(filtered.Labels)
key := labelsKey(filtered.LabelsMap())
actualCounts[key]++
}

View File

@@ -12,19 +12,18 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/formatter"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/prometheus/prometheus/promql"
)
type PromRule struct {
*BaseRule
version string
prometheus prometheus.Prometheus
}
@@ -48,7 +47,6 @@ func NewPromRule(
p := PromRule{
BaseRule: baseRule,
version: postableRule.Version,
prometheus: prometheus,
}
p.logger = logger
@@ -83,48 +81,30 @@ func (r *PromRule) GetSelectedQuery() string {
}
func (r *PromRule) getPqlQuery() (string, error) {
if r.version == "v5" {
if len(r.ruleCondition.CompositeQuery.Queries) > 0 {
selectedQuery := r.GetSelectedQuery()
for _, item := range r.ruleCondition.CompositeQuery.Queries {
switch item.Type {
case qbtypes.QueryTypePromQL:
promQuery, ok := item.Spec.(qbtypes.PromQuery)
if !ok {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query spec %T", item.Spec)
}
if promQuery.Name == selectedQuery {
return promQuery.Query, nil
}
if len(r.ruleCondition.CompositeQuery.Queries) > 0 {
selectedQuery := r.GetSelectedQuery()
for _, item := range r.ruleCondition.CompositeQuery.Queries {
switch item.Type {
case qbtypes.QueryTypePromQL:
promQuery, ok := item.Spec.(qbtypes.PromQuery)
if !ok {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query spec %T", item.Spec)
}
if promQuery.Name == selectedQuery {
return promQuery.Query, nil
}
}
}
return "", fmt.Errorf("invalid promql rule setup")
}
if r.ruleCondition.CompositeQuery.QueryType == v3.QueryTypePromQL {
if len(r.ruleCondition.CompositeQuery.PromQueries) > 0 {
selectedQuery := r.GetSelectedQuery()
if promQuery, ok := r.ruleCondition.CompositeQuery.PromQueries[selectedQuery]; ok {
query := promQuery.Query
if query == "" {
return query, fmt.Errorf("a promquery needs to be set for this rule to function")
}
return query, nil
}
}
}
return "", fmt.Errorf("invalid promql rule query")
return "", fmt.Errorf("invalid promql rule setup")
}
func (r *PromRule) matrixToV3Series(res promql.Matrix) []*v3.Series {
v3Series := make([]*v3.Series, 0, len(res))
func matrixToTimeSeries(res promql.Matrix) []*qbtypes.TimeSeries {
result := make([]*qbtypes.TimeSeries, 0, len(res))
for _, series := range res {
commonSeries := toCommonSeries(series)
v3Series = append(v3Series, &commonSeries)
result = append(result, promSeriesToTimeSeries(series))
}
return v3Series
return result
}
func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletypes.Vector, error) {
@@ -143,31 +123,31 @@ func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletype
return nil, err
}
matrixToProcess := r.matrixToV3Series(res)
seriesToProcess := matrixToTimeSeries(res)
hasData := len(matrixToProcess) > 0
hasData := len(seriesToProcess) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
// Filter out new series if newGroupEvalDelay is configured
if r.ShouldSkipNewGroups() {
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, matrixToProcess)
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
// In case of error we log the error and continue with the original series
if filterErr != nil {
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
} else {
matrixToProcess = filteredSeries
seriesToProcess = filteredSeries
}
}
var resultVector ruletypes.Vector
for _, series := range matrixToProcess {
for _, series := range seriesToProcess {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(
ctx, "not enough data points to evaluate series, skipping",
"rule_id", r.ID(), "num_points", len(series.Points), "required_points", r.Condition().RequiredNumPoints,
"rule_id", r.ID(), "num_points", len(series.Values), "required_points", r.Condition().RequiredNumPoints,
)
continue
}
@@ -454,26 +434,25 @@ func (r *PromRule) RunAlertQuery(ctx context.Context, qs string, start, end time
}
}
func toCommonSeries(series promql.Series) v3.Series {
commonSeries := v3.Series{
Labels: make(map[string]string),
LabelsArray: make([]map[string]string, 0),
Points: make([]v3.Point, 0),
func promSeriesToTimeSeries(series promql.Series) *qbtypes.TimeSeries {
ts := &qbtypes.TimeSeries{
Labels: make([]*qbtypes.Label, 0, len(series.Metric)),
Values: make([]*qbtypes.TimeSeriesValue, 0, len(series.Floats)),
}
for _, lbl := range series.Metric {
commonSeries.Labels[lbl.Name] = lbl.Value
commonSeries.LabelsArray = append(commonSeries.LabelsArray, map[string]string{
lbl.Name: lbl.Value,
ts.Labels = append(ts.Labels, &qbtypes.Label{
Key: telemetrytypes.TelemetryFieldKey{Name: lbl.Name},
Value: lbl.Value,
})
}
for _, f := range series.Floats {
commonSeries.Points = append(commonSeries.Points, v3.Point{
ts.Values = append(ts.Values, &qbtypes.TimeSeriesValue{
Timestamp: f.T,
Value: f.F,
})
}
return commonSeries
return ts
}

View File

@@ -20,6 +20,7 @@ import (
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -47,9 +48,13 @@ func TestPromRuleEval(t *testing.T) {
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {
Query: "dummy_query", // This is not used in the test
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "A",
Query: "dummy_query", // This is not used in the test
},
},
},
},
@@ -62,7 +67,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp string
matchType string
target float64
expectedAlertSample v3.Point
expectedAlertSample float64
expectedVectorValues []float64 // Expected values in result vector
}{
// Test cases for Equals Always
@@ -80,7 +85,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "2", // Always
target: 0.0,
expectedAlertSample: v3.Point{Value: 0.0},
expectedAlertSample: 0.0,
expectedVectorValues: []float64{0.0},
},
{
@@ -145,7 +150,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 0.0},
expectedAlertSample: 0.0,
expectedVectorValues: []float64{0.0},
},
{
@@ -162,7 +167,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 0.0},
expectedAlertSample: 0.0,
},
{
values: pql.Series{
@@ -178,7 +183,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 0.0},
expectedAlertSample: 0.0,
},
{
values: pql.Series{
@@ -211,7 +216,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Greater Than
matchType: "2", // Always
target: 1.5,
expectedAlertSample: v3.Point{Value: 2.0},
expectedAlertSample: 2.0,
expectedVectorValues: []float64{2.0},
},
{
@@ -228,7 +233,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Above
matchType: "2", // Always
target: 2.0,
expectedAlertSample: v3.Point{Value: 3.0},
expectedAlertSample: 3.0,
},
{
values: pql.Series{
@@ -244,7 +249,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Below
matchType: "2", // Always
target: 13.0,
expectedAlertSample: v3.Point{Value: 12.0},
expectedAlertSample: 12.0,
},
{
values: pql.Series{
@@ -276,7 +281,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Greater Than
matchType: "1", // Once
target: 4.5,
expectedAlertSample: v3.Point{Value: 10.0},
expectedAlertSample: 10.0,
expectedVectorValues: []float64{10.0},
},
{
@@ -339,7 +344,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "2", // Always
target: 0.0,
expectedAlertSample: v3.Point{Value: 1.0},
expectedAlertSample: 1.0,
},
{
values: pql.Series{
@@ -371,7 +376,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 1.0},
expectedAlertSample: 1.0,
},
{
values: pql.Series{
@@ -402,7 +407,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 1.0},
expectedAlertSample: 1.0,
},
{
values: pql.Series{
@@ -418,7 +423,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 1.0},
expectedAlertSample: 1.0,
},
// Test cases for Less Than Always
{
@@ -435,7 +440,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Less Than
matchType: "2", // Always
target: 4,
expectedAlertSample: v3.Point{Value: 1.5},
expectedAlertSample: 1.5,
},
{
values: pql.Series{
@@ -467,7 +472,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Less Than
matchType: "1", // Once
target: 4,
expectedAlertSample: v3.Point{Value: 2.5},
expectedAlertSample: 2.5,
},
{
values: pql.Series{
@@ -499,7 +504,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "3", // OnAverage
target: 6.0,
expectedAlertSample: v3.Point{Value: 6.0},
expectedAlertSample: 6.0,
},
{
values: pql.Series{
@@ -530,7 +535,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "3", // OnAverage
target: 4.5,
expectedAlertSample: v3.Point{Value: 6.0},
expectedAlertSample: 6.0,
},
{
values: pql.Series{
@@ -561,7 +566,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Greater Than
matchType: "3", // OnAverage
target: 4.5,
expectedAlertSample: v3.Point{Value: 6.0},
expectedAlertSample: 6.0,
},
{
values: pql.Series{
@@ -577,7 +582,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Less Than
matchType: "3", // OnAverage
target: 12.0,
expectedAlertSample: v3.Point{Value: 6.0},
expectedAlertSample: 6.0,
},
// Test cases for InTotal
{
@@ -594,7 +599,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "4", // InTotal
target: 30.0,
expectedAlertSample: v3.Point{Value: 30.0},
expectedAlertSample: 30.0,
},
{
values: pql.Series{
@@ -621,7 +626,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "4", // InTotal
target: 9.0,
expectedAlertSample: v3.Point{Value: 10.0},
expectedAlertSample: 10.0,
},
{
values: pql.Series{
@@ -645,7 +650,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Greater Than
matchType: "4", // InTotal
target: 10.0,
expectedAlertSample: v3.Point{Value: 20.0},
expectedAlertSample: 20.0,
},
{
values: pql.Series{
@@ -670,7 +675,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Less Than
matchType: "4", // InTotal
target: 30.0,
expectedAlertSample: v3.Point{Value: 20.0},
expectedAlertSample: 20.0,
},
{
values: pql.Series{
@@ -708,7 +713,7 @@ func TestPromRuleEval(t *testing.T) {
assert.NoError(t, err)
}
resultVectors, err := rule.Threshold.Eval(toCommonSeries(c.values), rule.Unit(), ruletypes.EvalData{})
resultVectors, err := rule.Threshold.Eval(*promSeriesToTimeSeries(c.values), rule.Unit(), ruletypes.EvalData{})
assert.NoError(t, err)
// Compare full result vector with expected vector
@@ -724,12 +729,12 @@ func TestPromRuleEval(t *testing.T) {
if len(resultVectors) > 0 {
found := false
for _, sample := range resultVectors {
if sample.V == c.expectedAlertSample.Value {
if sample.V == c.expectedAlertSample {
found = true
break
}
}
assert.True(t, found, "Expected alert sample value %.2f not found in result vectors for case %d. Got values: %v", c.expectedAlertSample.Value, idx, actualValues)
assert.True(t, found, "Expected alert sample value %.2f not found in result vectors for case %d. Got values: %v", c.expectedAlertSample, idx, actualValues)
}
} else {
assert.Empty(t, resultVectors, "Expected no alert but got result vectors for case %d", idx)
@@ -754,9 +759,13 @@ func TestPromRuleUnitCombinations(t *testing.T) {
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {
Query: "test_metric",
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "A",
Query: "test_metric",
},
},
},
},
@@ -1013,9 +1022,13 @@ func _Enable_this_after_9146_issue_fix_is_merged_TestPromRuleNoData(t *testing.T
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {
Query: "test_metric",
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "A",
Query: "test_metric",
},
},
},
},
@@ -1124,9 +1137,13 @@ func TestMultipleThresholdPromRule(t *testing.T) {
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {
Query: "test_metric",
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "A",
Query: "test_metric",
},
},
},
},
@@ -1361,8 +1378,11 @@ func TestPromRule_NoData(t *testing.T) {
MatchType: ruletypes.AtleastOnce,
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {Query: "test_metric"},
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
},
},
},
Thresholds: &ruletypes.RuleThresholdData{
@@ -1486,8 +1506,11 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
Target: &target,
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {Query: "test_metric"},
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
},
},
},
Thresholds: &ruletypes.RuleThresholdData{
@@ -1635,8 +1658,11 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
MatchType: ruletypes.AtleastOnce,
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {Query: "test_metric"},
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
},
},
},
},

View File

@@ -1,38 +1,24 @@
package rules
import (
"bytes"
"context"
"encoding/json"
"fmt"
"log/slog"
"math"
"net/url"
"reflect"
"text/template"
"time"
"github.com/SigNoz/signoz/pkg/contextlinks"
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/query-service/app/querier"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
querytemplate "github.com/SigNoz/signoz/pkg/query-service/utils/queryTemplate"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
logsv3 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v3"
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
"github.com/SigNoz/signoz/pkg/query-service/formatter"
querierV5 "github.com/SigNoz/signoz/pkg/querier"
@@ -42,23 +28,9 @@ import (
type ThresholdRule struct {
*BaseRule
// Ever since we introduced the new metrics query builder, the version is "v4"
// for all the rules
// if the version is "v3", then we use the old querier
// if the version is "v4", then we use the new querierV2
version string
// querier is used for alerts created before the introduction of new metrics query builder
querier interfaces.Querier
// querierV2 is used for alerts created after the introduction of new metrics query builder
querierV2 interfaces.Querier
// querierV5 is used for alerts migrated after the introduction of new query builder
// querierV5 is the query builder v5 querier used for all alert rule evaluation
querierV5 querierV5.Querier
// used for attribute metadata enrichment for logs and traces
logsKeys map[string]v3.AttributeKey
spansKeys map[string]v3.AttributeKey
}
var _ Rule = (*ThresholdRule)(nil)
@@ -82,25 +54,10 @@ func NewThresholdRule(
}
t := ThresholdRule{
BaseRule: baseRule,
version: p.Version,
BaseRule: baseRule,
querierV5: querierV5,
}
querierOption := querier.QuerierOptions{
Reader: reader,
Cache: nil,
KeyGenerator: queryBuilder.NewKeyGenerator(),
}
querierOptsV2 := querierV2.QuerierOptions{
Reader: reader,
Cache: nil,
KeyGenerator: queryBuilder.NewKeyGenerator(),
}
t.querier = querier.NewQuerier(querierOption)
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
t.querierV5 = querierV5
t.reader = reader
return &t, nil
}
@@ -120,169 +77,9 @@ func (r *ThresholdRule) Type() ruletypes.RuleType {
return ruletypes.RuleTypeThreshold
}
func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3.QueryRangeParamsV3, error) {
func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
r.logger.InfoContext(
ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
)
startTs, endTs := r.Timestamps(ts)
start, end := startTs.UnixMilli(), endTs.UnixMilli()
if r.ruleCondition.QueryType() == v3.QueryTypeClickHouseSQL {
params := &v3.QueryRangeParamsV3{
Start: start,
End: end,
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
CompositeQuery: &v3.CompositeQuery{
QueryType: r.ruleCondition.CompositeQuery.QueryType,
PanelType: r.ruleCondition.CompositeQuery.PanelType,
BuilderQueries: make(map[string]*v3.BuilderQuery),
ClickHouseQueries: make(map[string]*v3.ClickHouseQuery),
PromQueries: make(map[string]*v3.PromQuery),
Unit: r.ruleCondition.CompositeQuery.Unit,
},
Variables: make(map[string]interface{}),
NoCache: true,
}
querytemplate.AssignReservedVarsV3(params)
for name, chQuery := range r.ruleCondition.CompositeQuery.ClickHouseQueries {
if chQuery.Disabled {
continue
}
tmpl := template.New("clickhouse-query")
tmpl, err := tmpl.Parse(chQuery.Query)
if err != nil {
return nil, err
}
var query bytes.Buffer
err = tmpl.Execute(&query, params.Variables)
if err != nil {
return nil, err
}
params.CompositeQuery.ClickHouseQueries[name] = &v3.ClickHouseQuery{
Query: query.String(),
Disabled: chQuery.Disabled,
Legend: chQuery.Legend,
}
}
return params, nil
}
if r.ruleCondition.CompositeQuery != nil && r.ruleCondition.CompositeQuery.BuilderQueries != nil {
for _, q := range r.ruleCondition.CompositeQuery.BuilderQueries {
// If the step interval is less than the minimum allowed step interval, set it to the minimum allowed step interval
if minStep := common.MinAllowedStepInterval(start, end); q.StepInterval < minStep {
q.StepInterval = minStep
}
q.SetShiftByFromFunc()
if q.DataSource == v3.DataSourceMetrics {
// if the time range is greater than 1 day, and less than 1 week set the step interval to be multiple of 5 minutes
// if the time range is greater than 1 week, set the step interval to be multiple of 30 mins
if end-start >= 24*time.Hour.Milliseconds() && end-start < 7*24*time.Hour.Milliseconds() {
q.StepInterval = int64(math.Round(float64(q.StepInterval)/300)) * 300
} else if end-start >= 7*24*time.Hour.Milliseconds() {
q.StepInterval = int64(math.Round(float64(q.StepInterval)/1800)) * 1800
}
}
}
}
if r.ruleCondition.CompositeQuery.PanelType != v3.PanelTypeGraph {
r.ruleCondition.CompositeQuery.PanelType = v3.PanelTypeGraph
}
// default mode
return &v3.QueryRangeParamsV3{
Start: start,
End: end,
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
CompositeQuery: r.ruleCondition.CompositeQuery,
Variables: make(map[string]interface{}),
NoCache: true,
}, nil
}
func (r *ThresholdRule) prepareLinksToLogs(ctx context.Context, ts time.Time, lbls labels.Labels) string {
if r.version == "v5" {
return r.prepareLinksToLogsV5(ctx, ts, lbls)
}
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRange(ctx, ts)
if err != nil {
return ""
}
start := time.UnixMilli(qr.Start)
end := time.UnixMilli(qr.End)
// TODO(srikanthccv): handle formula queries
if selectedQuery < "A" || selectedQuery > "Z" {
return ""
}
q := r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery]
if q == nil {
return ""
}
if q.DataSource != v3.DataSourceLogs {
return ""
}
queryFilter := []v3.FilterItem{}
if q.Filters != nil {
queryFilter = q.Filters.Items
}
filterItems := contextlinks.PrepareFilters(lbls.Map(), queryFilter, q.GroupBy, r.logsKeys)
return contextlinks.PrepareLinksToLogs(start, end, filterItems)
}
func (r *ThresholdRule) prepareLinksToTraces(ctx context.Context, ts time.Time, lbls labels.Labels) string {
if r.version == "v5" {
return r.prepareLinksToTracesV5(ctx, ts, lbls)
}
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRange(ctx, ts)
if err != nil {
return ""
}
start := time.UnixMilli(qr.Start)
end := time.UnixMilli(qr.End)
// TODO(srikanthccv): handle formula queries
if selectedQuery < "A" || selectedQuery > "Z" {
return ""
}
q := r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery]
if q == nil {
return ""
}
if q.DataSource != v3.DataSourceTraces {
return ""
}
queryFilter := []v3.FilterItem{}
if q.Filters != nil {
queryFilter = q.Filters.Items
}
filterItems := contextlinks.PrepareFilters(lbls.Map(), queryFilter, q.GroupBy, r.spansKeys)
return contextlinks.PrepareLinksToTraces(start, end, filterItems)
}
func (r *ThresholdRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
r.logger.InfoContext(
ctx, "prepare query range request v5", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
ctx, "prepare query range request", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
)
startTs, endTs := r.Timestamps(ts)
@@ -302,10 +99,10 @@ func (r *ThresholdRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (
return req, nil
}
func (r *ThresholdRule) prepareLinksToLogsV5(ctx context.Context, ts time.Time, lbls labels.Labels) string {
func (r *ThresholdRule) prepareLinksToLogs(ctx context.Context, ts time.Time, lbls labels.Labels) string {
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRangeV5(ctx, ts)
qr, err := r.prepareQueryRange(ctx, ts)
if err != nil {
return ""
}
@@ -342,10 +139,10 @@ func (r *ThresholdRule) prepareLinksToLogsV5(ctx context.Context, ts time.Time,
return contextlinks.PrepareLinksToLogsV5(start, end, whereClause)
}
func (r *ThresholdRule) prepareLinksToTracesV5(ctx context.Context, ts time.Time, lbls labels.Labels) string {
func (r *ThresholdRule) prepareLinksToTraces(ctx context.Context, ts time.Time, lbls labels.Labels) string {
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRangeV5(ctx, ts)
qr, err := r.prepareQueryRange(ctx, ts)
if err != nil {
return ""
}
@@ -391,115 +188,6 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
if err != nil {
return nil, err
}
err = r.PopulateTemporality(ctx, orgID, params)
if err != nil {
return nil, fmt.Errorf("internal error while setting temporality")
}
if params.CompositeQuery.QueryType == v3.QueryTypeBuilder {
hasLogsQuery := false
hasTracesQuery := false
for _, query := range params.CompositeQuery.BuilderQueries {
if query.DataSource == v3.DataSourceLogs {
hasLogsQuery = true
}
if query.DataSource == v3.DataSourceTraces {
hasTracesQuery = true
}
}
if hasLogsQuery {
// check if any enrichment is required for logs if yes then enrich them
if logsv3.EnrichmentRequired(params) {
logsFields, apiErr := r.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(params.CompositeQuery))
if apiErr != nil {
return nil, apiErr.ToError()
}
logsKeys := model.GetLogFieldsV3(ctx, params, logsFields)
r.logsKeys = logsKeys
logsv3.Enrich(params, logsKeys)
}
}
if hasTracesQuery {
spanKeys, err := r.reader.GetSpanAttributeKeysByNames(ctx, logsv3.GetFieldNames(params.CompositeQuery))
if err != nil {
return nil, err
}
r.spansKeys = spanKeys
tracesV4.Enrich(params, spanKeys)
}
}
var results []*v3.Result
var queryErrors map[string]error
if r.version == "v4" {
results, queryErrors, err = r.querierV2.QueryRange(ctx, orgID, params)
} else {
results, queryErrors, err = r.querier.QueryRange(ctx, orgID, params)
}
if err != nil {
r.logger.ErrorContext(ctx, "failed to get alert query range result", "rule_name", r.Name(), "error", err, "query_errors", queryErrors)
return nil, fmt.Errorf("internal error while querying")
}
if params.CompositeQuery.QueryType == v3.QueryTypeBuilder {
results, err = postprocess.PostProcessResult(results, params)
if err != nil {
r.logger.ErrorContext(ctx, "failed to post process result", "rule_name", r.Name(), "error", err)
return nil, fmt.Errorf("internal error while post processing")
}
}
selectedQuery := r.GetSelectedQuery()
var queryResult *v3.Result
for _, res := range results {
if res.QueryName == selectedQuery {
queryResult = res
break
}
}
hasData := queryResult != nil && len(queryResult.Series) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
var resultVector ruletypes.Vector
if queryResult == nil {
r.logger.WarnContext(ctx, "query result is nil", "rule_name", r.Name(), "query_name", selectedQuery)
return resultVector, nil
}
for _, series := range queryResult.Series {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
resultSeries, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
ActiveAlerts: r.ActiveAlertsLabelFP(),
SendUnmatched: r.ShouldSendUnmatched(),
})
if err != nil {
return nil, err
}
resultVector = append(resultVector, resultSeries...)
}
return resultVector, nil
}
func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
params, err := r.prepareQueryRangeV5(ctx, ts)
if err != nil {
return nil, err
}
var results []*v3.Result
v5Result, err := r.querierV5.QueryRange(ctx, orgID, params)
if err != nil {
@@ -507,26 +195,24 @@ func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUI
return nil, fmt.Errorf("internal error while querying")
}
for _, item := range v5Result.Data.Results {
if tsData, ok := item.(*qbtypes.TimeSeriesData); ok {
results = append(results, transition.ConvertV5TimeSeriesDataToV4Result(tsData))
} else {
// NOTE: should not happen but just to ensure we don't miss it if it happens for some reason
r.logger.WarnContext(ctx, "expected qbtypes.TimeSeriesData but got", "item_type", reflect.TypeOf(item))
}
}
selectedQuery := r.GetSelectedQuery()
var queryResult *v3.Result
for _, res := range results {
if res.QueryName == selectedQuery {
queryResult = res
var queryResult *qbtypes.TimeSeriesData
for _, item := range v5Result.Data.Results {
if tsData, ok := item.(*qbtypes.TimeSeriesData); ok && tsData.QueryName == selectedQuery {
queryResult = tsData
break
}
}
hasData := queryResult != nil && len(queryResult.Series) > 0
var allSeries []*qbtypes.TimeSeries
if queryResult != nil {
for _, bucket := range queryResult.Aggregations {
allSeries = append(allSeries, bucket.Series...)
}
}
hasData := len(allSeries) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
@@ -539,7 +225,7 @@ func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUI
}
// Filter out new series if newGroupEvalDelay is configured
seriesToProcess := queryResult.Series
seriesToProcess := allSeries
if r.ShouldSkipNewGroups() {
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
// In case of error we log the error and continue with the original series
@@ -552,7 +238,7 @@ func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUI
for _, series := range seriesToProcess {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Values), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
resultSeries, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
@@ -573,16 +259,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
valueFormatter := formatter.FromUnit(r.Unit())
var res ruletypes.Vector
var err error
if r.version == "v5" {
r.logger.InfoContext(ctx, "running v5 query")
res, err = r.buildAndRunQueryV5(ctx, r.orgID, ts)
} else {
r.logger.InfoContext(ctx, "running v4 query")
res, err = r.buildAndRunQuery(ctx, r.orgID, ts)
}
res, err := r.buildAndRunQuery(ctx, r.orgID, ts)
if err != nil {
return 0, err

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -167,6 +167,7 @@ func NewSQLMigrationProviderFactories(
sqlmigration.NewMigrateRbacToAuthzFactory(sqlstore),
sqlmigration.NewMigratePublicDashboardsFactory(sqlstore),
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
sqlmigration.NewMigrateRulesV4ToV5Factory(sqlstore, telemetryStore),
)
}

View File

@@ -0,0 +1,194 @@
package sqlmigration
import (
"context"
"database/sql"
"encoding/json"
"log/slog"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
type migrateRulesV4ToV5 struct {
store sqlstore.SQLStore
telemetryStore telemetrystore.TelemetryStore
logger *slog.Logger
}
func NewMigrateRulesV4ToV5Factory(
store sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore,
) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(
factory.MustNewName("migrate_rules_v4_to_v5"),
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return &migrateRulesV4ToV5{
store: store,
telemetryStore: telemetryStore,
logger: ps.Logger,
}, nil
})
}
func (migration *migrateRulesV4ToV5) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *migrateRulesV4ToV5) getLogDuplicateKeys(ctx context.Context) ([]string, error) {
query := `
SELECT name
FROM (
SELECT DISTINCT name FROM signoz_logs.distributed_logs_attribute_keys
INTERSECT
SELECT DISTINCT name FROM signoz_logs.distributed_logs_resource_keys
)
ORDER BY name
`
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
if err != nil {
migration.logger.WarnContext(ctx, "failed to query log duplicate keys", "error", err)
return nil, nil
}
defer rows.Close()
var keys []string
for rows.Next() {
var key string
if err := rows.Scan(&key); err != nil {
migration.logger.WarnContext(ctx, "failed to scan log duplicate key", "error", err)
continue
}
keys = append(keys, key)
}
return keys, nil
}
func (migration *migrateRulesV4ToV5) getTraceDuplicateKeys(ctx context.Context) ([]string, error) {
query := `
SELECT tagKey
FROM signoz_traces.distributed_span_attributes_keys
WHERE tagType IN ('tag', 'resource')
GROUP BY tagKey
HAVING COUNT(DISTINCT tagType) > 1
ORDER BY tagKey
`
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
if err != nil {
migration.logger.WarnContext(ctx, "failed to query trace duplicate keys", "error", err)
return nil, nil
}
defer rows.Close()
var keys []string
for rows.Next() {
var key string
if err := rows.Scan(&key); err != nil {
migration.logger.WarnContext(ctx, "failed to scan trace duplicate key", "error", err)
continue
}
keys = append(keys, key)
}
return keys, nil
}
func (migration *migrateRulesV4ToV5) Up(ctx context.Context, db *bun.DB) error {
logsKeys, err := migration.getLogDuplicateKeys(ctx)
if err != nil {
return err
}
tracesKeys, err := migration.getTraceDuplicateKeys(ctx)
if err != nil {
return err
}
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
var rules []struct {
ID string `bun:"id"`
Data map[string]any `bun:"data"`
}
err = tx.NewSelect().
Table("rule").
Column("id", "data").
Scan(ctx, &rules)
if err != nil {
if err == sql.ErrNoRows {
return nil
}
return err
}
alertsMigrator := transition.NewAlertMigrateV5(migration.logger, logsKeys, tracesKeys)
for _, rule := range rules {
version, _ := rule.Data["version"].(string)
if version == "v5" {
continue
}
migration.logger.InfoContext(ctx, "migrating rule v4 to v5", "rule_id", rule.ID, "current_version", version)
// Check if the queries envelope already exists and is non-empty
hasQueriesEnvelope := false
if condition, ok := rule.Data["condition"].(map[string]any); ok {
if compositeQuery, ok := condition["compositeQuery"].(map[string]any); ok {
if queries, ok := compositeQuery["queries"].([]any); ok && len(queries) > 0 {
hasQueriesEnvelope = true
}
}
}
if hasQueriesEnvelope {
// Case 2: Already has queries envelope, just bump version
migration.logger.InfoContext(ctx, "rule already has queries envelope, bumping version", "rule_id", rule.ID)
rule.Data["version"] = "v5"
} else {
// Case 1: Old format, run full migration
migration.logger.InfoContext(ctx, "rule has old format, running full migration", "rule_id", rule.ID)
alertsMigrator.Migrate(ctx, rule.Data)
// Force version to v5 regardless of Migrate return value
rule.Data["version"] = "v5"
}
dataJSON, err := json.Marshal(rule.Data)
if err != nil {
return err
}
_, err = tx.NewUpdate().
Table("rule").
Set("data = ?", string(dataJSON)).
Where("id = ?", rule.ID).
Exec(ctx)
if err != nil {
return err
}
}
return tx.Commit()
}
func (migration *migrateRulesV4ToV5) Down(ctx context.Context, db *bun.DB) error {
return nil
}

View File

@@ -1,102 +0,0 @@
package transition
import (
"fmt"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
// ConvertV5TimeSeriesDataToV4Result converts v5 TimeSeriesData to v4 Result
func ConvertV5TimeSeriesDataToV4Result(v5Data *qbtypes.TimeSeriesData) *v3.Result {
if v5Data == nil {
return nil
}
result := &v3.Result{
QueryName: v5Data.QueryName,
Series: make([]*v3.Series, 0),
}
toV4Series := func(ts *qbtypes.TimeSeries) *v3.Series {
series := &v3.Series{
Labels: make(map[string]string),
LabelsArray: make([]map[string]string, 0),
Points: make([]v3.Point, 0, len(ts.Values)),
}
for _, label := range ts.Labels {
valueStr := fmt.Sprintf("%v", label.Value)
series.Labels[label.Key.Name] = valueStr
}
if len(series.Labels) > 0 {
series.LabelsArray = append(series.LabelsArray, series.Labels)
}
for _, tsValue := range ts.Values {
if tsValue.Partial {
continue
}
point := v3.Point{
Timestamp: tsValue.Timestamp,
Value: tsValue.Value,
}
series.Points = append(series.Points, point)
}
return series
}
for _, aggBucket := range v5Data.Aggregations {
for _, ts := range aggBucket.Series {
result.Series = append(result.Series, toV4Series(ts))
}
if len(aggBucket.AnomalyScores) != 0 {
result.AnomalyScores = make([]*v3.Series, 0)
for _, ts := range aggBucket.AnomalyScores {
result.AnomalyScores = append(result.AnomalyScores, toV4Series(ts))
}
}
if len(aggBucket.PredictedSeries) != 0 {
result.PredictedSeries = make([]*v3.Series, 0)
for _, ts := range aggBucket.PredictedSeries {
result.PredictedSeries = append(result.PredictedSeries, toV4Series(ts))
}
}
if len(aggBucket.LowerBoundSeries) != 0 {
result.LowerBoundSeries = make([]*v3.Series, 0)
for _, ts := range aggBucket.LowerBoundSeries {
result.LowerBoundSeries = append(result.LowerBoundSeries, toV4Series(ts))
}
}
if len(aggBucket.UpperBoundSeries) != 0 {
result.UpperBoundSeries = make([]*v3.Series, 0)
for _, ts := range aggBucket.UpperBoundSeries {
result.UpperBoundSeries = append(result.UpperBoundSeries, toV4Series(ts))
}
}
}
return result
}
// ConvertV5TimeSeriesDataSliceToV4Results converts a slice of v5 TimeSeriesData to v4 QueryRangeResponse
func ConvertV5TimeSeriesDataSliceToV4Results(v5DataSlice []*qbtypes.TimeSeriesData) *v3.QueryRangeResponse {
response := &v3.QueryRangeResponse{
ResultType: "matrix", // Time series data is typically "matrix" type
Result: make([]*v3.Result, 0, len(v5DataSlice)),
}
for _, v5Data := range v5DataSlice {
if result := ConvertV5TimeSeriesDataToV4Result(v5Data); result != nil {
response.Result = append(response.Result, result)
}
}
return response
}

View File

@@ -12,12 +12,13 @@ import (
var (
// Templates is a list of all the templates that are supported by the emailing service.
// This list should be updated whenever a new template is added.
Templates = []TemplateName{TemplateNameInvitationEmail, TemplateNameResetPassword}
Templates = []TemplateName{TemplateNameInvitationEmail, TemplateNameUpdateRole, TemplateNameResetPassword}
)
var (
TemplateNameInvitationEmail = TemplateName{valuer.NewString("invitation")}
TemplateNameResetPassword = TemplateName{valuer.NewString("reset_password")}
TemplateNameInvitationEmail = TemplateName{valuer.NewString("invitation_email")}
TemplateNameUpdateRole = TemplateName{valuer.NewString("update_role")}
TemplateNameResetPassword = TemplateName{valuer.NewString("reset_password_email")}
)
type TemplateName struct{ valuer.String }
@@ -26,6 +27,8 @@ func NewTemplateName(name string) (TemplateName, error) {
switch name {
case TemplateNameInvitationEmail.StringValue():
return TemplateNameInvitationEmail, nil
case TemplateNameUpdateRole.StringValue():
return TemplateNameUpdateRole, nil
case TemplateNameResetPassword.StringValue():
return TemplateNameResetPassword, nil
default:
@@ -37,7 +40,7 @@ func NewContent(template *template.Template, data map[string]any) ([]byte, error
buf := bytes.NewBuffer(nil)
err := template.Execute(buf, data)
if err != nil {
return nil, err
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to execute template")
}
return buf.Bytes(), nil

View File

@@ -76,6 +76,33 @@ type TimeSeries struct {
Values []*TimeSeriesValue `json:"values"`
}
// LabelsMap converts the label slice to a map[string]string for use in
// alert evaluation helpers that operate on flat label maps.
func (ts *TimeSeries) LabelsMap() map[string]string {
if ts == nil {
return nil
}
m := make(map[string]string, len(ts.Labels))
for _, l := range ts.Labels {
m[l.Key.Name] = fmt.Sprintf("%v", l.Value)
}
return m
}
// NonPartialValues returns only the values where Partial is false.
func (ts *TimeSeries) NonPartialValues() []*TimeSeriesValue {
if ts == nil {
return nil
}
result := make([]*TimeSeriesValue, 0, len(ts.Values))
for _, v := range ts.Values {
if !v.Partial {
result = append(result, v)
}
}
return result
}
type Label struct {
Key telemetrytypes.TelemetryFieldKey `json:"key"`
Value any `json:"value"`

View File

@@ -203,11 +203,11 @@ func (rc *RuleCondition) IsValid() bool {
}
// ShouldEval checks if the further series should be evaluated at all for alerts.
func (rc *RuleCondition) ShouldEval(series *v3.Series) bool {
func (rc *RuleCondition) ShouldEval(series *qbtypes.TimeSeries) bool {
if rc == nil {
return true
}
return !rc.RequireMinPoints || len(series.Points) >= rc.RequiredNumPoints
return !rc.RequireMinPoints || len(series.NonPartialValues()) >= rc.RequiredNumPoints
}
// QueryType is a shorthand method to get query type

View File

@@ -355,6 +355,14 @@ func (r *PostableRule) validate() error {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "composite query is required"))
}
if r.Version != "" && r.Version != "v5" {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "only version v5 is supported, got %q", r.Version))
}
if r.Version == "v5" && r.RuleCondition.CompositeQuery != nil && len(r.RuleCondition.CompositeQuery.Queries) == 0 {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "queries envelope is required in compositeQuery"))
}
if isAllQueriesDisabled(r.RuleCondition.CompositeQuery) {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "all queries are disabled in rule condition"))
}

View File

@@ -8,6 +8,8 @@ import (
"github.com/stretchr/testify/assert"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
func TestIsAllQueriesDisabled(t *testing.T) {
@@ -621,9 +623,9 @@ func TestParseIntoRuleThresholdGeneration(t *testing.T) {
}
// Test that threshold can evaluate properly
vector, err := threshold.Eval(v3.Series{
Points: []v3.Point{{Value: 0.15, Timestamp: 1000}}, // 150ms in seconds
Labels: map[string]string{"test": "label"},
vector, err := threshold.Eval(qbtypes.TimeSeries{
Values: []*qbtypes.TimeSeriesValue{{Value: 0.15, Timestamp: 1000}}, // 150ms in seconds
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "test"}, Value: "label"}},
}, "", EvalData{})
if err != nil {
t.Fatalf("Unexpected error in shouldAlert: %v", err)
@@ -698,9 +700,9 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
}
// Test with a value that should trigger both WARNING and CRITICAL thresholds
vector, err := threshold.Eval(v3.Series{
Points: []v3.Point{{Value: 95.0, Timestamp: 1000}}, // 95% CPU usage
Labels: map[string]string{"service": "test"},
vector, err := threshold.Eval(qbtypes.TimeSeries{
Values: []*qbtypes.TimeSeriesValue{{Value: 95.0, Timestamp: 1000}}, // 95% CPU usage
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "service"}, Value: "test"}},
}, "", EvalData{})
if err != nil {
t.Fatalf("Unexpected error in shouldAlert: %v", err)
@@ -708,9 +710,9 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
assert.Equal(t, 2, len(vector))
vector, err = threshold.Eval(v3.Series{
Points: []v3.Point{{Value: 75.0, Timestamp: 1000}}, // 75% CPU usage
Labels: map[string]string{"service": "test"},
vector, err = threshold.Eval(qbtypes.TimeSeries{
Values: []*qbtypes.TimeSeriesValue{{Value: 75.0, Timestamp: 1000}}, // 75% CPU usage
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "service"}, Value: "test"}},
}, "", EvalData{})
if err != nil {
t.Fatalf("Unexpected error in shouldAlert: %v", err)
@@ -723,7 +725,7 @@ func TestAnomalyNegationEval(t *testing.T) {
tests := []struct {
name string
ruleJSON []byte
series v3.Series
series qbtypes.TimeSeries
shouldAlert bool
expectedValue float64
}{
@@ -751,9 +753,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: -2.1}, // below & at least once, should alert
{Timestamp: 2000, Value: -2.3},
},
@@ -785,9 +787,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`), // below & at least once, no value below -2.0
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: -1.9},
{Timestamp: 2000, Value: -1.8},
},
@@ -818,9 +820,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`), // above & at least once, should alert
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: 2.1}, // above 2.0, should alert
{Timestamp: 2000, Value: 2.2},
},
@@ -852,9 +854,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: 1.1},
{Timestamp: 2000, Value: 1.2},
},
@@ -885,9 +887,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`), // below and all the times
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: -2.1}, // all below -2
{Timestamp: 2000, Value: -2.2},
{Timestamp: 3000, Value: -2.5},
@@ -920,9 +922,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: -3.0},
{Timestamp: 2000, Value: -1.0}, // above -2, breaks condition
{Timestamp: 3000, Value: -2.5},
@@ -954,10 +956,10 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -8.0}, // abs(8) >= 7, alert
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: -8.0}, // abs(-8) >= 7, alert
{Timestamp: 2000, Value: 5.0},
},
},
@@ -988,9 +990,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: 80.0}, // below 90, should alert
{Timestamp: 2000, Value: 85.0},
},
@@ -1022,9 +1024,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: 60.0}, // below, should alert
{Timestamp: 2000, Value: 90.0},
},

View File

@@ -8,8 +8,8 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/converter"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -83,7 +83,7 @@ func (eval EvalData) HasActiveAlert(sampleLabelFp uint64) bool {
type RuleThreshold interface {
// Eval runs the given series through the threshold rules
// using the given EvalData and returns the matching series
Eval(series v3.Series, unit string, evalData EvalData) (Vector, error)
Eval(series qbtypes.TimeSeries, unit string, evalData EvalData) (Vector, error)
GetRuleReceivers() []RuleReceivers
}
@@ -122,10 +122,11 @@ func (r BasicRuleThresholds) Validate() error {
return errors.Join(errs...)
}
func (r BasicRuleThresholds) Eval(series v3.Series, unit string, evalData EvalData) (Vector, error) {
func (r BasicRuleThresholds) Eval(series qbtypes.TimeSeries, unit string, evalData EvalData) (Vector, error) {
var resultVector Vector
thresholds := []BasicRuleThreshold(r)
sortThresholds(thresholds)
seriesLabels := series.LabelsMap()
for _, threshold := range thresholds {
smpl, shouldAlert := threshold.shouldAlert(series, unit)
if shouldAlert {
@@ -137,15 +138,15 @@ func (r BasicRuleThresholds) Eval(series v3.Series, unit string, evalData EvalDa
resultVector = append(resultVector, smpl)
continue
} else if evalData.SendUnmatched {
// Sanitise the series points to remove any NaN or Inf values
series.Points = removeGroupinSetPoints(series)
if len(series.Points) == 0 {
// Sanitise the series values to remove any NaN, Inf, or partial values
values := filterValidValues(series.Values)
if len(values) == 0 {
continue
}
// prepare the sample with the first point of the series
// prepare the sample with the first value of the series
smpl := Sample{
Point: Point{T: series.Points[0].Timestamp, V: series.Points[0].Value},
Metric: PrepareSampleLabelsForRule(series.Labels, threshold.Name),
Point: Point{T: values[0].Timestamp, V: values[0].Value},
Metric: PrepareSampleLabelsForRule(seriesLabels, threshold.Name),
Target: *threshold.TargetValue,
TargetUnit: threshold.TargetUnit,
}
@@ -160,7 +161,7 @@ func (r BasicRuleThresholds) Eval(series v3.Series, unit string, evalData EvalDa
if threshold.RecoveryTarget == nil {
continue
}
sampleLabels := PrepareSampleLabelsForRule(series.Labels, threshold.Name)
sampleLabels := PrepareSampleLabelsForRule(seriesLabels, threshold.Name)
alertHash := sampleLabels.Hash()
// check if alert is active and then check if recovery threshold matches
if evalData.HasActiveAlert(alertHash) {
@@ -255,18 +256,23 @@ func (b BasicRuleThreshold) Validate() error {
return errors.Join(errs...)
}
func (b BasicRuleThreshold) matchesRecoveryThreshold(series v3.Series, ruleUnit string) (Sample, bool) {
func (b BasicRuleThreshold) matchesRecoveryThreshold(series qbtypes.TimeSeries, ruleUnit string) (Sample, bool) {
return b.shouldAlertWithTarget(series, b.recoveryTarget(ruleUnit))
}
func (b BasicRuleThreshold) shouldAlert(series v3.Series, ruleUnit string) (Sample, bool) {
func (b BasicRuleThreshold) shouldAlert(series qbtypes.TimeSeries, ruleUnit string) (Sample, bool) {
return b.shouldAlertWithTarget(series, b.target(ruleUnit))
}
func removeGroupinSetPoints(series v3.Series) []v3.Point {
var result []v3.Point
for _, s := range series.Points {
if s.Timestamp >= 0 && !math.IsNaN(s.Value) && !math.IsInf(s.Value, 0) {
result = append(result, s)
// filterValidValues returns only the values that are valid for alert evaluation:
// non-partial, non-NaN, non-Inf, and with non-negative timestamps.
func filterValidValues(values []*qbtypes.TimeSeriesValue) []*qbtypes.TimeSeriesValue {
var result []*qbtypes.TimeSeriesValue
for _, v := range values {
if v.Partial {
continue
}
if v.Timestamp >= 0 && !math.IsNaN(v.Value) && !math.IsInf(v.Value, 0) {
result = append(result, v)
}
}
return result
@@ -284,15 +290,15 @@ func PrepareSampleLabelsForRule(seriesLabels map[string]string, thresholdName st
return lb.Labels()
}
func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float64) (Sample, bool) {
func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, target float64) (Sample, bool) {
var shouldAlert bool
var alertSmpl Sample
lbls := PrepareSampleLabelsForRule(series.Labels, b.Name)
lbls := PrepareSampleLabelsForRule(series.LabelsMap(), b.Name)
series.Points = removeGroupinSetPoints(series)
values := filterValidValues(series.Values)
// nothing to evaluate
if len(series.Points) == 0 {
if len(values) == 0 {
return alertSmpl, false
}
@@ -300,7 +306,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
case AtleastOnce:
// If any sample matches the condition, the rule is firing.
if b.CompareOp == ValueIsAbove {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value > target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -308,7 +314,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
}
} else if b.CompareOp == ValueIsBelow {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value < target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -316,7 +322,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
}
} else if b.CompareOp == ValueIsEq {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value == target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -324,7 +330,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
}
} else if b.CompareOp == ValueIsNotEq {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value != target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -332,7 +338,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
}
} else if b.CompareOp == ValueOutsideBounds {
for _, smpl := range series.Points {
for _, smpl := range values {
if math.Abs(smpl.Value) >= target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -345,7 +351,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
shouldAlert = true
alertSmpl = Sample{Point: Point{V: target}, Metric: lbls}
if b.CompareOp == ValueIsAbove {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value <= target {
shouldAlert = false
break
@@ -354,7 +360,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
// use min value from the series
if shouldAlert {
var minValue float64 = math.Inf(1)
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value < minValue {
minValue = smpl.Value
}
@@ -362,7 +368,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
alertSmpl = Sample{Point: Point{V: minValue}, Metric: lbls}
}
} else if b.CompareOp == ValueIsBelow {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value >= target {
shouldAlert = false
break
@@ -370,7 +376,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
if shouldAlert {
var maxValue float64 = math.Inf(-1)
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value > maxValue {
maxValue = smpl.Value
}
@@ -378,14 +384,14 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
alertSmpl = Sample{Point: Point{V: maxValue}, Metric: lbls}
}
} else if b.CompareOp == ValueIsEq {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value != target {
shouldAlert = false
break
}
}
} else if b.CompareOp == ValueIsNotEq {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value == target {
shouldAlert = false
break
@@ -393,7 +399,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
// use any non-inf or nan value from the series
if shouldAlert {
for _, smpl := range series.Points {
for _, smpl := range values {
if !math.IsInf(smpl.Value, 0) && !math.IsNaN(smpl.Value) {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
break
@@ -401,7 +407,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
}
} else if b.CompareOp == ValueOutsideBounds {
for _, smpl := range series.Points {
for _, smpl := range values {
if math.Abs(smpl.Value) < target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = false
@@ -412,7 +418,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
case OnAverage:
// If the average of all samples matches the condition, the rule is firing.
var sum, count float64
for _, smpl := range series.Points {
for _, smpl := range values {
if math.IsNaN(smpl.Value) || math.IsInf(smpl.Value, 0) {
continue
}
@@ -446,7 +452,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
// If the sum of all samples matches the condition, the rule is firing.
var sum float64
for _, smpl := range series.Points {
for _, smpl := range values {
if math.IsNaN(smpl.Value) || math.IsInf(smpl.Value, 0) {
continue
}
@@ -477,21 +483,22 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
case Last:
// If the last sample matches the condition, the rule is firing.
shouldAlert = false
alertSmpl = Sample{Point: Point{V: series.Points[len(series.Points)-1].Value}, Metric: lbls}
lastValue := values[len(values)-1].Value
alertSmpl = Sample{Point: Point{V: lastValue}, Metric: lbls}
if b.CompareOp == ValueIsAbove {
if series.Points[len(series.Points)-1].Value > target {
if lastValue > target {
shouldAlert = true
}
} else if b.CompareOp == ValueIsBelow {
if series.Points[len(series.Points)-1].Value < target {
if lastValue < target {
shouldAlert = true
}
} else if b.CompareOp == ValueIsEq {
if series.Points[len(series.Points)-1].Value == target {
if lastValue == target {
shouldAlert = true
}
} else if b.CompareOp == ValueIsNotEq {
if series.Points[len(series.Points)-1].Value != target {
if lastValue != target {
shouldAlert = true
}
}

View File

@@ -6,16 +6,24 @@ import (
"github.com/stretchr/testify/assert"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
target := 100.0
makeLabel := func(name, value string) *qbtypes.Label {
return &qbtypes.Label{Key: telemetrytypes.TelemetryFieldKey{Name: name}, Value: value}
}
makeValue := func(value float64, ts int64) *qbtypes.TimeSeriesValue {
return &qbtypes.TimeSeriesValue{Value: value, Timestamp: ts}
}
tests := []struct {
name string
threshold BasicRuleThreshold
series v3.Series
series qbtypes.TimeSeries
ruleUnit string
shouldAlert bool
}{
@@ -28,11 +36,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000}, // 150ms in seconds
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)}, // 150ms in seconds
},
ruleUnit: "s",
shouldAlert: true,
@@ -46,11 +52,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.05, Timestamp: 1000}, // 50ms in seconds
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.05, 1000)}, // 50ms in seconds
},
ruleUnit: "s",
shouldAlert: false,
@@ -64,11 +68,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 150000, Timestamp: 1000}, // 150000ms = 150s
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(150000, 1000)}, // 150000ms = 150s
},
ruleUnit: "ms",
shouldAlert: true,
@@ -83,11 +85,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000}, // 0.15KiB ≈ 153.6 bytes
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)}, // 0.15KiB ≈ 153.6 bytes
},
ruleUnit: "kbytes",
shouldAlert: true,
@@ -101,11 +101,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000},
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)},
},
ruleUnit: "mbytes",
shouldAlert: true,
@@ -120,11 +118,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsBelow,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.05, Timestamp: 1000}, // 50ms in seconds
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.05, 1000)}, // 50ms in seconds
},
ruleUnit: "s",
shouldAlert: true,
@@ -138,12 +134,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: OnAverage,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.08, Timestamp: 1000}, // 80ms
{Value: 0.12, Timestamp: 2000}, // 120ms
{Value: 0.15, Timestamp: 3000}, // 150ms
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{
makeValue(0.08, 1000), // 80ms
makeValue(0.12, 2000), // 120ms
makeValue(0.15, 3000), // 150ms
},
},
ruleUnit: "s",
@@ -158,12 +154,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: InTotal,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.04, Timestamp: 1000}, // 40MB
{Value: 0.05, Timestamp: 2000}, // 50MB
{Value: 0.03, Timestamp: 3000}, // 30MB
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{
makeValue(0.04, 1000), // 40MB
makeValue(0.05, 2000), // 50MB
makeValue(0.03, 3000), // 30MB
},
},
ruleUnit: "decgbytes",
@@ -178,12 +174,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AllTheTimes,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.11, Timestamp: 1000}, // 110ms
{Value: 0.12, Timestamp: 2000}, // 120ms
{Value: 0.15, Timestamp: 3000}, // 150ms
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{
makeValue(0.11, 1000), // 110ms
makeValue(0.12, 2000), // 120ms
makeValue(0.15, 3000), // 150ms
},
},
ruleUnit: "s",
@@ -198,11 +194,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: Last,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000}, // 150kB
{Value: 0.05, Timestamp: 2000}, // 50kB (last value)
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{
makeValue(0.15, 1000), // 150kB
makeValue(0.05, 2000), // 50kB (last value)
},
},
ruleUnit: "decmbytes",
@@ -218,11 +214,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000},
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)},
},
ruleUnit: "KBs",
shouldAlert: true,
@@ -237,11 +231,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 150, Timestamp: 1000}, // 150ms
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(150, 1000)}, // 150ms
},
ruleUnit: "ms",
shouldAlert: true,
@@ -256,11 +248,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 150, Timestamp: 1000}, // 150 (unitless)
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(150, 1000)}, // 150 (unitless)
},
ruleUnit: "",
shouldAlert: true,

View File

@@ -1,91 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>You're Invited to Join SigNoz</title>
</head>
<body style="margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,'Helvetica Neue',Arial,sans-serif;line-height:1.6;color:#333;background:#fff">
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="background:#fff">
<tr>
<td align="center" style="padding:0">
<table role="presentation" width="600" cellspacing="0" cellpadding="0" border="0" style="max-width:600px;width:100%">
{{ if .format.Header.Enabled }}
<tr>
<td align="center" style="padding:16px 20px 16px">
<img src="{{.format.Header.LogoURL}}" alt="SigNoz" width="160" height="40" style="display:block;border:0;outline:none;max-width:100%;height:auto">
</td>
</tr>
{{ end }}
<tr>
<td style="padding:16px 20px 16px">
<p style="margin:0 0 16px;font-size:16px;color:#333">
Hi there,
</p>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
You've been invited by <strong>{{.inviter_email}}</strong> to join their SigNoz organization.
</p>
<p style="margin:0 0 12px;font-size:16px;color:#333;line-height:1.6">
A new account has been created for you with the following details:
</p>
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
<tr>
<td style="padding:20px;background:#f5f5f5;border-radius:6px;border-left:4px solid #4E74F8">
<p style="margin:0;font-size:15px;color:#333;line-height:1.6">
<strong>Email:</strong> {{.to}}
</p>
</td>
</tr>
</table>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
Accept the invitation to get started.
</p>
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
<tr>
<td align="center">
<a href="{{.link}}" target="_blank" style="display:inline-block;padding:16px 48px;font-size:16px;font-weight:600;color:#fff;background:#4E74F8;text-decoration:none;border-radius:4px">
Accept Invitation
</a>
</td>
</tr>
</table>
<p style="margin:0 0 4px;font-size:13px;color:#666;text-align:center">
Button not working? Copy and paste this link into your browser:
</p>
<p style="margin:0 0 16px;font-size:13px;color:#4E74F8;word-break:break-all;text-align:center">
<a href="{{.link}}" style="color:#4E74F8;text-decoration:none">
{{.link}}
</a>
</p>
{{ if .format.Help.Enabled }}
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
Need help? Chat with our team in the SigNoz application or email us at <a href="mailto:{{.format.Help.Email}}" style="color:#4E74F8;text-decoration:none">{{.format.Help.Email}}</a>.
</p>
{{ end }}
<p style="margin:0;font-size:16px;color:#333;line-height:1.6">
Thanks,<br><strong>The SigNoz Team</strong>
</p>
</td>
</tr>
{{ if .format.Footer.Enabled }}
<tr>
<td align="center" style="padding:8px 16px 8px">
<p style="margin:0 0 8px;font-size:12px;color:#999;line-height:1.5">
<a href="https://signoz.io/terms-of-service/" style="color:#4E74F8;text-decoration:none">Terms of Service</a> - <a href="https://signoz.io/privacy/" style="color:#4E74F8;text-decoration:none">Privacy Policy</a>
</p>
<p style="margin:0;font-size:12px;color:#999;line-height:1.5">
&#169; 2026 SigNoz Inc.
</p>
</td>
</tr>
{{ end }}
</table>
</td>
</tr>
</table>
</body>
</html>

View File

@@ -0,0 +1,14 @@
<!DOCTYPE html>
<html>
<body>
<p>Hi {{.CustomerName}},</p>
<p>You have been invited to join SigNoz project by {{.InviterName}} ({{.InviterEmail}}).</p>
<p>Please click on the following button to accept the invitation:</p>
<a href="{{.Link}}" style="background-color: #000000; color: white; padding: 14px 20px; text-align: center; text-decoration: none; display: inline-block;">Accept Invitation</a>
<p>Button not working? Paste the following link into your browser:</p>
<p>{{.Link}}</p>
<p>Follow docs here 👉 to <a href="https://signoz.io/docs/cloud/">Get Started with SigNoz Cloud</a></p>
<p>Thanks,</p>
<p>SigNoz Team</p>
</body>
</html>

View File

@@ -1,91 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>{{.subject}}</title>
</head>
<body style="margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,'Helvetica Neue',Arial,sans-serif;line-height:1.6;color:#333;background:#fff">
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="background:#fff">
<tr>
<td align="center" style="padding:0">
<table role="presentation" width="600" cellspacing="0" cellpadding="0" border="0" style="max-width:600px;width:100%">
{{ if .format.Header.Enabled }}
<tr>
<td align="center" style="padding:16px 20px 16px">
<img src="{{.format.Header.LogoURL}}" alt="SigNoz" width="160" height="40" style="display:block;border:0;outline:none;max-width:100%;height:auto">
</td>
</tr>
{{ end }}
<tr>
<td style="padding:16px 20px 16px">
<p style="margin:0 0 16px;font-size:16px;color:#333">
Hi there,
</p>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
A password reset was requested for your SigNoz account.
</p>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
Click the button below to reset your password:
</p>
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
<tr>
<td align="center">
<a href="{{.Link}}" target="_blank" style="display:inline-block;padding:16px 48px;font-size:16px;font-weight:600;color:#fff;background:#4E74F8;text-decoration:none;border-radius:4px">
Reset Password
</a>
</td>
</tr>
</table>
<p style="margin:0 0 4px;font-size:13px;color:#666;text-align:center">
Button not working? Copy and paste this link into your browser:
</p>
<p style="margin:0 0 16px;font-size:13px;color:#4E74F8;word-break:break-all;text-align:center">
<a href="{{.Link}}" style="color:#4E74F8;text-decoration:none">
{{.Link}}
</a>
</p>
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
<tr>
<td style="padding:16px;background:#fff4e6;border-radius:6px;border-left:4px solid #ff9800">
<p style="margin:0;font-size:14px;color:#333;line-height:1.6">
<strong>⏱️ This link will expire in {{.Expiry}}.</strong>
</p>
</td>
</tr>
</table>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
If you didn't request this password reset, please ignore this email. Your password will remain unchanged.
</p>
{{ if .format.Help.Enabled }}
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
Need help? Chat with our team in the SigNoz application or email us at <a href="mailto:{{.format.Help.Email}}" style="color:#4E74F8;text-decoration:none">{{.format.Help.Email}}</a>.
</p>
{{ end }}
<p style="margin:0;font-size:16px;color:#333;line-height:1.6">
Thanks,<br><strong>The SigNoz Team</strong>
</p>
</td>
</tr>
{{ if .format.Footer.Enabled }}
<tr>
<td align="center" style="padding:8px 16px 8px">
<p style="margin:0 0 8px;font-size:12px;color:#999;line-height:1.5">
<a href="https://signoz.io/terms-of-service/" style="color:#4E74F8;text-decoration:none">Terms of Service</a> - <a href="https://signoz.io/privacy/" style="color:#4E74F8;text-decoration:none">Privacy Policy</a>
</p>
<p style="margin:0;font-size:12px;color:#999;line-height:1.5">
&#169; 2026 SigNoz Inc.
</p>
</td>
</tr>
{{ end }}
</table>
</td>
</tr>
</table>
</body>
</html>

View File

@@ -0,0 +1,13 @@
<!DOCTYPE html>
<html>
<body>
<p>Hello {{.Name}},</p>
<p>You requested a password reset for your SigNoz account.</p>
<p>Click the link below to reset your password:</p>
<a href="{{.Link}}">Reset Password</a>
<p>This link will expire in {{.Expiry}}.</p>
<p>If you didn't request this, please ignore this email. Your password will remain unchanged.</p>
<br>
<p>Best regards,<br>The SigNoz Team</p>
</body>
</html>

View File

@@ -0,0 +1,21 @@
<!DOCTYPE html>
<html>
<body>
Hi {{.CustomerName}},<br>
Your role in <strong>SigNoz</strong> has been updated by {{.UpdatedByEmail}}.
<p>
<strong>Previous Role:</strong> {{.OldRole}}<br>
<strong>New Role:</strong> {{.NewRole}}
</p>
{{if eq .OldRole "Admin"}}
<p>
If you were not expecting this change or have any questions, please contact us at <a href="mailto:support@signoz.io">support@signoz.io</a>.
</p>
{{else}}
<p>
If you were not expecting this change or have any questions, please reach out to your administrator.
</p>
{{end}}
<p>Best regards,<br>The SigNoz Team</p>
</body>
</html>