Compare commits

..

5 Commits

Author SHA1 Message Date
Naman Verma
da7f62a5d3 Merge branch 'main' into nv/10282 2026-02-16 11:48:33 +05:30
Abhi kumar
8f38398863 test: added tests for barpanel hooks + utils (#10295)
* chore: refactored the config builder and added base config builder

* chore: added a common chart wrapper

* chore: tsc fix

* fix: pr review changes

* fix: pr review changes

* chore: added different tooltips

* chore: removed dayjs extention

* feat: added new barpanel component

* fix: added fix for pr review changes

* chore: added support for bar alignment configuration

* chore: updated structure for bar panel

* test: added tests for barpanel hooks + utils
2026-02-16 06:18:14 +00:00
Ashwin Bhatkal
eb39772d3c chore: variable fetch state machine (#10290)
* chore: shared utils update + API plumbing

* chore: variable fetch state machine

* chore: add tests

* chore: add tests

* chore: move tests

* chore: fix tests
2026-02-16 11:36:16 +05:30
Naman Verma
052cb01e00 feat: include monotonicity in metrics' properties API responses 2026-02-16 09:16:17 +05:30
Pandey
df72c897f9 feat: change invitation and password reset emails (#10297)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
2026-02-15 14:59:54 +00:00
52 changed files with 4648 additions and 1249 deletions

6
.github/CODEOWNERS vendored
View File

@@ -43,6 +43,12 @@
/pkg/analytics/ @vikrantgupta25
/pkg/statsreporter/ @vikrantgupta25
# Emailing Owners
/pkg/emailing/ @vikrantgupta25
/pkg/types/emailtypes/ @vikrantgupta25
/templates/email/ @vikrantgupta25
# Querier Owners
/pkg/querier/ @srikanthccv

View File

@@ -14,5 +14,8 @@
},
"[sql]": {
"editor.defaultFormatter": "adpyke.vscode-sql-formatter"
},
"[html]": {
"editor.defaultFormatter": "vscode.html-language-features"
}
}

View File

@@ -193,6 +193,15 @@ emailing:
templates:
# The directory containing the email templates. This directory should contain a list of files defined at pkg/types/emailtypes/template.go.
directory: /opt/signoz/conf/templates/email
format:
header:
enabled: false
logo_url: ""
help:
enabled: false
email: ""
footer:
enabled: false
smtp:
# The SMTP server address.
address: localhost:25

View File

@@ -5,16 +5,23 @@ import (
"encoding/json"
"fmt"
"log/slog"
"math"
"strings"
"sync"
"time"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
@@ -25,8 +32,6 @@ import (
querierV5 "github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
@@ -43,12 +48,17 @@ type AnomalyRule struct {
reader interfaces.Reader
// querierV5 is the query builder v5 querier used for all alert rule evaluation
// querierV2 is used for alerts created after the introduction of new metrics query builder
querierV2 interfaces.Querier
// querierV5 is used for alerts migrated after the introduction of new query builder
querierV5 querierV5.Querier
provider anomaly.Provider
providerV2 anomalyV2.Provider
logger *slog.Logger
version string
logger *slog.Logger
seasonality anomaly.Seasonality
}
@@ -92,6 +102,34 @@ func NewAnomalyRule(
logger.Info("using seasonality", "seasonality", t.seasonality.String())
querierOptsV2 := querierV2.QuerierOptions{
Reader: reader,
Cache: cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
}
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
t.reader = reader
if t.seasonality == anomaly.SeasonalityHourly {
t.provider = anomaly.NewHourlyProvider(
anomaly.WithCache[*anomaly.HourlyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.HourlyProvider](reader),
)
} else if t.seasonality == anomaly.SeasonalityDaily {
t.provider = anomaly.NewDailyProvider(
anomaly.WithCache[*anomaly.DailyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.DailyProvider](reader),
)
} else if t.seasonality == anomaly.SeasonalityWeekly {
t.provider = anomaly.NewWeeklyProvider(
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
)
}
if t.seasonality == anomaly.SeasonalityHourly {
t.providerV2 = anomalyV2.NewHourlyProvider(
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](querierV5),
@@ -110,7 +148,7 @@ func NewAnomalyRule(
}
t.querierV5 = querierV5
t.reader = reader
t.version = p.Version
t.logger = logger
return &t, nil
}
@@ -119,9 +157,36 @@ func (r *AnomalyRule) Type() ruletypes.RuleType {
return RuleTypeAnomaly
}
func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3.QueryRangeParamsV3, error) {
r.logger.InfoContext(ctx, "prepare query range request", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds())
r.logger.InfoContext(
ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds(),
)
st, en := r.Timestamps(ts)
start := st.UnixMilli()
end := en.UnixMilli()
compositeQuery := r.Condition().CompositeQuery
if compositeQuery.PanelType != v3.PanelTypeGraph {
compositeQuery.PanelType = v3.PanelTypeGraph
}
// default mode
return &v3.QueryRangeParamsV3{
Start: start,
End: end,
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
CompositeQuery: compositeQuery,
Variables: make(map[string]interface{}, 0),
NoCache: false,
}, nil
}
func (r *AnomalyRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
r.logger.InfoContext(ctx, "prepare query range request v5", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds())
startTs, endTs := r.Timestamps(ts)
start, end := startTs.UnixMilli(), endTs.UnixMilli()
@@ -150,6 +215,60 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
if err != nil {
return nil, err
}
err = r.PopulateTemporality(ctx, orgID, params)
if err != nil {
return nil, fmt.Errorf("internal error while setting temporality")
}
anomalies, err := r.provider.GetAnomalies(ctx, orgID, &anomaly.GetAnomaliesRequest{
Params: params,
Seasonality: r.seasonality,
})
if err != nil {
return nil, err
}
var queryResult *v3.Result
for _, result := range anomalies.Results {
if result.QueryName == r.GetSelectedQuery() {
queryResult = result
break
}
}
hasData := len(queryResult.AnomalyScores) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
var resultVector ruletypes.Vector
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
for _, series := range queryResult.AnomalyScores {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
ActiveAlerts: r.ActiveAlertsLabelFP(),
SendUnmatched: r.ShouldSendUnmatched(),
})
if err != nil {
return nil, err
}
resultVector = append(resultVector, results...)
}
return resultVector, nil
}
func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
params, err := r.prepareQueryRangeV5(ctx, ts)
if err != nil {
return nil, err
}
anomalies, err := r.providerV2.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{
Params: *params,
@@ -171,25 +290,20 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
r.logger.WarnContext(ctx, "nil qb result", "ts", ts.UnixMilli())
}
var anomalyScores []*qbtypes.TimeSeries
if qbResult != nil {
for _, bucket := range qbResult.Aggregations {
anomalyScores = append(anomalyScores, bucket.AnomalyScores...)
}
}
queryResult := transition.ConvertV5TimeSeriesDataToV4Result(qbResult)
hasData := len(anomalyScores) > 0
hasData := len(queryResult.AnomalyScores) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
var resultVector ruletypes.Vector
scoresJSON, _ := json.Marshal(anomalyScores)
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
// Filter out new series if newGroupEvalDelay is configured
seriesToProcess := anomalyScores
seriesToProcess := queryResult.AnomalyScores
if r.ShouldSkipNewGroups() {
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
// In case of error we log the error and continue with the original series
@@ -202,7 +316,7 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
for _, series := range seriesToProcess {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Values), "requiredPoints", r.Condition().RequiredNumPoints)
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
@@ -223,7 +337,16 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
valueFormatter := formatter.FromUnit(r.Unit())
res, err := r.buildAndRunQuery(ctx, r.OrgID(), ts)
var res ruletypes.Vector
var err error
if r.version == "v5" {
r.logger.InfoContext(ctx, "running v5 query")
res, err = r.buildAndRunQueryV5(ctx, r.OrgID(), ts)
} else {
r.logger.InfoContext(ctx, "running v4 query")
res, err = r.buildAndRunQuery(ctx, r.OrgID(), ts)
}
if err != nil {
return 0, err
}

View File

@@ -8,27 +8,28 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/valuer"
)
// mockAnomalyProviderV2 is a mock implementation of anomalyV2.Provider for testing.
type mockAnomalyProviderV2 struct {
responses []*anomalyV2.AnomaliesResponse
// mockAnomalyProvider is a mock implementation of anomaly.Provider for testing.
// We need this because the anomaly provider makes 6 different queries for various
// time periods (current, past period, current season, past season, past 2 seasons,
// past 3 seasons), making it cumbersome to create mock data.
type mockAnomalyProvider struct {
responses []*anomaly.GetAnomaliesResponse
callCount int
}
func (m *mockAnomalyProviderV2) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomalyV2.AnomaliesRequest) (*anomalyV2.AnomaliesResponse, error) {
func (m *mockAnomalyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomaly.GetAnomaliesRequest) (*anomaly.GetAnomaliesResponse, error) {
if m.callCount >= len(m.responses) {
return &anomalyV2.AnomaliesResponse{Results: []*qbtypes.TimeSeriesData{}}, nil
return &anomaly.GetAnomaliesResponse{Results: []*v3.Result{}}, nil
}
resp := m.responses[m.callCount]
m.callCount++
@@ -81,11 +82,11 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
},
}
responseNoData := &anomalyV2.AnomaliesResponse{
Results: []*qbtypes.TimeSeriesData{
responseNoData := &anomaly.GetAnomaliesResponse{
Results: []*v3.Result{
{
QueryName: "A",
Aggregations: []*qbtypes.AggregationBucket{},
QueryName: "A",
AnomalyScores: []*v3.Series{},
},
},
}
@@ -128,8 +129,8 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
)
require.NoError(t, err)
rule.providerV2 = &mockAnomalyProviderV2{
responses: []*anomalyV2.AnomaliesResponse{responseNoData},
rule.provider = &mockAnomalyProvider{
responses: []*anomaly.GetAnomaliesResponse{responseNoData},
}
alertsFound, err := rule.Eval(context.Background(), evalTime)
@@ -189,11 +190,11 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
},
}
responseNoData := &anomalyV2.AnomaliesResponse{
Results: []*qbtypes.TimeSeriesData{
responseNoData := &anomaly.GetAnomaliesResponse{
Results: []*v3.Result{
{
QueryName: "A",
Aggregations: []*qbtypes.AggregationBucket{},
QueryName: "A",
AnomalyScores: []*v3.Series{},
},
},
}
@@ -227,22 +228,16 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
t1 := baseTime.Add(5 * time.Minute)
t2 := t1.Add(c.timeBetweenEvals)
responseWithData := &anomalyV2.AnomaliesResponse{
Results: []*qbtypes.TimeSeriesData{
responseWithData := &anomaly.GetAnomaliesResponse{
Results: []*v3.Result{
{
QueryName: "A",
Aggregations: []*qbtypes.AggregationBucket{
AnomalyScores: []*v3.Series{
{
AnomalyScores: []*qbtypes.TimeSeries{
{
Labels: []*qbtypes.Label{
{Key: telemetrytypes.TelemetryFieldKey{Name: "test"}, Value: "label"},
},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
},
},
Labels: map[string]string{"test": "label"},
Points: []v3.Point{
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
},
},
},
@@ -257,8 +252,8 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil)
require.NoError(t, err)
rule.providerV2 = &mockAnomalyProviderV2{
responses: []*anomalyV2.AnomaliesResponse{responseWithData, responseNoData},
rule.provider = &mockAnomalyProvider{
responses: []*anomaly.GetAnomaliesResponse{responseWithData, responseNoData},
}
alertsFound1, err := rule.Eval(context.Background(), t1)

View File

@@ -53,6 +53,7 @@ describe('VariableItem', () => {
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
}}
/>
@@ -75,6 +76,7 @@ describe('VariableItem', () => {
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
}}
/>
@@ -98,6 +100,7 @@ describe('VariableItem', () => {
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
}}
/>
@@ -139,6 +142,7 @@ describe('VariableItem', () => {
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
}}
/>
@@ -169,6 +173,7 @@ describe('VariableItem', () => {
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
}}
/>
@@ -191,6 +196,7 @@ describe('VariableItem', () => {
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
}}
/>

View File

@@ -14,6 +14,7 @@ const baseDependencyData = {
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
};

View File

@@ -223,6 +223,16 @@ describe('dashboardVariables - utilities and processors', () => {
},
hasCycle: false,
cycleNodes: undefined,
transitiveDescendants: {
deployment_environment: ['service_name', 'endpoint', 'http_status_code'],
endpoint: ['http_status_code'],
environment: [],
http_status_code: [],
k8s_cluster_name: ['k8s_node_name', 'k8s_namespace_name'],
k8s_namespace_name: [],
k8s_node_name: ['k8s_namespace_name'],
service_name: ['endpoint', 'http_status_code'],
},
};
expect(buildDependencyGraph(graph)).toEqual(expected);

View File

@@ -246,10 +246,26 @@ export const buildDependencyGraph = (
const hasCycle = topologicalOrder.length !== Object.keys(dependencies)?.length;
// Pre-compute transitive descendants by walking topological order in reverse.
// Each node's transitive descendants = direct children + their transitive descendants.
const transitiveDescendants: VariableGraph = {};
for (let i = topologicalOrder.length - 1; i >= 0; i--) {
const node = topologicalOrder[i];
const desc = new Set<string>();
for (const child of adjList[node] || []) {
desc.add(child);
for (const d of transitiveDescendants[child] || []) {
desc.add(d);
}
}
transitiveDescendants[node] = Array.from(desc);
}
return {
order: topologicalOrder,
graph: adjList,
parentDependencyGraph: buildParentDependencyGraph(adjList),
transitiveDescendants,
hasCycle,
cycleNodes,
};

View File

@@ -0,0 +1,117 @@
import { AlignedData } from 'uplot';
import { getInitialStackedBands, stack } from '../stackUtils';
describe('stackUtils', () => {
describe('stack', () => {
const neverOmit = (): boolean => false;
it('preserves time axis as first row', () => {
const data: AlignedData = [
[100, 200, 300],
[1, 2, 3],
[4, 5, 6],
];
const { data: result } = stack(data, neverOmit);
expect(result[0]).toEqual([100, 200, 300]);
});
it('stacks value series cumulatively (last = raw, first = total)', () => {
// Time, then 3 value series. Stack order: last series stays raw, then we add upward.
const data: AlignedData = [
[0, 1, 2],
[1, 2, 3], // series 1
[4, 5, 6], // series 2
[7, 8, 9], // series 3
];
const { data: result } = stack(data, neverOmit);
// result[1] = s1+s2+s3, result[2] = s2+s3, result[3] = s3
expect(result[1]).toEqual([12, 15, 18]); // 1+4+7, 2+5+8, 3+6+9
expect(result[2]).toEqual([11, 13, 15]); // 4+7, 5+8, 6+9
expect(result[3]).toEqual([7, 8, 9]);
});
it('treats null values as 0 when stacking', () => {
const data: AlignedData = [
[0, 1],
[1, null],
[null, 10],
];
const { data: result } = stack(data, neverOmit);
expect(result[1]).toEqual([1, 10]); // total
expect(result[2]).toEqual([0, 10]); // last series with null→0
});
it('copies omitted series as-is without accumulating', () => {
// Omit series 2 (index 2); series 1 and 3 are stacked.
const data: AlignedData = [
[0, 1],
[10, 20], // series 1
[100, 200], // series 2 - omitted
[1, 2], // series 3
];
const omitSeries2 = (i: number): boolean => i === 2;
const { data: result } = stack(data, omitSeries2);
// series 3 raw: [1, 2]; series 2 omitted: [100, 200] as-is; series 1 stacked with s3: [11, 22]
expect(result[1]).toEqual([11, 22]); // 10+1, 20+2
expect(result[2]).toEqual([100, 200]); // copied, not stacked
expect(result[3]).toEqual([1, 2]);
});
it('returns bands between consecutive visible series when none omitted', () => {
const data: AlignedData = [
[0, 1],
[1, 2],
[3, 4],
[5, 6],
];
const { bands } = stack(data, neverOmit);
expect(bands).toEqual([{ series: [1, 2] }, { series: [2, 3] }]);
});
it('returns bands only between visible series when some are omitted', () => {
// 4 value series; omit index 2. Visible: 1, 3, 4. Bands: [1,3], [3,4]
const data: AlignedData = [[0], [1], [2], [3], [4]];
const omitSeries2 = (i: number): boolean => i === 2;
const { bands } = stack(data, omitSeries2);
expect(bands).toEqual([{ series: [1, 3] }, { series: [3, 4] }]);
});
it('returns empty bands when only one value series', () => {
const data: AlignedData = [
[0, 1],
[1, 2],
];
const { bands } = stack(data, neverOmit);
expect(bands).toEqual([]);
});
});
describe('getInitialStackedBands', () => {
it('returns one band between each consecutive pair for seriesCount 3', () => {
expect(getInitialStackedBands(3)).toEqual([
{ series: [1, 2] },
{ series: [2, 3] },
]);
});
it('returns empty array for seriesCount 0 or 1', () => {
expect(getInitialStackedBands(0)).toEqual([]);
expect(getInitialStackedBands(1)).toEqual([]);
});
it('returns single band for seriesCount 2', () => {
expect(getInitialStackedBands(2)).toEqual([{ series: [1, 2] }]);
});
it('returns bands [1,2], [2,3], ..., [n-1, n] for seriesCount n', () => {
const bands = getInitialStackedBands(5);
expect(bands).toEqual([
{ series: [1, 2] },
{ series: [2, 3] },
{ series: [3, 4] },
{ series: [4, 5] },
]);
});
});
});

View File

@@ -0,0 +1,116 @@
import uPlot, { AlignedData } from 'uplot';
/**
* Stack data cumulatively (top-down: first series = top, last = bottom).
* When `omit(seriesIndex)` returns true, that series is excluded from stacking.
*/
export function stack(
data: AlignedData,
omit: (seriesIndex: number) => boolean,
): { data: AlignedData; bands: uPlot.Band[] } {
const timeAxis = data[0];
const pointCount = timeAxis.length;
const valueSeriesCount = data.length - 1; // exclude time axis
const stackedSeries = buildStackedSeries({
data,
valueSeriesCount,
pointCount,
omit,
});
const bands = buildFillBands(valueSeriesCount + 1, omit); // +1 for 1-based series indices
return {
data: [timeAxis, ...stackedSeries] as AlignedData,
bands,
};
}
interface BuildStackedSeriesParams {
data: AlignedData;
valueSeriesCount: number;
pointCount: number;
omit: (seriesIndex: number) => boolean;
}
/**
* Accumulate from last series upward: last series = raw values, first = total.
* Omitted series are copied as-is (no accumulation).
*/
function buildStackedSeries({
data,
valueSeriesCount,
pointCount,
omit,
}: BuildStackedSeriesParams): (number | null)[][] {
const stackedSeries: (number | null)[][] = Array(valueSeriesCount);
const cumulativeSums = Array(pointCount).fill(0) as number[];
for (let seriesIndex = valueSeriesCount; seriesIndex >= 1; seriesIndex--) {
const rawValues = data[seriesIndex] as (number | null)[];
if (omit(seriesIndex)) {
stackedSeries[seriesIndex - 1] = rawValues;
} else {
stackedSeries[seriesIndex - 1] = rawValues.map((rawValue, pointIndex) => {
const numericValue = rawValue == null ? 0 : Number(rawValue);
return (cumulativeSums[pointIndex] += numericValue);
});
}
}
return stackedSeries;
}
/**
* Bands define fill between consecutive visible series for stacked appearance.
* uPlot format: [upperSeriesIdx, lowerSeriesIdx].
*/
function buildFillBands(
seriesLength: number,
omit: (seriesIndex: number) => boolean,
): uPlot.Band[] {
const bands: uPlot.Band[] = [];
for (let seriesIndex = 1; seriesIndex < seriesLength; seriesIndex++) {
if (omit(seriesIndex)) {
continue;
}
const nextVisibleSeriesIndex = findNextVisibleSeriesIndex(
seriesLength,
seriesIndex,
omit,
);
if (nextVisibleSeriesIndex !== -1) {
bands.push({ series: [seriesIndex, nextVisibleSeriesIndex] });
}
}
return bands;
}
function findNextVisibleSeriesIndex(
seriesLength: number,
afterIndex: number,
omit: (seriesIndex: number) => boolean,
): number {
for (let i = afterIndex + 1; i < seriesLength; i++) {
if (!omit(i)) {
return i;
}
}
return -1;
}
/**
* Returns band indices for initial stacked state (no series omitted).
* Top-down: first series at top, band fills between consecutive series.
* uPlot band format: [upperSeriesIdx, lowerSeriesIdx].
*/
export function getInitialStackedBands(seriesCount: number): uPlot.Band[] {
const bands: uPlot.Band[] = [];
for (let seriesIndex = 1; seriesIndex < seriesCount; seriesIndex++) {
bands.push({ series: [seriesIndex, seriesIndex + 1] });
}
return bands;
}

View File

@@ -0,0 +1,313 @@
import { renderHook } from '@testing-library/react';
import uPlot from 'uplot';
import type { UseBarChartStackingParams } from '../useBarChartStacking';
import { useBarChartStacking } from '../useBarChartStacking';
type MockConfig = { addHook: jest.Mock };
function asConfig(c: MockConfig): UseBarChartStackingParams['config'] {
return (c as unknown) as UseBarChartStackingParams['config'];
}
function createMockConfig(): {
config: MockConfig;
invokeSetData: (plot: uPlot) => void;
invokeSetSeries: (
plot: uPlot,
seriesIndex: number | null,
opts: Partial<uPlot.Series> & { focus?: boolean },
) => void;
removeSetData: jest.Mock;
removeSetSeries: jest.Mock;
} {
let setDataHandler: ((plot: uPlot) => void) | null = null;
let setSeriesHandler:
| ((plot: uPlot, seriesIndex: number | null, opts: uPlot.Series) => void)
| null = null;
const removeSetData = jest.fn();
const removeSetSeries = jest.fn();
const addHook = jest.fn(
(
hookName: string,
handler: (plot: uPlot, ...args: unknown[]) => void,
): (() => void) => {
if (hookName === 'setData') {
setDataHandler = handler as (plot: uPlot) => void;
return removeSetData;
}
if (hookName === 'setSeries') {
setSeriesHandler = handler as (
plot: uPlot,
seriesIndex: number | null,
opts: uPlot.Series,
) => void;
return removeSetSeries;
}
return jest.fn();
},
);
const config: MockConfig = { addHook };
const invokeSetData = (plot: uPlot): void => {
setDataHandler?.(plot);
};
const invokeSetSeries = (
plot: uPlot,
seriesIndex: number | null,
opts: Partial<uPlot.Series> & { focus?: boolean },
): void => {
setSeriesHandler?.(plot, seriesIndex, opts as uPlot.Series);
};
return {
config,
invokeSetData,
invokeSetSeries,
removeSetData,
removeSetSeries,
};
}
function createMockPlot(overrides: Partial<uPlot> = {}): uPlot {
return ({
data: [
[0, 1, 2],
[1, 2, 3],
[4, 5, 6],
],
series: [{ show: true }, { show: true }, { show: true }],
delBand: jest.fn(),
addBand: jest.fn(),
setData: jest.fn(),
...overrides,
} as unknown) as uPlot;
}
describe('useBarChartStacking', () => {
it('returns data as-is when isStackedBarChart is false', () => {
const data: uPlot.AlignedData = [
[100, 200],
[1, 2],
[3, 4],
];
const { result } = renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: false,
config: null,
}),
);
expect(result.current).toBe(data);
});
it('returns data as-is when config is null and isStackedBarChart is true', () => {
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
[4, 5],
];
const { result } = renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: null,
}),
);
// Still returns stacked data (computed in useMemo); no hooks registered
expect(result.current[0]).toEqual([0, 1]);
expect(result.current[1]).toEqual([5, 7]); // stacked
expect(result.current[2]).toEqual([4, 5]);
});
it('returns stacked data when isStackedBarChart is true and multiple value series', () => {
const data: uPlot.AlignedData = [
[0, 1, 2],
[1, 2, 3],
[4, 5, 6],
[7, 8, 9],
];
const { result } = renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: null,
}),
);
expect(result.current[0]).toEqual([0, 1, 2]);
expect(result.current[1]).toEqual([12, 15, 18]); // s1+s2+s3
expect(result.current[2]).toEqual([11, 13, 15]); // s2+s3
expect(result.current[3]).toEqual([7, 8, 9]);
});
it('returns data as-is when only one value series (no stacking needed)', () => {
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
];
const { result } = renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: null,
}),
);
expect(result.current).toEqual(data);
});
it('registers setData and setSeries hooks when isStackedBarChart and config provided', () => {
const { config } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
[3, 4],
];
renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: asConfig(config),
}),
);
expect(config.addHook).toHaveBeenCalledWith('setData', expect.any(Function));
expect(config.addHook).toHaveBeenCalledWith(
'setSeries',
expect.any(Function),
);
});
it('does not register hooks when isStackedBarChart is false', () => {
const { config } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
[3, 4],
];
renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: false,
config: asConfig(config),
}),
);
expect(config.addHook).not.toHaveBeenCalled();
});
it('calls cleanup when unmounted', () => {
const { config, removeSetData, removeSetSeries } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
[3, 4],
];
const { unmount } = renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: asConfig(config),
}),
);
unmount();
expect(removeSetData).toHaveBeenCalled();
expect(removeSetSeries).toHaveBeenCalled();
});
it('re-stacks and updates plot when setData hook is invoked', () => {
const { config, invokeSetData } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1, 2],
[1, 2, 3],
[4, 5, 6],
];
const plot = createMockPlot({
data: [
[0, 1, 2],
[5, 7, 9],
[4, 5, 6],
],
});
renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: asConfig(config),
}),
);
invokeSetData(plot);
expect(plot.delBand).toHaveBeenCalledWith(null);
expect(plot.addBand).toHaveBeenCalled();
expect(plot.setData).toHaveBeenCalledWith(
expect.arrayContaining([
[0, 1, 2],
expect.any(Array), // stacked row 1
expect.any(Array), // stacked row 2
]),
);
});
it('re-stacks when setSeries hook is invoked (e.g. legend toggle)', () => {
const { config, invokeSetSeries } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1],
[10, 20],
[5, 10],
];
// Plot data must match unstacked length so canApplyStacking passes
const plot = createMockPlot({
data: [
[0, 1],
[15, 30],
[5, 10],
],
});
renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: asConfig(config),
}),
);
invokeSetSeries(plot, 1, { show: false });
expect(plot.setData).toHaveBeenCalled();
});
it('does not re-stack when setSeries is called with focus option', () => {
const { config, invokeSetSeries } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
[3, 4],
];
const plot = createMockPlot();
renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: asConfig(config),
}),
);
(plot.setData as jest.Mock).mockClear();
invokeSetSeries(plot, 1, { focus: true } as uPlot.Series);
expect(plot.setData).not.toHaveBeenCalled();
});
});

View File

@@ -6,7 +6,7 @@ import logEvent from 'api/common/logEvent';
import PromQLIcon from 'assets/Dashboard/PromQl';
import { QueryBuilderV2 } from 'components/QueryBuilderV2/QueryBuilderV2';
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
import { ENTITY_VERSION_V5 } from 'constants/app';
import { ENTITY_VERSION_V4 } from 'constants/app';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { QBShortcuts } from 'constants/shortcuts/QBShortcuts';
import RunQueryBtn from 'container/QueryBuilder/components/RunQueryBtn/RunQueryBtn';
@@ -63,8 +63,12 @@ function QuerySection({
signalSource: signalSource === 'meter' ? 'meter' : '',
}}
showTraceOperator={alertType === AlertTypes.TRACES_BASED_ALERT}
showFunctions
version={ENTITY_VERSION_V5}
showFunctions={
(alertType === AlertTypes.METRICS_BASED_ALERT &&
alertDef.version === ENTITY_VERSION_V4) ||
alertType === AlertTypes.LOGS_BASED_ALERT
}
version={alertDef.version || 'v3'}
onSignalSourceChange={handleSignalSourceChange}
signalSourceChangeEnabled
/>

View File

@@ -0,0 +1,153 @@
import { useCallback, useMemo, useRef, useSyncExternalStore } from 'react';
import isEmpty from 'lodash-es/isEmpty';
import {
IVariableFetchStoreState,
VariableFetchState,
variableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { useDashboardVariablesSelector } from './useDashboardVariables';
/**
* Generic selector hook for the variable fetch store.
* Same pattern as useDashboardVariablesSelector.
*/
const useVariableFetchSelector = <T>(
selector: (state: IVariableFetchStoreState) => T,
): T => {
const selectorRef = useRef(selector);
selectorRef.current = selector;
const getSnapshot = useCallback(
() => selectorRef.current(variableFetchStore.getSnapshot()),
[],
);
return useSyncExternalStore(variableFetchStore.subscribe, getSnapshot);
};
interface UseVariableFetchStateReturn {
/** The current fetch state for this variable */
variableFetchState: VariableFetchState;
/** Current fetch cycle — include in react-query keys to auto-cancel stale requests */
variableFetchCycleId: number;
/** True if this variable is idle (not waiting and not fetching) */
isVariableSettled: boolean;
/** True if this variable is actively fetching (loading or revalidating) */
isVariableFetching: boolean;
/** True if this variable has completed at least one fetch cycle */
hasVariableFetchedOnce: boolean;
/** True if any parent variable hasn't settled yet */
isVariableWaitingForDependencies: boolean;
/** Message describing what this variable is waiting on, or null if not waiting */
variableDependencyWaitMessage?: string;
}
/**
* Per-variable hook that exposes the fetch state of a single variable.
* Reusable by both variable input components and panel components.
*
* Subscribes to both variableFetchStore (for states) and
* dashboardVariablesStore (for parent graph) to compute derived values.
*/
export function useVariableFetchState(
variableName: string,
): UseVariableFetchStateReturn {
// This variable's fetch state (loading, waiting, idle, etc.)
const variableFetchState = useVariableFetchSelector(
(s) => s.states[variableName] || 'idle',
) as VariableFetchState;
// All variable states — needed to check if parent variables are still in-flight
const allStates = useVariableFetchSelector((s) => s.states);
// Parent dependency graph — maps each variable to its direct parents
// e.g. { "childVariable": ["parentVariable"] } means "childVariable" depends on "parentVariable"
const parentGraph = useDashboardVariablesSelector(
(s) => s.dependencyData?.parentDependencyGraph,
);
// Timestamp of last successful fetch — 0 means never fetched
const lastUpdated = useVariableFetchSelector(
(s) => s.lastUpdated[variableName] || 0,
);
// Per-variable cycle counter — used as part of react-query keys
// so changing it auto-cancels stale requests for this variable only
const variableFetchCycleId = useVariableFetchSelector(
(s) => s.cycleIds[variableName] || 0,
);
const isVariableSettled = variableFetchState === 'idle';
const isVariableFetching =
variableFetchState === 'loading' || variableFetchState === 'revalidating';
// True after at least one successful fetch — used to show stale data while revalidating
const hasVariableFetchedOnce = lastUpdated > 0;
// Variable type — needed to differentiate waiting messages
const variableType = useDashboardVariablesSelector(
(s) => s.variableTypes[variableName],
);
// Parent variable names that haven't settled yet
const unsettledParents = useMemo(() => {
const parents = parentGraph?.[variableName] || [];
return parents.filter((p) => (allStates[p] || 'idle') !== 'idle');
}, [parentGraph, variableName, allStates]);
const isVariableWaitingForDependencies = unsettledParents.length > 0;
const variableDependencyWaitMessage = useMemo(() => {
if (variableFetchState !== 'waiting') {
return;
}
if (variableType === 'DYNAMIC') {
return 'Waiting for all query variable options to load.';
}
if (unsettledParents.length === 0) {
return;
}
const quoted = unsettledParents.map((p) => `"${p}"`);
const names =
quoted.length > 1
? `${quoted.slice(0, -1).join(', ')} and ${quoted[quoted.length - 1]}`
: quoted[0];
return `Waiting for options of ${names} to load.`;
}, [variableFetchState, variableType, unsettledParents]);
return {
variableFetchState,
isVariableSettled,
isVariableWaitingForDependencies,
variableDependencyWaitMessage,
isVariableFetching,
hasVariableFetchedOnce,
variableFetchCycleId,
};
}
export function useIsPanelWaitingOnVariable(variableNames: string[]): boolean {
const states = useVariableFetchSelector((s) => s.states);
const dashboardVariables = useDashboardVariablesSelector((s) => s.variables);
const variableTypesMap = useDashboardVariablesSelector((s) => s.variableTypes);
return variableNames.some((name) => {
const variableFetchState = states[name];
const { selectedValue, allSelected } = dashboardVariables?.[name] || {};
const isVariableInFetchingOrWaitingState =
variableFetchState === 'loading' ||
variableFetchState === 'revalidating' ||
variableFetchState === 'waiting';
if (variableTypesMap[name] === 'DYNAMIC' && allSelected) {
return isVariableInFetchingOrWaitingState;
}
return isEmpty(selectedValue) ? isVariableInFetchingOrWaitingState : false;
});
}

View File

@@ -3,6 +3,7 @@ import { TelemetryFieldKey } from 'api/v5/v5';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { convertKeysToColumnFields } from 'container/LogsExplorerList/utils';
import { placeWidgetAtBottom } from 'container/NewWidget/utils';
import { textContainsVariableReference } from 'lib/dashboardVariables/variableReference';
import { isArray } from 'lodash-es';
import {
Dashboard,
@@ -116,10 +117,17 @@ export const createDynamicVariableToWidgetsMap = (
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
variable.name &&
filter.key?.key === variable.dynamicVariablesAttribute &&
((isArray(filter.value) &&
filter.value.includes(`$${variable.name}`)) ||
filter.value === `$${variable.name}`) &&
(isArray(filter.value)
? filter.value.some(
(v) =>
typeof v === 'string' &&
variable.name &&
textContainsVariableReference(v, variable.name),
)
: typeof filter.value === 'string' &&
textContainsVariableReference(filter.value, variable.name)) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
@@ -132,7 +140,12 @@ export const createDynamicVariableToWidgetsMap = (
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
queryData.filter?.expression?.includes(`$${variable.name}`) &&
variable.name &&
queryData.filter?.expression &&
textContainsVariableReference(
queryData.filter.expression,
variable.name,
) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
@@ -149,7 +162,9 @@ export const createDynamicVariableToWidgetsMap = (
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
promqlQuery.query?.includes(`$${variable.name}`) &&
variable.name &&
promqlQuery.query &&
textContainsVariableReference(promqlQuery.query, variable.name) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
@@ -165,7 +180,9 @@ export const createDynamicVariableToWidgetsMap = (
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
clickhouseQuery.query?.includes(`$${variable.name}`) &&
variable.name &&
clickhouseQuery.query &&
textContainsVariableReference(clickhouseQuery.query, variable.name) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);

View File

@@ -0,0 +1,527 @@
import * as dashboardVariablesStore from '../dashboardVariables/dashboardVariablesStore';
import { IDependencyData } from '../dashboardVariables/dashboardVariablesStoreTypes';
import {
enqueueDescendantsOfVariable,
enqueueFetchOfAllVariables,
initializeVariableFetchStore,
onVariableFetchComplete,
onVariableFetchFailure,
VariableFetchContext,
variableFetchStore,
} from '../variableFetchStore';
const getVariableDependencyContextSpy = jest.spyOn(
dashboardVariablesStore,
'getVariableDependencyContext',
);
function resetStore(): void {
variableFetchStore.set(() => ({
states: {},
lastUpdated: {},
cycleIds: {},
}));
}
function mockContext(overrides: Partial<VariableFetchContext> = {}): void {
getVariableDependencyContextSpy.mockReturnValue({
doAllVariablesHaveValuesSelected: false,
variableTypes: {},
dynamicVariableOrder: [],
dependencyData: null,
...overrides,
});
}
/**
* Helper to build a dependency data object for tests.
* Only the fields used by the store actions are required.
*/
function buildDependencyData(
overrides: Partial<IDependencyData> = {},
): IDependencyData {
return {
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
...overrides,
};
}
describe('variableFetchStore', () => {
beforeEach(() => {
resetStore();
jest.clearAllMocks();
});
// ==================== initializeVariableFetchStore ====================
describe('initializeVariableFetchStore', () => {
it('should initialize new variables to idle', () => {
initializeVariableFetchStore(['a', 'b', 'c']);
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states).toEqual({ a: 'idle', b: 'idle', c: 'idle' });
});
it('should preserve existing states for known variables', () => {
// Pre-set a state
variableFetchStore.update((d) => {
d.states.a = 'loading';
});
initializeVariableFetchStore(['a', 'b']);
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('loading');
expect(storeSnapshot.states.b).toBe('idle');
});
it('should clean up stale variables that no longer exist', () => {
variableFetchStore.update((d) => {
d.states.old = 'idle';
d.lastUpdated.old = 100;
d.cycleIds.old = 3;
});
initializeVariableFetchStore(['a']);
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.old).toBeUndefined();
expect(storeSnapshot.lastUpdated.old).toBeUndefined();
expect(storeSnapshot.cycleIds.old).toBeUndefined();
expect(storeSnapshot.states.a).toBe('idle');
});
it('should handle empty variable names array', () => {
variableFetchStore.update((d) => {
d.states.a = 'idle';
});
initializeVariableFetchStore([]);
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states).toEqual({});
});
});
// ==================== enqueueFetchOfAllVariables ====================
describe('enqueueFetchOfAllVariables', () => {
it('should no-op when dependencyData is null', () => {
mockContext({ dependencyData: null });
initializeVariableFetchStore(['a']);
enqueueFetchOfAllVariables();
expect(variableFetchStore.getSnapshot().states.a).toBe('idle');
});
it('should set root query variables to loading and dependent ones to waiting', () => {
// a is root (no parents), b depends on a
mockContext({
dependencyData: buildDependencyData({
order: ['a', 'b'],
parentDependencyGraph: { a: [], b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
initializeVariableFetchStore(['a', 'b']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('loading');
expect(storeSnapshot.states.b).toBe('waiting');
});
it('should set root query variables to revalidating when previously fetched', () => {
mockContext({
dependencyData: buildDependencyData({
order: ['a'],
parentDependencyGraph: { a: [] },
}),
variableTypes: { a: 'QUERY' },
});
// Pre-set lastUpdated so it appears previously fetched
variableFetchStore.update((d) => {
d.lastUpdated.a = 1000;
});
initializeVariableFetchStore(['a']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('revalidating');
});
it('should bump cycle IDs for all enqueued variables', () => {
mockContext({
dependencyData: buildDependencyData({
order: ['a', 'b'],
parentDependencyGraph: { a: [], b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
initializeVariableFetchStore(['a', 'b']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.cycleIds.a).toBe(1);
expect(storeSnapshot.cycleIds.b).toBe(1);
});
it('should set dynamic variables to waiting when not all variables have values', () => {
mockContext({
doAllVariablesHaveValuesSelected: false,
dependencyData: buildDependencyData({ order: [] }),
variableTypes: { dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
initializeVariableFetchStore(['dyn1']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.dyn1).toBe('waiting');
});
it('should set dynamic variables to loading when all variables have values', () => {
mockContext({
doAllVariablesHaveValuesSelected: true,
dependencyData: buildDependencyData({ order: [] }),
variableTypes: { dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
initializeVariableFetchStore(['dyn1']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.dyn1).toBe('loading');
});
it('should not treat non-QUERY parents as query parents', () => {
// b has a CUSTOM parent — shouldn't cause waiting
mockContext({
dependencyData: buildDependencyData({
order: ['b'],
parentDependencyGraph: { b: ['customVar'] },
}),
variableTypes: { b: 'QUERY', customVar: 'CUSTOM' },
});
initializeVariableFetchStore(['b', 'customVar']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.b).toBe('loading');
});
});
// ==================== onVariableFetchComplete ====================
describe('onVariableFetchComplete', () => {
it('should set the completed variable to idle with a lastUpdated timestamp', () => {
mockContext();
variableFetchStore.update((d) => {
d.states.a = 'loading';
});
const before = Date.now();
onVariableFetchComplete('a');
const after = Date.now();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('idle');
expect(storeSnapshot.lastUpdated.a).toBeGreaterThanOrEqual(before);
expect(storeSnapshot.lastUpdated.a).toBeLessThanOrEqual(after);
});
it('should unblock waiting query-type children', () => {
mockContext({
dependencyData: buildDependencyData({
graph: { a: ['b'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.b = 'waiting';
});
onVariableFetchComplete('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('idle');
expect(storeSnapshot.states.b).toBe('loading');
});
it('should not unblock non-QUERY children', () => {
mockContext({
dependencyData: buildDependencyData({
graph: { a: ['dyn1'] },
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.dyn1 = 'waiting';
});
onVariableFetchComplete('a');
const storeSnapshot = variableFetchStore.getSnapshot();
// dyn1 is DYNAMIC, not QUERY, so it should remain waiting
expect(storeSnapshot.states.dyn1).toBe('waiting');
});
it('should unlock waiting dynamic variables when all query variables are settled', () => {
mockContext({
dependencyData: buildDependencyData({
graph: { a: [] },
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.dyn1 = 'waiting';
});
onVariableFetchComplete('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.dyn1).toBe('loading');
});
it('should NOT unlock dynamic variables if a query variable is still in-flight', () => {
mockContext({
dependencyData: buildDependencyData({
graph: { a: ['b'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.b = 'waiting';
d.states.dyn1 = 'waiting';
});
onVariableFetchComplete('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.dyn1).toBe('waiting');
});
});
// ==================== onVariableFetchFailure ====================
describe('onVariableFetchFailure', () => {
it('should set the failed variable to error', () => {
mockContext();
variableFetchStore.update((d) => {
d.states.a = 'loading';
});
onVariableFetchFailure('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('error');
});
it('should set query-type transitive descendants to idle', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b', 'c'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.b = 'waiting';
d.states.c = 'waiting';
});
onVariableFetchFailure('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('error');
expect(storeSnapshot.states.b).toBe('idle');
expect(storeSnapshot.states.c).toBe('idle');
});
it('should not touch non-QUERY descendants', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['dyn1'] },
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.dyn1 = 'waiting';
});
onVariableFetchFailure('a');
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('waiting');
});
it('should unlock waiting dynamic variables when all query variables settle via error', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: {},
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.dyn1 = 'waiting';
});
onVariableFetchFailure('a');
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('loading');
});
});
// ==================== enqueueDescendantsOfVariable ====================
describe('enqueueDescendantsOfVariable', () => {
it('should no-op when dependencyData is null', () => {
mockContext({ dependencyData: null });
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
});
enqueueDescendantsOfVariable('a');
expect(variableFetchStore.getSnapshot().states.b).toBe('idle');
});
it('should enqueue query-type descendants with all parents settled', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b'] },
parentDependencyGraph: { b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
});
enqueueDescendantsOfVariable('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.b).toBe('loading');
expect(storeSnapshot.cycleIds.b).toBe(1);
});
it('should set descendants to waiting when some parents are not settled', () => {
// b depends on both a and c; c is still loading
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b'] },
parentDependencyGraph: { b: ['a', 'c'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
d.states.c = 'loading';
});
enqueueDescendantsOfVariable('a');
expect(variableFetchStore.getSnapshot().states.b).toBe('waiting');
});
it('should skip non-QUERY descendants', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['dyn1'] },
parentDependencyGraph: {},
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.dyn1 = 'idle';
});
enqueueDescendantsOfVariable('a');
// dyn1 is DYNAMIC, so it should not be touched
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('idle');
});
it('should handle chain of descendants: a -> b -> c', () => {
// a -> b -> c, all QUERY
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b', 'c'] },
parentDependencyGraph: { b: ['a'], c: ['b'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
d.states.c = 'idle';
});
enqueueDescendantsOfVariable('a');
const storeSnapshot = variableFetchStore.getSnapshot();
// b's parent (a) is idle/settled → loading
expect(storeSnapshot.states.b).toBe('loading');
// c's parent (b) just moved to loading (not settled) → waiting
expect(storeSnapshot.states.c).toBe('waiting');
});
it('should set descendants to revalidating when previously fetched', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b'] },
parentDependencyGraph: { b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
d.lastUpdated.b = 1000;
});
enqueueDescendantsOfVariable('a');
expect(variableFetchStore.getSnapshot().states.b).toBe('revalidating');
});
});
});

View File

@@ -0,0 +1,196 @@
import {
IVariableFetchStoreState,
VariableFetchState,
} from '../variableFetchStore';
import {
areAllQueryVariablesSettled,
isSettled,
resolveFetchState,
unlockWaitingDynamicVariables,
} from '../variableFetchStoreUtils';
describe('variableFetchStoreUtils', () => {
describe('isSettled', () => {
it('should return true for idle state', () => {
expect(isSettled('idle')).toBe(true);
});
it('should return true for error state', () => {
expect(isSettled('error')).toBe(true);
});
it('should return false for loading state', () => {
expect(isSettled('loading')).toBe(false);
});
it('should return false for revalidating state', () => {
expect(isSettled('revalidating')).toBe(false);
});
it('should return false for waiting state', () => {
expect(isSettled('waiting')).toBe(false);
});
it('should return false for undefined', () => {
expect(isSettled(undefined)).toBe(false);
});
});
describe('resolveFetchState', () => {
it('should return "loading" when variable has never been fetched', () => {
const draft: IVariableFetchStoreState = {
states: {},
lastUpdated: {},
cycleIds: {},
};
expect(resolveFetchState(draft, 'myVar')).toBe('loading');
});
it('should return "loading" when lastUpdated is 0', () => {
const draft: IVariableFetchStoreState = {
states: {},
lastUpdated: { myVar: 0 },
cycleIds: {},
};
expect(resolveFetchState(draft, 'myVar')).toBe('loading');
});
it('should return "revalidating" when variable has been fetched before', () => {
const draft: IVariableFetchStoreState = {
states: {},
lastUpdated: { myVar: 1000 },
cycleIds: {},
};
expect(resolveFetchState(draft, 'myVar')).toBe('revalidating');
});
});
describe('areAllQueryVariablesSettled', () => {
it('should return true when all query variables are idle', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
b: 'idle',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
it('should return true when all query variables are in error', () => {
const states: Record<string, VariableFetchState> = {
a: 'error',
b: 'error',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
it('should return true with a mix of idle and error query variables', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
b: 'error',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
it('should return false when any query variable is loading', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
b: 'loading',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(false);
});
it('should return false when any query variable is waiting', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
b: 'waiting',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(false);
});
it('should ignore non-QUERY variable types', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
dynVar: 'loading',
};
const variableTypes = {
a: 'QUERY' as const,
dynVar: 'DYNAMIC' as const,
};
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
it('should return true when there are no QUERY variables', () => {
const states: Record<string, VariableFetchState> = {
dynVar: 'loading',
};
const variableTypes = { dynVar: 'DYNAMIC' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
});
describe('unlockWaitingDynamicVariables', () => {
it('should transition waiting dynamic variables to loading when never fetched', () => {
const draft: IVariableFetchStoreState = {
states: { dyn1: 'waiting', dyn2: 'waiting' },
lastUpdated: {},
cycleIds: {},
};
unlockWaitingDynamicVariables(draft, ['dyn1', 'dyn2']);
expect(draft.states.dyn1).toBe('loading');
expect(draft.states.dyn2).toBe('loading');
});
it('should transition waiting dynamic variables to revalidating when previously fetched', () => {
const draft: IVariableFetchStoreState = {
states: { dyn1: 'waiting' },
lastUpdated: { dyn1: 1000 },
cycleIds: {},
};
unlockWaitingDynamicVariables(draft, ['dyn1']);
expect(draft.states.dyn1).toBe('revalidating');
});
it('should not touch dynamic variables that are not in waiting state', () => {
const draft: IVariableFetchStoreState = {
states: { dyn1: 'idle', dyn2: 'loading' },
lastUpdated: {},
cycleIds: {},
};
unlockWaitingDynamicVariables(draft, ['dyn1', 'dyn2']);
expect(draft.states.dyn1).toBe('idle');
expect(draft.states.dyn2).toBe('loading');
});
it('should handle empty dynamic variable order', () => {
const draft: IVariableFetchStoreState = {
states: { dyn1: 'waiting' },
lastUpdated: {},
cycleIds: {},
};
unlockWaitingDynamicVariables(draft, []);
expect(draft.states.dyn1).toBe('waiting');
});
});
});

View File

@@ -0,0 +1,225 @@
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import {
dashboardVariablesStore,
getVariableDependencyContext,
setDashboardVariablesStore,
updateDashboardVariablesStore,
} from '../dashboardVariablesStore';
import { IDashboardVariables } from '../dashboardVariablesStoreTypes';
function createVariable(
overrides: Partial<IDashboardVariable> = {},
): IDashboardVariable {
return {
id: 'test-id',
name: 'test-var',
description: '',
type: 'QUERY',
sort: 'DISABLED',
showALLOption: false,
multiSelect: false,
order: 0,
...overrides,
};
}
function resetStore(): void {
dashboardVariablesStore.set(() => ({
dashboardId: '',
variables: {},
sortedVariablesArray: [],
dependencyData: null,
variableTypes: {},
dynamicVariableOrder: [],
}));
}
describe('dashboardVariablesStore', () => {
beforeEach(() => {
resetStore();
});
describe('setDashboardVariablesStore', () => {
it('should set the dashboard variables and compute derived values', () => {
const variables: IDashboardVariables = {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
};
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
const storeSnapshot = dashboardVariablesStore.getSnapshot();
expect(storeSnapshot.dashboardId).toBe('dash-1');
expect(storeSnapshot.variables).toEqual(variables);
expect(storeSnapshot.variableTypes).toEqual({ env: 'QUERY' });
expect(storeSnapshot.sortedVariablesArray).toHaveLength(1);
});
});
describe('updateDashboardVariablesStore', () => {
it('should update variables and recompute derived values', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
},
});
const updatedVariables: IDashboardVariables = {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
};
updateDashboardVariablesStore({
dashboardId: 'dash-1',
variables: updatedVariables,
});
const storeSnapshot = dashboardVariablesStore.getSnapshot();
expect(storeSnapshot.variableTypes).toEqual({
env: 'QUERY',
dyn1: 'DYNAMIC',
});
expect(storeSnapshot.dynamicVariableOrder).toEqual(['dyn1']);
});
it('should replace dashboardId when it does not match', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
'not-there': createVariable({ name: 'not-there', order: 0 }),
},
});
updateDashboardVariablesStore({
dashboardId: 'dash-2',
variables: {
a: createVariable({ name: 'a', order: 0 }),
},
});
const storeSnapshot = dashboardVariablesStore.getSnapshot();
expect(storeSnapshot.dashboardId).toBe('dash-2');
expect(storeSnapshot.variableTypes).toEqual({
a: 'QUERY',
});
expect(storeSnapshot.variableTypes).not.toEqual({
'not-there': 'QUERY',
});
});
});
describe('getVariableDependencyContext', () => {
it('should return context with all fields', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
selectedValue: 'prod',
}),
},
});
const {
variableTypes,
dynamicVariableOrder,
dependencyData,
} = getVariableDependencyContext();
expect(variableTypes).toEqual({ env: 'QUERY' });
expect(dynamicVariableOrder).toEqual([]);
expect(dependencyData).not.toBeNull();
});
it('should report doAllVariablesHaveValuesSelected as true when all variables have selectedValue', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
selectedValue: 'prod',
}),
region: createVariable({
name: 'region',
type: 'CUSTOM',
order: 1,
selectedValue: 'us-east',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should report doAllVariablesHaveValuesSelected as false when some variables lack selectedValue', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
selectedValue: 'prod',
}),
region: createVariable({
name: 'region',
type: 'CUSTOM',
order: 1,
selectedValue: undefined,
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(false);
});
it('should treat DYNAMIC variable with allSelected=true and selectedValue=null as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: null as any,
allSelected: true,
}),
env: createVariable({
name: 'env',
type: 'QUERY',
order: 1,
selectedValue: 'prod',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should report false when a DYNAMIC variable has empty selectedValue and allSelected is not true', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: '',
allSelected: false,
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(false);
});
});
});

View File

@@ -0,0 +1,369 @@
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { IDashboardVariables } from '../dashboardVariablesStoreTypes';
import {
buildDynamicVariableOrder,
buildSortedVariablesArray,
buildVariableTypesMap,
computeDerivedValues,
} from '../dashboardVariablesStoreUtils';
const createVariable = (
overrides: Partial<IDashboardVariable> = {},
): IDashboardVariable => ({
id: 'test-id',
name: 'test-var',
description: '',
type: 'QUERY',
sort: 'DISABLED',
showALLOption: false,
multiSelect: false,
order: 0,
...overrides,
});
describe('dashboardVariablesStoreUtils', () => {
describe('buildSortedVariablesArray', () => {
it('should sort variables by order property', () => {
const variables: IDashboardVariables = {
c: createVariable({ name: 'c', order: 3 }),
a: createVariable({ name: 'a', order: 1 }),
b: createVariable({ name: 'b', order: 2 }),
};
const result = buildSortedVariablesArray(variables);
expect(result.map((v) => v.name)).toEqual(['a', 'b', 'c']);
});
it('should return empty array for empty variables', () => {
const result = buildSortedVariablesArray({});
expect(result).toEqual([]);
});
it('should create copies of variables (not references)', () => {
const original = createVariable({ name: 'a', order: 0 });
const variables: IDashboardVariables = { a: original };
const result = buildSortedVariablesArray(variables);
expect(result[0]).not.toBe(original);
expect(result[0]).toEqual(original);
});
});
describe('buildVariableTypesMap', () => {
it('should create a name-to-type mapping', () => {
const sorted = [
createVariable({ name: 'env', type: 'QUERY' }),
createVariable({ name: 'region', type: 'CUSTOM' }),
createVariable({ name: 'dynVar', type: 'DYNAMIC' }),
createVariable({ name: 'text', type: 'TEXTBOX' }),
];
const result = buildVariableTypesMap(sorted);
expect(result).toEqual({
env: 'QUERY',
region: 'CUSTOM',
dynVar: 'DYNAMIC',
text: 'TEXTBOX',
});
});
it('should return empty object for empty array', () => {
expect(buildVariableTypesMap([])).toEqual({});
});
});
describe('buildDynamicVariableOrder', () => {
it('should return only DYNAMIC variable names in order', () => {
const sorted = [
createVariable({ name: 'queryVar', type: 'QUERY', order: 0 }),
createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
createVariable({ name: 'customVar', type: 'CUSTOM', order: 2 }),
createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 3 }),
];
const result = buildDynamicVariableOrder(sorted);
expect(result).toEqual(['dyn1', 'dyn2']);
});
it('should return empty array when no DYNAMIC variables exist', () => {
const sorted = [
createVariable({ name: 'a', type: 'QUERY' }),
createVariable({ name: 'b', type: 'CUSTOM' }),
];
expect(buildDynamicVariableOrder(sorted)).toEqual([]);
});
it('should return empty array for empty input', () => {
expect(buildDynamicVariableOrder([])).toEqual([]);
});
});
describe('computeDerivedValues', () => {
it('should compute all derived values from variables', () => {
const variables: IDashboardVariables = {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
}),
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 1,
}),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray).toHaveLength(2);
expect(result.sortedVariablesArray[0].name).toBe('env');
expect(result.sortedVariablesArray[1].name).toBe('dyn1');
expect(result.variableTypes).toEqual({
env: 'QUERY',
dyn1: 'DYNAMIC',
});
expect(result.dynamicVariableOrder).toEqual(['dyn1']);
// dependencyData should exist since there are variables
expect(result.dependencyData).not.toBeNull();
});
it('should return null dependencyData for empty variables', () => {
const result = computeDerivedValues({});
expect(result.sortedVariablesArray).toEqual([]);
expect(result.dependencyData).toBeNull();
expect(result.variableTypes).toEqual({});
expect(result.dynamicVariableOrder).toEqual([]);
});
it('should handle all four variable types together', () => {
const variables: IDashboardVariables = {
queryVar: createVariable({
name: 'queryVar',
type: 'QUERY',
order: 0,
}),
customVar: createVariable({
name: 'customVar',
type: 'CUSTOM',
order: 1,
}),
dynVar: createVariable({
name: 'dynVar',
type: 'DYNAMIC',
order: 2,
}),
textVar: createVariable({
name: 'textVar',
type: 'TEXTBOX',
order: 3,
}),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray).toHaveLength(4);
expect(result.sortedVariablesArray.map((v) => v.name)).toEqual([
'queryVar',
'customVar',
'dynVar',
'textVar',
]);
expect(result.variableTypes).toEqual({
queryVar: 'QUERY',
customVar: 'CUSTOM',
dynVar: 'DYNAMIC',
textVar: 'TEXTBOX',
});
expect(result.dynamicVariableOrder).toEqual(['dynVar']);
expect(result.dependencyData).not.toBeNull();
});
it('should sort variables by order regardless of insertion order', () => {
const variables: IDashboardVariables = {
z: createVariable({ name: 'z', type: 'QUERY', order: 4 }),
a: createVariable({ name: 'a', type: 'CUSTOM', order: 0 }),
m: createVariable({ name: 'm', type: 'DYNAMIC', order: 2 }),
b: createVariable({ name: 'b', type: 'TEXTBOX', order: 1 }),
x: createVariable({ name: 'x', type: 'QUERY', order: 3 }),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray.map((v) => v.name)).toEqual([
'a',
'b',
'm',
'x',
'z',
]);
});
it('should include multiple dynamic variables in order', () => {
const variables: IDashboardVariables = {
dyn3: createVariable({ name: 'dyn3', type: 'DYNAMIC', order: 5 }),
query1: createVariable({ name: 'query1', type: 'QUERY', order: 0 }),
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
custom1: createVariable({ name: 'custom1', type: 'CUSTOM', order: 2 }),
dyn2: createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 3 }),
};
const result = computeDerivedValues(variables);
expect(result.dynamicVariableOrder).toEqual(['dyn1', 'dyn2', 'dyn3']);
});
it('should build dependency data with query variable order for dependent queries', () => {
const variables: IDashboardVariables = {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
queryValue: 'SELECT DISTINCT env FROM table',
}),
service: createVariable({
name: 'service',
type: 'QUERY',
order: 1,
queryValue: 'SELECT DISTINCT service FROM table WHERE env={{.env}}',
}),
};
const result = computeDerivedValues(variables);
const { dependencyData } = result;
expect(dependencyData).not.toBeNull();
// env should appear in the dependency order (it's a root QUERY variable)
expect(dependencyData?.order).toContain('env');
// service depends on env, so it should also be in the order
expect(dependencyData?.order).toContain('service');
// env comes before service in topological order
const envIdx = dependencyData?.order.indexOf('env') ?? -1;
const svcIdx = dependencyData?.order.indexOf('service') ?? -1;
expect(envIdx).toBeLessThan(svcIdx);
});
it('should not include non-QUERY variables in dependency order', () => {
const variables: IDashboardVariables = {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
queryValue: 'SELECT DISTINCT env FROM table',
}),
customVar: createVariable({
name: 'customVar',
type: 'CUSTOM',
order: 1,
}),
dynVar: createVariable({
name: 'dynVar',
type: 'DYNAMIC',
order: 2,
}),
textVar: createVariable({
name: 'textVar',
type: 'TEXTBOX',
order: 3,
}),
};
const result = computeDerivedValues(variables);
expect(result.dependencyData).not.toBeNull();
// Only QUERY variables should be in the dependency order
result.dependencyData?.order.forEach((name) => {
expect(result.variableTypes[name]).toBe('QUERY');
});
});
it('should produce transitive descendants in dependency data', () => {
const variables: IDashboardVariables = {
region: createVariable({
name: 'region',
type: 'QUERY',
order: 0,
queryValue: 'SELECT region FROM table',
}),
cluster: createVariable({
name: 'cluster',
type: 'QUERY',
order: 1,
queryValue: 'SELECT cluster FROM table WHERE region={{.region}}',
}),
host: createVariable({
name: 'host',
type: 'QUERY',
order: 2,
queryValue: 'SELECT host FROM table WHERE cluster={{.cluster}}',
}),
};
const result = computeDerivedValues(variables);
const { dependencyData: depData } = result;
expect(depData).not.toBeNull();
expect(depData?.transitiveDescendants).toBeDefined();
// region's transitive descendants should include cluster and host
expect(depData?.transitiveDescendants['region']).toEqual(
expect.arrayContaining(['cluster', 'host']),
);
});
it('should handle a single variable', () => {
const variables: IDashboardVariables = {
solo: createVariable({
name: 'solo',
type: 'QUERY',
order: 0,
}),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray).toHaveLength(1);
expect(result.variableTypes).toEqual({ solo: 'QUERY' });
expect(result.dynamicVariableOrder).toEqual([]);
expect(result.dependencyData).not.toBeNull();
expect(result.dependencyData?.order).toEqual(['solo']);
});
it('should handle only non-QUERY variables', () => {
const variables: IDashboardVariables = {
custom1: createVariable({
name: 'custom1',
type: 'CUSTOM',
order: 0,
}),
text1: createVariable({
name: 'text1',
type: 'TEXTBOX',
order: 1,
}),
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 2,
}),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray).toHaveLength(3);
// No QUERY variables, so dependency order should be empty
expect(result.dependencyData?.order).toEqual([]);
expect(result.dynamicVariableOrder).toEqual(['dyn1']);
});
});
});

View File

@@ -1,4 +1,7 @@
import { isEmpty, isUndefined } from 'lodash-es';
import createStore from '../store';
import { VariableFetchContext } from '../variableFetchStore';
import { IDashboardVariablesStoreState } from './dashboardVariablesStoreTypes';
import {
computeDerivedValues,
@@ -10,6 +13,8 @@ const initialState: IDashboardVariablesStoreState = {
variables: {},
sortedVariablesArray: [],
dependencyData: null,
variableTypes: {},
dynamicVariableOrder: [],
};
export const dashboardVariablesStore = createStore<IDashboardVariablesStoreState>(
@@ -55,3 +60,38 @@ export function updateDashboardVariablesStore({
updateDerivedValues(draft);
});
}
/**
* Read current store snapshot as VariableFetchContext.
* Used by components to pass context to variableFetchStore actions
* without creating a circular import.
*/
export function getVariableDependencyContext(): VariableFetchContext {
const state = dashboardVariablesStore.getSnapshot();
// If every variable already has a selectedValue (e.g. persisted from
// localStorage/URL), dynamic variables can start in parallel.
// Otherwise they wait for query vars to settle first.
const doAllVariablesHaveValuesSelected = Object.values(state.variables).every(
(variable) => {
if (
variable.type === 'DYNAMIC' &&
variable.selectedValue === null &&
variable.allSelected === true
) {
return true;
}
return (
!isUndefined(variable.selectedValue) && !isEmpty(variable.selectedValue)
);
},
);
return {
doAllVariablesHaveValuesSelected,
variableTypes: state.variableTypes,
dynamicVariableOrder: state.dynamicVariableOrder,
dependencyData: state.dependencyData,
};
}

View File

@@ -1,11 +1,18 @@
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import {
IDashboardVariable,
TVariableQueryType,
} from 'types/api/dashboard/getAll';
export type VariableGraph = Record<string, string[]>;
export interface IDependencyData {
order: string[];
// Direct children for each variable
graph: VariableGraph;
// Direct parents for each variable
parentDependencyGraph: VariableGraph;
// Pre-computed transitive descendants for each node (all reachable nodes, not just direct children)
transitiveDescendants: VariableGraph;
hasCycle: boolean;
cycleNodes?: string[];
}
@@ -24,6 +31,12 @@ export interface IDashboardVariablesStoreState {
// Derived: dependency data for QUERY variables
dependencyData: IDependencyData | null;
// Derived: variable name → type mapping
variableTypes: Record<string, TVariableQueryType>;
// Derived: display-ordered list of dynamic variable names
dynamicVariableOrder: string[];
}
export interface IUseDashboardVariablesReturn {

View File

@@ -2,9 +2,11 @@ import {
buildDependencies,
buildDependencyGraph,
} from 'container/DashboardContainer/DashboardVariablesSelection/util';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import {
IDashboardVariable,
TVariableQueryType,
} from 'types/api/dashboard/getAll';
import { initializeVariableFetchStore } from '../variableFetchStore';
import {
IDashboardVariables,
IDashboardVariablesStoreState,
@@ -44,6 +46,7 @@ export function buildDependencyData(
order,
graph,
parentDependencyGraph,
transitiveDescendants,
hasCycle,
cycleNodes,
} = buildDependencyGraph(dependencies);
@@ -58,49 +61,62 @@ export function buildDependencyData(
order: queryVariableOrder,
graph,
parentDependencyGraph,
transitiveDescendants,
hasCycle,
cycleNodes,
};
}
/**
* Initialize the variable fetch store with the computed dependency data
* Build a variable name → type mapping from sorted variables array
*/
function initializeFetchStore(
export function buildVariableTypesMap(
sortedVariablesArray: IDashboardVariable[],
dependencyData: IDependencyData | null,
): void {
if (dependencyData) {
const allVariableNames = sortedVariablesArray
.map((v) => v.name)
.filter((name): name is string => !!name);
): Record<string, TVariableQueryType> {
const types: Record<string, TVariableQueryType> = {};
sortedVariablesArray.forEach((v) => {
if (v.name) {
types[v.name] = v.type;
}
});
return types;
}
initializeVariableFetchStore(
allVariableNames,
dependencyData.graph,
dependencyData.parentDependencyGraph,
);
}
/**
* Build display-ordered list of dynamic variable names
*/
export function buildDynamicVariableOrder(
sortedVariablesArray: IDashboardVariable[],
): string[] {
return sortedVariablesArray
.filter((v) => v.type === 'DYNAMIC' && v.name)
.map((v) => v.name as string);
}
/**
* Compute derived values from variables
* This is a composition of buildSortedVariablesArray and buildDependencyData
* Also initializes the variable fetch store with the new dependency data
*/
export function computeDerivedValues(
variables: IDashboardVariablesStoreState['variables'],
): Pick<
IDashboardVariablesStoreState,
'sortedVariablesArray' | 'dependencyData'
| 'sortedVariablesArray'
| 'dependencyData'
| 'variableTypes'
| 'dynamicVariableOrder'
> {
const sortedVariablesArray = buildSortedVariablesArray(variables);
const dependencyData = buildDependencyData(sortedVariablesArray);
const variableTypes = buildVariableTypesMap(sortedVariablesArray);
const dynamicVariableOrder = buildDynamicVariableOrder(sortedVariablesArray);
// Initialize the variable fetch store when dependency data is computed
initializeFetchStore(sortedVariablesArray, dependencyData);
return { sortedVariablesArray, dependencyData };
return {
sortedVariablesArray,
dependencyData,
variableTypes,
dynamicVariableOrder,
};
}
/**
@@ -112,7 +128,8 @@ export function updateDerivedValues(
): void {
draft.sortedVariablesArray = buildSortedVariablesArray(draft.variables);
draft.dependencyData = buildDependencyData(draft.sortedVariablesArray);
// Initialize the variable fetch store when dependency data is updated
initializeFetchStore(draft.sortedVariablesArray, draft.dependencyData);
draft.variableTypes = buildVariableTypesMap(draft.sortedVariablesArray);
draft.dynamicVariableOrder = buildDynamicVariableOrder(
draft.sortedVariablesArray,
);
}

View File

@@ -1,6 +1,12 @@
import { VariableGraph } from 'container/DashboardContainer/DashboardVariablesSelection/util';
import { getVariableDependencyContext } from './dashboardVariables/dashboardVariablesStore';
import { IDashboardVariablesStoreState } from './dashboardVariables/dashboardVariablesStoreTypes';
import createStore from './store';
import {
areAllQueryVariablesSettled,
isSettled,
resolveFetchState,
unlockWaitingDynamicVariables,
} from './variableFetchStoreUtils';
// Fetch state for each variable
export type VariableFetchState =
@@ -14,19 +20,29 @@ export interface IVariableFetchStoreState {
// Per-variable fetch state
states: Record<string, VariableFetchState>;
// Dependency graphs (set once when variables change)
dependencyGraph: VariableGraph; // variable -> children that depend on it
parentGraph: VariableGraph; // variable -> parents it depends on
// Track last update timestamp per variable to trigger re-fetches
// Track last update timestamp per variable
lastUpdated: Record<string, number>;
// Per-variable cycle counter — bumped when a variable needs to refetch.
// Used in react-query keys to auto-cancel stale requests for that variable only.
cycleIds: Record<string, number>;
}
/**
* Context from dashboardVariablesStore needed by fetch actions.
* Passed as parameter to avoid circular imports.
*/
export type VariableFetchContext = Pick<
IDashboardVariablesStoreState,
'variableTypes' | 'dynamicVariableOrder' | 'dependencyData'
> & {
doAllVariablesHaveValuesSelected: boolean;
};
const initialState: IVariableFetchStoreState = {
states: {},
dependencyGraph: {},
parentGraph: {},
lastUpdated: {},
cycleIds: {},
};
export const variableFetchStore = createStore<IVariableFetchStoreState>(
@@ -36,22 +52,183 @@ export const variableFetchStore = createStore<IVariableFetchStoreState>(
// ============== Actions ==============
/**
* Initialize the store with dependency graphs and set initial states
* Initialize the store with variable names.
* Called when dashboard variables change — sets up state entries.
*/
export function initializeVariableFetchStore(
variableNames: string[],
dependencyGraph: VariableGraph,
parentGraph: VariableGraph,
): void {
export function initializeVariableFetchStore(variableNames: string[]): void {
variableFetchStore.update((draft) => {
draft.dependencyGraph = dependencyGraph;
draft.parentGraph = parentGraph;
// Initialize all variables to idle, preserving existing ready states
// Initialize all variables to idle, preserving existing states
variableNames.forEach((name) => {
if (!draft.states[name]) {
draft.states[name] = 'idle';
}
});
// Clean up stale entries for variables that no longer exist
const nameSet = new Set(variableNames);
Object.keys(draft.states).forEach((name) => {
if (!nameSet.has(name)) {
delete draft.states[name];
delete draft.lastUpdated[name];
delete draft.cycleIds[name];
}
});
});
}
/**
* Start a full fetch cycle for all fetchable variables.
* Called on: initial load, time range change, or dependency graph change.
*
* Query variables with no query-type parents start immediately.
* Query variables with query-type parents get 'waiting'.
* Dynamic variables start immediately if all variables already have
* selectedValues (e.g. persisted from localStorage/URL). Otherwise they
* wait for all query variables to settle first.
*/
export function enqueueFetchOfAllVariables(): void {
const {
doAllVariablesHaveValuesSelected,
dependencyData,
variableTypes,
dynamicVariableOrder,
} = getVariableDependencyContext();
if (!dependencyData) {
return;
}
const { order: queryVariableOrder, parentDependencyGraph } = dependencyData;
variableFetchStore.update((draft) => {
// Query variables: root ones start immediately, dependent ones wait
queryVariableOrder.forEach((name) => {
draft.cycleIds[name] = (draft.cycleIds[name] || 0) + 1;
const parents = parentDependencyGraph[name] || [];
const hasQueryParents = parents.some((p) => variableTypes[p] === 'QUERY');
if (hasQueryParents) {
draft.states[name] = 'waiting';
} else {
draft.states[name] = resolveFetchState(draft, name);
}
});
// Dynamic variables: start immediately if query variables have values,
// otherwise wait for query variables to settle first
dynamicVariableOrder.forEach((name) => {
draft.cycleIds[name] = (draft.cycleIds[name] || 0) + 1;
draft.states[name] = doAllVariablesHaveValuesSelected
? resolveFetchState(draft, name)
: 'waiting';
});
});
}
/**
* Mark a variable as completed. Unblocks waiting query-type children.
* If all query variables are now settled, unlocks any waiting dynamic variables.
*/
export function onVariableFetchComplete(name: string): void {
const {
dependencyData,
variableTypes,
dynamicVariableOrder,
} = getVariableDependencyContext();
variableFetchStore.update((draft) => {
draft.states[name] = 'idle';
draft.lastUpdated[name] = Date.now();
if (!dependencyData) {
return;
}
const { graph } = dependencyData;
// Unblock waiting query-type children
const children = graph[name] || [];
children.forEach((child) => {
if (variableTypes[child] === 'QUERY' && draft.states[child] === 'waiting') {
draft.states[child] = resolveFetchState(draft, child);
}
});
// If all query variables are settled, unlock any waiting dynamic variables
if (
variableTypes[name] === 'QUERY' &&
areAllQueryVariablesSettled(draft.states, variableTypes)
) {
unlockWaitingDynamicVariables(draft, dynamicVariableOrder);
}
});
}
/**
* Mark a variable as errored. Sets query-type descendants to idle
* (they can't proceed without this parent).
* If all query variables are now settled, unlocks any waiting dynamic variables.
*/
export function onVariableFetchFailure(name: string): void {
const {
dependencyData,
variableTypes,
dynamicVariableOrder,
} = getVariableDependencyContext();
variableFetchStore.update((draft) => {
draft.states[name] = 'error';
if (!dependencyData) {
return;
}
// Set query-type descendants to idle (can't fetch without parent)
const descendants = dependencyData.transitiveDescendants[name] || [];
descendants.forEach((desc) => {
if (variableTypes[desc] === 'QUERY') {
draft.states[desc] = 'idle';
}
});
// If all query variables are settled (error counts), unlock any waiting dynamic variables
if (
variableTypes[name] === 'QUERY' &&
areAllQueryVariablesSettled(draft.states, variableTypes)
) {
unlockWaitingDynamicVariables(draft, dynamicVariableOrder);
}
});
}
/**
* Cascade a value change to query-type descendants.
* Called when a user changes a variable's value (not from a fetch cycle).
*
* Direct children whose parents are all settled start immediately.
* Deeper descendants wait until their parents complete (BFS order
* ensures parents are set before children within a single update).
*/
export function enqueueDescendantsOfVariable(name: string): void {
const { dependencyData, variableTypes } = getVariableDependencyContext();
if (!dependencyData) {
return;
}
const { parentDependencyGraph } = dependencyData;
variableFetchStore.update((draft) => {
const descendants = dependencyData.transitiveDescendants[name] || [];
const queryDescendants = descendants.filter(
(desc) => variableTypes[desc] === 'QUERY',
);
queryDescendants.forEach((desc) => {
draft.cycleIds[desc] = (draft.cycleIds[desc] || 0) + 1;
const parents = parentDependencyGraph[desc] || [];
const allParentsSettled = parents.every((p) => isSettled(draft.states[p]));
draft.states[desc] = allParentsSettled
? resolveFetchState(draft, desc)
: 'waiting';
});
});
}

View File

@@ -0,0 +1,46 @@
import { TVariableQueryType } from 'types/api/dashboard/getAll';
import {
IVariableFetchStoreState,
VariableFetchState,
} from './variableFetchStore';
export function isSettled(state: VariableFetchState | undefined): boolean {
return state === 'idle' || state === 'error';
}
/**
* Resolve the next fetch state based on whether the variable has been fetched before.
*/
export function resolveFetchState(
draft: IVariableFetchStoreState,
name: string,
): VariableFetchState {
return (draft.lastUpdated[name] || 0) > 0 ? 'revalidating' : 'loading';
}
/**
* Check if all query variables are settled (idle or error).
*/
export function areAllQueryVariablesSettled(
states: Record<string, VariableFetchState>,
variableTypes: Record<string, TVariableQueryType>,
): boolean {
return Object.entries(variableTypes)
.filter(([, type]) => type === 'QUERY')
.every(([name]) => isSettled(states[name]));
}
/**
* Transition waiting dynamic variables to loading/revalidating if in 'waiting' state.
*/
export function unlockWaitingDynamicVariables(
draft: IVariableFetchStoreState,
dynamicVariableOrder: string[],
): void {
dynamicVariableOrder.forEach((dynName) => {
if (draft.states[dynName] === 'waiting') {
draft.states[dynName] = resolveFetchState(draft, dynName);
}
});
}

View File

@@ -10,6 +10,27 @@ type Config struct {
type Templates struct {
Directory string `mapstructure:"directory"`
Format Format `mapstructure:"format"`
}
type Format struct {
Header Header `mapstructure:"header"`
Help Help `mapstructure:"help"`
Footer Footer `mapstructure:"footer"`
}
type Header struct {
Enabled bool `mapstructure:"enabled"`
LogoURL string `mapstructure:"logo_url"`
}
type Help struct {
Enabled bool `mapstructure:"enabled"`
Email string `mapstructure:"email"`
}
type Footer struct {
Enabled bool `mapstructure:"enabled"`
}
type SMTP struct {
@@ -45,6 +66,19 @@ func newConfig() factory.Config {
Enabled: false,
Templates: Templates{
Directory: "/root/templates",
Format: Format{
Header: Header{
Enabled: false,
LogoURL: "",
},
Help: Help{
Enabled: false,
Email: "",
},
Footer: Footer{
Enabled: false,
},
},
},
SMTP: SMTP{
Address: "localhost:25",

View File

@@ -15,6 +15,7 @@ type provider struct {
settings factory.ScopedProviderSettings
store emailtypes.TemplateStore
client *client.Client
config emailing.Config
}
func NewFactory() factory.ProviderFactory[emailing.Emailing, emailing.Config] {
@@ -55,7 +56,12 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
return nil, err
}
return &provider{settings: settings, store: store, client: client}, nil
return &provider{
settings: settings,
store: store,
client: client,
config: config,
}, nil
}
func (provider *provider) SendHTML(ctx context.Context, to string, subject string, templateName emailtypes.TemplateName, data map[string]any) error {
@@ -69,8 +75,19 @@ func (provider *provider) SendHTML(ctx context.Context, to string, subject strin
return err
}
// if no data is provided, create an empty map to prevent a panic when we add the format, to, and subject data
if data == nil {
data = make(map[string]any)
}
// the following are overridden if provided in the data map
data["format"] = provider.config.Templates.Format
data["to"] = to
data["subject"] = subject
content, err := emailtypes.NewContent(template, data)
if err != nil {
provider.settings.Logger().ErrorContext(ctx, "failed to create email content", "error", err)
return err
}

View File

@@ -22,8 +22,6 @@ import (
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/dustin/go-humanize"
"golang.org/x/text/cases"
"golang.org/x/text/language"
)
type Module struct {
@@ -146,11 +144,9 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID valuer.UUID, userID
continue
}
if err := m.emailing.SendHTML(ctx, invites[i].Email.String(), "You are invited to join a team in SigNoz", emailtypes.TemplateNameInvitationEmail, map[string]any{
"CustomerName": invites[i].Name,
"InviterName": creator.DisplayName,
"InviterEmail": creator.Email,
"Link": fmt.Sprintf("%s/signup?token=%s", bulkInvites.Invites[i].FrontendBaseUrl, invites[i].Token),
if err := m.emailing.SendHTML(ctx, invites[i].Email.String(), "You're Invited to Join SigNoz", emailtypes.TemplateNameInvitationEmail, map[string]any{
"inviter_email": creator.Email,
"link": fmt.Sprintf("%s/signup?token=%s", bulkInvites.Invites[i].FrontendBaseUrl, invites[i].Token),
}); err != nil {
m.settings.Logger().ErrorContext(ctx, "failed to send email", "error", err)
}
@@ -261,18 +257,6 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
traits["updated_by"] = updatedBy
m.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
// if the role is updated then send an email
if existingUser.Role != updatedUser.Role {
if err := m.emailing.SendHTML(ctx, existingUser.Email.String(), "Your Role Has Been Updated in SigNoz", emailtypes.TemplateNameUpdateRole, map[string]any{
"CustomerName": existingUser.DisplayName,
"UpdatedByEmail": requestor.Email,
"OldRole": cases.Title(language.English).String(strings.ToLower(existingUser.Role.String())),
"NewRole": cases.Title(language.English).String(strings.ToLower(updatedUser.Role.String())),
}); err != nil {
m.settings.Logger().ErrorContext(ctx, "failed to send email", "error", err)
}
}
if err := m.tokenizer.DeleteIdentity(ctx, valuer.MustNewUUID(id)); err != nil {
return nil, err
}
@@ -394,10 +378,9 @@ func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, ema
if err := module.emailing.SendHTML(
ctx,
user.Email.String(),
"Reset your SigNoz password",
"A Password Reset Was Requested for SigNoz",
emailtypes.TemplateNameResetPassword,
map[string]any{
"Name": user.DisplayName,
"Link": resetLink,
"Expiry": humanizedTokenLifetime,
},

View File

@@ -3261,20 +3261,14 @@ func (r *ClickHouseReader) GetMetricAggregateAttributes(ctx context.Context, org
metadata := metadataMap[name]
typ := string(metadata.MetricType)
temporality := string(metadata.Temporality)
isMonotonic := metadata.IsMonotonic
// Non-monotonic cumulative sums are treated as gauges
if typ == "Sum" && !isMonotonic && temporality == string(v3.Cumulative) {
typ = "Gauge"
}
// unlike traces/logs `tag`/`resource` type, the `Type` will be metric type
key := v3.AttributeKey{
Key: name,
DataType: v3.AttributeKeyDataTypeFloat64,
Type: v3.AttributeKeyType(typ),
IsColumn: true,
Key: name,
DataType: v3.AttributeKeyDataTypeFloat64,
Type: v3.AttributeKeyType(typ),
IsMonotonic: metadata.IsMonotonic,
IsColumn: true,
}
if _, ok := seen[name+typ]; ok {
@@ -5419,6 +5413,7 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.
t.metric_name AS metric_name,
ANY_VALUE(t.description) AS description,
ANY_VALUE(t.type) AS metric_type,
ANY_VALUE(t.is_monotonic) AS metric_is_monotonic,
ANY_VALUE(t.unit) AS metric_unit,
uniq(t.fingerprint) AS timeseries,
uniq(metric_name) OVER() AS total
@@ -5450,7 +5445,7 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.
for rows.Next() {
var metric metrics_explorer.MetricDetail
if err := rows.Scan(&metric.MetricName, &metric.Description, &metric.MetricType, &metric.MetricUnit, &metric.TimeSeries, &response.Total); err != nil {
if err := rows.Scan(&metric.MetricName, &metric.Description, &metric.MetricType, &metric.IsMonotonic, &metric.MetricUnit, &metric.TimeSeries, &response.Total); err != nil {
zap.L().Error("Error scanning metric row", zap.Error(err))
return &response, &model.ApiError{Typ: "ClickHouseError", Err: err}
}

View File

@@ -36,6 +36,7 @@ type MetricDetail struct {
TimeSeries uint64 `json:"timeseries"`
Samples uint64 `json:"samples"`
LastReceived int64 `json:"lastReceived"`
IsMonotonic bool `json:"is_monotonic"`
}
type TreeMapResponseItem struct {

View File

@@ -381,11 +381,12 @@ func (t AttributeKeyType) String() string {
}
type AttributeKey struct {
Key string `json:"key"`
DataType AttributeKeyDataType `json:"dataType"`
Type AttributeKeyType `json:"type"`
IsColumn bool `json:"isColumn"`
IsJSON bool `json:"isJSON"`
Key string `json:"key"`
DataType AttributeKeyDataType `json:"dataType"`
Type AttributeKeyType `json:"type"`
IsColumn bool `json:"isColumn"`
IsMonotonic bool `json:"is_monotonic"`
IsJSON bool `json:"isJSON"`
}
func (a AttributeKey) CacheKey() string {

View File

@@ -625,7 +625,7 @@ func (r *BaseRule) extractMetricAndGroupBys(ctx context.Context) (map[string][]s
// FilterNewSeries filters out items that are too new based on metadata first_seen timestamps.
// Returns the filtered series (old ones) excluding new series that are still within the grace period.
func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*qbtypes.TimeSeries) ([]*qbtypes.TimeSeries, error) {
func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*v3.Series) ([]*v3.Series, error) {
// Extract metric names and groupBy keys
metricToGroupedFields, err := r.extractMetricAndGroupBys(ctx)
if err != nil {
@@ -642,7 +642,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
seriesIdxToLookupKeys := make(map[int][]telemetrytypes.MetricMetadataLookupKey) // series index -> lookup keys
for i := 0; i < len(series); i++ {
metricLabelMap := series[i].LabelsMap()
metricLabelMap := series[i].Labels
// Collect groupBy attribute-value pairs for this series
seriesKeys := make([]telemetrytypes.MetricMetadataLookupKey, 0)
@@ -689,7 +689,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
}
// Filter series based on first_seen + delay
filteredSeries := make([]*qbtypes.TimeSeries, 0, len(series))
filteredSeries := make([]*v3.Series, 0, len(series))
evalTimeMs := ts.UnixMilli()
newGroupEvalDelayMs := r.newGroupEvalDelay.Milliseconds()
@@ -727,7 +727,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
// Check if first_seen + delay has passed
if maxFirstSeen+newGroupEvalDelayMs > evalTimeMs {
// Still within grace period, skip this series
r.logger.InfoContext(ctx, "Skipping new series", "rule_name", r.Name(), "series_idx", i, "max_first_seen", maxFirstSeen, "eval_time_ms", evalTimeMs, "delay_ms", newGroupEvalDelayMs, "labels", series[i].LabelsMap())
r.logger.InfoContext(ctx, "Skipping new series", "rule_name", r.Name(), "series_idx", i, "max_first_seen", maxFirstSeen, "eval_time_ms", evalTimeMs, "delay_ms", newGroupEvalDelayMs, "labels", series[i].Labels)
continue
}

View File

@@ -26,33 +26,33 @@ import (
"github.com/SigNoz/signoz/pkg/valuer"
)
// createTestSeries creates a *qbtypes.TimeSeries with the given labels and optional values
// createTestSeries creates a *v3.Series with the given labels and optional points
// so we don't exactly need the points in the series because the labels are used to determine if the series is new or old
// we use the labels to create a lookup key for the series and then check the first_seen timestamp for the series in the metadata table
func createTestSeries(labels map[string]string, points []*qbtypes.TimeSeriesValue) *qbtypes.TimeSeries {
func createTestSeries(labels map[string]string, points []v3.Point) *v3.Series {
if points == nil {
points = []*qbtypes.TimeSeriesValue{}
points = []v3.Point{}
}
lbls := make([]*qbtypes.Label, 0, len(labels))
for k, v := range labels {
lbls = append(lbls, &qbtypes.Label{Key: telemetrytypes.TelemetryFieldKey{Name: k}, Value: v})
}
return &qbtypes.TimeSeries{
Labels: lbls,
Values: points,
return &v3.Series{
Labels: labels,
Points: points,
}
}
// seriesEqual compares two *qbtypes.TimeSeries by their labels
// seriesEqual compares two v3.Series by their labels
// Returns true if the series have the same labels (order doesn't matter)
func seriesEqual(s1, s2 *qbtypes.TimeSeries) bool {
m1 := s1.LabelsMap()
m2 := s2.LabelsMap()
if len(m1) != len(m2) {
func seriesEqual(s1, s2 *v3.Series) bool {
if s1 == nil && s2 == nil {
return true
}
if s1 == nil || s2 == nil {
return false
}
for k, v := range m1 {
if m2[k] != v {
if len(s1.Labels) != len(s2.Labels) {
return false
}
for k, v := range s1.Labels {
if s2.Labels[k] != v {
return false
}
}
@@ -149,11 +149,11 @@ func createPostableRule(compositeQuery *v3.CompositeQuery) ruletypes.PostableRul
type filterNewSeriesTestCase struct {
name string
compositeQuery *v3.CompositeQuery
series []*qbtypes.TimeSeries
series []*v3.Series
firstSeenMap map[telemetrytypes.MetricMetadataLookupKey]int64
newGroupEvalDelay valuer.TextDuration
evalTime time.Time
expectedFiltered []*qbtypes.TimeSeries // series that should be in the final filtered result (old enough)
expectedFiltered []*v3.Series // series that should be in the final filtered result (old enough)
expectError bool
}
@@ -193,7 +193,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*qbtypes.TimeSeries{
series: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-new", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-missing", "env": "stage"}, nil),
@@ -205,7 +205,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*qbtypes.TimeSeries{
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-missing", "env": "stage"}, nil),
}, // svc-old and svc-missing should be included; svc-new is filtered out
@@ -227,7 +227,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*qbtypes.TimeSeries{
series: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-new1", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-new2", "env": "stage"}, nil),
},
@@ -237,7 +237,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*qbtypes.TimeSeries{}, // all should be filtered out (new series)
expectedFiltered: []*v3.Series{}, // all should be filtered out (new series)
},
{
name: "all old series - ClickHouse query",
@@ -254,7 +254,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*qbtypes.TimeSeries{
series: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-old2", "env": "stage"}, nil),
},
@@ -264,7 +264,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*qbtypes.TimeSeries{
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-old2", "env": "stage"}, nil),
}, // all should be included (old series)
@@ -292,13 +292,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*qbtypes.TimeSeries{
series: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*qbtypes.TimeSeries{
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
}, // early return, no filtering - all series included
},
@@ -322,13 +322,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*qbtypes.TimeSeries{
series: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*qbtypes.TimeSeries{
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
}, // early return, no filtering - all series included
},
@@ -358,13 +358,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*qbtypes.TimeSeries{
series: []*v3.Series{
createTestSeries(map[string]string{"status": "200"}, nil), // no service_name or env
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*qbtypes.TimeSeries{
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"status": "200"}, nil),
}, // series included as we can't decide if it's new or old
},
@@ -385,7 +385,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*qbtypes.TimeSeries{
series: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-no-metadata", "env": "prod"}, nil),
},
@@ -393,7 +393,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
// svc-no-metadata has no entry in firstSeenMap
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*qbtypes.TimeSeries{
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-no-metadata", "env": "prod"}, nil),
}, // both should be included - svc-old is old, svc-no-metadata can't be decided
@@ -413,7 +413,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*qbtypes.TimeSeries{
series: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
},
// Only provide metadata for service_name, not env
@@ -423,7 +423,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*qbtypes.TimeSeries{
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
}, // has some metadata, uses max first_seen which is old
},
@@ -453,11 +453,11 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*qbtypes.TimeSeries{},
series: []*v3.Series{},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*qbtypes.TimeSeries{},
expectedFiltered: []*v3.Series{},
},
{
name: "zero delay - Builder",
@@ -485,13 +485,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*qbtypes.TimeSeries{
series: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
newGroupEvalDelay: valuer.TextDuration{}, // zero delay
evalTime: defaultEvalTime,
expectedFiltered: []*qbtypes.TimeSeries{
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
}, // with zero delay, all series pass
},
@@ -526,7 +526,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*qbtypes.TimeSeries{
series: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: mergeFirstSeenMaps(
@@ -535,7 +535,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*qbtypes.TimeSeries{
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
},
@@ -565,7 +565,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*qbtypes.TimeSeries{
series: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
// service_name is old, env is new - should use max (new)
@@ -575,7 +575,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*qbtypes.TimeSeries{}, // max first_seen is new, so should be filtered out
expectedFiltered: []*v3.Series{}, // max first_seen is new, so should be filtered out
},
{
name: "Logs query - should skip filtering and return empty skip indexes",
@@ -600,14 +600,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*qbtypes.TimeSeries{
series: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*qbtypes.TimeSeries{
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
}, // Logs queries should return early, no filtering - all included
@@ -635,14 +635,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*qbtypes.TimeSeries{
series: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*qbtypes.TimeSeries{
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
}, // Traces queries should return early, no filtering - all included
@@ -724,14 +724,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
// Build a map to count occurrences of each unique label combination in expected series
expectedCounts := make(map[string]int)
for _, expected := range tt.expectedFiltered {
key := labelsKey(expected.LabelsMap())
key := labelsKey(expected.Labels)
expectedCounts[key]++
}
// Build a map to count occurrences of each unique label combination in filtered series
actualCounts := make(map[string]int)
for _, filtered := range filteredSeries {
key := labelsKey(filtered.LabelsMap())
key := labelsKey(filtered.Labels)
actualCounts[key]++
}

View File

@@ -12,18 +12,19 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/formatter"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/prometheus/prometheus/promql"
)
type PromRule struct {
*BaseRule
version string
prometheus prometheus.Prometheus
}
@@ -47,6 +48,7 @@ func NewPromRule(
p := PromRule{
BaseRule: baseRule,
version: postableRule.Version,
prometheus: prometheus,
}
p.logger = logger
@@ -81,30 +83,48 @@ func (r *PromRule) GetSelectedQuery() string {
}
func (r *PromRule) getPqlQuery() (string, error) {
if len(r.ruleCondition.CompositeQuery.Queries) > 0 {
selectedQuery := r.GetSelectedQuery()
for _, item := range r.ruleCondition.CompositeQuery.Queries {
switch item.Type {
case qbtypes.QueryTypePromQL:
promQuery, ok := item.Spec.(qbtypes.PromQuery)
if !ok {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query spec %T", item.Spec)
}
if promQuery.Name == selectedQuery {
return promQuery.Query, nil
if r.version == "v5" {
if len(r.ruleCondition.CompositeQuery.Queries) > 0 {
selectedQuery := r.GetSelectedQuery()
for _, item := range r.ruleCondition.CompositeQuery.Queries {
switch item.Type {
case qbtypes.QueryTypePromQL:
promQuery, ok := item.Spec.(qbtypes.PromQuery)
if !ok {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query spec %T", item.Spec)
}
if promQuery.Name == selectedQuery {
return promQuery.Query, nil
}
}
}
}
return "", fmt.Errorf("invalid promql rule setup")
}
return "", fmt.Errorf("invalid promql rule setup")
if r.ruleCondition.CompositeQuery.QueryType == v3.QueryTypePromQL {
if len(r.ruleCondition.CompositeQuery.PromQueries) > 0 {
selectedQuery := r.GetSelectedQuery()
if promQuery, ok := r.ruleCondition.CompositeQuery.PromQueries[selectedQuery]; ok {
query := promQuery.Query
if query == "" {
return query, fmt.Errorf("a promquery needs to be set for this rule to function")
}
return query, nil
}
}
}
return "", fmt.Errorf("invalid promql rule query")
}
func matrixToTimeSeries(res promql.Matrix) []*qbtypes.TimeSeries {
result := make([]*qbtypes.TimeSeries, 0, len(res))
func (r *PromRule) matrixToV3Series(res promql.Matrix) []*v3.Series {
v3Series := make([]*v3.Series, 0, len(res))
for _, series := range res {
result = append(result, promSeriesToTimeSeries(series))
commonSeries := toCommonSeries(series)
v3Series = append(v3Series, &commonSeries)
}
return result
return v3Series
}
func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletypes.Vector, error) {
@@ -123,31 +143,31 @@ func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletype
return nil, err
}
seriesToProcess := matrixToTimeSeries(res)
matrixToProcess := r.matrixToV3Series(res)
hasData := len(seriesToProcess) > 0
hasData := len(matrixToProcess) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
// Filter out new series if newGroupEvalDelay is configured
if r.ShouldSkipNewGroups() {
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, matrixToProcess)
// In case of error we log the error and continue with the original series
if filterErr != nil {
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
} else {
seriesToProcess = filteredSeries
matrixToProcess = filteredSeries
}
}
var resultVector ruletypes.Vector
for _, series := range seriesToProcess {
for _, series := range matrixToProcess {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(
ctx, "not enough data points to evaluate series, skipping",
"rule_id", r.ID(), "num_points", len(series.Values), "required_points", r.Condition().RequiredNumPoints,
"rule_id", r.ID(), "num_points", len(series.Points), "required_points", r.Condition().RequiredNumPoints,
)
continue
}
@@ -434,25 +454,26 @@ func (r *PromRule) RunAlertQuery(ctx context.Context, qs string, start, end time
}
}
func promSeriesToTimeSeries(series promql.Series) *qbtypes.TimeSeries {
ts := &qbtypes.TimeSeries{
Labels: make([]*qbtypes.Label, 0, len(series.Metric)),
Values: make([]*qbtypes.TimeSeriesValue, 0, len(series.Floats)),
func toCommonSeries(series promql.Series) v3.Series {
commonSeries := v3.Series{
Labels: make(map[string]string),
LabelsArray: make([]map[string]string, 0),
Points: make([]v3.Point, 0),
}
for _, lbl := range series.Metric {
ts.Labels = append(ts.Labels, &qbtypes.Label{
Key: telemetrytypes.TelemetryFieldKey{Name: lbl.Name},
Value: lbl.Value,
commonSeries.Labels[lbl.Name] = lbl.Value
commonSeries.LabelsArray = append(commonSeries.LabelsArray, map[string]string{
lbl.Name: lbl.Value,
})
}
for _, f := range series.Floats {
ts.Values = append(ts.Values, &qbtypes.TimeSeriesValue{
commonSeries.Points = append(commonSeries.Points, v3.Point{
Timestamp: f.T,
Value: f.F,
})
}
return ts
return commonSeries
}

View File

@@ -20,7 +20,6 @@ import (
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -48,13 +47,9 @@ func TestPromRuleEval(t *testing.T) {
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "A",
Query: "dummy_query", // This is not used in the test
},
PromQueries: map[string]*v3.PromQuery{
"A": {
Query: "dummy_query", // This is not used in the test
},
},
},
@@ -67,7 +62,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp string
matchType string
target float64
expectedAlertSample float64
expectedAlertSample v3.Point
expectedVectorValues []float64 // Expected values in result vector
}{
// Test cases for Equals Always
@@ -85,7 +80,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "2", // Always
target: 0.0,
expectedAlertSample: 0.0,
expectedAlertSample: v3.Point{Value: 0.0},
expectedVectorValues: []float64{0.0},
},
{
@@ -150,7 +145,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: 0.0,
expectedAlertSample: v3.Point{Value: 0.0},
expectedVectorValues: []float64{0.0},
},
{
@@ -167,7 +162,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: 0.0,
expectedAlertSample: v3.Point{Value: 0.0},
},
{
values: pql.Series{
@@ -183,7 +178,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: 0.0,
expectedAlertSample: v3.Point{Value: 0.0},
},
{
values: pql.Series{
@@ -216,7 +211,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Greater Than
matchType: "2", // Always
target: 1.5,
expectedAlertSample: 2.0,
expectedAlertSample: v3.Point{Value: 2.0},
expectedVectorValues: []float64{2.0},
},
{
@@ -233,7 +228,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Above
matchType: "2", // Always
target: 2.0,
expectedAlertSample: 3.0,
expectedAlertSample: v3.Point{Value: 3.0},
},
{
values: pql.Series{
@@ -249,7 +244,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Below
matchType: "2", // Always
target: 13.0,
expectedAlertSample: 12.0,
expectedAlertSample: v3.Point{Value: 12.0},
},
{
values: pql.Series{
@@ -281,7 +276,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Greater Than
matchType: "1", // Once
target: 4.5,
expectedAlertSample: 10.0,
expectedAlertSample: v3.Point{Value: 10.0},
expectedVectorValues: []float64{10.0},
},
{
@@ -344,7 +339,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "2", // Always
target: 0.0,
expectedAlertSample: 1.0,
expectedAlertSample: v3.Point{Value: 1.0},
},
{
values: pql.Series{
@@ -376,7 +371,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: 1.0,
expectedAlertSample: v3.Point{Value: 1.0},
},
{
values: pql.Series{
@@ -407,7 +402,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: 1.0,
expectedAlertSample: v3.Point{Value: 1.0},
},
{
values: pql.Series{
@@ -423,7 +418,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: 1.0,
expectedAlertSample: v3.Point{Value: 1.0},
},
// Test cases for Less Than Always
{
@@ -440,7 +435,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Less Than
matchType: "2", // Always
target: 4,
expectedAlertSample: 1.5,
expectedAlertSample: v3.Point{Value: 1.5},
},
{
values: pql.Series{
@@ -472,7 +467,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Less Than
matchType: "1", // Once
target: 4,
expectedAlertSample: 2.5,
expectedAlertSample: v3.Point{Value: 2.5},
},
{
values: pql.Series{
@@ -504,7 +499,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "3", // OnAverage
target: 6.0,
expectedAlertSample: 6.0,
expectedAlertSample: v3.Point{Value: 6.0},
},
{
values: pql.Series{
@@ -535,7 +530,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "3", // OnAverage
target: 4.5,
expectedAlertSample: 6.0,
expectedAlertSample: v3.Point{Value: 6.0},
},
{
values: pql.Series{
@@ -566,7 +561,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Greater Than
matchType: "3", // OnAverage
target: 4.5,
expectedAlertSample: 6.0,
expectedAlertSample: v3.Point{Value: 6.0},
},
{
values: pql.Series{
@@ -582,7 +577,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Less Than
matchType: "3", // OnAverage
target: 12.0,
expectedAlertSample: 6.0,
expectedAlertSample: v3.Point{Value: 6.0},
},
// Test cases for InTotal
{
@@ -599,7 +594,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "4", // InTotal
target: 30.0,
expectedAlertSample: 30.0,
expectedAlertSample: v3.Point{Value: 30.0},
},
{
values: pql.Series{
@@ -626,7 +621,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "4", // InTotal
target: 9.0,
expectedAlertSample: 10.0,
expectedAlertSample: v3.Point{Value: 10.0},
},
{
values: pql.Series{
@@ -650,7 +645,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Greater Than
matchType: "4", // InTotal
target: 10.0,
expectedAlertSample: 20.0,
expectedAlertSample: v3.Point{Value: 20.0},
},
{
values: pql.Series{
@@ -675,7 +670,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Less Than
matchType: "4", // InTotal
target: 30.0,
expectedAlertSample: 20.0,
expectedAlertSample: v3.Point{Value: 20.0},
},
{
values: pql.Series{
@@ -713,7 +708,7 @@ func TestPromRuleEval(t *testing.T) {
assert.NoError(t, err)
}
resultVectors, err := rule.Threshold.Eval(*promSeriesToTimeSeries(c.values), rule.Unit(), ruletypes.EvalData{})
resultVectors, err := rule.Threshold.Eval(toCommonSeries(c.values), rule.Unit(), ruletypes.EvalData{})
assert.NoError(t, err)
// Compare full result vector with expected vector
@@ -729,12 +724,12 @@ func TestPromRuleEval(t *testing.T) {
if len(resultVectors) > 0 {
found := false
for _, sample := range resultVectors {
if sample.V == c.expectedAlertSample {
if sample.V == c.expectedAlertSample.Value {
found = true
break
}
}
assert.True(t, found, "Expected alert sample value %.2f not found in result vectors for case %d. Got values: %v", c.expectedAlertSample, idx, actualValues)
assert.True(t, found, "Expected alert sample value %.2f not found in result vectors for case %d. Got values: %v", c.expectedAlertSample.Value, idx, actualValues)
}
} else {
assert.Empty(t, resultVectors, "Expected no alert but got result vectors for case %d", idx)
@@ -759,13 +754,9 @@ func TestPromRuleUnitCombinations(t *testing.T) {
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "A",
Query: "test_metric",
},
PromQueries: map[string]*v3.PromQuery{
"A": {
Query: "test_metric",
},
},
},
@@ -1022,13 +1013,9 @@ func _Enable_this_after_9146_issue_fix_is_merged_TestPromRuleNoData(t *testing.T
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "A",
Query: "test_metric",
},
PromQueries: map[string]*v3.PromQuery{
"A": {
Query: "test_metric",
},
},
},
@@ -1137,13 +1124,9 @@ func TestMultipleThresholdPromRule(t *testing.T) {
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "A",
Query: "test_metric",
},
PromQueries: map[string]*v3.PromQuery{
"A": {
Query: "test_metric",
},
},
},
@@ -1378,11 +1361,8 @@ func TestPromRule_NoData(t *testing.T) {
MatchType: ruletypes.AtleastOnce,
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
},
PromQueries: map[string]*v3.PromQuery{
"A": {Query: "test_metric"},
},
},
Thresholds: &ruletypes.RuleThresholdData{
@@ -1506,11 +1486,8 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
Target: &target,
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
},
PromQueries: map[string]*v3.PromQuery{
"A": {Query: "test_metric"},
},
},
Thresholds: &ruletypes.RuleThresholdData{
@@ -1658,11 +1635,8 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
MatchType: ruletypes.AtleastOnce,
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
},
PromQueries: map[string]*v3.PromQuery{
"A": {Query: "test_metric"},
},
},
},

View File

@@ -1,24 +1,38 @@
package rules
import (
"bytes"
"context"
"encoding/json"
"fmt"
"log/slog"
"math"
"net/url"
"reflect"
"text/template"
"time"
"github.com/SigNoz/signoz/pkg/contextlinks"
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/query-service/app/querier"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
querytemplate "github.com/SigNoz/signoz/pkg/query-service/utils/queryTemplate"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
logsv3 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v3"
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
"github.com/SigNoz/signoz/pkg/query-service/formatter"
querierV5 "github.com/SigNoz/signoz/pkg/querier"
@@ -28,9 +42,23 @@ import (
type ThresholdRule struct {
*BaseRule
// Ever since we introduced the new metrics query builder, the version is "v4"
// for all the rules
// if the version is "v3", then we use the old querier
// if the version is "v4", then we use the new querierV2
version string
// querierV5 is the query builder v5 querier used for all alert rule evaluation
// querier is used for alerts created before the introduction of new metrics query builder
querier interfaces.Querier
// querierV2 is used for alerts created after the introduction of new metrics query builder
querierV2 interfaces.Querier
// querierV5 is used for alerts migrated after the introduction of new query builder
querierV5 querierV5.Querier
// used for attribute metadata enrichment for logs and traces
logsKeys map[string]v3.AttributeKey
spansKeys map[string]v3.AttributeKey
}
var _ Rule = (*ThresholdRule)(nil)
@@ -54,10 +82,25 @@ func NewThresholdRule(
}
t := ThresholdRule{
BaseRule: baseRule,
querierV5: querierV5,
BaseRule: baseRule,
version: p.Version,
}
querierOption := querier.QuerierOptions{
Reader: reader,
Cache: nil,
KeyGenerator: queryBuilder.NewKeyGenerator(),
}
querierOptsV2 := querierV2.QuerierOptions{
Reader: reader,
Cache: nil,
KeyGenerator: queryBuilder.NewKeyGenerator(),
}
t.querier = querier.NewQuerier(querierOption)
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
t.querierV5 = querierV5
t.reader = reader
return &t, nil
}
@@ -77,9 +120,169 @@ func (r *ThresholdRule) Type() ruletypes.RuleType {
return ruletypes.RuleTypeThreshold
}
func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3.QueryRangeParamsV3, error) {
r.logger.InfoContext(
ctx, "prepare query range request", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
)
startTs, endTs := r.Timestamps(ts)
start, end := startTs.UnixMilli(), endTs.UnixMilli()
if r.ruleCondition.QueryType() == v3.QueryTypeClickHouseSQL {
params := &v3.QueryRangeParamsV3{
Start: start,
End: end,
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
CompositeQuery: &v3.CompositeQuery{
QueryType: r.ruleCondition.CompositeQuery.QueryType,
PanelType: r.ruleCondition.CompositeQuery.PanelType,
BuilderQueries: make(map[string]*v3.BuilderQuery),
ClickHouseQueries: make(map[string]*v3.ClickHouseQuery),
PromQueries: make(map[string]*v3.PromQuery),
Unit: r.ruleCondition.CompositeQuery.Unit,
},
Variables: make(map[string]interface{}),
NoCache: true,
}
querytemplate.AssignReservedVarsV3(params)
for name, chQuery := range r.ruleCondition.CompositeQuery.ClickHouseQueries {
if chQuery.Disabled {
continue
}
tmpl := template.New("clickhouse-query")
tmpl, err := tmpl.Parse(chQuery.Query)
if err != nil {
return nil, err
}
var query bytes.Buffer
err = tmpl.Execute(&query, params.Variables)
if err != nil {
return nil, err
}
params.CompositeQuery.ClickHouseQueries[name] = &v3.ClickHouseQuery{
Query: query.String(),
Disabled: chQuery.Disabled,
Legend: chQuery.Legend,
}
}
return params, nil
}
if r.ruleCondition.CompositeQuery != nil && r.ruleCondition.CompositeQuery.BuilderQueries != nil {
for _, q := range r.ruleCondition.CompositeQuery.BuilderQueries {
// If the step interval is less than the minimum allowed step interval, set it to the minimum allowed step interval
if minStep := common.MinAllowedStepInterval(start, end); q.StepInterval < minStep {
q.StepInterval = minStep
}
q.SetShiftByFromFunc()
if q.DataSource == v3.DataSourceMetrics {
// if the time range is greater than 1 day, and less than 1 week set the step interval to be multiple of 5 minutes
// if the time range is greater than 1 week, set the step interval to be multiple of 30 mins
if end-start >= 24*time.Hour.Milliseconds() && end-start < 7*24*time.Hour.Milliseconds() {
q.StepInterval = int64(math.Round(float64(q.StepInterval)/300)) * 300
} else if end-start >= 7*24*time.Hour.Milliseconds() {
q.StepInterval = int64(math.Round(float64(q.StepInterval)/1800)) * 1800
}
}
}
}
if r.ruleCondition.CompositeQuery.PanelType != v3.PanelTypeGraph {
r.ruleCondition.CompositeQuery.PanelType = v3.PanelTypeGraph
}
// default mode
return &v3.QueryRangeParamsV3{
Start: start,
End: end,
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
CompositeQuery: r.ruleCondition.CompositeQuery,
Variables: make(map[string]interface{}),
NoCache: true,
}, nil
}
func (r *ThresholdRule) prepareLinksToLogs(ctx context.Context, ts time.Time, lbls labels.Labels) string {
if r.version == "v5" {
return r.prepareLinksToLogsV5(ctx, ts, lbls)
}
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRange(ctx, ts)
if err != nil {
return ""
}
start := time.UnixMilli(qr.Start)
end := time.UnixMilli(qr.End)
// TODO(srikanthccv): handle formula queries
if selectedQuery < "A" || selectedQuery > "Z" {
return ""
}
q := r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery]
if q == nil {
return ""
}
if q.DataSource != v3.DataSourceLogs {
return ""
}
queryFilter := []v3.FilterItem{}
if q.Filters != nil {
queryFilter = q.Filters.Items
}
filterItems := contextlinks.PrepareFilters(lbls.Map(), queryFilter, q.GroupBy, r.logsKeys)
return contextlinks.PrepareLinksToLogs(start, end, filterItems)
}
func (r *ThresholdRule) prepareLinksToTraces(ctx context.Context, ts time.Time, lbls labels.Labels) string {
if r.version == "v5" {
return r.prepareLinksToTracesV5(ctx, ts, lbls)
}
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRange(ctx, ts)
if err != nil {
return ""
}
start := time.UnixMilli(qr.Start)
end := time.UnixMilli(qr.End)
// TODO(srikanthccv): handle formula queries
if selectedQuery < "A" || selectedQuery > "Z" {
return ""
}
q := r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery]
if q == nil {
return ""
}
if q.DataSource != v3.DataSourceTraces {
return ""
}
queryFilter := []v3.FilterItem{}
if q.Filters != nil {
queryFilter = q.Filters.Items
}
filterItems := contextlinks.PrepareFilters(lbls.Map(), queryFilter, q.GroupBy, r.spansKeys)
return contextlinks.PrepareLinksToTraces(start, end, filterItems)
}
func (r *ThresholdRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
r.logger.InfoContext(
ctx, "prepare query range request v5", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
)
startTs, endTs := r.Timestamps(ts)
@@ -99,10 +302,10 @@ func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*q
return req, nil
}
func (r *ThresholdRule) prepareLinksToLogs(ctx context.Context, ts time.Time, lbls labels.Labels) string {
func (r *ThresholdRule) prepareLinksToLogsV5(ctx context.Context, ts time.Time, lbls labels.Labels) string {
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRange(ctx, ts)
qr, err := r.prepareQueryRangeV5(ctx, ts)
if err != nil {
return ""
}
@@ -139,10 +342,10 @@ func (r *ThresholdRule) prepareLinksToLogs(ctx context.Context, ts time.Time, lb
return contextlinks.PrepareLinksToLogsV5(start, end, whereClause)
}
func (r *ThresholdRule) prepareLinksToTraces(ctx context.Context, ts time.Time, lbls labels.Labels) string {
func (r *ThresholdRule) prepareLinksToTracesV5(ctx context.Context, ts time.Time, lbls labels.Labels) string {
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRange(ctx, ts)
qr, err := r.prepareQueryRangeV5(ctx, ts)
if err != nil {
return ""
}
@@ -188,6 +391,115 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
if err != nil {
return nil, err
}
err = r.PopulateTemporality(ctx, orgID, params)
if err != nil {
return nil, fmt.Errorf("internal error while setting temporality")
}
if params.CompositeQuery.QueryType == v3.QueryTypeBuilder {
hasLogsQuery := false
hasTracesQuery := false
for _, query := range params.CompositeQuery.BuilderQueries {
if query.DataSource == v3.DataSourceLogs {
hasLogsQuery = true
}
if query.DataSource == v3.DataSourceTraces {
hasTracesQuery = true
}
}
if hasLogsQuery {
// check if any enrichment is required for logs if yes then enrich them
if logsv3.EnrichmentRequired(params) {
logsFields, apiErr := r.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(params.CompositeQuery))
if apiErr != nil {
return nil, apiErr.ToError()
}
logsKeys := model.GetLogFieldsV3(ctx, params, logsFields)
r.logsKeys = logsKeys
logsv3.Enrich(params, logsKeys)
}
}
if hasTracesQuery {
spanKeys, err := r.reader.GetSpanAttributeKeysByNames(ctx, logsv3.GetFieldNames(params.CompositeQuery))
if err != nil {
return nil, err
}
r.spansKeys = spanKeys
tracesV4.Enrich(params, spanKeys)
}
}
var results []*v3.Result
var queryErrors map[string]error
if r.version == "v4" {
results, queryErrors, err = r.querierV2.QueryRange(ctx, orgID, params)
} else {
results, queryErrors, err = r.querier.QueryRange(ctx, orgID, params)
}
if err != nil {
r.logger.ErrorContext(ctx, "failed to get alert query range result", "rule_name", r.Name(), "error", err, "query_errors", queryErrors)
return nil, fmt.Errorf("internal error while querying")
}
if params.CompositeQuery.QueryType == v3.QueryTypeBuilder {
results, err = postprocess.PostProcessResult(results, params)
if err != nil {
r.logger.ErrorContext(ctx, "failed to post process result", "rule_name", r.Name(), "error", err)
return nil, fmt.Errorf("internal error while post processing")
}
}
selectedQuery := r.GetSelectedQuery()
var queryResult *v3.Result
for _, res := range results {
if res.QueryName == selectedQuery {
queryResult = res
break
}
}
hasData := queryResult != nil && len(queryResult.Series) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
var resultVector ruletypes.Vector
if queryResult == nil {
r.logger.WarnContext(ctx, "query result is nil", "rule_name", r.Name(), "query_name", selectedQuery)
return resultVector, nil
}
for _, series := range queryResult.Series {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
resultSeries, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
ActiveAlerts: r.ActiveAlertsLabelFP(),
SendUnmatched: r.ShouldSendUnmatched(),
})
if err != nil {
return nil, err
}
resultVector = append(resultVector, resultSeries...)
}
return resultVector, nil
}
func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
params, err := r.prepareQueryRangeV5(ctx, ts)
if err != nil {
return nil, err
}
var results []*v3.Result
v5Result, err := r.querierV5.QueryRange(ctx, orgID, params)
if err != nil {
@@ -195,24 +507,26 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
return nil, fmt.Errorf("internal error while querying")
}
for _, item := range v5Result.Data.Results {
if tsData, ok := item.(*qbtypes.TimeSeriesData); ok {
results = append(results, transition.ConvertV5TimeSeriesDataToV4Result(tsData))
} else {
// NOTE: should not happen but just to ensure we don't miss it if it happens for some reason
r.logger.WarnContext(ctx, "expected qbtypes.TimeSeriesData but got", "item_type", reflect.TypeOf(item))
}
}
selectedQuery := r.GetSelectedQuery()
var queryResult *qbtypes.TimeSeriesData
for _, item := range v5Result.Data.Results {
if tsData, ok := item.(*qbtypes.TimeSeriesData); ok && tsData.QueryName == selectedQuery {
queryResult = tsData
var queryResult *v3.Result
for _, res := range results {
if res.QueryName == selectedQuery {
queryResult = res
break
}
}
var allSeries []*qbtypes.TimeSeries
if queryResult != nil {
for _, bucket := range queryResult.Aggregations {
allSeries = append(allSeries, bucket.Series...)
}
}
hasData := len(allSeries) > 0
hasData := queryResult != nil && len(queryResult.Series) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
@@ -225,7 +539,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
}
// Filter out new series if newGroupEvalDelay is configured
seriesToProcess := allSeries
seriesToProcess := queryResult.Series
if r.ShouldSkipNewGroups() {
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
// In case of error we log the error and continue with the original series
@@ -238,7 +552,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
for _, series := range seriesToProcess {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Values), "requiredPoints", r.Condition().RequiredNumPoints)
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
resultSeries, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
@@ -259,7 +573,16 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
valueFormatter := formatter.FromUnit(r.Unit())
res, err := r.buildAndRunQuery(ctx, r.orgID, ts)
var res ruletypes.Vector
var err error
if r.version == "v5" {
r.logger.InfoContext(ctx, "running v5 query")
res, err = r.buildAndRunQueryV5(ctx, r.orgID, ts)
} else {
r.logger.InfoContext(ctx, "running v4 query")
res, err = r.buildAndRunQuery(ctx, r.orgID, ts)
}
if err != nil {
return 0, err

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -167,7 +167,6 @@ func NewSQLMigrationProviderFactories(
sqlmigration.NewMigrateRbacToAuthzFactory(sqlstore),
sqlmigration.NewMigratePublicDashboardsFactory(sqlstore),
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
sqlmigration.NewMigrateRulesV4ToV5Factory(sqlstore, telemetryStore),
)
}

View File

@@ -1,194 +0,0 @@
package sqlmigration
import (
"context"
"database/sql"
"encoding/json"
"log/slog"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
type migrateRulesV4ToV5 struct {
store sqlstore.SQLStore
telemetryStore telemetrystore.TelemetryStore
logger *slog.Logger
}
func NewMigrateRulesV4ToV5Factory(
store sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore,
) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(
factory.MustNewName("migrate_rules_v4_to_v5"),
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return &migrateRulesV4ToV5{
store: store,
telemetryStore: telemetryStore,
logger: ps.Logger,
}, nil
})
}
func (migration *migrateRulesV4ToV5) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *migrateRulesV4ToV5) getLogDuplicateKeys(ctx context.Context) ([]string, error) {
query := `
SELECT name
FROM (
SELECT DISTINCT name FROM signoz_logs.distributed_logs_attribute_keys
INTERSECT
SELECT DISTINCT name FROM signoz_logs.distributed_logs_resource_keys
)
ORDER BY name
`
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
if err != nil {
migration.logger.WarnContext(ctx, "failed to query log duplicate keys", "error", err)
return nil, nil
}
defer rows.Close()
var keys []string
for rows.Next() {
var key string
if err := rows.Scan(&key); err != nil {
migration.logger.WarnContext(ctx, "failed to scan log duplicate key", "error", err)
continue
}
keys = append(keys, key)
}
return keys, nil
}
func (migration *migrateRulesV4ToV5) getTraceDuplicateKeys(ctx context.Context) ([]string, error) {
query := `
SELECT tagKey
FROM signoz_traces.distributed_span_attributes_keys
WHERE tagType IN ('tag', 'resource')
GROUP BY tagKey
HAVING COUNT(DISTINCT tagType) > 1
ORDER BY tagKey
`
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
if err != nil {
migration.logger.WarnContext(ctx, "failed to query trace duplicate keys", "error", err)
return nil, nil
}
defer rows.Close()
var keys []string
for rows.Next() {
var key string
if err := rows.Scan(&key); err != nil {
migration.logger.WarnContext(ctx, "failed to scan trace duplicate key", "error", err)
continue
}
keys = append(keys, key)
}
return keys, nil
}
func (migration *migrateRulesV4ToV5) Up(ctx context.Context, db *bun.DB) error {
logsKeys, err := migration.getLogDuplicateKeys(ctx)
if err != nil {
return err
}
tracesKeys, err := migration.getTraceDuplicateKeys(ctx)
if err != nil {
return err
}
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
var rules []struct {
ID string `bun:"id"`
Data map[string]any `bun:"data"`
}
err = tx.NewSelect().
Table("rule").
Column("id", "data").
Scan(ctx, &rules)
if err != nil {
if err == sql.ErrNoRows {
return nil
}
return err
}
alertsMigrator := transition.NewAlertMigrateV5(migration.logger, logsKeys, tracesKeys)
for _, rule := range rules {
version, _ := rule.Data["version"].(string)
if version == "v5" {
continue
}
migration.logger.InfoContext(ctx, "migrating rule v4 to v5", "rule_id", rule.ID, "current_version", version)
// Check if the queries envelope already exists and is non-empty
hasQueriesEnvelope := false
if condition, ok := rule.Data["condition"].(map[string]any); ok {
if compositeQuery, ok := condition["compositeQuery"].(map[string]any); ok {
if queries, ok := compositeQuery["queries"].([]any); ok && len(queries) > 0 {
hasQueriesEnvelope = true
}
}
}
if hasQueriesEnvelope {
// Case 2: Already has queries envelope, just bump version
migration.logger.InfoContext(ctx, "rule already has queries envelope, bumping version", "rule_id", rule.ID)
rule.Data["version"] = "v5"
} else {
// Case 1: Old format, run full migration
migration.logger.InfoContext(ctx, "rule has old format, running full migration", "rule_id", rule.ID)
alertsMigrator.Migrate(ctx, rule.Data)
// Force version to v5 regardless of Migrate return value
rule.Data["version"] = "v5"
}
dataJSON, err := json.Marshal(rule.Data)
if err != nil {
return err
}
_, err = tx.NewUpdate().
Table("rule").
Set("data = ?", string(dataJSON)).
Where("id = ?", rule.ID).
Exec(ctx)
if err != nil {
return err
}
}
return tx.Commit()
}
func (migration *migrateRulesV4ToV5) Down(ctx context.Context, db *bun.DB) error {
return nil
}

102
pkg/transition/v5_to_v4.go Normal file
View File

@@ -0,0 +1,102 @@
package transition
import (
"fmt"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
// ConvertV5TimeSeriesDataToV4Result converts v5 TimeSeriesData to v4 Result
func ConvertV5TimeSeriesDataToV4Result(v5Data *qbtypes.TimeSeriesData) *v3.Result {
if v5Data == nil {
return nil
}
result := &v3.Result{
QueryName: v5Data.QueryName,
Series: make([]*v3.Series, 0),
}
toV4Series := func(ts *qbtypes.TimeSeries) *v3.Series {
series := &v3.Series{
Labels: make(map[string]string),
LabelsArray: make([]map[string]string, 0),
Points: make([]v3.Point, 0, len(ts.Values)),
}
for _, label := range ts.Labels {
valueStr := fmt.Sprintf("%v", label.Value)
series.Labels[label.Key.Name] = valueStr
}
if len(series.Labels) > 0 {
series.LabelsArray = append(series.LabelsArray, series.Labels)
}
for _, tsValue := range ts.Values {
if tsValue.Partial {
continue
}
point := v3.Point{
Timestamp: tsValue.Timestamp,
Value: tsValue.Value,
}
series.Points = append(series.Points, point)
}
return series
}
for _, aggBucket := range v5Data.Aggregations {
for _, ts := range aggBucket.Series {
result.Series = append(result.Series, toV4Series(ts))
}
if len(aggBucket.AnomalyScores) != 0 {
result.AnomalyScores = make([]*v3.Series, 0)
for _, ts := range aggBucket.AnomalyScores {
result.AnomalyScores = append(result.AnomalyScores, toV4Series(ts))
}
}
if len(aggBucket.PredictedSeries) != 0 {
result.PredictedSeries = make([]*v3.Series, 0)
for _, ts := range aggBucket.PredictedSeries {
result.PredictedSeries = append(result.PredictedSeries, toV4Series(ts))
}
}
if len(aggBucket.LowerBoundSeries) != 0 {
result.LowerBoundSeries = make([]*v3.Series, 0)
for _, ts := range aggBucket.LowerBoundSeries {
result.LowerBoundSeries = append(result.LowerBoundSeries, toV4Series(ts))
}
}
if len(aggBucket.UpperBoundSeries) != 0 {
result.UpperBoundSeries = make([]*v3.Series, 0)
for _, ts := range aggBucket.UpperBoundSeries {
result.UpperBoundSeries = append(result.UpperBoundSeries, toV4Series(ts))
}
}
}
return result
}
// ConvertV5TimeSeriesDataSliceToV4Results converts a slice of v5 TimeSeriesData to v4 QueryRangeResponse
func ConvertV5TimeSeriesDataSliceToV4Results(v5DataSlice []*qbtypes.TimeSeriesData) *v3.QueryRangeResponse {
response := &v3.QueryRangeResponse{
ResultType: "matrix", // Time series data is typically "matrix" type
Result: make([]*v3.Result, 0, len(v5DataSlice)),
}
for _, v5Data := range v5DataSlice {
if result := ConvertV5TimeSeriesDataToV4Result(v5Data); result != nil {
response.Result = append(response.Result, result)
}
}
return response
}

View File

@@ -12,13 +12,12 @@ import (
var (
// Templates is a list of all the templates that are supported by the emailing service.
// This list should be updated whenever a new template is added.
Templates = []TemplateName{TemplateNameInvitationEmail, TemplateNameUpdateRole, TemplateNameResetPassword}
Templates = []TemplateName{TemplateNameInvitationEmail, TemplateNameResetPassword}
)
var (
TemplateNameInvitationEmail = TemplateName{valuer.NewString("invitation_email")}
TemplateNameUpdateRole = TemplateName{valuer.NewString("update_role")}
TemplateNameResetPassword = TemplateName{valuer.NewString("reset_password_email")}
TemplateNameInvitationEmail = TemplateName{valuer.NewString("invitation")}
TemplateNameResetPassword = TemplateName{valuer.NewString("reset_password")}
)
type TemplateName struct{ valuer.String }
@@ -27,8 +26,6 @@ func NewTemplateName(name string) (TemplateName, error) {
switch name {
case TemplateNameInvitationEmail.StringValue():
return TemplateNameInvitationEmail, nil
case TemplateNameUpdateRole.StringValue():
return TemplateNameUpdateRole, nil
case TemplateNameResetPassword.StringValue():
return TemplateNameResetPassword, nil
default:
@@ -40,7 +37,7 @@ func NewContent(template *template.Template, data map[string]any) ([]byte, error
buf := bytes.NewBuffer(nil)
err := template.Execute(buf, data)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to execute template")
return nil, err
}
return buf.Bytes(), nil

View File

@@ -76,33 +76,6 @@ type TimeSeries struct {
Values []*TimeSeriesValue `json:"values"`
}
// LabelsMap converts the label slice to a map[string]string for use in
// alert evaluation helpers that operate on flat label maps.
func (ts *TimeSeries) LabelsMap() map[string]string {
if ts == nil {
return nil
}
m := make(map[string]string, len(ts.Labels))
for _, l := range ts.Labels {
m[l.Key.Name] = fmt.Sprintf("%v", l.Value)
}
return m
}
// NonPartialValues returns only the values where Partial is false.
func (ts *TimeSeries) NonPartialValues() []*TimeSeriesValue {
if ts == nil {
return nil
}
result := make([]*TimeSeriesValue, 0, len(ts.Values))
for _, v := range ts.Values {
if !v.Partial {
result = append(result, v)
}
}
return result
}
type Label struct {
Key telemetrytypes.TelemetryFieldKey `json:"key"`
Value any `json:"value"`

View File

@@ -203,11 +203,11 @@ func (rc *RuleCondition) IsValid() bool {
}
// ShouldEval checks if the further series should be evaluated at all for alerts.
func (rc *RuleCondition) ShouldEval(series *qbtypes.TimeSeries) bool {
func (rc *RuleCondition) ShouldEval(series *v3.Series) bool {
if rc == nil {
return true
}
return !rc.RequireMinPoints || len(series.NonPartialValues()) >= rc.RequiredNumPoints
return !rc.RequireMinPoints || len(series.Points) >= rc.RequiredNumPoints
}
// QueryType is a shorthand method to get query type

View File

@@ -355,14 +355,6 @@ func (r *PostableRule) validate() error {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "composite query is required"))
}
if r.Version != "" && r.Version != "v5" {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "only version v5 is supported, got %q", r.Version))
}
if r.Version == "v5" && r.RuleCondition.CompositeQuery != nil && len(r.RuleCondition.CompositeQuery.Queries) == 0 {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "queries envelope is required in compositeQuery"))
}
if isAllQueriesDisabled(r.RuleCondition.CompositeQuery) {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "all queries are disabled in rule condition"))
}

View File

@@ -8,8 +8,6 @@ import (
"github.com/stretchr/testify/assert"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
func TestIsAllQueriesDisabled(t *testing.T) {
@@ -623,9 +621,9 @@ func TestParseIntoRuleThresholdGeneration(t *testing.T) {
}
// Test that threshold can evaluate properly
vector, err := threshold.Eval(qbtypes.TimeSeries{
Values: []*qbtypes.TimeSeriesValue{{Value: 0.15, Timestamp: 1000}}, // 150ms in seconds
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "test"}, Value: "label"}},
vector, err := threshold.Eval(v3.Series{
Points: []v3.Point{{Value: 0.15, Timestamp: 1000}}, // 150ms in seconds
Labels: map[string]string{"test": "label"},
}, "", EvalData{})
if err != nil {
t.Fatalf("Unexpected error in shouldAlert: %v", err)
@@ -700,9 +698,9 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
}
// Test with a value that should trigger both WARNING and CRITICAL thresholds
vector, err := threshold.Eval(qbtypes.TimeSeries{
Values: []*qbtypes.TimeSeriesValue{{Value: 95.0, Timestamp: 1000}}, // 95% CPU usage
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "service"}, Value: "test"}},
vector, err := threshold.Eval(v3.Series{
Points: []v3.Point{{Value: 95.0, Timestamp: 1000}}, // 95% CPU usage
Labels: map[string]string{"service": "test"},
}, "", EvalData{})
if err != nil {
t.Fatalf("Unexpected error in shouldAlert: %v", err)
@@ -710,9 +708,9 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
assert.Equal(t, 2, len(vector))
vector, err = threshold.Eval(qbtypes.TimeSeries{
Values: []*qbtypes.TimeSeriesValue{{Value: 75.0, Timestamp: 1000}}, // 75% CPU usage
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "service"}, Value: "test"}},
vector, err = threshold.Eval(v3.Series{
Points: []v3.Point{{Value: 75.0, Timestamp: 1000}}, // 75% CPU usage
Labels: map[string]string{"service": "test"},
}, "", EvalData{})
if err != nil {
t.Fatalf("Unexpected error in shouldAlert: %v", err)
@@ -725,7 +723,7 @@ func TestAnomalyNegationEval(t *testing.T) {
tests := []struct {
name string
ruleJSON []byte
series qbtypes.TimeSeries
series v3.Series
shouldAlert bool
expectedValue float64
}{
@@ -753,9 +751,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -2.1}, // below & at least once, should alert
{Timestamp: 2000, Value: -2.3},
},
@@ -787,9 +785,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`), // below & at least once, no value below -2.0
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -1.9},
{Timestamp: 2000, Value: -1.8},
},
@@ -820,9 +818,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`), // above & at least once, should alert
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: 2.1}, // above 2.0, should alert
{Timestamp: 2000, Value: 2.2},
},
@@ -854,9 +852,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: 1.1},
{Timestamp: 2000, Value: 1.2},
},
@@ -887,9 +885,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`), // below and all the times
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -2.1}, // all below -2
{Timestamp: 2000, Value: -2.2},
{Timestamp: 3000, Value: -2.5},
@@ -922,9 +920,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -3.0},
{Timestamp: 2000, Value: -1.0}, // above -2, breaks condition
{Timestamp: 3000, Value: -2.5},
@@ -956,10 +954,10 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: -8.0}, // abs(-8) >= 7, alert
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -8.0}, // abs(8) >= 7, alert
{Timestamp: 2000, Value: 5.0},
},
},
@@ -990,9 +988,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: 80.0}, // below 90, should alert
{Timestamp: 2000, Value: 85.0},
},
@@ -1024,9 +1022,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: 60.0}, // below, should alert
{Timestamp: 2000, Value: 90.0},
},

View File

@@ -8,8 +8,8 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/converter"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -83,7 +83,7 @@ func (eval EvalData) HasActiveAlert(sampleLabelFp uint64) bool {
type RuleThreshold interface {
// Eval runs the given series through the threshold rules
// using the given EvalData and returns the matching series
Eval(series qbtypes.TimeSeries, unit string, evalData EvalData) (Vector, error)
Eval(series v3.Series, unit string, evalData EvalData) (Vector, error)
GetRuleReceivers() []RuleReceivers
}
@@ -122,11 +122,10 @@ func (r BasicRuleThresholds) Validate() error {
return errors.Join(errs...)
}
func (r BasicRuleThresholds) Eval(series qbtypes.TimeSeries, unit string, evalData EvalData) (Vector, error) {
func (r BasicRuleThresholds) Eval(series v3.Series, unit string, evalData EvalData) (Vector, error) {
var resultVector Vector
thresholds := []BasicRuleThreshold(r)
sortThresholds(thresholds)
seriesLabels := series.LabelsMap()
for _, threshold := range thresholds {
smpl, shouldAlert := threshold.shouldAlert(series, unit)
if shouldAlert {
@@ -138,15 +137,15 @@ func (r BasicRuleThresholds) Eval(series qbtypes.TimeSeries, unit string, evalDa
resultVector = append(resultVector, smpl)
continue
} else if evalData.SendUnmatched {
// Sanitise the series values to remove any NaN, Inf, or partial values
values := filterValidValues(series.Values)
if len(values) == 0 {
// Sanitise the series points to remove any NaN or Inf values
series.Points = removeGroupinSetPoints(series)
if len(series.Points) == 0 {
continue
}
// prepare the sample with the first value of the series
// prepare the sample with the first point of the series
smpl := Sample{
Point: Point{T: values[0].Timestamp, V: values[0].Value},
Metric: PrepareSampleLabelsForRule(seriesLabels, threshold.Name),
Point: Point{T: series.Points[0].Timestamp, V: series.Points[0].Value},
Metric: PrepareSampleLabelsForRule(series.Labels, threshold.Name),
Target: *threshold.TargetValue,
TargetUnit: threshold.TargetUnit,
}
@@ -161,7 +160,7 @@ func (r BasicRuleThresholds) Eval(series qbtypes.TimeSeries, unit string, evalDa
if threshold.RecoveryTarget == nil {
continue
}
sampleLabels := PrepareSampleLabelsForRule(seriesLabels, threshold.Name)
sampleLabels := PrepareSampleLabelsForRule(series.Labels, threshold.Name)
alertHash := sampleLabels.Hash()
// check if alert is active and then check if recovery threshold matches
if evalData.HasActiveAlert(alertHash) {
@@ -256,23 +255,18 @@ func (b BasicRuleThreshold) Validate() error {
return errors.Join(errs...)
}
func (b BasicRuleThreshold) matchesRecoveryThreshold(series qbtypes.TimeSeries, ruleUnit string) (Sample, bool) {
func (b BasicRuleThreshold) matchesRecoveryThreshold(series v3.Series, ruleUnit string) (Sample, bool) {
return b.shouldAlertWithTarget(series, b.recoveryTarget(ruleUnit))
}
func (b BasicRuleThreshold) shouldAlert(series qbtypes.TimeSeries, ruleUnit string) (Sample, bool) {
func (b BasicRuleThreshold) shouldAlert(series v3.Series, ruleUnit string) (Sample, bool) {
return b.shouldAlertWithTarget(series, b.target(ruleUnit))
}
// filterValidValues returns only the values that are valid for alert evaluation:
// non-partial, non-NaN, non-Inf, and with non-negative timestamps.
func filterValidValues(values []*qbtypes.TimeSeriesValue) []*qbtypes.TimeSeriesValue {
var result []*qbtypes.TimeSeriesValue
for _, v := range values {
if v.Partial {
continue
}
if v.Timestamp >= 0 && !math.IsNaN(v.Value) && !math.IsInf(v.Value, 0) {
result = append(result, v)
func removeGroupinSetPoints(series v3.Series) []v3.Point {
var result []v3.Point
for _, s := range series.Points {
if s.Timestamp >= 0 && !math.IsNaN(s.Value) && !math.IsInf(s.Value, 0) {
result = append(result, s)
}
}
return result
@@ -290,15 +284,15 @@ func PrepareSampleLabelsForRule(seriesLabels map[string]string, thresholdName st
return lb.Labels()
}
func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, target float64) (Sample, bool) {
func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float64) (Sample, bool) {
var shouldAlert bool
var alertSmpl Sample
lbls := PrepareSampleLabelsForRule(series.LabelsMap(), b.Name)
lbls := PrepareSampleLabelsForRule(series.Labels, b.Name)
values := filterValidValues(series.Values)
series.Points = removeGroupinSetPoints(series)
// nothing to evaluate
if len(values) == 0 {
if len(series.Points) == 0 {
return alertSmpl, false
}
@@ -306,7 +300,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
case AtleastOnce:
// If any sample matches the condition, the rule is firing.
if b.CompareOp == ValueIsAbove {
for _, smpl := range values {
for _, smpl := range series.Points {
if smpl.Value > target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -314,7 +308,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
}
}
} else if b.CompareOp == ValueIsBelow {
for _, smpl := range values {
for _, smpl := range series.Points {
if smpl.Value < target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -322,7 +316,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
}
}
} else if b.CompareOp == ValueIsEq {
for _, smpl := range values {
for _, smpl := range series.Points {
if smpl.Value == target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -330,7 +324,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
}
}
} else if b.CompareOp == ValueIsNotEq {
for _, smpl := range values {
for _, smpl := range series.Points {
if smpl.Value != target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -338,7 +332,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
}
}
} else if b.CompareOp == ValueOutsideBounds {
for _, smpl := range values {
for _, smpl := range series.Points {
if math.Abs(smpl.Value) >= target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -351,7 +345,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
shouldAlert = true
alertSmpl = Sample{Point: Point{V: target}, Metric: lbls}
if b.CompareOp == ValueIsAbove {
for _, smpl := range values {
for _, smpl := range series.Points {
if smpl.Value <= target {
shouldAlert = false
break
@@ -360,7 +354,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
// use min value from the series
if shouldAlert {
var minValue float64 = math.Inf(1)
for _, smpl := range values {
for _, smpl := range series.Points {
if smpl.Value < minValue {
minValue = smpl.Value
}
@@ -368,7 +362,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
alertSmpl = Sample{Point: Point{V: minValue}, Metric: lbls}
}
} else if b.CompareOp == ValueIsBelow {
for _, smpl := range values {
for _, smpl := range series.Points {
if smpl.Value >= target {
shouldAlert = false
break
@@ -376,7 +370,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
}
if shouldAlert {
var maxValue float64 = math.Inf(-1)
for _, smpl := range values {
for _, smpl := range series.Points {
if smpl.Value > maxValue {
maxValue = smpl.Value
}
@@ -384,14 +378,14 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
alertSmpl = Sample{Point: Point{V: maxValue}, Metric: lbls}
}
} else if b.CompareOp == ValueIsEq {
for _, smpl := range values {
for _, smpl := range series.Points {
if smpl.Value != target {
shouldAlert = false
break
}
}
} else if b.CompareOp == ValueIsNotEq {
for _, smpl := range values {
for _, smpl := range series.Points {
if smpl.Value == target {
shouldAlert = false
break
@@ -399,7 +393,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
}
// use any non-inf or nan value from the series
if shouldAlert {
for _, smpl := range values {
for _, smpl := range series.Points {
if !math.IsInf(smpl.Value, 0) && !math.IsNaN(smpl.Value) {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
break
@@ -407,7 +401,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
}
}
} else if b.CompareOp == ValueOutsideBounds {
for _, smpl := range values {
for _, smpl := range series.Points {
if math.Abs(smpl.Value) < target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = false
@@ -418,7 +412,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
case OnAverage:
// If the average of all samples matches the condition, the rule is firing.
var sum, count float64
for _, smpl := range values {
for _, smpl := range series.Points {
if math.IsNaN(smpl.Value) || math.IsInf(smpl.Value, 0) {
continue
}
@@ -452,7 +446,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
// If the sum of all samples matches the condition, the rule is firing.
var sum float64
for _, smpl := range values {
for _, smpl := range series.Points {
if math.IsNaN(smpl.Value) || math.IsInf(smpl.Value, 0) {
continue
}
@@ -483,22 +477,21 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, tar
case Last:
// If the last sample matches the condition, the rule is firing.
shouldAlert = false
lastValue := values[len(values)-1].Value
alertSmpl = Sample{Point: Point{V: lastValue}, Metric: lbls}
alertSmpl = Sample{Point: Point{V: series.Points[len(series.Points)-1].Value}, Metric: lbls}
if b.CompareOp == ValueIsAbove {
if lastValue > target {
if series.Points[len(series.Points)-1].Value > target {
shouldAlert = true
}
} else if b.CompareOp == ValueIsBelow {
if lastValue < target {
if series.Points[len(series.Points)-1].Value < target {
shouldAlert = true
}
} else if b.CompareOp == ValueIsEq {
if lastValue == target {
if series.Points[len(series.Points)-1].Value == target {
shouldAlert = true
}
} else if b.CompareOp == ValueIsNotEq {
if lastValue != target {
if series.Points[len(series.Points)-1].Value != target {
shouldAlert = true
}
}

View File

@@ -6,24 +6,16 @@ import (
"github.com/stretchr/testify/assert"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
)
func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
target := 100.0
makeLabel := func(name, value string) *qbtypes.Label {
return &qbtypes.Label{Key: telemetrytypes.TelemetryFieldKey{Name: name}, Value: value}
}
makeValue := func(value float64, ts int64) *qbtypes.TimeSeriesValue {
return &qbtypes.TimeSeriesValue{Value: value, Timestamp: ts}
}
tests := []struct {
name string
threshold BasicRuleThreshold
series qbtypes.TimeSeries
series v3.Series
ruleUnit string
shouldAlert bool
}{
@@ -36,9 +28,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)}, // 150ms in seconds
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000}, // 150ms in seconds
},
},
ruleUnit: "s",
shouldAlert: true,
@@ -52,9 +46,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.05, 1000)}, // 50ms in seconds
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.05, Timestamp: 1000}, // 50ms in seconds
},
},
ruleUnit: "s",
shouldAlert: false,
@@ -68,9 +64,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(150000, 1000)}, // 150000ms = 150s
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 150000, Timestamp: 1000}, // 150000ms = 150s
},
},
ruleUnit: "ms",
shouldAlert: true,
@@ -85,9 +83,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)}, // 0.15KiB ≈ 153.6 bytes
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000}, // 0.15KiB ≈ 153.6 bytes
},
},
ruleUnit: "kbytes",
shouldAlert: true,
@@ -101,9 +101,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000},
},
},
ruleUnit: "mbytes",
shouldAlert: true,
@@ -118,9 +120,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsBelow,
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.05, 1000)}, // 50ms in seconds
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.05, Timestamp: 1000}, // 50ms in seconds
},
},
ruleUnit: "s",
shouldAlert: true,
@@ -134,12 +138,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: OnAverage,
CompareOp: ValueIsAbove,
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{
makeValue(0.08, 1000), // 80ms
makeValue(0.12, 2000), // 120ms
makeValue(0.15, 3000), // 150ms
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.08, Timestamp: 1000}, // 80ms
{Value: 0.12, Timestamp: 2000}, // 120ms
{Value: 0.15, Timestamp: 3000}, // 150ms
},
},
ruleUnit: "s",
@@ -154,12 +158,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: InTotal,
CompareOp: ValueIsAbove,
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{
makeValue(0.04, 1000), // 40MB
makeValue(0.05, 2000), // 50MB
makeValue(0.03, 3000), // 30MB
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.04, Timestamp: 1000}, // 40MB
{Value: 0.05, Timestamp: 2000}, // 50MB
{Value: 0.03, Timestamp: 3000}, // 30MB
},
},
ruleUnit: "decgbytes",
@@ -174,12 +178,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AllTheTimes,
CompareOp: ValueIsAbove,
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{
makeValue(0.11, 1000), // 110ms
makeValue(0.12, 2000), // 120ms
makeValue(0.15, 3000), // 150ms
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.11, Timestamp: 1000}, // 110ms
{Value: 0.12, Timestamp: 2000}, // 120ms
{Value: 0.15, Timestamp: 3000}, // 150ms
},
},
ruleUnit: "s",
@@ -194,11 +198,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: Last,
CompareOp: ValueIsAbove,
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{
makeValue(0.15, 1000), // 150kB
makeValue(0.05, 2000), // 50kB (last value)
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000}, // 150kB
{Value: 0.05, Timestamp: 2000}, // 50kB (last value)
},
},
ruleUnit: "decmbytes",
@@ -214,9 +218,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000},
},
},
ruleUnit: "KBs",
shouldAlert: true,
@@ -231,9 +237,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(150, 1000)}, // 150ms
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 150, Timestamp: 1000}, // 150ms
},
},
ruleUnit: "ms",
shouldAlert: true,
@@ -248,9 +256,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(150, 1000)}, // 150 (unitless)
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 150, Timestamp: 1000}, // 150 (unitless)
},
},
ruleUnit: "",
shouldAlert: true,

View File

@@ -0,0 +1,91 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>You're Invited to Join SigNoz</title>
</head>
<body style="margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,'Helvetica Neue',Arial,sans-serif;line-height:1.6;color:#333;background:#fff">
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="background:#fff">
<tr>
<td align="center" style="padding:0">
<table role="presentation" width="600" cellspacing="0" cellpadding="0" border="0" style="max-width:600px;width:100%">
{{ if .format.Header.Enabled }}
<tr>
<td align="center" style="padding:16px 20px 16px">
<img src="{{.format.Header.LogoURL}}" alt="SigNoz" width="160" height="40" style="display:block;border:0;outline:none;max-width:100%;height:auto">
</td>
</tr>
{{ end }}
<tr>
<td style="padding:16px 20px 16px">
<p style="margin:0 0 16px;font-size:16px;color:#333">
Hi there,
</p>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
You've been invited by <strong>{{.inviter_email}}</strong> to join their SigNoz organization.
</p>
<p style="margin:0 0 12px;font-size:16px;color:#333;line-height:1.6">
A new account has been created for you with the following details:
</p>
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
<tr>
<td style="padding:20px;background:#f5f5f5;border-radius:6px;border-left:4px solid #4E74F8">
<p style="margin:0;font-size:15px;color:#333;line-height:1.6">
<strong>Email:</strong> {{.to}}
</p>
</td>
</tr>
</table>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
Accept the invitation to get started.
</p>
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
<tr>
<td align="center">
<a href="{{.link}}" target="_blank" style="display:inline-block;padding:16px 48px;font-size:16px;font-weight:600;color:#fff;background:#4E74F8;text-decoration:none;border-radius:4px">
Accept Invitation
</a>
</td>
</tr>
</table>
<p style="margin:0 0 4px;font-size:13px;color:#666;text-align:center">
Button not working? Copy and paste this link into your browser:
</p>
<p style="margin:0 0 16px;font-size:13px;color:#4E74F8;word-break:break-all;text-align:center">
<a href="{{.link}}" style="color:#4E74F8;text-decoration:none">
{{.link}}
</a>
</p>
{{ if .format.Help.Enabled }}
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
Need help? Chat with our team in the SigNoz application or email us at <a href="mailto:{{.format.Help.Email}}" style="color:#4E74F8;text-decoration:none">{{.format.Help.Email}}</a>.
</p>
{{ end }}
<p style="margin:0;font-size:16px;color:#333;line-height:1.6">
Thanks,<br><strong>The SigNoz Team</strong>
</p>
</td>
</tr>
{{ if .format.Footer.Enabled }}
<tr>
<td align="center" style="padding:8px 16px 8px">
<p style="margin:0 0 8px;font-size:12px;color:#999;line-height:1.5">
<a href="https://signoz.io/terms-of-service/" style="color:#4E74F8;text-decoration:none">Terms of Service</a> - <a href="https://signoz.io/privacy/" style="color:#4E74F8;text-decoration:none">Privacy Policy</a>
</p>
<p style="margin:0;font-size:12px;color:#999;line-height:1.5">
&#169; 2026 SigNoz Inc.
</p>
</td>
</tr>
{{ end }}
</table>
</td>
</tr>
</table>
</body>
</html>

View File

@@ -1,14 +0,0 @@
<!DOCTYPE html>
<html>
<body>
<p>Hi {{.CustomerName}},</p>
<p>You have been invited to join SigNoz project by {{.InviterName}} ({{.InviterEmail}}).</p>
<p>Please click on the following button to accept the invitation:</p>
<a href="{{.Link}}" style="background-color: #000000; color: white; padding: 14px 20px; text-align: center; text-decoration: none; display: inline-block;">Accept Invitation</a>
<p>Button not working? Paste the following link into your browser:</p>
<p>{{.Link}}</p>
<p>Follow docs here 👉 to <a href="https://signoz.io/docs/cloud/">Get Started with SigNoz Cloud</a></p>
<p>Thanks,</p>
<p>SigNoz Team</p>
</body>
</html>

View File

@@ -0,0 +1,91 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>{{.subject}}</title>
</head>
<body style="margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,'Helvetica Neue',Arial,sans-serif;line-height:1.6;color:#333;background:#fff">
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="background:#fff">
<tr>
<td align="center" style="padding:0">
<table role="presentation" width="600" cellspacing="0" cellpadding="0" border="0" style="max-width:600px;width:100%">
{{ if .format.Header.Enabled }}
<tr>
<td align="center" style="padding:16px 20px 16px">
<img src="{{.format.Header.LogoURL}}" alt="SigNoz" width="160" height="40" style="display:block;border:0;outline:none;max-width:100%;height:auto">
</td>
</tr>
{{ end }}
<tr>
<td style="padding:16px 20px 16px">
<p style="margin:0 0 16px;font-size:16px;color:#333">
Hi there,
</p>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
A password reset was requested for your SigNoz account.
</p>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
Click the button below to reset your password:
</p>
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
<tr>
<td align="center">
<a href="{{.Link}}" target="_blank" style="display:inline-block;padding:16px 48px;font-size:16px;font-weight:600;color:#fff;background:#4E74F8;text-decoration:none;border-radius:4px">
Reset Password
</a>
</td>
</tr>
</table>
<p style="margin:0 0 4px;font-size:13px;color:#666;text-align:center">
Button not working? Copy and paste this link into your browser:
</p>
<p style="margin:0 0 16px;font-size:13px;color:#4E74F8;word-break:break-all;text-align:center">
<a href="{{.Link}}" style="color:#4E74F8;text-decoration:none">
{{.Link}}
</a>
</p>
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
<tr>
<td style="padding:16px;background:#fff4e6;border-radius:6px;border-left:4px solid #ff9800">
<p style="margin:0;font-size:14px;color:#333;line-height:1.6">
<strong>⏱️ This link will expire in {{.Expiry}}.</strong>
</p>
</td>
</tr>
</table>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
If you didn't request this password reset, please ignore this email. Your password will remain unchanged.
</p>
{{ if .format.Help.Enabled }}
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
Need help? Chat with our team in the SigNoz application or email us at <a href="mailto:{{.format.Help.Email}}" style="color:#4E74F8;text-decoration:none">{{.format.Help.Email}}</a>.
</p>
{{ end }}
<p style="margin:0;font-size:16px;color:#333;line-height:1.6">
Thanks,<br><strong>The SigNoz Team</strong>
</p>
</td>
</tr>
{{ if .format.Footer.Enabled }}
<tr>
<td align="center" style="padding:8px 16px 8px">
<p style="margin:0 0 8px;font-size:12px;color:#999;line-height:1.5">
<a href="https://signoz.io/terms-of-service/" style="color:#4E74F8;text-decoration:none">Terms of Service</a> - <a href="https://signoz.io/privacy/" style="color:#4E74F8;text-decoration:none">Privacy Policy</a>
</p>
<p style="margin:0;font-size:12px;color:#999;line-height:1.5">
&#169; 2026 SigNoz Inc.
</p>
</td>
</tr>
{{ end }}
</table>
</td>
</tr>
</table>
</body>
</html>

View File

@@ -1,13 +0,0 @@
<!DOCTYPE html>
<html>
<body>
<p>Hello {{.Name}},</p>
<p>You requested a password reset for your SigNoz account.</p>
<p>Click the link below to reset your password:</p>
<a href="{{.Link}}">Reset Password</a>
<p>This link will expire in {{.Expiry}}.</p>
<p>If you didn't request this, please ignore this email. Your password will remain unchanged.</p>
<br>
<p>Best regards,<br>The SigNoz Team</p>
</body>
</html>

View File

@@ -1,21 +0,0 @@
<!DOCTYPE html>
<html>
<body>
Hi {{.CustomerName}},<br>
Your role in <strong>SigNoz</strong> has been updated by {{.UpdatedByEmail}}.
<p>
<strong>Previous Role:</strong> {{.OldRole}}<br>
<strong>New Role:</strong> {{.NewRole}}
</p>
{{if eq .OldRole "Admin"}}
<p>
If you were not expecting this change or have any questions, please contact us at <a href="mailto:support@signoz.io">support@signoz.io</a>.
</p>
{{else}}
<p>
If you were not expecting this change or have any questions, please reach out to your administrator.
</p>
{{end}}
<p>Best regards,<br>The SigNoz Team</p>
</body>
</html>