Compare commits

..

2 Commits

Author SHA1 Message Date
Srikanth Chekuri
25e0c19ddb Merge branch 'main' into remove-v4-support-rules 2026-02-14 23:51:21 +05:30
srikanthccv
8a6abb2f09 chore: remove support for older versions in rules 2026-02-14 23:48:09 +05:30
32 changed files with 1185 additions and 2636 deletions

View File

@@ -5,23 +5,16 @@ import (
"encoding/json"
"fmt"
"log/slog"
"math"
"strings"
"sync"
"time"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
@@ -32,6 +25,8 @@ import (
querierV5 "github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
@@ -48,17 +43,12 @@ type AnomalyRule struct {
reader interfaces.Reader
// querierV2 is used for alerts created after the introduction of new metrics query builder
querierV2 interfaces.Querier
// querierV5 is used for alerts migrated after the introduction of new query builder
// querierV5 is the query builder v5 querier used for all alert rule evaluation
querierV5 querierV5.Querier
provider anomaly.Provider
providerV2 anomalyV2.Provider
version string
logger *slog.Logger
logger *slog.Logger
seasonality anomaly.Seasonality
}
@@ -102,34 +92,6 @@ func NewAnomalyRule(
logger.Info("using seasonality", "seasonality", t.seasonality.String())
querierOptsV2 := querierV2.QuerierOptions{
Reader: reader,
Cache: cache,
KeyGenerator: queryBuilder.NewKeyGenerator(),
}
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
t.reader = reader
if t.seasonality == anomaly.SeasonalityHourly {
t.provider = anomaly.NewHourlyProvider(
anomaly.WithCache[*anomaly.HourlyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.HourlyProvider](reader),
)
} else if t.seasonality == anomaly.SeasonalityDaily {
t.provider = anomaly.NewDailyProvider(
anomaly.WithCache[*anomaly.DailyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.DailyProvider](reader),
)
} else if t.seasonality == anomaly.SeasonalityWeekly {
t.provider = anomaly.NewWeeklyProvider(
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
)
}
if t.seasonality == anomaly.SeasonalityHourly {
t.providerV2 = anomalyV2.NewHourlyProvider(
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](querierV5),
@@ -148,7 +110,7 @@ func NewAnomalyRule(
}
t.querierV5 = querierV5
t.version = p.Version
t.reader = reader
t.logger = logger
return &t, nil
}
@@ -157,36 +119,9 @@ func (r *AnomalyRule) Type() ruletypes.RuleType {
return RuleTypeAnomaly
}
func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3.QueryRangeParamsV3, error) {
func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
r.logger.InfoContext(
ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds(),
)
st, en := r.Timestamps(ts)
start := st.UnixMilli()
end := en.UnixMilli()
compositeQuery := r.Condition().CompositeQuery
if compositeQuery.PanelType != v3.PanelTypeGraph {
compositeQuery.PanelType = v3.PanelTypeGraph
}
// default mode
return &v3.QueryRangeParamsV3{
Start: start,
End: end,
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
CompositeQuery: compositeQuery,
Variables: make(map[string]interface{}, 0),
NoCache: false,
}, nil
}
func (r *AnomalyRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
r.logger.InfoContext(ctx, "prepare query range request v5", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds())
r.logger.InfoContext(ctx, "prepare query range request", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds())
startTs, endTs := r.Timestamps(ts)
start, end := startTs.UnixMilli(), endTs.UnixMilli()
@@ -215,60 +150,6 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
if err != nil {
return nil, err
}
err = r.PopulateTemporality(ctx, orgID, params)
if err != nil {
return nil, fmt.Errorf("internal error while setting temporality")
}
anomalies, err := r.provider.GetAnomalies(ctx, orgID, &anomaly.GetAnomaliesRequest{
Params: params,
Seasonality: r.seasonality,
})
if err != nil {
return nil, err
}
var queryResult *v3.Result
for _, result := range anomalies.Results {
if result.QueryName == r.GetSelectedQuery() {
queryResult = result
break
}
}
hasData := len(queryResult.AnomalyScores) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
var resultVector ruletypes.Vector
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
for _, series := range queryResult.AnomalyScores {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
ActiveAlerts: r.ActiveAlertsLabelFP(),
SendUnmatched: r.ShouldSendUnmatched(),
})
if err != nil {
return nil, err
}
resultVector = append(resultVector, results...)
}
return resultVector, nil
}
func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
params, err := r.prepareQueryRangeV5(ctx, ts)
if err != nil {
return nil, err
}
anomalies, err := r.providerV2.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{
Params: *params,
@@ -290,20 +171,25 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
r.logger.WarnContext(ctx, "nil qb result", "ts", ts.UnixMilli())
}
queryResult := transition.ConvertV5TimeSeriesDataToV4Result(qbResult)
var anomalyScores []*qbtypes.TimeSeries
if qbResult != nil {
for _, bucket := range qbResult.Aggregations {
anomalyScores = append(anomalyScores, bucket.AnomalyScores...)
}
}
hasData := len(queryResult.AnomalyScores) > 0
hasData := len(anomalyScores) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
var resultVector ruletypes.Vector
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
scoresJSON, _ := json.Marshal(anomalyScores)
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
// Filter out new series if newGroupEvalDelay is configured
seriesToProcess := queryResult.AnomalyScores
seriesToProcess := anomalyScores
if r.ShouldSkipNewGroups() {
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
// In case of error we log the error and continue with the original series
@@ -316,7 +202,7 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
for _, series := range seriesToProcess {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Values), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
@@ -337,16 +223,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
valueFormatter := formatter.FromUnit(r.Unit())
var res ruletypes.Vector
var err error
if r.version == "v5" {
r.logger.InfoContext(ctx, "running v5 query")
res, err = r.buildAndRunQueryV5(ctx, r.OrgID(), ts)
} else {
r.logger.InfoContext(ctx, "running v4 query")
res, err = r.buildAndRunQuery(ctx, r.OrgID(), ts)
}
res, err := r.buildAndRunQuery(ctx, r.OrgID(), ts)
if err != nil {
return 0, err
}

View File

@@ -8,28 +8,27 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/valuer"
)
// mockAnomalyProvider is a mock implementation of anomaly.Provider for testing.
// We need this because the anomaly provider makes 6 different queries for various
// time periods (current, past period, current season, past season, past 2 seasons,
// past 3 seasons), making it cumbersome to create mock data.
type mockAnomalyProvider struct {
responses []*anomaly.GetAnomaliesResponse
// mockAnomalyProviderV2 is a mock implementation of anomalyV2.Provider for testing.
type mockAnomalyProviderV2 struct {
responses []*anomalyV2.AnomaliesResponse
callCount int
}
func (m *mockAnomalyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomaly.GetAnomaliesRequest) (*anomaly.GetAnomaliesResponse, error) {
func (m *mockAnomalyProviderV2) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomalyV2.AnomaliesRequest) (*anomalyV2.AnomaliesResponse, error) {
if m.callCount >= len(m.responses) {
return &anomaly.GetAnomaliesResponse{Results: []*v3.Result{}}, nil
return &anomalyV2.AnomaliesResponse{Results: []*qbtypes.TimeSeriesData{}}, nil
}
resp := m.responses[m.callCount]
m.callCount++
@@ -82,11 +81,11 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
},
}
responseNoData := &anomaly.GetAnomaliesResponse{
Results: []*v3.Result{
responseNoData := &anomalyV2.AnomaliesResponse{
Results: []*qbtypes.TimeSeriesData{
{
QueryName: "A",
AnomalyScores: []*v3.Series{},
QueryName: "A",
Aggregations: []*qbtypes.AggregationBucket{},
},
},
}
@@ -129,8 +128,8 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
)
require.NoError(t, err)
rule.provider = &mockAnomalyProvider{
responses: []*anomaly.GetAnomaliesResponse{responseNoData},
rule.providerV2 = &mockAnomalyProviderV2{
responses: []*anomalyV2.AnomaliesResponse{responseNoData},
}
alertsFound, err := rule.Eval(context.Background(), evalTime)
@@ -190,11 +189,11 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
},
}
responseNoData := &anomaly.GetAnomaliesResponse{
Results: []*v3.Result{
responseNoData := &anomalyV2.AnomaliesResponse{
Results: []*qbtypes.TimeSeriesData{
{
QueryName: "A",
AnomalyScores: []*v3.Series{},
QueryName: "A",
Aggregations: []*qbtypes.AggregationBucket{},
},
},
}
@@ -228,16 +227,22 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
t1 := baseTime.Add(5 * time.Minute)
t2 := t1.Add(c.timeBetweenEvals)
responseWithData := &anomaly.GetAnomaliesResponse{
Results: []*v3.Result{
responseWithData := &anomalyV2.AnomaliesResponse{
Results: []*qbtypes.TimeSeriesData{
{
QueryName: "A",
AnomalyScores: []*v3.Series{
Aggregations: []*qbtypes.AggregationBucket{
{
Labels: map[string]string{"test": "label"},
Points: []v3.Point{
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
AnomalyScores: []*qbtypes.TimeSeries{
{
Labels: []*qbtypes.Label{
{Key: telemetrytypes.TelemetryFieldKey{Name: "test"}, Value: "label"},
},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
},
},
},
},
},
@@ -252,8 +257,8 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil)
require.NoError(t, err)
rule.provider = &mockAnomalyProvider{
responses: []*anomaly.GetAnomaliesResponse{responseWithData, responseNoData},
rule.providerV2 = &mockAnomalyProviderV2{
responses: []*anomalyV2.AnomaliesResponse{responseWithData, responseNoData},
}
alertsFound1, err := rule.Eval(context.Background(), t1)

View File

@@ -6,7 +6,7 @@ import logEvent from 'api/common/logEvent';
import PromQLIcon from 'assets/Dashboard/PromQl';
import { QueryBuilderV2 } from 'components/QueryBuilderV2/QueryBuilderV2';
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
import { ENTITY_VERSION_V4 } from 'constants/app';
import { ENTITY_VERSION_V5 } from 'constants/app';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { QBShortcuts } from 'constants/shortcuts/QBShortcuts';
import RunQueryBtn from 'container/QueryBuilder/components/RunQueryBtn/RunQueryBtn';
@@ -63,12 +63,8 @@ function QuerySection({
signalSource: signalSource === 'meter' ? 'meter' : '',
}}
showTraceOperator={alertType === AlertTypes.TRACES_BASED_ALERT}
showFunctions={
(alertType === AlertTypes.METRICS_BASED_ALERT &&
alertDef.version === ENTITY_VERSION_V4) ||
alertType === AlertTypes.LOGS_BASED_ALERT
}
version={alertDef.version || 'v3'}
showFunctions
version={ENTITY_VERSION_V5}
onSignalSourceChange={handleSignalSourceChange}
signalSourceChangeEnabled
/>

View File

@@ -43,11 +43,11 @@ var (
// FromUnit returns a converter for the given unit
func FromUnit(u Unit) Converter {
switch u {
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min", "w", "wk":
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min":
return DurationConverter
case "bytes", "decbytes", "bits", "bit", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy", "EBy", "ZBy", "YBy", "KiBy", "MiBy", "GiBy", "TiBy", "PiBy", "EiBy", "ZiBy", "YiBy", "kbit", "Mbit", "Gbit", "Tbit", "Pbit", "Ebit", "Zbit", "Ybit", "Kibit", "Mibit", "Gibit", "Tibit", "Pibit":
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy":
return DataConverter
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "EBy/s", "ZBy/s", "YBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s", "Ebit/s", "Zbit/s", "Ybit/s", "KiBy/s", "MiBy/s", "GiBy/s", "TiBy/s", "PiBy/s", "EiBy/s", "ZiBy/s", "YiBy/s", "Kibit/s", "Mibit/s", "Gibit/s", "Tibit/s", "Pibit/s", "Eibit/s", "Zibit/s", "Yibit/s":
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s":
return DataRateConverter
case "percent", "percentunit", "%":
return PercentConverter

View File

@@ -58,80 +58,36 @@ func (*dataConverter) Name() string {
return "data"
}
// Notation followed by UCUM:
// https://ucum.org/ucum
// kibi = Ki, mebi = Mi, gibi = Gi, tebi = Ti, pibi = Pi
// kilo = k, mega = M, giga = G, tera = T, peta = P
// exa = E, zetta = Z, yotta = Y
// byte = By, bit = bit
func FromDataUnit(u Unit) float64 {
switch u {
case "bytes", "By": // base 2
return Byte
case "decbytes": // base 10
return Byte
case "bits", "bit": // base 2
case "bits": // base 2
return Bit
case "decbits": // base 10
return Bit
case "kbytes", "KiBy": // base 2
case "kbytes", "kBy": // base 2
return Kibibyte
case "decKbytes", "deckbytes", "kBy": // base 10
case "decKbytes", "deckbytes": // base 10
return Kilobyte
case "mbytes", "MiBy": // base 2
case "mbytes", "MBy": // base 2
return Mebibyte
case "decMbytes", "decmbytes", "MBy": // base 10
case "decMbytes", "decmbytes": // base 10
return Megabyte
case "gbytes", "GiBy": // base 2
case "gbytes", "GBy": // base 2
return Gibibyte
case "decGbytes", "decgbytes", "GBy": // base 10
case "decGbytes", "decgbytes": // base 10
return Gigabyte
case "tbytes", "TiBy": // base 2
case "tbytes", "TBy": // base 2
return Tebibyte
case "decTbytes", "dectbytes", "TBy": // base 10
case "decTbytes", "dectbytes": // base 10
return Terabyte
case "pbytes", "PiBy": // base 2
case "pbytes", "PBy": // base 2
return Pebibyte
case "decPbytes", "decpbytes", "PBy": // base 10
case "decPbytes", "decpbytes": // base 10
return Petabyte
case "EBy": // base 10
return Exabyte
case "ZBy": // base 10
return Zettabyte
case "YBy": // base 10
return Yottabyte
case "Kibit": // base 2
return Kibibit
case "Mibit": // base 2
return Mebibit
case "Gibit": // base 2
return Gibibit
case "Tibit": // base 2
return Tebibit
case "Pibit": // base 2
return Pebibit
case "EiBy": // base 2
return Exbibyte
case "ZiBy": // base 2
return Zebibyte
case "YiBy": // base 2
return Yobibyte
case "kbit": // base 10
return Kilobit
case "Mbit": // base 10
return Megabit
case "Gbit": // base 10
return Gigabit
case "Tbit": // base 10
return Terabit
case "Pbit": // base 10
return Petabit
case "Ebit": // base 10
return Exabit
case "Zbit": // base 10
return Zettabit
case "Ybit": // base 10
return Yottabit
default:
return 1
}

View File

@@ -54,12 +54,6 @@ func (*dataRateConverter) Name() string {
return "data_rate"
}
// Notation followed by UCUM:
// https://ucum.org/ucum
// kibi = Ki, mebi = Mi, gibi = Gi, tebi = Ti, pibi = Pi
// kilo = k, mega = M, giga = G, tera = T, peta = P
// exa = E, zetta = Z, yotta = Y
// byte = By, bit = bit
func FromDataRateUnit(u Unit) float64 {
// See https://github.com/SigNoz/signoz/blob/5a81f5f90b34845f5b4b3bdd46acf29d04bf3987/frontend/src/container/NewWidget/RightContainer/dataFormatCategories.ts#L62-L85
switch u {
@@ -71,70 +65,46 @@ func FromDataRateUnit(u Unit) float64 {
return BitPerSecond
case "bps", "bit/s": // bits/sec(SI)
return BitPerSecond
case "KiBs", "KiBy/s": // kibibytes/sec
case "KiBs": // kibibytes/sec
return KibibytePerSecond
case "Kibits", "Kibit/s": // kibibits/sec
case "Kibits": // kibibits/sec
return KibibitPerSecond
case "KBs", "kBy/s": // kilobytes/sec
return KilobytePerSecond
case "Kbits", "kbit/s": // kilobits/sec
return KilobitPerSecond
case "MiBs", "MiBy/s": // mebibytes/sec
case "MiBs": // mebibytes/sec
return MebibytePerSecond
case "Mibits", "Mibit/s": // mebibits/sec
case "Mibits": // mebibits/sec
return MebibitPerSecond
case "MBs", "MBy/s": // megabytes/sec
return MegabytePerSecond
case "Mbits", "Mbit/s": // megabits/sec
return MegabitPerSecond
case "GiBs", "GiBy/s": // gibibytes/sec
case "GiBs": // gibibytes/sec
return GibibytePerSecond
case "Gibits", "Gibit/s": // gibibits/sec
case "Gibits": // gibibits/sec
return GibibitPerSecond
case "GBs", "GBy/s": // gigabytes/sec
return GigabytePerSecond
case "Gbits", "Gbit/s": // gigabits/sec
return GigabitPerSecond
case "TiBs", "TiBy/s": // tebibytes/sec
case "TiBs": // tebibytes/sec
return TebibytePerSecond
case "Tibits", "Tibit/s": // tebibits/sec
case "Tibits": // tebibits/sec
return TebibitPerSecond
case "TBs", "TBy/s": // terabytes/sec
return TerabytePerSecond
case "Tbits", "Tbit/s": // terabits/sec
return TerabitPerSecond
case "PiBs", "PiBy/s": // pebibytes/sec
case "PiBs": // pebibytes/sec
return PebibytePerSecond
case "Pibits", "Pibit/s": // pebibits/sec
case "Pibits": // pebibits/sec
return PebibitPerSecond
case "PBs", "PBy/s": // petabytes/sec
return PetabytePerSecond
case "Pbits", "Pbit/s": // petabits/sec
return PetabitPerSecond
case "EBy/s": // exabytes/sec
return ExabytePerSecond
case "Ebit/s": // exabits/sec
return ExabitPerSecond
case "EiBy/s": // exbibytes/sec
return ExbibytePerSecond
case "Eibit/s": // exbibits/sec
return ExbibitPerSecond
case "ZBy/s": // zettabytes/sec
return ZettabytePerSecond
case "Zbit/s": // zettabits/sec
return ZettabitPerSecond
case "ZiBy/s": // zebibytes/sec
return ZebibytePerSecond
case "Zibit/s": // zebibits/sec
return ZebibitPerSecond
case "YBy/s": // yottabytes/sec
return YottabytePerSecond
case "Ybit/s": // yottabits/sec
return YottabitPerSecond
case "YiBy/s": // yobibytes/sec
return YobibytePerSecond
case "Yibit/s": // yobibits/sec
return YobibitPerSecond
default:
return 1
}

View File

@@ -75,83 +75,3 @@ func TestDataRate(t *testing.T) {
// 1024 * 1024 * 1024 bytes = 1 gbytes
assert.Equal(t, Value{F: 1, U: "GiBs"}, dataRateConverter.Convert(Value{F: 1024 * 1024 * 1024, U: "binBps"}, "GiBs"))
}
func TestDataRateConversionUCUMUnit(t *testing.T) {
dataRateConverter := NewDataRateConverter()
tests := []struct {
name string
input Value
toUnit Unit
expected Value
}{
// Binary byte scaling
{name: "Binary byte scaling: 1024 By/s = 1 KiBy/s", input: Value{F: 1024, U: "By/s"}, toUnit: "KiBy/s", expected: Value{F: 1, U: "KiBy/s"}},
{name: "Kibibyte to bytes: 1 KiBy/s = 1024 By/s", input: Value{F: 1, U: "KiBy/s"}, toUnit: "By/s", expected: Value{F: 1024, U: "By/s"}},
{name: "Binary byte scaling: 1024 KiBy/s = 1 MiBy/s", input: Value{F: 1024, U: "KiBy/s"}, toUnit: "MiBy/s", expected: Value{F: 1, U: "MiBy/s"}},
{name: "Gibibyte to bytes: 1 GiBy/s = 1073741824 By/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "By/s", expected: Value{F: 1024 * 1024 * 1024, U: "By/s"}},
{name: "Binary byte scaling: 1024 MiBy/s = 1 GiBy/s", input: Value{F: 1024, U: "MiBy/s"}, toUnit: "GiBy/s", expected: Value{F: 1, U: "GiBy/s"}},
{name: "Gibibyte to mebibyte: 1 GiBy/s = 1024 MiBy/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "MiBy/s", expected: Value{F: 1024, U: "MiBy/s"}},
{name: "Binary byte scaling: 1024 GiBy/s = 1 TiBy/s", input: Value{F: 1024, U: "GiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1, U: "TiBy/s"}},
{name: "Tebibyte to bytes: 1 TiBy/s = 1099511627776 By/s", input: Value{F: 1, U: "TiBy/s"}, toUnit: "By/s", expected: Value{F: 1024 * 1024 * 1024 * 1024, U: "By/s"}},
{name: "Binary byte scaling: 1024 TiBy/s = 1 PiBy/s", input: Value{F: 1024, U: "TiBy/s"}, toUnit: "PiBy/s", expected: Value{F: 1, U: "PiBy/s"}},
{name: "Pebibyte to tebibyte: 1 PiBy/s = 1024 TiBy/s", input: Value{F: 1, U: "PiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1024, U: "TiBy/s"}},
// Binary bit scaling
{name: "Binary bit scaling: 1024 bit/s = 1 Kibit/s", input: Value{F: 1024, U: "bit/s"}, toUnit: "Kibit/s", expected: Value{F: 1, U: "Kibit/s"}},
{name: "Kibibit to bits: 1 Kibit/s = 1024 bit/s", input: Value{F: 1, U: "Kibit/s"}, toUnit: "bit/s", expected: Value{F: 1024, U: "bit/s"}},
{name: "Binary bit scaling: 1024 Kibit/s = 1 Mibit/s", input: Value{F: 1024, U: "Kibit/s"}, toUnit: "Mibit/s", expected: Value{F: 1, U: "Mibit/s"}},
{name: "Gibibit to bits: 1 Gibit/s = 1073741824 bit/s", input: Value{F: 1, U: "Gibit/s"}, toUnit: "bit/s", expected: Value{F: 1024 * 1024 * 1024, U: "bit/s"}},
{name: "Binary bit scaling: 1024 Mibit/s = 1 Gibit/s", input: Value{F: 1024, U: "Mibit/s"}, toUnit: "Gibit/s", expected: Value{F: 1, U: "Gibit/s"}},
{name: "Gibibit to mebibit: 1 Gibit/s = 1024 Mibit/s", input: Value{F: 1, U: "Gibit/s"}, toUnit: "Mibit/s", expected: Value{F: 1024, U: "Mibit/s"}},
{name: "Binary bit scaling: 1024 Gibit/s = 1 Tibit/s", input: Value{F: 1024, U: "Gibit/s"}, toUnit: "Tibit/s", expected: Value{F: 1, U: "Tibit/s"}},
{name: "Tebibit to gibibit: 1 Tibit/s = 1024 Gibit/s", input: Value{F: 1, U: "Tibit/s"}, toUnit: "Gibit/s", expected: Value{F: 1024, U: "Gibit/s"}},
{name: "Binary bit scaling: 1024 Tibit/s = 1 Pibit/s", input: Value{F: 1024, U: "Tibit/s"}, toUnit: "Pibit/s", expected: Value{F: 1, U: "Pibit/s"}},
{name: "Pebibit to tebibit: 1 Pibit/s = 1024 Tibit/s", input: Value{F: 1, U: "Pibit/s"}, toUnit: "Tibit/s", expected: Value{F: 1024, U: "Tibit/s"}},
// Bytes to bits
{name: "Bytes to bits: 1 KiBy/s = 8 Kibit/s", input: Value{F: 1, U: "KiBy/s"}, toUnit: "Kibit/s", expected: Value{F: 8, U: "Kibit/s"}},
{name: "Bytes to bits: 1 MiBy/s = 8 Mibit/s", input: Value{F: 1, U: "MiBy/s"}, toUnit: "Mibit/s", expected: Value{F: 8, U: "Mibit/s"}},
{name: "Bytes to bits: 1 GiBy/s = 8 Gibit/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "Gibit/s", expected: Value{F: 8, U: "Gibit/s"}},
// Unit alias
{name: "Unit alias: 1 KiBs = 1 KiBy/s", input: Value{F: 1, U: "KiBs"}, toUnit: "KiBy/s", expected: Value{F: 1, U: "KiBy/s"}},
{name: "Unit alias: 1 Kibits = 1 Kibit/s", input: Value{F: 1, U: "Kibits"}, toUnit: "Kibit/s", expected: Value{F: 1, U: "Kibit/s"}},
// SI byte scaling (Exa, Zetta, Yotta)
{name: "SI byte scaling: 1000 PBy/s = 1 EBy/s", input: Value{F: 1000, U: "PBy/s"}, toUnit: "EBy/s", expected: Value{F: 1, U: "EBy/s"}},
{name: "Exabyte to bytes: 1 EBy/s = 1e18 By/s", input: Value{F: 1, U: "EBy/s"}, toUnit: "By/s", expected: Value{F: 1e18, U: "By/s"}},
{name: "SI byte scaling: 1000 EBy/s = 1 ZBy/s", input: Value{F: 1000, U: "EBy/s"}, toUnit: "ZBy/s", expected: Value{F: 1, U: "ZBy/s"}},
{name: "Zettabyte to petabytes: 1 ZBy/s = 1000000 PBy/s", input: Value{F: 1, U: "ZBy/s"}, toUnit: "PBy/s", expected: Value{F: 1e6, U: "PBy/s"}},
{name: "SI byte scaling: 1000 ZBy/s = 1 YBy/s", input: Value{F: 1000, U: "ZBy/s"}, toUnit: "YBy/s", expected: Value{F: 1, U: "YBy/s"}},
{name: "Yottabyte to zettabyte: 1 YBy/s = 1000 ZBy/s", input: Value{F: 1, U: "YBy/s"}, toUnit: "ZBy/s", expected: Value{F: 1000, U: "ZBy/s"}},
// Binary byte scaling (Exbi, Zebi, Yobi)
{name: "Binary byte scaling: 1024 PiBy/s = 1 EiBy/s", input: Value{F: 1024, U: "PiBy/s"}, toUnit: "EiBy/s", expected: Value{F: 1, U: "EiBy/s"}},
{name: "Exbibyte to tebibytes: 1 EiBy/s = 1048576 TiBy/s", input: Value{F: 1, U: "EiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1024 * 1024, U: "TiBy/s"}},
{name: "Binary byte scaling: 1024 EiBy/s = 1 ZiBy/s", input: Value{F: 1024, U: "EiBy/s"}, toUnit: "ZiBy/s", expected: Value{F: 1, U: "ZiBy/s"}},
{name: "Zebibyte to exbibyte: 1 ZiBy/s = 1024 EiBy/s", input: Value{F: 1, U: "ZiBy/s"}, toUnit: "EiBy/s", expected: Value{F: 1024, U: "EiBy/s"}},
{name: "Binary byte scaling: 1024 ZiBy/s = 1 YiBy/s", input: Value{F: 1024, U: "ZiBy/s"}, toUnit: "YiBy/s", expected: Value{F: 1, U: "YiBy/s"}},
{name: "Yobibyte to zebibyte: 1 YiBy/s = 1024 ZiBy/s", input: Value{F: 1, U: "YiBy/s"}, toUnit: "ZiBy/s", expected: Value{F: 1024, U: "ZiBy/s"}},
// SI bit scaling (Exa, Zetta, Yotta)
{name: "SI bit scaling: 1000 Pbit/s = 1 Ebit/s", input: Value{F: 1000, U: "Pbit/s"}, toUnit: "Ebit/s", expected: Value{F: 1, U: "Ebit/s"}},
{name: "Exabit to gigabits: 1 Ebit/s = 1e9 Gbit/s", input: Value{F: 1, U: "Ebit/s"}, toUnit: "Gbit/s", expected: Value{F: 1e9, U: "Gbit/s"}},
{name: "SI bit scaling: 1000 Ebit/s = 1 Zbit/s", input: Value{F: 1000, U: "Ebit/s"}, toUnit: "Zbit/s", expected: Value{F: 1, U: "Zbit/s"}},
{name: "Zettabit to exabit: 1 Zbit/s = 1000 Ebit/s", input: Value{F: 1, U: "Zbit/s"}, toUnit: "Ebit/s", expected: Value{F: 1000, U: "Ebit/s"}},
{name: "SI bit scaling: 1000 Zbit/s = 1 Ybit/s", input: Value{F: 1000, U: "Zbit/s"}, toUnit: "Ybit/s", expected: Value{F: 1, U: "Ybit/s"}},
{name: "Yottabit to zettabit: 1 Ybit/s = 1000 Zbit/s", input: Value{F: 1, U: "Ybit/s"}, toUnit: "Zbit/s", expected: Value{F: 1000, U: "Zbit/s"}},
// Binary bit scaling (Exbi, Zebi, Yobi)
{name: "Binary bit scaling: 1024 Pibit/s = 1 Eibit/s", input: Value{F: 1024, U: "Pibit/s"}, toUnit: "Eibit/s", expected: Value{F: 1, U: "Eibit/s"}},
{name: "Exbibit to pebibit: 1 Eibit/s = 1024 Pibit/s", input: Value{F: 1, U: "Eibit/s"}, toUnit: "Pibit/s", expected: Value{F: 1024, U: "Pibit/s"}},
{name: "Binary bit scaling: 1024 Eibit/s = 1 Zibit/s", input: Value{F: 1024, U: "Eibit/s"}, toUnit: "Zibit/s", expected: Value{F: 1, U: "Zibit/s"}},
{name: "Zebibit to exbibit: 1 Zibit/s = 1024 Eibit/s", input: Value{F: 1, U: "Zibit/s"}, toUnit: "Eibit/s", expected: Value{F: 1024, U: "Eibit/s"}},
{name: "Binary bit scaling: 1024 Zibit/s = 1 Yibit/s", input: Value{F: 1024, U: "Zibit/s"}, toUnit: "Yibit/s", expected: Value{F: 1, U: "Yibit/s"}},
{name: "Yobibit to zebibit: 1 Yibit/s = 1024 Zibit/s", input: Value{F: 1, U: "Yibit/s"}, toUnit: "Zibit/s", expected: Value{F: 1024, U: "Zibit/s"}},
// Bytes to bits (Exbi, Zebi, Yobi)
{name: "Bytes to bits: 1 EiBy/s = 8 Eibit/s", input: Value{F: 1, U: "EiBy/s"}, toUnit: "Eibit/s", expected: Value{F: 8, U: "Eibit/s"}},
{name: "Bytes to bits: 1 ZiBy/s = 8 Zibit/s", input: Value{F: 1, U: "ZiBy/s"}, toUnit: "Zibit/s", expected: Value{F: 8, U: "Zibit/s"}},
{name: "Bytes to bits: 1 YiBy/s = 8 Yibit/s", input: Value{F: 1, U: "YiBy/s"}, toUnit: "Yibit/s", expected: Value{F: 8, U: "Yibit/s"}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := dataRateConverter.Convert(tt.input, tt.toUnit)
assert.Equal(t, tt.expected, got)
})
}
}

View File

@@ -13,7 +13,7 @@ func TestData(t *testing.T) {
assert.Equal(t, Value{F: 1, U: "By"}, dataConverter.Convert(Value{F: 8, U: "bits"}, "By"))
// 1024 bytes = 1 kbytes
assert.Equal(t, Value{F: 1, U: "kbytes"}, dataConverter.Convert(Value{F: 1024, U: "bytes"}, "kbytes"))
assert.Equal(t, Value{F: 1, U: "kBy"}, dataConverter.Convert(Value{F: 1000, U: "bytes"}, "kBy"))
assert.Equal(t, Value{F: 1, U: "kBy"}, dataConverter.Convert(Value{F: 1024, U: "bytes"}, "kBy"))
// 1 byte = 8 bits
assert.Equal(t, Value{F: 8, U: "bits"}, dataConverter.Convert(Value{F: 1, U: "bytes"}, "bits"))
// 1 mbytes = 1024 kbytes
@@ -22,7 +22,7 @@ func TestData(t *testing.T) {
assert.Equal(t, Value{F: 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "kbytes"}, "bytes"))
// 1024 kbytes = 1 mbytes
assert.Equal(t, Value{F: 1, U: "mbytes"}, dataConverter.Convert(Value{F: 1024, U: "kbytes"}, "mbytes"))
assert.Equal(t, Value{F: 1, U: "MBy"}, dataConverter.Convert(Value{F: 1000, U: "kBy"}, "MBy"))
assert.Equal(t, Value{F: 1, U: "MBy"}, dataConverter.Convert(Value{F: 1024, U: "kbytes"}, "MBy"))
// 1 mbytes = 1024 * 1024 bytes
assert.Equal(t, Value{F: 1024 * 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "mbytes"}, "bytes"))
// 1024 mbytes = 1 gbytes
@@ -45,90 +45,10 @@ func TestData(t *testing.T) {
assert.Equal(t, Value{F: 1024 * 1024 * 1024 * 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "tbytes"}, "bytes"))
// 1024 tbytes = 1 pbytes
assert.Equal(t, Value{F: 1, U: "pbytes"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "pbytes"))
// 1024 tbytes = 1 PiBy
assert.Equal(t, Value{F: 1, U: "PiBy"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "PiBy"))
// 1024 tbytes = 1 pbytes
assert.Equal(t, Value{F: 1, U: "PBy"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "PBy"))
// 1 pbytes = 1024 tbytes
assert.Equal(t, Value{F: 1024, U: "tbytes"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "tbytes"))
// 1024 TiBy = 1 pbytes
assert.Equal(t, Value{F: 1024, U: "TiBy"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "TiBy"))
}
func TestDataConversionUCUMUnit(t *testing.T) {
dataConverter := NewDataConverter()
tests := []struct {
name string
input Value
toUnit Unit
expected Value
}{
// Bits to bytes
{name: "Bits to bytes: 8 bit = 1 By", input: Value{F: 8, U: "bit"}, toUnit: "By", expected: Value{F: 1, U: "By"}},
{name: "Byte to bits: 1 By = 8 bit", input: Value{F: 1, U: "By"}, toUnit: "bit", expected: Value{F: 8, U: "bit"}},
// Binary byte scaling
{name: "Binary byte scaling: 1024 By = 1 KiBy", input: Value{F: 1024, U: "By"}, toUnit: "KiBy", expected: Value{F: 1, U: "KiBy"}},
{name: "Kibibyte to bytes: 1 KiBy = 1024 By", input: Value{F: 1, U: "KiBy"}, toUnit: "By", expected: Value{F: 1024, U: "By"}},
{name: "Binary byte scaling: 1024 KiBy = 1 MiBy", input: Value{F: 1024, U: "KiBy"}, toUnit: "MiBy", expected: Value{F: 1, U: "MiBy"}},
{name: "Binary byte scaling: 1024 MiBy = 1 GiBy", input: Value{F: 1024, U: "MiBy"}, toUnit: "GiBy", expected: Value{F: 1, U: "GiBy"}},
{name: "Gibibyte to mebibyte: 1 GiBy = 1024 MiBy", input: Value{F: 1, U: "GiBy"}, toUnit: "MiBy", expected: Value{F: 1024, U: "MiBy"}},
{name: "Binary byte scaling: 1024 GiBy = 1 TiBy", input: Value{F: 1024, U: "GiBy"}, toUnit: "TiBy", expected: Value{F: 1, U: "TiBy"}},
{name: "Binary byte scaling: 1024 TiBy = 1 PiBy", input: Value{F: 1024, U: "TiBy"}, toUnit: "PiBy", expected: Value{F: 1, U: "PiBy"}},
{name: "Pebibyte to tebibyte: 1 PiBy = 1024 TiBy", input: Value{F: 1, U: "PiBy"}, toUnit: "TiBy", expected: Value{F: 1024, U: "TiBy"}},
{name: "Gibibyte to bytes: 1 GiBy = 1073741824 By", input: Value{F: 1, U: "GiBy"}, toUnit: "By", expected: Value{F: 1024 * 1024 * 1024, U: "By"}},
{name: "Tebibyte to bytes: 1 TiBy = 1099511627776 By", input: Value{F: 1, U: "TiBy"}, toUnit: "By", expected: Value{F: 1024 * 1024 * 1024 * 1024, U: "By"}},
// SI bit scaling
{name: "SI bit scaling: 1000 bit = 1 kbit", input: Value{F: 1000, U: "bit"}, toUnit: "kbit", expected: Value{F: 1, U: "kbit"}},
{name: "Kilobit to bits: 1 kbit = 1000 bit", input: Value{F: 1, U: "kbit"}, toUnit: "bit", expected: Value{F: 1000, U: "bit"}},
{name: "SI bit scaling: 1000 kbit = 1 Mbit", input: Value{F: 1000, U: "kbit"}, toUnit: "Mbit", expected: Value{F: 1, U: "Mbit"}},
{name: "Gigabit to bits: 1 Gbit = 1000000000 bit", input: Value{F: 1, U: "Gbit"}, toUnit: "bit", expected: Value{F: 1000 * 1000 * 1000, U: "bit"}},
{name: "SI bit scaling: 1000 Mbit = 1 Gbit", input: Value{F: 1000, U: "Mbit"}, toUnit: "Gbit", expected: Value{F: 1, U: "Gbit"}},
{name: "Gigabit to megabit: 1 Gbit = 1000 Mbit", input: Value{F: 1, U: "Gbit"}, toUnit: "Mbit", expected: Value{F: 1000, U: "Mbit"}},
{name: "SI bit scaling: 1000 Gbit = 1 Tbit", input: Value{F: 1000, U: "Gbit"}, toUnit: "Tbit", expected: Value{F: 1, U: "Tbit"}},
{name: "Terabit to gigabit: 1 Tbit = 1000 Gbit", input: Value{F: 1, U: "Tbit"}, toUnit: "Gbit", expected: Value{F: 1000, U: "Gbit"}},
{name: "SI bit scaling: 1000 Tbit = 1 Pbit", input: Value{F: 1000, U: "Tbit"}, toUnit: "Pbit", expected: Value{F: 1, U: "Pbit"}},
{name: "Petabit to terabit: 1 Pbit = 1000 Tbit", input: Value{F: 1, U: "Pbit"}, toUnit: "Tbit", expected: Value{F: 1000, U: "Tbit"}},
// Binary bit scaling
{name: "Binary bit scaling: 1024 bit = 1 Kibit", input: Value{F: 1024, U: "bit"}, toUnit: "Kibit", expected: Value{F: 1, U: "Kibit"}},
{name: "Kibibit to bits: 1 Kibit = 1024 bit", input: Value{F: 1, U: "Kibit"}, toUnit: "bit", expected: Value{F: 1024, U: "bit"}},
{name: "Binary bit scaling: 1024 Kibit = 1 Mibit", input: Value{F: 1024, U: "Kibit"}, toUnit: "Mibit", expected: Value{F: 1, U: "Mibit"}},
{name: "Mebibit to kibibit: 1 Mibit = 1024 Kibit", input: Value{F: 1, U: "Mibit"}, toUnit: "Kibit", expected: Value{F: 1024, U: "Kibit"}},
{name: "Binary bit scaling: 1024 Mibit = 1 Gibit", input: Value{F: 1024, U: "Mibit"}, toUnit: "Gibit", expected: Value{F: 1, U: "Gibit"}},
{name: "Gibibit to mebibit: 1 Gibit = 1024 Mibit", input: Value{F: 1, U: "Gibit"}, toUnit: "Mibit", expected: Value{F: 1024, U: "Mibit"}},
{name: "Binary bit scaling: 1024 Gibit = 1 Tibit", input: Value{F: 1024, U: "Gibit"}, toUnit: "Tibit", expected: Value{F: 1, U: "Tibit"}},
{name: "Tebibit to gibibit: 1 Tibit = 1024 Gibit", input: Value{F: 1, U: "Tibit"}, toUnit: "Gibit", expected: Value{F: 1024, U: "Gibit"}},
{name: "Binary bit scaling: 1024 Tibit = 1 Pibit", input: Value{F: 1024, U: "Tibit"}, toUnit: "Pibit", expected: Value{F: 1, U: "Pibit"}},
{name: "Pebibit to tebibit: 1 Pibit = 1024 Tibit", input: Value{F: 1, U: "Pibit"}, toUnit: "Tibit", expected: Value{F: 1024, U: "Tibit"}},
// Bytes to bits
{name: "Bytes to bits: 1 KiBy = 8 Kibit", input: Value{F: 1, U: "KiBy"}, toUnit: "Kibit", expected: Value{F: 8, U: "Kibit"}},
{name: "Bytes to bits: 1 MiBy = 8 Mibit", input: Value{F: 1, U: "MiBy"}, toUnit: "Mibit", expected: Value{F: 8, U: "Mibit"}},
{name: "Bytes to bits: 1 GiBy = 8 Gibit", input: Value{F: 1, U: "GiBy"}, toUnit: "Gibit", expected: Value{F: 8, U: "Gibit"}},
// SI byte scaling (Exa, Zetta, Yotta)
{name: "SI byte scaling: 1000 PBy = 1 EBy", input: Value{F: 1000, U: "PBy"}, toUnit: "EBy", expected: Value{F: 1, U: "EBy"}},
{name: "Exabyte to bytes: 1 EBy = 1e18 By", input: Value{F: 1, U: "EBy"}, toUnit: "By", expected: Value{F: 1e18, U: "By"}},
{name: "SI byte scaling: 1000 EBy = 1 ZBy", input: Value{F: 1000, U: "EBy"}, toUnit: "ZBy", expected: Value{F: 1, U: "ZBy"}},
{name: "Zettabyte to petabytes: 1 ZBy = 1000000 PBy", input: Value{F: 1, U: "ZBy"}, toUnit: "PBy", expected: Value{F: 1e6, U: "PBy"}},
{name: "SI byte scaling: 1000 ZBy = 1 YBy", input: Value{F: 1000, U: "ZBy"}, toUnit: "YBy", expected: Value{F: 1, U: "YBy"}},
{name: "Yottabyte to zettabyte: 1 YBy = 1000 ZBy", input: Value{F: 1, U: "YBy"}, toUnit: "ZBy", expected: Value{F: 1000, U: "ZBy"}},
// Binary byte scaling (Exbi, Zebi, Yobi)
{name: "Binary byte scaling: 1024 PiBy = 1 EiBy", input: Value{F: 1024, U: "PiBy"}, toUnit: "EiBy", expected: Value{F: 1, U: "EiBy"}},
{name: "Exbibyte to tebibytes: 1 EiBy = 1048576 TiBy", input: Value{F: 1, U: "EiBy"}, toUnit: "TiBy", expected: Value{F: 1024 * 1024, U: "TiBy"}},
{name: "Binary byte scaling: 1024 EiBy = 1 ZiBy", input: Value{F: 1024, U: "EiBy"}, toUnit: "ZiBy", expected: Value{F: 1, U: "ZiBy"}},
{name: "Zebibyte to exbibyte: 1 ZiBy = 1024 EiBy", input: Value{F: 1, U: "ZiBy"}, toUnit: "EiBy", expected: Value{F: 1024, U: "EiBy"}},
{name: "Binary byte scaling: 1024 ZiBy = 1 YiBy", input: Value{F: 1024, U: "ZiBy"}, toUnit: "YiBy", expected: Value{F: 1, U: "YiBy"}},
{name: "Yobibyte to zebibyte: 1 YiBy = 1024 ZiBy", input: Value{F: 1, U: "YiBy"}, toUnit: "ZiBy", expected: Value{F: 1024, U: "ZiBy"}},
// SI bit scaling (Exa, Zetta, Yotta)
{name: "SI bit scaling: 1000 Pbit = 1 Ebit", input: Value{F: 1000, U: "Pbit"}, toUnit: "Ebit", expected: Value{F: 1, U: "Ebit"}},
{name: "Exabit to gigabits: 1 Ebit = 1e9 Gbit", input: Value{F: 1, U: "Ebit"}, toUnit: "Gbit", expected: Value{F: 1e9, U: "Gbit"}},
{name: "SI bit scaling: 1000 Ebit = 1 Zbit", input: Value{F: 1000, U: "Ebit"}, toUnit: "Zbit", expected: Value{F: 1, U: "Zbit"}},
{name: "Zettabit to exabit: 1 Zbit = 1000 Ebit", input: Value{F: 1, U: "Zbit"}, toUnit: "Ebit", expected: Value{F: 1000, U: "Ebit"}},
{name: "SI bit scaling: 1000 Zbit = 1 Ybit", input: Value{F: 1000, U: "Zbit"}, toUnit: "Ybit", expected: Value{F: 1, U: "Ybit"}},
{name: "Yottabit to zettabit: 1 Ybit = 1000 Zbit", input: Value{F: 1, U: "Ybit"}, toUnit: "Zbit", expected: Value{F: 1000, U: "Zbit"}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := dataConverter.Convert(tt.input, tt.toUnit)
assert.Equal(t, tt.expected, got)
})
}
// 1024 pbytes = 1 tbytes
assert.Equal(t, Value{F: 1024, U: "TBy"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "TBy"))
}

View File

@@ -47,7 +47,7 @@ func FromTimeUnit(u Unit) Duration {
return Hour
case "d":
return Day
case "w", "wk":
case "w":
return Week
default:
return Second

View File

@@ -54,13 +54,4 @@ func TestDurationConvert(t *testing.T) {
assert.Equal(t, Value{F: 1, U: "ms"}, timeConverter.Convert(Value{F: 1000, U: "us"}, "ms"))
// 1000000000 ns = 1 s
assert.Equal(t, Value{F: 1, U: "s"}, timeConverter.Convert(Value{F: 1000000000, U: "ns"}, "s"))
// 7 d = 1 wk
assert.Equal(t, Value{F: 1, U: "wk"}, timeConverter.Convert(Value{F: 7, U: "d"}, "wk"))
// 1 wk = 7 d
assert.Equal(t, Value{F: 7, U: "d"}, timeConverter.Convert(Value{F: 1, U: "wk"}, "d"))
// 1 wk = 168 h
assert.Equal(t, Value{F: 168, U: "h"}, timeConverter.Convert(Value{F: 1, U: "wk"}, "h"))
// 604800 s = 1 wk
assert.Equal(t, Value{F: 1, U: "wk"}, timeConverter.Convert(Value{F: 604800, U: "s"}, "wk"))
}

View File

@@ -24,50 +24,30 @@ func (f *dataFormatter) Format(value float64, unit string) string {
return humanize.IBytes(uint64(value))
case "decbytes":
return humanize.Bytes(uint64(value))
case "kbytes", "KiBy":
case "bits":
return humanize.IBytes(uint64(value * converter.Bit))
case "decbits":
return humanize.Bytes(uint64(value * converter.Bit))
case "kbytes", "kBy":
return humanize.IBytes(uint64(value * converter.Kibibit))
case "Kibit":
return humanize.IBytes(uint64(value * converter.Kibibit / 8))
case "decKbytes", "deckbytes", "kBy":
return humanize.Bytes(uint64(value * converter.Kilobit))
case "kbit":
return humanize.Bytes(uint64(value * converter.Kilobit / 8))
case "mbytes", "MiBy":
case "decKbytes", "deckbytes":
return humanize.IBytes(uint64(value * converter.Kilobit))
case "mbytes", "MBy":
return humanize.IBytes(uint64(value * converter.Mebibit))
case "Mibit":
return humanize.IBytes(uint64(value * converter.Mebibit / 8))
case "decMbytes", "decmbytes", "MBy":
case "decMbytes", "decmbytes":
return humanize.Bytes(uint64(value * converter.Megabit))
case "Mbit":
return humanize.Bytes(uint64(value * converter.Megabit / 8))
case "gbytes", "GiBy":
case "gbytes", "GBy":
return humanize.IBytes(uint64(value * converter.Gibibit))
case "Gibit":
return humanize.IBytes(uint64(value * converter.Gibibit / 8))
case "decGbytes", "decgbytes", "GBy":
case "decGbytes", "decgbytes":
return humanize.Bytes(uint64(value * converter.Gigabit))
case "Gbit":
return humanize.Bytes(uint64(value * converter.Gigabit / 8))
case "tbytes", "TiBy":
case "tbytes", "TBy":
return humanize.IBytes(uint64(value * converter.Tebibit))
case "Tibit":
return humanize.IBytes(uint64(value * converter.Tebibit / 8))
case "decTbytes", "dectbytes", "TBy":
case "decTbytes", "dectbytes":
return humanize.Bytes(uint64(value * converter.Terabit))
case "Tbit":
return humanize.Bytes(uint64(value * converter.Terabit / 8))
case "pbytes", "PiBy":
case "pbytes", "PBy":
return humanize.IBytes(uint64(value * converter.Pebibit))
case "Pbit":
return humanize.Bytes(uint64(value * converter.Petabit / 8))
case "decPbytes", "decpbytes", "PBy":
case "decPbytes", "decpbytes":
return humanize.Bytes(uint64(value * converter.Petabit))
case "EiBy":
return humanize.IBytes(uint64(value * converter.Exbibit))
case "Ebit":
return humanize.Bytes(uint64(value * converter.Exabit / 8))
case "EBy":
return humanize.Bytes(uint64(value * converter.Exabit))
}
// When unit is not matched, return the value as it is.
return fmt.Sprintf("%v", value)

View File

@@ -24,55 +24,50 @@ func (f *dataRateFormatter) Format(value float64, unit string) string {
return humanize.IBytes(uint64(value)) + "/s"
case "Bps", "By/s":
return humanize.Bytes(uint64(value)) + "/s"
case "KiBs", "KiBy/s":
case "binbps":
return humanize.IBytes(uint64(value*converter.BitPerSecond)) + "/s"
case "bps", "bit/s":
return humanize.Bytes(uint64(value*converter.BitPerSecond)) + "/s"
case "KiBs":
return humanize.IBytes(uint64(value*converter.KibibitPerSecond)) + "/s"
case "Kibits", "Kibit/s":
return humanize.IBytes(uint64(value*converter.KibibitPerSecond/8)) + "/s"
case "Kibits":
return humanize.IBytes(uint64(value*converter.KibibytePerSecond)) + "/s"
case "KBs", "kBy/s":
return humanize.IBytes(uint64(value*converter.KilobitPerSecond)) + "/s"
case "Kbits", "kbit/s":
return humanize.Bytes(uint64(value*converter.KilobitPerSecond/8)) + "/s"
case "MiBs", "MiBy/s":
return humanize.IBytes(uint64(value*converter.KilobytePerSecond)) + "/s"
case "MiBs":
return humanize.IBytes(uint64(value*converter.MebibitPerSecond)) + "/s"
case "Mibits", "Mibit/s":
return humanize.IBytes(uint64(value*converter.MebibitPerSecond/8)) + "/s"
case "Mibits":
return humanize.IBytes(uint64(value*converter.MebibytePerSecond)) + "/s"
case "MBs", "MBy/s":
return humanize.IBytes(uint64(value*converter.MegabitPerSecond)) + "/s"
case "Mbits", "Mbit/s":
return humanize.Bytes(uint64(value*converter.MegabitPerSecond/8)) + "/s"
case "GiBs", "GiBy/s":
return humanize.IBytes(uint64(value*converter.MegabytePerSecond)) + "/s"
case "GiBs":
return humanize.IBytes(uint64(value*converter.GibibitPerSecond)) + "/s"
case "Gibits", "Gibit/s":
return humanize.IBytes(uint64(value*converter.GibibitPerSecond/8)) + "/s"
case "Gibits":
return humanize.IBytes(uint64(value*converter.GibibytePerSecond)) + "/s"
case "GBs", "GBy/s":
return humanize.IBytes(uint64(value*converter.GigabitPerSecond)) + "/s"
case "Gbits", "Gbit/s":
return humanize.Bytes(uint64(value*converter.GigabitPerSecond/8)) + "/s"
case "TiBs", "TiBy/s":
return humanize.IBytes(uint64(value*converter.GigabytePerSecond)) + "/s"
case "TiBs":
return humanize.IBytes(uint64(value*converter.TebibitPerSecond)) + "/s"
case "Tibits", "Tibit/s":
return humanize.IBytes(uint64(value*converter.TebibitPerSecond/8)) + "/s"
case "Tibits":
return humanize.IBytes(uint64(value*converter.TebibytePerSecond)) + "/s"
case "TBs", "TBy/s":
return humanize.IBytes(uint64(value*converter.TerabitPerSecond)) + "/s"
case "Tbits", "Tbit/s":
return humanize.Bytes(uint64(value*converter.TerabitPerSecond/8)) + "/s"
case "PiBs", "PiBy/s":
return humanize.IBytes(uint64(value*converter.TerabytePerSecond)) + "/s"
case "PiBs":
return humanize.IBytes(uint64(value*converter.PebibitPerSecond)) + "/s"
case "Pibits", "Pibit/s":
return humanize.IBytes(uint64(value*converter.PebibitPerSecond/8)) + "/s"
case "Pibits":
return humanize.IBytes(uint64(value*converter.PebibytePerSecond)) + "/s"
case "PBs", "PBy/s":
return humanize.IBytes(uint64(value*converter.PetabitPerSecond)) + "/s"
case "Pbits", "Pbit/s":
return humanize.Bytes(uint64(value*converter.PetabitPerSecond/8)) + "/s"
// Exa units
case "EBy/s":
return humanize.Bytes(uint64(value*converter.ExabitPerSecond)) + "/s"
case "Ebit/s":
return humanize.Bytes(uint64(value*converter.ExabitPerSecond/8)) + "/s"
case "EiBy/s":
return humanize.IBytes(uint64(value*converter.ExbibitPerSecond)) + "/s"
case "Eibit/s":
return humanize.IBytes(uint64(value*converter.ExbibitPerSecond/8)) + "/s"
return humanize.IBytes(uint64(value*converter.PetabytePerSecond)) + "/s"
}
// When unit is not matched, return the value as it is.
return fmt.Sprintf("%v", value)

View File

@@ -1,140 +0,0 @@
package formatter
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestDataRateFormatterComprehensive(t *testing.T) {
dataRateFormatter := NewDataRateFormatter()
tests := []struct {
name string
value float64
unit string
expected string
}{
// IEC Base bytes/sec - binBps
{name: "binBps as Bps", value: 0, unit: "binBps", expected: "0 B/s"},
{name: "1 binBps as 1 Bps", value: 1, unit: "binBps", expected: "1 B/s"},
{name: "binBps as Kibps", value: 1024, unit: "binBps", expected: "1.0 KiB/s"},
{name: "binBps as Mibps", value: 1024 * 1024, unit: "binBps", expected: "1.0 MiB/s"},
{name: "binBps as Gibps", value: 1024 * 1024 * 1024, unit: "binBps", expected: "1.0 GiB/s"},
// SI Base bytes/sec - Bps, By/s
{name: "Bps as Bps", value: 1, unit: "Bps", expected: "1 B/s"},
{name: "Bps as kbps", value: 1000, unit: "Bps", expected: "1.0 kB/s"},
{name: "Bps as Mbps", value: 1000 * 1000, unit: "Bps", expected: "1.0 MB/s"},
{name: "Byps as kbps", value: 1000, unit: "By/s", expected: "1.0 kB/s"},
// Kibibytes/sec - KiBs, KiBy/s
{name: "Kibs as Bps", value: 0, unit: "KiBs", expected: "0 B/s"},
{name: "Kibs as Kibps", value: 1, unit: "KiBs", expected: "1.0 KiB/s"},
{name: "Kibs as Mibps", value: 1024, unit: "KiBs", expected: "1.0 MiB/s"},
{name: "Kibs as Gibps", value: 3 * 1024 * 1024, unit: "KiBs", expected: "3.0 GiB/s"},
{name: "KiByps as Kibps", value: 1, unit: "KiBy/s", expected: "1.0 KiB/s"},
{name: "KiByps as Mibps", value: 1024, unit: "KiBy/s", expected: "1.0 MiB/s"},
// Kibibits/sec - Kibits, Kibit/s
{name: "Kibitps as Kibps", value: 1, unit: "Kibits", expected: "128 B/s"},
{name: "Kibitps as Mibps", value: 42 * 1024, unit: "Kibits", expected: "5.3 MiB/s"},
{name: "Kibitps as Kibps 10", value: 10, unit: "Kibit/s", expected: "1.3 KiB/s"},
// Kilobytes/sec (SI) - KBs, kBy/s
{name: "Kbs as Bps", value: 0.5, unit: "KBs", expected: "500 B/s"},
{name: "Kbs as Mibps", value: 1048.6, unit: "KBs", expected: "1.0 MiB/s"},
{name: "kByps as Bps", value: 1, unit: "kBy/s", expected: "1000 B/s"},
// Kilobits/sec (SI) - Kbits, kbit/s
{name: "Kbitps as Bps", value: 1, unit: "Kbits", expected: "125 B/s"},
{name: "kbitps as Bps", value: 1, unit: "kbit/s", expected: "125 B/s"},
// Mebibytes/sec - MiBs, MiBy/s
{name: "Mibs as Mibps", value: 1, unit: "MiBs", expected: "1.0 MiB/s"},
{name: "Mibs as Gibps", value: 1024, unit: "MiBs", expected: "1.0 GiB/s"},
{name: "Mibs as Tibps", value: 1024 * 1024, unit: "MiBs", expected: "1.0 TiB/s"},
{name: "MiByps as Mibps", value: 1, unit: "MiBy/s", expected: "1.0 MiB/s"},
// Mebibits/sec - Mibits, Mibit/s
{name: "Mibitps as Mibps", value: 40, unit: "Mibits", expected: "5.0 MiB/s"},
{name: "Mibitps as Mibps per second variant", value: 10, unit: "Mibit/s", expected: "1.3 MiB/s"},
// Megabytes/sec (SI) - MBs, MBy/s
{name: "Mbs as Kibps", value: 1, unit: "MBs", expected: "977 KiB/s"},
{name: "MByps as Kibps", value: 1, unit: "MBy/s", expected: "977 KiB/s"},
// Megabits/sec (SI) - Mbits, Mbit/s
{name: "Mbitps as Kibps", value: 1, unit: "Mbits", expected: "125 kB/s"},
{name: "Mbitps as Kibps per second variant", value: 1, unit: "Mbit/s", expected: "125 kB/s"},
// Gibibytes/sec - GiBs, GiBy/s
{name: "Gibs as Gibps", value: 1, unit: "GiBs", expected: "1.0 GiB/s"},
{name: "Gibs as Tibps", value: 1024, unit: "GiBs", expected: "1.0 TiB/s"},
{name: "GiByps as Tibps", value: 42 * 1024, unit: "GiBy/s", expected: "42 TiB/s"},
// Gibibits/sec - Gibits, Gibit/s
{name: "Gibitps as Tibps", value: 42 * 1024, unit: "Gibits", expected: "5.3 TiB/s"},
{name: "Gibitps as Tibps per second variant", value: 42 * 1024, unit: "Gibit/s", expected: "5.3 TiB/s"},
// Gigabytes/sec (SI) - GBs, GBy/s
{name: "Gbs as Tibps", value: 42 * 1000, unit: "GBs", expected: "38 TiB/s"},
{name: "GByps as Tibps", value: 42 * 1000, unit: "GBy/s", expected: "38 TiB/s"},
// Gigabits/sec (SI) - Gbits, Gbit/s
{name: "Gbitps as Tibps", value: 42 * 1000, unit: "Gbits", expected: "5.3 TB/s"},
{name: "Gbitps as Tibps per second variant", value: 42 * 1000, unit: "Gbit/s", expected: "5.3 TB/s"},
// Tebibytes/sec - TiBs, TiBy/s
{name: "Tibs as Tibps", value: 1, unit: "TiBs", expected: "1.0 TiB/s"},
{name: "Tibs as Pibps", value: 1024, unit: "TiBs", expected: "1.0 PiB/s"},
{name: "TiByps as Pibps", value: 42 * 1024, unit: "TiBy/s", expected: "42 PiB/s"},
// Tebibits/sec - Tibits, Tibit/s
{name: "Tibitps as Pibps", value: 42 * 1024, unit: "Tibits", expected: "5.3 PiB/s"},
{name: "Tibitps as Pibps per second variant", value: 42 * 1024, unit: "Tibit/s", expected: "5.3 PiB/s"},
// Terabytes/sec (SI) - TBs, TBy/s
{name: "Tbs as Pibps", value: 42 * 1000, unit: "TBs", expected: "37 PiB/s"},
{name: "TByps as Pibps", value: 42 * 1000, unit: "TBy/s", expected: "37 PiB/s"},
// Terabits/sec (SI) - Tbits, Tbit/s
{name: "Tbitps as Pibps", value: 42 * 1000, unit: "Tbits", expected: "5.3 PB/s"},
{name: "Tbitps as Pibps per second variant", value: 42 * 1000, unit: "Tbit/s", expected: "5.3 PB/s"},
// Pebibytes/sec - PiBs, PiBy/s
{name: "Pibs as Eibps", value: 10 * 1024, unit: "PiBs", expected: "10 EiB/s"},
{name: "PiByps as Eibps", value: 10 * 1024, unit: "PiBy/s", expected: "10 EiB/s"},
// Pebibits/sec - Pibits, Pibit/s
{name: "Pibitps as Eibps", value: 10 * 1024, unit: "Pibits", expected: "1.3 EiB/s"},
{name: "Pibitps as Eibps per second variant", value: 10 * 1024, unit: "Pibit/s", expected: "1.3 EiB/s"},
// Petabytes/sec (SI) - PBs, PBy/s
{name: "Pbs as Pibps", value: 42, unit: "PBs", expected: "37 PiB/s"},
{name: "PByps as Pibps", value: 42, unit: "PBy/s", expected: "37 PiB/s"},
// Petabits/sec (SI) - Pbits, Pbit/s
{name: "Pbitps as Pibps", value: 42, unit: "Pbits", expected: "5.3 PB/s"},
{name: "Pbitps as Pibps per second variant", value: 42, unit: "Pbit/s", expected: "5.3 PB/s"},
// Exabytes/sec (SI) - EBy/s
{name: "EByps as Ebps", value: 10, unit: "EBy/s", expected: "10 EB/s"},
// Exabits/sec (SI) - Ebit/s
{name: "Ebitps as Ebps", value: 10, unit: "Ebit/s", expected: "1.3 EB/s"},
// Exbibytes/sec (IEC) - EiBy/s
{name: "EiByps as Eibps", value: 10, unit: "EiBy/s", expected: "10 EiB/s"},
// Exbibits/sec (IEC) - Eibit/s
{name: "Eibitps as Eibps", value: 10, unit: "Eibit/s", expected: "1.3 EiB/s"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := dataRateFormatter.Format(tt.value, tt.unit)
assert.Equal(t, tt.expected, got)
})
}
}

View File

@@ -14,164 +14,21 @@ func TestData(t *testing.T) {
assert.Equal(t, "1.0 KiB", dataFormatter.Format(1024, "bytes"))
assert.Equal(t, "1.0 KiB", dataFormatter.Format(1024, "By"))
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "mbytes"))
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "MiBy"))
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "MBy"))
assert.Equal(t, "1.0 MiB", dataFormatter.Format(1024*1024, "bytes"))
assert.Equal(t, "1.0 MiB", dataFormatter.Format(1024*1024, "By"))
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "mbytes"))
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "MiBy"))
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "MBy"))
assert.Equal(t, "102 KiB", dataFormatter.Format(102*1024, "bytes"))
assert.Equal(t, "102 KiB", dataFormatter.Format(102*1024, "By"))
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "kbytes"))
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "KiBy"))
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "kBy"))
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "kbytes"))
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "KiBy"))
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "kBy"))
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "kbytes"))
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "KiBy"))
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "kBy"))
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "kbytes"))
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "KiBy"))
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "kBy"))
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "mbytes"))
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "MiBy"))
}
func TestDataFormatterComprehensive(t *testing.T) {
dataFormatter := NewDataFormatter()
tests := []struct {
name string
value float64
unit string
expected string
}{
// IEC Base bytes - bytes, By
{name: "bytes: 0", value: 0, unit: "bytes", expected: "0 B"},
{name: "bytes: 1", value: 1, unit: "bytes", expected: "1 B"},
{name: "bytes: 512", value: 512, unit: "bytes", expected: "512 B"},
{name: "bytes: 1023", value: 1023, unit: "bytes", expected: "1023 B"},
{name: "bytes: 1024 = 1 KiB", value: 1024, unit: "bytes", expected: "1.0 KiB"},
{name: "bytes: 1536", value: 1536, unit: "bytes", expected: "1.5 KiB"},
{name: "bytes: 1024*1024 = 1 MiB", value: 1024 * 1024, unit: "bytes", expected: "1.0 MiB"},
{name: "bytes: 1024*1024*1024 = 1 GiB", value: 1024 * 1024 * 1024, unit: "bytes", expected: "1.0 GiB"},
{name: "By: same as bytes", value: 1024, unit: "By", expected: "1.0 KiB"},
// SI Base bytes - decbytes
{name: "decbytes: 1", value: 1, unit: "decbytes", expected: "1 B"},
{name: "decbytes: 1000 = 1 kB", value: 1000, unit: "decbytes", expected: "1.0 kB"},
{name: "decbytes: 1000*1000 = 1 MB", value: 1000 * 1000, unit: "decbytes", expected: "1.0 MB"},
{name: "decbytes: 1000*1000*1000 = 1 GB", value: 1000 * 1000 * 1000, unit: "decbytes", expected: "1.0 GB"},
// Kibibytes - kbytes, KiBy (IEC)
{name: "kbytes: 0", value: 0, unit: "kbytes", expected: "0 B"},
{name: "kbytes: 1 = 1 KiB", value: 1, unit: "kbytes", expected: "1.0 KiB"},
{name: "kbytes: 512", value: 512, unit: "kbytes", expected: "512 KiB"},
{name: "kbytes: 1024 = 1 MiB", value: 1024, unit: "kbytes", expected: "1.0 MiB"},
{name: "kbytes: 1024*1024 = 1 GiB", value: 1024 * 1024, unit: "kbytes", expected: "1.0 GiB"},
{name: "kbytes: 2.3*1024 = 2.3 MiB", value: 2.3 * 1024, unit: "kbytes", expected: "2.3 MiB"},
{name: "KiBy: 1 = 1 KiB", value: 1, unit: "KiBy", expected: "1.0 KiB"},
{name: "KiBy: 1024 = 1 MiB", value: 1024, unit: "KiBy", expected: "1.0 MiB"},
{name: "kbytes and KiBy alias", value: 240 * 1024, unit: "KiBy", expected: "240 MiB"},
// SI Kilobytes - decKbytes, deckbytes, kBy
{name: "decKbytes: 1 = 1 kB", value: 1, unit: "decKbytes", expected: "1.0 kB"},
{name: "decKbytes: 1000 = 1 MB", value: 1000, unit: "decKbytes", expected: "1.0 MB"},
{name: "deckbytes: 1 = 1 kB", value: 1, unit: "deckbytes", expected: "1.0 kB"},
{name: "kBy: 1 = 1 kB", value: 1, unit: "kBy", expected: "1.0 kB"},
{name: "kBy: 1000 = 1 MB", value: 1000, unit: "kBy", expected: "1.0 MB"},
// Mebibytes - mbytes, MiBy (IEC)
{name: "mbytes: 1 = 1 MiB", value: 1, unit: "mbytes", expected: "1.0 MiB"},
{name: "mbytes: 24", value: 24, unit: "mbytes", expected: "24 MiB"},
{name: "mbytes: 1024 = 1 GiB", value: 1024, unit: "mbytes", expected: "1.0 GiB"},
{name: "mbytes: 1024*1024 = 1 TiB", value: 1024 * 1024, unit: "mbytes", expected: "1.0 TiB"},
{name: "mbytes: 69*1024 = 69 GiB", value: 69 * 1024, unit: "mbytes", expected: "69 GiB"},
{name: "mbytes: 69*1024*1024 = 69 TiB", value: 69 * 1024 * 1024, unit: "mbytes", expected: "69 TiB"},
{name: "MiBy: 1 = 1 MiB", value: 1, unit: "MiBy", expected: "1.0 MiB"},
{name: "MiBy: 1024 = 1 GiB", value: 1024, unit: "MiBy", expected: "1.0 GiB"},
// SI Megabytes - decMbytes, decmbytes, MBy
{name: "decMbytes: 1 = 1 MB", value: 1, unit: "decMbytes", expected: "1.0 MB"},
{name: "decMbytes: 1000 = 1 GB", value: 1000, unit: "decMbytes", expected: "1.0 GB"},
{name: "decmbytes: 1 = 1 MB", value: 1, unit: "decmbytes", expected: "1.0 MB"},
{name: "MBy: 1 = 1 MB", value: 1, unit: "MBy", expected: "1.0 MB"},
// Gibibytes - gbytes, GiBy (IEC)
{name: "gbytes: 1 = 1 GiB", value: 1, unit: "gbytes", expected: "1.0 GiB"},
{name: "gbytes: 1024 = 1 TiB", value: 1024, unit: "gbytes", expected: "1.0 TiB"},
{name: "GiBy: 42*1024 = 42 TiB", value: 42 * 1024, unit: "GiBy", expected: "42 TiB"},
// SI Gigabytes - decGbytes, decgbytes, GBy
{name: "decGbytes: 42*1000 = 42 TB", value: 42 * 1000, unit: "decGbytes", expected: "42 TB"},
{name: "GBy: 42*1000 = 42 TB", value: 42 * 1000, unit: "GBy", expected: "42 TB"},
// Tebibytes - tbytes, TiBy (IEC)
{name: "tbytes: 1 = 1 TiB", value: 1, unit: "tbytes", expected: "1.0 TiB"},
{name: "tbytes: 1024 = 1 PiB", value: 1024, unit: "tbytes", expected: "1.0 PiB"},
{name: "TiBy: 42*1024 = 42 PiB", value: 42 * 1024, unit: "TiBy", expected: "42 PiB"},
// SI Terabytes - decTbytes, dectbytes, TBy
{name: "decTbytes: 42*1000 = 42 PB", value: 42 * 1000, unit: "decTbytes", expected: "42 PB"},
{name: "dectbytes: 42*1000 = 42 PB", value: 42 * 1000, unit: "dectbytes", expected: "42 PB"},
{name: "TBy: 42*1000 = 42 PB", value: 42 * 1000, unit: "TBy", expected: "42 PB"},
// Pebibytes - pbytes, PiBy (IEC)
{name: "pbytes: 10*1024 = 10 EiB", value: 10 * 1024, unit: "pbytes", expected: "10 EiB"},
{name: "PiBy: 10*1024 = 10 EiB", value: 10 * 1024, unit: "PiBy", expected: "10 EiB"},
// SI Petabytes - decPbytes, decpbytes, PBy
{name: "decPbytes: 42 = 42 PB", value: 42, unit: "decPbytes", expected: "42 PB"},
{name: "decpbytes: 42 = 42 PB", value: 42, unit: "decpbytes", expected: "42 PB"},
{name: "PBy: 42 = 42 PB", value: 42, unit: "PBy", expected: "42 PB"},
// Exbibytes - EiBy (IEC)
{name: "EiBy: 10 = 10 EiB", value: 10, unit: "EiBy", expected: "10 EiB"},
// Exabytes - EBy (SI)
{name: "EBy: 10 = 10 EB", value: 10, unit: "EBy", expected: "10 EB"},
// Kibibits - Kibit (IEC): 1 Kibit = 1024 bits = 128 bytes
{name: "Kibit: 1 = 128 B", value: 1, unit: "Kibit", expected: "128 B"},
{name: "Kibit: 1024 = 128 KiB", value: 1024, unit: "Kibit", expected: "128 KiB"},
// Mebibits - Mibit (IEC): 1 Mibit = 1024 Kibit = 128 KiB
{name: "Mibit: 1 = 128 KiB", value: 1, unit: "Mibit", expected: "128 KiB"},
{name: "Mibit: 1024 = 128 MiB", value: 1024, unit: "Mibit", expected: "128 MiB"},
// Gibibits - Gibit (IEC): 1 Gibit = 1024 Mibit = 128 MiB
{name: "Gibit: 1 = 128 MiB", value: 1, unit: "Gibit", expected: "128 MiB"},
{name: "Gibit: 42*1024 = 5.3 TiB", value: 42 * 1024, unit: "Gibit", expected: "5.3 TiB"},
// Tebibits - Tibit (IEC): 1 Tibit = 1024 Gibit = 128 GiB
{name: "Tibit: 1 = 128 GiB", value: 1, unit: "Tibit", expected: "128 GiB"},
{name: "Tibit: 42*1024 = 5.3 PiB", value: 42 * 1024, unit: "Tibit", expected: "5.3 PiB"},
// Kilobits - kbit (SI): 1 kbit = 1000 bits = 125 bytes
{name: "kbit: 1 = 125 B", value: 1, unit: "kbit", expected: "125 B"},
{name: "kbit: 1000 = 125 kB", value: 1000, unit: "kbit", expected: "125 kB"},
// Megabits - Mbit (SI): 1 Mbit = 1000 kbit = 125 kB
{name: "Mbit: 1 = 125 kB", value: 1, unit: "Mbit", expected: "125 kB"},
{name: "Mbit: 1000 = 125 MB", value: 1000, unit: "Mbit", expected: "125 MB"},
// Gigabits - Gbit (SI): 1 Gbit = 1000 Mbit = 125 MB
{name: "Gbit: 1 = 125 MB", value: 1, unit: "Gbit", expected: "125 MB"},
{name: "Gbit: 42*1000 = 5.3 TB", value: 42 * 1000, unit: "Gbit", expected: "5.3 TB"},
// Terabits - Tbit (SI): 1 Tbit = 1000 Gbit = 125 GB
{name: "Tbit: 1 = 125 GB", value: 1, unit: "Tbit", expected: "125 GB"},
{name: "Tbit: 42*1000 = 5.3 PB", value: 42 * 1000, unit: "Tbit", expected: "5.3 PB"},
// Petabits - Pbit (SI): 1 Pbit = 1000 Tbit = 125 TB
{name: "Pbit: 1 = 125 TB", value: 1, unit: "Pbit", expected: "125 TB"},
{name: "Pbit: 42 = 5.3 PB", value: 42, unit: "Pbit", expected: "5.3 PB"},
// Exabits - Ebit (SI): 1 Ebit = 1000 Pbit = 125 PB
{name: "Ebit: 1 = 125 PB", value: 1, unit: "Ebit", expected: "125 PB"},
{name: "Ebit: 10 = 1.3 EB", value: 10, unit: "Ebit", expected: "1.3 EB"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := dataFormatter.Format(tt.value, tt.unit)
assert.Equal(t, tt.expected, got)
})
}
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "MBy"))
}

View File

@@ -18,11 +18,11 @@ var (
func FromUnit(u string) Formatter {
switch u {
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min", "w", "wk":
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min":
return DurationFormatter
case "bytes", "decbytes", "bits", "bit", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy", "EBy", "KiBy", "MiBy", "GiBy", "TiBy", "PiBy", "EiBy", "kbit", "Mbit", "Gbit", "Tbit", "Pbit", "Ebit", "Kibit", "Mibit", "Gibit", "Tibit", "Pibit":
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy":
return DataFormatter
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "EBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s", "Ebit/s", "KiBy/s", "MiBy/s", "GiBy/s", "TiBy/s", "PiBy/s", "EiBy/s", "Kibit/s", "Mibit/s", "Gibit/s", "Tibit/s", "Pibit/s", "Eibit/s":
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s":
return DataRateFormatter
case "percent", "percentunit", "%":
return PercentFormatter

View File

@@ -32,7 +32,7 @@ func (f *durationFormatter) Format(value float64, unit string) string {
return toHours(value)
case "d":
return toDays(value)
case "w", "wk":
case "w":
return toWeeks(value)
}
// When unit is not matched, return the value as it is.

View File

@@ -625,7 +625,7 @@ func (r *BaseRule) extractMetricAndGroupBys(ctx context.Context) (map[string][]s
// FilterNewSeries filters out items that are too new based on metadata first_seen timestamps.
// Returns the filtered series (old ones) excluding new series that are still within the grace period.
func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*v3.Series) ([]*v3.Series, error) {
func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*qbtypes.TimeSeries) ([]*qbtypes.TimeSeries, error) {
// Extract metric names and groupBy keys
metricToGroupedFields, err := r.extractMetricAndGroupBys(ctx)
if err != nil {
@@ -642,7 +642,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
seriesIdxToLookupKeys := make(map[int][]telemetrytypes.MetricMetadataLookupKey) // series index -> lookup keys
for i := 0; i < len(series); i++ {
metricLabelMap := series[i].Labels
metricLabelMap := series[i].LabelsMap()
// Collect groupBy attribute-value pairs for this series
seriesKeys := make([]telemetrytypes.MetricMetadataLookupKey, 0)
@@ -689,7 +689,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
}
// Filter series based on first_seen + delay
filteredSeries := make([]*v3.Series, 0, len(series))
filteredSeries := make([]*qbtypes.TimeSeries, 0, len(series))
evalTimeMs := ts.UnixMilli()
newGroupEvalDelayMs := r.newGroupEvalDelay.Milliseconds()
@@ -727,7 +727,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
// Check if first_seen + delay has passed
if maxFirstSeen+newGroupEvalDelayMs > evalTimeMs {
// Still within grace period, skip this series
r.logger.InfoContext(ctx, "Skipping new series", "rule_name", r.Name(), "series_idx", i, "max_first_seen", maxFirstSeen, "eval_time_ms", evalTimeMs, "delay_ms", newGroupEvalDelayMs, "labels", series[i].Labels)
r.logger.InfoContext(ctx, "Skipping new series", "rule_name", r.Name(), "series_idx", i, "max_first_seen", maxFirstSeen, "eval_time_ms", evalTimeMs, "delay_ms", newGroupEvalDelayMs, "labels", series[i].LabelsMap())
continue
}

View File

@@ -26,33 +26,33 @@ import (
"github.com/SigNoz/signoz/pkg/valuer"
)
// createTestSeries creates a *v3.Series with the given labels and optional points
// createTestSeries creates a *qbtypes.TimeSeries with the given labels and optional values
// so we don't exactly need the points in the series because the labels are used to determine if the series is new or old
// we use the labels to create a lookup key for the series and then check the first_seen timestamp for the series in the metadata table
func createTestSeries(labels map[string]string, points []v3.Point) *v3.Series {
func createTestSeries(labels map[string]string, points []*qbtypes.TimeSeriesValue) *qbtypes.TimeSeries {
if points == nil {
points = []v3.Point{}
points = []*qbtypes.TimeSeriesValue{}
}
return &v3.Series{
Labels: labels,
Points: points,
lbls := make([]*qbtypes.Label, 0, len(labels))
for k, v := range labels {
lbls = append(lbls, &qbtypes.Label{Key: telemetrytypes.TelemetryFieldKey{Name: k}, Value: v})
}
return &qbtypes.TimeSeries{
Labels: lbls,
Values: points,
}
}
// seriesEqual compares two v3.Series by their labels
// seriesEqual compares two *qbtypes.TimeSeries by their labels
// Returns true if the series have the same labels (order doesn't matter)
func seriesEqual(s1, s2 *v3.Series) bool {
if s1 == nil && s2 == nil {
return true
}
if s1 == nil || s2 == nil {
func seriesEqual(s1, s2 *qbtypes.TimeSeries) bool {
m1 := s1.LabelsMap()
m2 := s2.LabelsMap()
if len(m1) != len(m2) {
return false
}
if len(s1.Labels) != len(s2.Labels) {
return false
}
for k, v := range s1.Labels {
if s2.Labels[k] != v {
for k, v := range m1 {
if m2[k] != v {
return false
}
}
@@ -149,11 +149,11 @@ func createPostableRule(compositeQuery *v3.CompositeQuery) ruletypes.PostableRul
type filterNewSeriesTestCase struct {
name string
compositeQuery *v3.CompositeQuery
series []*v3.Series
series []*qbtypes.TimeSeries
firstSeenMap map[telemetrytypes.MetricMetadataLookupKey]int64
newGroupEvalDelay valuer.TextDuration
evalTime time.Time
expectedFiltered []*v3.Series // series that should be in the final filtered result (old enough)
expectedFiltered []*qbtypes.TimeSeries // series that should be in the final filtered result (old enough)
expectError bool
}
@@ -193,7 +193,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-new", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-missing", "env": "stage"}, nil),
@@ -205,7 +205,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-missing", "env": "stage"}, nil),
}, // svc-old and svc-missing should be included; svc-new is filtered out
@@ -227,7 +227,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-new1", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-new2", "env": "stage"}, nil),
},
@@ -237,7 +237,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{}, // all should be filtered out (new series)
expectedFiltered: []*qbtypes.TimeSeries{}, // all should be filtered out (new series)
},
{
name: "all old series - ClickHouse query",
@@ -254,7 +254,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-old2", "env": "stage"}, nil),
},
@@ -264,7 +264,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-old2", "env": "stage"}, nil),
}, // all should be included (old series)
@@ -292,13 +292,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
}, // early return, no filtering - all series included
},
@@ -322,13 +322,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
}, // early return, no filtering - all series included
},
@@ -358,13 +358,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"status": "200"}, nil), // no service_name or env
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"status": "200"}, nil),
}, // series included as we can't decide if it's new or old
},
@@ -385,7 +385,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-no-metadata", "env": "prod"}, nil),
},
@@ -393,7 +393,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
// svc-no-metadata has no entry in firstSeenMap
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
createTestSeries(map[string]string{"service_name": "svc-no-metadata", "env": "prod"}, nil),
}, // both should be included - svc-old is old, svc-no-metadata can't be decided
@@ -413,7 +413,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
},
// Only provide metadata for service_name, not env
@@ -423,7 +423,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
}, // has some metadata, uses max first_seen which is old
},
@@ -453,11 +453,11 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{},
series: []*qbtypes.TimeSeries{},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{},
expectedFiltered: []*qbtypes.TimeSeries{},
},
{
name: "zero delay - Builder",
@@ -485,13 +485,13 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
newGroupEvalDelay: valuer.TextDuration{}, // zero delay
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
}, // with zero delay, all series pass
},
@@ -526,7 +526,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: mergeFirstSeenMaps(
@@ -535,7 +535,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
},
@@ -565,7 +565,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
// service_name is old, env is new - should use max (new)
@@ -575,7 +575,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{}, // max first_seen is new, so should be filtered out
expectedFiltered: []*qbtypes.TimeSeries{}, // max first_seen is new, so should be filtered out
},
{
name: "Logs query - should skip filtering and return empty skip indexes",
@@ -600,14 +600,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
}, // Logs queries should return early, no filtering - all included
@@ -635,14 +635,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
},
},
series: []*v3.Series{
series: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
expectedFiltered: []*qbtypes.TimeSeries{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
}, // Traces queries should return early, no filtering - all included
@@ -724,14 +724,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
// Build a map to count occurrences of each unique label combination in expected series
expectedCounts := make(map[string]int)
for _, expected := range tt.expectedFiltered {
key := labelsKey(expected.Labels)
key := labelsKey(expected.LabelsMap())
expectedCounts[key]++
}
// Build a map to count occurrences of each unique label combination in filtered series
actualCounts := make(map[string]int)
for _, filtered := range filteredSeries {
key := labelsKey(filtered.Labels)
key := labelsKey(filtered.LabelsMap())
actualCounts[key]++
}

View File

@@ -12,19 +12,18 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/formatter"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/prometheus/prometheus/promql"
)
type PromRule struct {
*BaseRule
version string
prometheus prometheus.Prometheus
}
@@ -48,7 +47,6 @@ func NewPromRule(
p := PromRule{
BaseRule: baseRule,
version: postableRule.Version,
prometheus: prometheus,
}
p.logger = logger
@@ -83,48 +81,30 @@ func (r *PromRule) GetSelectedQuery() string {
}
func (r *PromRule) getPqlQuery() (string, error) {
if r.version == "v5" {
if len(r.ruleCondition.CompositeQuery.Queries) > 0 {
selectedQuery := r.GetSelectedQuery()
for _, item := range r.ruleCondition.CompositeQuery.Queries {
switch item.Type {
case qbtypes.QueryTypePromQL:
promQuery, ok := item.Spec.(qbtypes.PromQuery)
if !ok {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query spec %T", item.Spec)
}
if promQuery.Name == selectedQuery {
return promQuery.Query, nil
}
if len(r.ruleCondition.CompositeQuery.Queries) > 0 {
selectedQuery := r.GetSelectedQuery()
for _, item := range r.ruleCondition.CompositeQuery.Queries {
switch item.Type {
case qbtypes.QueryTypePromQL:
promQuery, ok := item.Spec.(qbtypes.PromQuery)
if !ok {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query spec %T", item.Spec)
}
if promQuery.Name == selectedQuery {
return promQuery.Query, nil
}
}
}
return "", fmt.Errorf("invalid promql rule setup")
}
if r.ruleCondition.CompositeQuery.QueryType == v3.QueryTypePromQL {
if len(r.ruleCondition.CompositeQuery.PromQueries) > 0 {
selectedQuery := r.GetSelectedQuery()
if promQuery, ok := r.ruleCondition.CompositeQuery.PromQueries[selectedQuery]; ok {
query := promQuery.Query
if query == "" {
return query, fmt.Errorf("a promquery needs to be set for this rule to function")
}
return query, nil
}
}
}
return "", fmt.Errorf("invalid promql rule query")
return "", fmt.Errorf("invalid promql rule setup")
}
func (r *PromRule) matrixToV3Series(res promql.Matrix) []*v3.Series {
v3Series := make([]*v3.Series, 0, len(res))
func matrixToTimeSeries(res promql.Matrix) []*qbtypes.TimeSeries {
result := make([]*qbtypes.TimeSeries, 0, len(res))
for _, series := range res {
commonSeries := toCommonSeries(series)
v3Series = append(v3Series, &commonSeries)
result = append(result, promSeriesToTimeSeries(series))
}
return v3Series
return result
}
func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletypes.Vector, error) {
@@ -143,31 +123,31 @@ func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletype
return nil, err
}
matrixToProcess := r.matrixToV3Series(res)
seriesToProcess := matrixToTimeSeries(res)
hasData := len(matrixToProcess) > 0
hasData := len(seriesToProcess) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
// Filter out new series if newGroupEvalDelay is configured
if r.ShouldSkipNewGroups() {
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, matrixToProcess)
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
// In case of error we log the error and continue with the original series
if filterErr != nil {
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
} else {
matrixToProcess = filteredSeries
seriesToProcess = filteredSeries
}
}
var resultVector ruletypes.Vector
for _, series := range matrixToProcess {
for _, series := range seriesToProcess {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(
ctx, "not enough data points to evaluate series, skipping",
"rule_id", r.ID(), "num_points", len(series.Points), "required_points", r.Condition().RequiredNumPoints,
"rule_id", r.ID(), "num_points", len(series.Values), "required_points", r.Condition().RequiredNumPoints,
)
continue
}
@@ -454,26 +434,25 @@ func (r *PromRule) RunAlertQuery(ctx context.Context, qs string, start, end time
}
}
func toCommonSeries(series promql.Series) v3.Series {
commonSeries := v3.Series{
Labels: make(map[string]string),
LabelsArray: make([]map[string]string, 0),
Points: make([]v3.Point, 0),
func promSeriesToTimeSeries(series promql.Series) *qbtypes.TimeSeries {
ts := &qbtypes.TimeSeries{
Labels: make([]*qbtypes.Label, 0, len(series.Metric)),
Values: make([]*qbtypes.TimeSeriesValue, 0, len(series.Floats)),
}
for _, lbl := range series.Metric {
commonSeries.Labels[lbl.Name] = lbl.Value
commonSeries.LabelsArray = append(commonSeries.LabelsArray, map[string]string{
lbl.Name: lbl.Value,
ts.Labels = append(ts.Labels, &qbtypes.Label{
Key: telemetrytypes.TelemetryFieldKey{Name: lbl.Name},
Value: lbl.Value,
})
}
for _, f := range series.Floats {
commonSeries.Points = append(commonSeries.Points, v3.Point{
ts.Values = append(ts.Values, &qbtypes.TimeSeriesValue{
Timestamp: f.T,
Value: f.F,
})
}
return commonSeries
return ts
}

View File

@@ -20,6 +20,7 @@ import (
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -47,9 +48,13 @@ func TestPromRuleEval(t *testing.T) {
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {
Query: "dummy_query", // This is not used in the test
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "A",
Query: "dummy_query", // This is not used in the test
},
},
},
},
@@ -62,7 +67,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp string
matchType string
target float64
expectedAlertSample v3.Point
expectedAlertSample float64
expectedVectorValues []float64 // Expected values in result vector
}{
// Test cases for Equals Always
@@ -80,7 +85,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "2", // Always
target: 0.0,
expectedAlertSample: v3.Point{Value: 0.0},
expectedAlertSample: 0.0,
expectedVectorValues: []float64{0.0},
},
{
@@ -145,7 +150,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 0.0},
expectedAlertSample: 0.0,
expectedVectorValues: []float64{0.0},
},
{
@@ -162,7 +167,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 0.0},
expectedAlertSample: 0.0,
},
{
values: pql.Series{
@@ -178,7 +183,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 0.0},
expectedAlertSample: 0.0,
},
{
values: pql.Series{
@@ -211,7 +216,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Greater Than
matchType: "2", // Always
target: 1.5,
expectedAlertSample: v3.Point{Value: 2.0},
expectedAlertSample: 2.0,
expectedVectorValues: []float64{2.0},
},
{
@@ -228,7 +233,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Above
matchType: "2", // Always
target: 2.0,
expectedAlertSample: v3.Point{Value: 3.0},
expectedAlertSample: 3.0,
},
{
values: pql.Series{
@@ -244,7 +249,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Below
matchType: "2", // Always
target: 13.0,
expectedAlertSample: v3.Point{Value: 12.0},
expectedAlertSample: 12.0,
},
{
values: pql.Series{
@@ -276,7 +281,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Greater Than
matchType: "1", // Once
target: 4.5,
expectedAlertSample: v3.Point{Value: 10.0},
expectedAlertSample: 10.0,
expectedVectorValues: []float64{10.0},
},
{
@@ -339,7 +344,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "2", // Always
target: 0.0,
expectedAlertSample: v3.Point{Value: 1.0},
expectedAlertSample: 1.0,
},
{
values: pql.Series{
@@ -371,7 +376,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 1.0},
expectedAlertSample: 1.0,
},
{
values: pql.Series{
@@ -402,7 +407,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 1.0},
expectedAlertSample: 1.0,
},
{
values: pql.Series{
@@ -418,7 +423,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "1", // Once
target: 0.0,
expectedAlertSample: v3.Point{Value: 1.0},
expectedAlertSample: 1.0,
},
// Test cases for Less Than Always
{
@@ -435,7 +440,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Less Than
matchType: "2", // Always
target: 4,
expectedAlertSample: v3.Point{Value: 1.5},
expectedAlertSample: 1.5,
},
{
values: pql.Series{
@@ -467,7 +472,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Less Than
matchType: "1", // Once
target: 4,
expectedAlertSample: v3.Point{Value: 2.5},
expectedAlertSample: 2.5,
},
{
values: pql.Series{
@@ -499,7 +504,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "3", // OnAverage
target: 6.0,
expectedAlertSample: v3.Point{Value: 6.0},
expectedAlertSample: 6.0,
},
{
values: pql.Series{
@@ -530,7 +535,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "3", // OnAverage
target: 4.5,
expectedAlertSample: v3.Point{Value: 6.0},
expectedAlertSample: 6.0,
},
{
values: pql.Series{
@@ -561,7 +566,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Greater Than
matchType: "3", // OnAverage
target: 4.5,
expectedAlertSample: v3.Point{Value: 6.0},
expectedAlertSample: 6.0,
},
{
values: pql.Series{
@@ -577,7 +582,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Less Than
matchType: "3", // OnAverage
target: 12.0,
expectedAlertSample: v3.Point{Value: 6.0},
expectedAlertSample: 6.0,
},
// Test cases for InTotal
{
@@ -594,7 +599,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "3", // Equals
matchType: "4", // InTotal
target: 30.0,
expectedAlertSample: v3.Point{Value: 30.0},
expectedAlertSample: 30.0,
},
{
values: pql.Series{
@@ -621,7 +626,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "4", // Not Equals
matchType: "4", // InTotal
target: 9.0,
expectedAlertSample: v3.Point{Value: 10.0},
expectedAlertSample: 10.0,
},
{
values: pql.Series{
@@ -645,7 +650,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "1", // Greater Than
matchType: "4", // InTotal
target: 10.0,
expectedAlertSample: v3.Point{Value: 20.0},
expectedAlertSample: 20.0,
},
{
values: pql.Series{
@@ -670,7 +675,7 @@ func TestPromRuleEval(t *testing.T) {
compareOp: "2", // Less Than
matchType: "4", // InTotal
target: 30.0,
expectedAlertSample: v3.Point{Value: 20.0},
expectedAlertSample: 20.0,
},
{
values: pql.Series{
@@ -708,7 +713,7 @@ func TestPromRuleEval(t *testing.T) {
assert.NoError(t, err)
}
resultVectors, err := rule.Threshold.Eval(toCommonSeries(c.values), rule.Unit(), ruletypes.EvalData{})
resultVectors, err := rule.Threshold.Eval(*promSeriesToTimeSeries(c.values), rule.Unit(), ruletypes.EvalData{})
assert.NoError(t, err)
// Compare full result vector with expected vector
@@ -724,12 +729,12 @@ func TestPromRuleEval(t *testing.T) {
if len(resultVectors) > 0 {
found := false
for _, sample := range resultVectors {
if sample.V == c.expectedAlertSample.Value {
if sample.V == c.expectedAlertSample {
found = true
break
}
}
assert.True(t, found, "Expected alert sample value %.2f not found in result vectors for case %d. Got values: %v", c.expectedAlertSample.Value, idx, actualValues)
assert.True(t, found, "Expected alert sample value %.2f not found in result vectors for case %d. Got values: %v", c.expectedAlertSample, idx, actualValues)
}
} else {
assert.Empty(t, resultVectors, "Expected no alert but got result vectors for case %d", idx)
@@ -754,9 +759,13 @@ func TestPromRuleUnitCombinations(t *testing.T) {
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {
Query: "test_metric",
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "A",
Query: "test_metric",
},
},
},
},
@@ -1013,9 +1022,13 @@ func _Enable_this_after_9146_issue_fix_is_merged_TestPromRuleNoData(t *testing.T
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {
Query: "test_metric",
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "A",
Query: "test_metric",
},
},
},
},
@@ -1124,9 +1137,13 @@ func TestMultipleThresholdPromRule(t *testing.T) {
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {
Query: "test_metric",
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "A",
Query: "test_metric",
},
},
},
},
@@ -1361,8 +1378,11 @@ func TestPromRule_NoData(t *testing.T) {
MatchType: ruletypes.AtleastOnce,
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {Query: "test_metric"},
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
},
},
},
Thresholds: &ruletypes.RuleThresholdData{
@@ -1486,8 +1506,11 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
Target: &target,
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {Query: "test_metric"},
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
},
},
},
Thresholds: &ruletypes.RuleThresholdData{
@@ -1635,8 +1658,11 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
MatchType: ruletypes.AtleastOnce,
CompositeQuery: &v3.CompositeQuery{
QueryType: v3.QueryTypePromQL,
PromQueries: map[string]*v3.PromQuery{
"A": {Query: "test_metric"},
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{Name: "A", Query: "test_metric"},
},
},
},
},

View File

@@ -1,38 +1,24 @@
package rules
import (
"bytes"
"context"
"encoding/json"
"fmt"
"log/slog"
"math"
"net/url"
"reflect"
"text/template"
"time"
"github.com/SigNoz/signoz/pkg/contextlinks"
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/query-service/app/querier"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
querytemplate "github.com/SigNoz/signoz/pkg/query-service/utils/queryTemplate"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
logsv3 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v3"
tracesV4 "github.com/SigNoz/signoz/pkg/query-service/app/traces/v4"
"github.com/SigNoz/signoz/pkg/query-service/formatter"
querierV5 "github.com/SigNoz/signoz/pkg/querier"
@@ -42,23 +28,9 @@ import (
type ThresholdRule struct {
*BaseRule
// Ever since we introduced the new metrics query builder, the version is "v4"
// for all the rules
// if the version is "v3", then we use the old querier
// if the version is "v4", then we use the new querierV2
version string
// querier is used for alerts created before the introduction of new metrics query builder
querier interfaces.Querier
// querierV2 is used for alerts created after the introduction of new metrics query builder
querierV2 interfaces.Querier
// querierV5 is used for alerts migrated after the introduction of new query builder
// querierV5 is the query builder v5 querier used for all alert rule evaluation
querierV5 querierV5.Querier
// used for attribute metadata enrichment for logs and traces
logsKeys map[string]v3.AttributeKey
spansKeys map[string]v3.AttributeKey
}
var _ Rule = (*ThresholdRule)(nil)
@@ -82,25 +54,10 @@ func NewThresholdRule(
}
t := ThresholdRule{
BaseRule: baseRule,
version: p.Version,
BaseRule: baseRule,
querierV5: querierV5,
}
querierOption := querier.QuerierOptions{
Reader: reader,
Cache: nil,
KeyGenerator: queryBuilder.NewKeyGenerator(),
}
querierOptsV2 := querierV2.QuerierOptions{
Reader: reader,
Cache: nil,
KeyGenerator: queryBuilder.NewKeyGenerator(),
}
t.querier = querier.NewQuerier(querierOption)
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
t.querierV5 = querierV5
t.reader = reader
return &t, nil
}
@@ -120,169 +77,9 @@ func (r *ThresholdRule) Type() ruletypes.RuleType {
return ruletypes.RuleTypeThreshold
}
func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3.QueryRangeParamsV3, error) {
func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
r.logger.InfoContext(
ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
)
startTs, endTs := r.Timestamps(ts)
start, end := startTs.UnixMilli(), endTs.UnixMilli()
if r.ruleCondition.QueryType() == v3.QueryTypeClickHouseSQL {
params := &v3.QueryRangeParamsV3{
Start: start,
End: end,
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
CompositeQuery: &v3.CompositeQuery{
QueryType: r.ruleCondition.CompositeQuery.QueryType,
PanelType: r.ruleCondition.CompositeQuery.PanelType,
BuilderQueries: make(map[string]*v3.BuilderQuery),
ClickHouseQueries: make(map[string]*v3.ClickHouseQuery),
PromQueries: make(map[string]*v3.PromQuery),
Unit: r.ruleCondition.CompositeQuery.Unit,
},
Variables: make(map[string]interface{}),
NoCache: true,
}
querytemplate.AssignReservedVarsV3(params)
for name, chQuery := range r.ruleCondition.CompositeQuery.ClickHouseQueries {
if chQuery.Disabled {
continue
}
tmpl := template.New("clickhouse-query")
tmpl, err := tmpl.Parse(chQuery.Query)
if err != nil {
return nil, err
}
var query bytes.Buffer
err = tmpl.Execute(&query, params.Variables)
if err != nil {
return nil, err
}
params.CompositeQuery.ClickHouseQueries[name] = &v3.ClickHouseQuery{
Query: query.String(),
Disabled: chQuery.Disabled,
Legend: chQuery.Legend,
}
}
return params, nil
}
if r.ruleCondition.CompositeQuery != nil && r.ruleCondition.CompositeQuery.BuilderQueries != nil {
for _, q := range r.ruleCondition.CompositeQuery.BuilderQueries {
// If the step interval is less than the minimum allowed step interval, set it to the minimum allowed step interval
if minStep := common.MinAllowedStepInterval(start, end); q.StepInterval < minStep {
q.StepInterval = minStep
}
q.SetShiftByFromFunc()
if q.DataSource == v3.DataSourceMetrics {
// if the time range is greater than 1 day, and less than 1 week set the step interval to be multiple of 5 minutes
// if the time range is greater than 1 week, set the step interval to be multiple of 30 mins
if end-start >= 24*time.Hour.Milliseconds() && end-start < 7*24*time.Hour.Milliseconds() {
q.StepInterval = int64(math.Round(float64(q.StepInterval)/300)) * 300
} else if end-start >= 7*24*time.Hour.Milliseconds() {
q.StepInterval = int64(math.Round(float64(q.StepInterval)/1800)) * 1800
}
}
}
}
if r.ruleCondition.CompositeQuery.PanelType != v3.PanelTypeGraph {
r.ruleCondition.CompositeQuery.PanelType = v3.PanelTypeGraph
}
// default mode
return &v3.QueryRangeParamsV3{
Start: start,
End: end,
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
CompositeQuery: r.ruleCondition.CompositeQuery,
Variables: make(map[string]interface{}),
NoCache: true,
}, nil
}
func (r *ThresholdRule) prepareLinksToLogs(ctx context.Context, ts time.Time, lbls labels.Labels) string {
if r.version == "v5" {
return r.prepareLinksToLogsV5(ctx, ts, lbls)
}
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRange(ctx, ts)
if err != nil {
return ""
}
start := time.UnixMilli(qr.Start)
end := time.UnixMilli(qr.End)
// TODO(srikanthccv): handle formula queries
if selectedQuery < "A" || selectedQuery > "Z" {
return ""
}
q := r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery]
if q == nil {
return ""
}
if q.DataSource != v3.DataSourceLogs {
return ""
}
queryFilter := []v3.FilterItem{}
if q.Filters != nil {
queryFilter = q.Filters.Items
}
filterItems := contextlinks.PrepareFilters(lbls.Map(), queryFilter, q.GroupBy, r.logsKeys)
return contextlinks.PrepareLinksToLogs(start, end, filterItems)
}
func (r *ThresholdRule) prepareLinksToTraces(ctx context.Context, ts time.Time, lbls labels.Labels) string {
if r.version == "v5" {
return r.prepareLinksToTracesV5(ctx, ts, lbls)
}
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRange(ctx, ts)
if err != nil {
return ""
}
start := time.UnixMilli(qr.Start)
end := time.UnixMilli(qr.End)
// TODO(srikanthccv): handle formula queries
if selectedQuery < "A" || selectedQuery > "Z" {
return ""
}
q := r.ruleCondition.CompositeQuery.BuilderQueries[selectedQuery]
if q == nil {
return ""
}
if q.DataSource != v3.DataSourceTraces {
return ""
}
queryFilter := []v3.FilterItem{}
if q.Filters != nil {
queryFilter = q.Filters.Items
}
filterItems := contextlinks.PrepareFilters(lbls.Map(), queryFilter, q.GroupBy, r.spansKeys)
return contextlinks.PrepareLinksToTraces(start, end, filterItems)
}
func (r *ThresholdRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
r.logger.InfoContext(
ctx, "prepare query range request v5", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
ctx, "prepare query range request", "ts", ts.UnixMilli(), "eval_window", r.evalWindow.Milliseconds(), "eval_delay", r.evalDelay.Milliseconds(),
)
startTs, endTs := r.Timestamps(ts)
@@ -302,10 +99,10 @@ func (r *ThresholdRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (
return req, nil
}
func (r *ThresholdRule) prepareLinksToLogsV5(ctx context.Context, ts time.Time, lbls labels.Labels) string {
func (r *ThresholdRule) prepareLinksToLogs(ctx context.Context, ts time.Time, lbls labels.Labels) string {
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRangeV5(ctx, ts)
qr, err := r.prepareQueryRange(ctx, ts)
if err != nil {
return ""
}
@@ -342,10 +139,10 @@ func (r *ThresholdRule) prepareLinksToLogsV5(ctx context.Context, ts time.Time,
return contextlinks.PrepareLinksToLogsV5(start, end, whereClause)
}
func (r *ThresholdRule) prepareLinksToTracesV5(ctx context.Context, ts time.Time, lbls labels.Labels) string {
func (r *ThresholdRule) prepareLinksToTraces(ctx context.Context, ts time.Time, lbls labels.Labels) string {
selectedQuery := r.GetSelectedQuery()
qr, err := r.prepareQueryRangeV5(ctx, ts)
qr, err := r.prepareQueryRange(ctx, ts)
if err != nil {
return ""
}
@@ -391,115 +188,6 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
if err != nil {
return nil, err
}
err = r.PopulateTemporality(ctx, orgID, params)
if err != nil {
return nil, fmt.Errorf("internal error while setting temporality")
}
if params.CompositeQuery.QueryType == v3.QueryTypeBuilder {
hasLogsQuery := false
hasTracesQuery := false
for _, query := range params.CompositeQuery.BuilderQueries {
if query.DataSource == v3.DataSourceLogs {
hasLogsQuery = true
}
if query.DataSource == v3.DataSourceTraces {
hasTracesQuery = true
}
}
if hasLogsQuery {
// check if any enrichment is required for logs if yes then enrich them
if logsv3.EnrichmentRequired(params) {
logsFields, apiErr := r.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(params.CompositeQuery))
if apiErr != nil {
return nil, apiErr.ToError()
}
logsKeys := model.GetLogFieldsV3(ctx, params, logsFields)
r.logsKeys = logsKeys
logsv3.Enrich(params, logsKeys)
}
}
if hasTracesQuery {
spanKeys, err := r.reader.GetSpanAttributeKeysByNames(ctx, logsv3.GetFieldNames(params.CompositeQuery))
if err != nil {
return nil, err
}
r.spansKeys = spanKeys
tracesV4.Enrich(params, spanKeys)
}
}
var results []*v3.Result
var queryErrors map[string]error
if r.version == "v4" {
results, queryErrors, err = r.querierV2.QueryRange(ctx, orgID, params)
} else {
results, queryErrors, err = r.querier.QueryRange(ctx, orgID, params)
}
if err != nil {
r.logger.ErrorContext(ctx, "failed to get alert query range result", "rule_name", r.Name(), "error", err, "query_errors", queryErrors)
return nil, fmt.Errorf("internal error while querying")
}
if params.CompositeQuery.QueryType == v3.QueryTypeBuilder {
results, err = postprocess.PostProcessResult(results, params)
if err != nil {
r.logger.ErrorContext(ctx, "failed to post process result", "rule_name", r.Name(), "error", err)
return nil, fmt.Errorf("internal error while post processing")
}
}
selectedQuery := r.GetSelectedQuery()
var queryResult *v3.Result
for _, res := range results {
if res.QueryName == selectedQuery {
queryResult = res
break
}
}
hasData := queryResult != nil && len(queryResult.Series) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
var resultVector ruletypes.Vector
if queryResult == nil {
r.logger.WarnContext(ctx, "query result is nil", "rule_name", r.Name(), "query_name", selectedQuery)
return resultVector, nil
}
for _, series := range queryResult.Series {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
resultSeries, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
ActiveAlerts: r.ActiveAlertsLabelFP(),
SendUnmatched: r.ShouldSendUnmatched(),
})
if err != nil {
return nil, err
}
resultVector = append(resultVector, resultSeries...)
}
return resultVector, nil
}
func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
params, err := r.prepareQueryRangeV5(ctx, ts)
if err != nil {
return nil, err
}
var results []*v3.Result
v5Result, err := r.querierV5.QueryRange(ctx, orgID, params)
if err != nil {
@@ -507,26 +195,24 @@ func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUI
return nil, fmt.Errorf("internal error while querying")
}
for _, item := range v5Result.Data.Results {
if tsData, ok := item.(*qbtypes.TimeSeriesData); ok {
results = append(results, transition.ConvertV5TimeSeriesDataToV4Result(tsData))
} else {
// NOTE: should not happen but just to ensure we don't miss it if it happens for some reason
r.logger.WarnContext(ctx, "expected qbtypes.TimeSeriesData but got", "item_type", reflect.TypeOf(item))
}
}
selectedQuery := r.GetSelectedQuery()
var queryResult *v3.Result
for _, res := range results {
if res.QueryName == selectedQuery {
queryResult = res
var queryResult *qbtypes.TimeSeriesData
for _, item := range v5Result.Data.Results {
if tsData, ok := item.(*qbtypes.TimeSeriesData); ok && tsData.QueryName == selectedQuery {
queryResult = tsData
break
}
}
hasData := queryResult != nil && len(queryResult.Series) > 0
var allSeries []*qbtypes.TimeSeries
if queryResult != nil {
for _, bucket := range queryResult.Aggregations {
allSeries = append(allSeries, bucket.Series...)
}
}
hasData := len(allSeries) > 0
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
return ruletypes.Vector{*missingDataAlert}, nil
}
@@ -539,7 +225,7 @@ func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUI
}
// Filter out new series if newGroupEvalDelay is configured
seriesToProcess := queryResult.Series
seriesToProcess := allSeries
if r.ShouldSkipNewGroups() {
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
// In case of error we log the error and continue with the original series
@@ -552,7 +238,7 @@ func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUI
for _, series := range seriesToProcess {
if !r.Condition().ShouldEval(series) {
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Values), "requiredPoints", r.Condition().RequiredNumPoints)
continue
}
resultSeries, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
@@ -573,16 +259,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
valueFormatter := formatter.FromUnit(r.Unit())
var res ruletypes.Vector
var err error
if r.version == "v5" {
r.logger.InfoContext(ctx, "running v5 query")
res, err = r.buildAndRunQueryV5(ctx, r.orgID, ts)
} else {
r.logger.InfoContext(ctx, "running v4 query")
res, err = r.buildAndRunQuery(ctx, r.orgID, ts)
}
res, err := r.buildAndRunQuery(ctx, r.orgID, ts)
if err != nil {
return 0, err

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -167,6 +167,7 @@ func NewSQLMigrationProviderFactories(
sqlmigration.NewMigrateRbacToAuthzFactory(sqlstore),
sqlmigration.NewMigratePublicDashboardsFactory(sqlstore),
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
sqlmigration.NewMigrateRulesV4ToV5Factory(sqlstore, telemetryStore),
)
}

View File

@@ -0,0 +1,194 @@
package sqlmigration
import (
"context"
"database/sql"
"encoding/json"
"log/slog"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
type migrateRulesV4ToV5 struct {
store sqlstore.SQLStore
telemetryStore telemetrystore.TelemetryStore
logger *slog.Logger
}
func NewMigrateRulesV4ToV5Factory(
store sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore,
) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(
factory.MustNewName("migrate_rules_v4_to_v5"),
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return &migrateRulesV4ToV5{
store: store,
telemetryStore: telemetryStore,
logger: ps.Logger,
}, nil
})
}
func (migration *migrateRulesV4ToV5) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *migrateRulesV4ToV5) getLogDuplicateKeys(ctx context.Context) ([]string, error) {
query := `
SELECT name
FROM (
SELECT DISTINCT name FROM signoz_logs.distributed_logs_attribute_keys
INTERSECT
SELECT DISTINCT name FROM signoz_logs.distributed_logs_resource_keys
)
ORDER BY name
`
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
if err != nil {
migration.logger.WarnContext(ctx, "failed to query log duplicate keys", "error", err)
return nil, nil
}
defer rows.Close()
var keys []string
for rows.Next() {
var key string
if err := rows.Scan(&key); err != nil {
migration.logger.WarnContext(ctx, "failed to scan log duplicate key", "error", err)
continue
}
keys = append(keys, key)
}
return keys, nil
}
func (migration *migrateRulesV4ToV5) getTraceDuplicateKeys(ctx context.Context) ([]string, error) {
query := `
SELECT tagKey
FROM signoz_traces.distributed_span_attributes_keys
WHERE tagType IN ('tag', 'resource')
GROUP BY tagKey
HAVING COUNT(DISTINCT tagType) > 1
ORDER BY tagKey
`
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
if err != nil {
migration.logger.WarnContext(ctx, "failed to query trace duplicate keys", "error", err)
return nil, nil
}
defer rows.Close()
var keys []string
for rows.Next() {
var key string
if err := rows.Scan(&key); err != nil {
migration.logger.WarnContext(ctx, "failed to scan trace duplicate key", "error", err)
continue
}
keys = append(keys, key)
}
return keys, nil
}
func (migration *migrateRulesV4ToV5) Up(ctx context.Context, db *bun.DB) error {
logsKeys, err := migration.getLogDuplicateKeys(ctx)
if err != nil {
return err
}
tracesKeys, err := migration.getTraceDuplicateKeys(ctx)
if err != nil {
return err
}
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
var rules []struct {
ID string `bun:"id"`
Data map[string]any `bun:"data"`
}
err = tx.NewSelect().
Table("rule").
Column("id", "data").
Scan(ctx, &rules)
if err != nil {
if err == sql.ErrNoRows {
return nil
}
return err
}
alertsMigrator := transition.NewAlertMigrateV5(migration.logger, logsKeys, tracesKeys)
for _, rule := range rules {
version, _ := rule.Data["version"].(string)
if version == "v5" {
continue
}
migration.logger.InfoContext(ctx, "migrating rule v4 to v5", "rule_id", rule.ID, "current_version", version)
// Check if the queries envelope already exists and is non-empty
hasQueriesEnvelope := false
if condition, ok := rule.Data["condition"].(map[string]any); ok {
if compositeQuery, ok := condition["compositeQuery"].(map[string]any); ok {
if queries, ok := compositeQuery["queries"].([]any); ok && len(queries) > 0 {
hasQueriesEnvelope = true
}
}
}
if hasQueriesEnvelope {
// Case 2: Already has queries envelope, just bump version
migration.logger.InfoContext(ctx, "rule already has queries envelope, bumping version", "rule_id", rule.ID)
rule.Data["version"] = "v5"
} else {
// Case 1: Old format, run full migration
migration.logger.InfoContext(ctx, "rule has old format, running full migration", "rule_id", rule.ID)
alertsMigrator.Migrate(ctx, rule.Data)
// Force version to v5 regardless of Migrate return value
rule.Data["version"] = "v5"
}
dataJSON, err := json.Marshal(rule.Data)
if err != nil {
return err
}
_, err = tx.NewUpdate().
Table("rule").
Set("data = ?", string(dataJSON)).
Where("id = ?", rule.ID).
Exec(ctx)
if err != nil {
return err
}
}
return tx.Commit()
}
func (migration *migrateRulesV4ToV5) Down(ctx context.Context, db *bun.DB) error {
return nil
}

View File

@@ -1,102 +0,0 @@
package transition
import (
"fmt"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
// ConvertV5TimeSeriesDataToV4Result converts v5 TimeSeriesData to v4 Result
func ConvertV5TimeSeriesDataToV4Result(v5Data *qbtypes.TimeSeriesData) *v3.Result {
if v5Data == nil {
return nil
}
result := &v3.Result{
QueryName: v5Data.QueryName,
Series: make([]*v3.Series, 0),
}
toV4Series := func(ts *qbtypes.TimeSeries) *v3.Series {
series := &v3.Series{
Labels: make(map[string]string),
LabelsArray: make([]map[string]string, 0),
Points: make([]v3.Point, 0, len(ts.Values)),
}
for _, label := range ts.Labels {
valueStr := fmt.Sprintf("%v", label.Value)
series.Labels[label.Key.Name] = valueStr
}
if len(series.Labels) > 0 {
series.LabelsArray = append(series.LabelsArray, series.Labels)
}
for _, tsValue := range ts.Values {
if tsValue.Partial {
continue
}
point := v3.Point{
Timestamp: tsValue.Timestamp,
Value: tsValue.Value,
}
series.Points = append(series.Points, point)
}
return series
}
for _, aggBucket := range v5Data.Aggregations {
for _, ts := range aggBucket.Series {
result.Series = append(result.Series, toV4Series(ts))
}
if len(aggBucket.AnomalyScores) != 0 {
result.AnomalyScores = make([]*v3.Series, 0)
for _, ts := range aggBucket.AnomalyScores {
result.AnomalyScores = append(result.AnomalyScores, toV4Series(ts))
}
}
if len(aggBucket.PredictedSeries) != 0 {
result.PredictedSeries = make([]*v3.Series, 0)
for _, ts := range aggBucket.PredictedSeries {
result.PredictedSeries = append(result.PredictedSeries, toV4Series(ts))
}
}
if len(aggBucket.LowerBoundSeries) != 0 {
result.LowerBoundSeries = make([]*v3.Series, 0)
for _, ts := range aggBucket.LowerBoundSeries {
result.LowerBoundSeries = append(result.LowerBoundSeries, toV4Series(ts))
}
}
if len(aggBucket.UpperBoundSeries) != 0 {
result.UpperBoundSeries = make([]*v3.Series, 0)
for _, ts := range aggBucket.UpperBoundSeries {
result.UpperBoundSeries = append(result.UpperBoundSeries, toV4Series(ts))
}
}
}
return result
}
// ConvertV5TimeSeriesDataSliceToV4Results converts a slice of v5 TimeSeriesData to v4 QueryRangeResponse
func ConvertV5TimeSeriesDataSliceToV4Results(v5DataSlice []*qbtypes.TimeSeriesData) *v3.QueryRangeResponse {
response := &v3.QueryRangeResponse{
ResultType: "matrix", // Time series data is typically "matrix" type
Result: make([]*v3.Result, 0, len(v5DataSlice)),
}
for _, v5Data := range v5DataSlice {
if result := ConvertV5TimeSeriesDataToV4Result(v5Data); result != nil {
response.Result = append(response.Result, result)
}
}
return response
}

View File

@@ -76,6 +76,33 @@ type TimeSeries struct {
Values []*TimeSeriesValue `json:"values"`
}
// LabelsMap converts the label slice to a map[string]string for use in
// alert evaluation helpers that operate on flat label maps.
func (ts *TimeSeries) LabelsMap() map[string]string {
if ts == nil {
return nil
}
m := make(map[string]string, len(ts.Labels))
for _, l := range ts.Labels {
m[l.Key.Name] = fmt.Sprintf("%v", l.Value)
}
return m
}
// NonPartialValues returns only the values where Partial is false.
func (ts *TimeSeries) NonPartialValues() []*TimeSeriesValue {
if ts == nil {
return nil
}
result := make([]*TimeSeriesValue, 0, len(ts.Values))
for _, v := range ts.Values {
if !v.Partial {
result = append(result, v)
}
}
return result
}
type Label struct {
Key telemetrytypes.TelemetryFieldKey `json:"key"`
Value any `json:"value"`

View File

@@ -203,11 +203,11 @@ func (rc *RuleCondition) IsValid() bool {
}
// ShouldEval checks if the further series should be evaluated at all for alerts.
func (rc *RuleCondition) ShouldEval(series *v3.Series) bool {
func (rc *RuleCondition) ShouldEval(series *qbtypes.TimeSeries) bool {
if rc == nil {
return true
}
return !rc.RequireMinPoints || len(series.Points) >= rc.RequiredNumPoints
return !rc.RequireMinPoints || len(series.NonPartialValues()) >= rc.RequiredNumPoints
}
// QueryType is a shorthand method to get query type

View File

@@ -355,6 +355,14 @@ func (r *PostableRule) validate() error {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "composite query is required"))
}
if r.Version != "" && r.Version != "v5" {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "only version v5 is supported, got %q", r.Version))
}
if r.Version == "v5" && r.RuleCondition.CompositeQuery != nil && len(r.RuleCondition.CompositeQuery.Queries) == 0 {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "queries envelope is required in compositeQuery"))
}
if isAllQueriesDisabled(r.RuleCondition.CompositeQuery) {
errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "all queries are disabled in rule condition"))
}

View File

@@ -8,6 +8,8 @@ import (
"github.com/stretchr/testify/assert"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
func TestIsAllQueriesDisabled(t *testing.T) {
@@ -621,9 +623,9 @@ func TestParseIntoRuleThresholdGeneration(t *testing.T) {
}
// Test that threshold can evaluate properly
vector, err := threshold.Eval(v3.Series{
Points: []v3.Point{{Value: 0.15, Timestamp: 1000}}, // 150ms in seconds
Labels: map[string]string{"test": "label"},
vector, err := threshold.Eval(qbtypes.TimeSeries{
Values: []*qbtypes.TimeSeriesValue{{Value: 0.15, Timestamp: 1000}}, // 150ms in seconds
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "test"}, Value: "label"}},
}, "", EvalData{})
if err != nil {
t.Fatalf("Unexpected error in shouldAlert: %v", err)
@@ -698,9 +700,9 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
}
// Test with a value that should trigger both WARNING and CRITICAL thresholds
vector, err := threshold.Eval(v3.Series{
Points: []v3.Point{{Value: 95.0, Timestamp: 1000}}, // 95% CPU usage
Labels: map[string]string{"service": "test"},
vector, err := threshold.Eval(qbtypes.TimeSeries{
Values: []*qbtypes.TimeSeriesValue{{Value: 95.0, Timestamp: 1000}}, // 95% CPU usage
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "service"}, Value: "test"}},
}, "", EvalData{})
if err != nil {
t.Fatalf("Unexpected error in shouldAlert: %v", err)
@@ -708,9 +710,9 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
assert.Equal(t, 2, len(vector))
vector, err = threshold.Eval(v3.Series{
Points: []v3.Point{{Value: 75.0, Timestamp: 1000}}, // 75% CPU usage
Labels: map[string]string{"service": "test"},
vector, err = threshold.Eval(qbtypes.TimeSeries{
Values: []*qbtypes.TimeSeriesValue{{Value: 75.0, Timestamp: 1000}}, // 75% CPU usage
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "service"}, Value: "test"}},
}, "", EvalData{})
if err != nil {
t.Fatalf("Unexpected error in shouldAlert: %v", err)
@@ -723,7 +725,7 @@ func TestAnomalyNegationEval(t *testing.T) {
tests := []struct {
name string
ruleJSON []byte
series v3.Series
series qbtypes.TimeSeries
shouldAlert bool
expectedValue float64
}{
@@ -751,9 +753,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: -2.1}, // below & at least once, should alert
{Timestamp: 2000, Value: -2.3},
},
@@ -785,9 +787,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`), // below & at least once, no value below -2.0
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: -1.9},
{Timestamp: 2000, Value: -1.8},
},
@@ -818,9 +820,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`), // above & at least once, should alert
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: 2.1}, // above 2.0, should alert
{Timestamp: 2000, Value: 2.2},
},
@@ -852,9 +854,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: 1.1},
{Timestamp: 2000, Value: 1.2},
},
@@ -885,9 +887,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`), // below and all the times
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: -2.1}, // all below -2
{Timestamp: 2000, Value: -2.2},
{Timestamp: 3000, Value: -2.5},
@@ -920,9 +922,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: -3.0},
{Timestamp: 2000, Value: -1.0}, // above -2, breaks condition
{Timestamp: 3000, Value: -2.5},
@@ -954,10 +956,10 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -8.0}, // abs(8) >= 7, alert
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: -8.0}, // abs(-8) >= 7, alert
{Timestamp: 2000, Value: 5.0},
},
},
@@ -988,9 +990,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: 80.0}, // below 90, should alert
{Timestamp: 2000, Value: 85.0},
},
@@ -1022,9 +1024,9 @@ func TestAnomalyNegationEval(t *testing.T) {
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{{Key: telemetrytypes.TelemetryFieldKey{Name: "host"}, Value: "server1"}},
Values: []*qbtypes.TimeSeriesValue{
{Timestamp: 1000, Value: 60.0}, // below, should alert
{Timestamp: 2000, Value: 90.0},
},

View File

@@ -8,8 +8,8 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/converter"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -83,7 +83,7 @@ func (eval EvalData) HasActiveAlert(sampleLabelFp uint64) bool {
type RuleThreshold interface {
// Eval runs the given series through the threshold rules
// using the given EvalData and returns the matching series
Eval(series v3.Series, unit string, evalData EvalData) (Vector, error)
Eval(series qbtypes.TimeSeries, unit string, evalData EvalData) (Vector, error)
GetRuleReceivers() []RuleReceivers
}
@@ -122,10 +122,11 @@ func (r BasicRuleThresholds) Validate() error {
return errors.Join(errs...)
}
func (r BasicRuleThresholds) Eval(series v3.Series, unit string, evalData EvalData) (Vector, error) {
func (r BasicRuleThresholds) Eval(series qbtypes.TimeSeries, unit string, evalData EvalData) (Vector, error) {
var resultVector Vector
thresholds := []BasicRuleThreshold(r)
sortThresholds(thresholds)
seriesLabels := series.LabelsMap()
for _, threshold := range thresholds {
smpl, shouldAlert := threshold.shouldAlert(series, unit)
if shouldAlert {
@@ -137,15 +138,15 @@ func (r BasicRuleThresholds) Eval(series v3.Series, unit string, evalData EvalDa
resultVector = append(resultVector, smpl)
continue
} else if evalData.SendUnmatched {
// Sanitise the series points to remove any NaN or Inf values
series.Points = removeGroupinSetPoints(series)
if len(series.Points) == 0 {
// Sanitise the series values to remove any NaN, Inf, or partial values
values := filterValidValues(series.Values)
if len(values) == 0 {
continue
}
// prepare the sample with the first point of the series
// prepare the sample with the first value of the series
smpl := Sample{
Point: Point{T: series.Points[0].Timestamp, V: series.Points[0].Value},
Metric: PrepareSampleLabelsForRule(series.Labels, threshold.Name),
Point: Point{T: values[0].Timestamp, V: values[0].Value},
Metric: PrepareSampleLabelsForRule(seriesLabels, threshold.Name),
Target: *threshold.TargetValue,
TargetUnit: threshold.TargetUnit,
}
@@ -160,7 +161,7 @@ func (r BasicRuleThresholds) Eval(series v3.Series, unit string, evalData EvalDa
if threshold.RecoveryTarget == nil {
continue
}
sampleLabels := PrepareSampleLabelsForRule(series.Labels, threshold.Name)
sampleLabels := PrepareSampleLabelsForRule(seriesLabels, threshold.Name)
alertHash := sampleLabels.Hash()
// check if alert is active and then check if recovery threshold matches
if evalData.HasActiveAlert(alertHash) {
@@ -255,18 +256,23 @@ func (b BasicRuleThreshold) Validate() error {
return errors.Join(errs...)
}
func (b BasicRuleThreshold) matchesRecoveryThreshold(series v3.Series, ruleUnit string) (Sample, bool) {
func (b BasicRuleThreshold) matchesRecoveryThreshold(series qbtypes.TimeSeries, ruleUnit string) (Sample, bool) {
return b.shouldAlertWithTarget(series, b.recoveryTarget(ruleUnit))
}
func (b BasicRuleThreshold) shouldAlert(series v3.Series, ruleUnit string) (Sample, bool) {
func (b BasicRuleThreshold) shouldAlert(series qbtypes.TimeSeries, ruleUnit string) (Sample, bool) {
return b.shouldAlertWithTarget(series, b.target(ruleUnit))
}
func removeGroupinSetPoints(series v3.Series) []v3.Point {
var result []v3.Point
for _, s := range series.Points {
if s.Timestamp >= 0 && !math.IsNaN(s.Value) && !math.IsInf(s.Value, 0) {
result = append(result, s)
// filterValidValues returns only the values that are valid for alert evaluation:
// non-partial, non-NaN, non-Inf, and with non-negative timestamps.
func filterValidValues(values []*qbtypes.TimeSeriesValue) []*qbtypes.TimeSeriesValue {
var result []*qbtypes.TimeSeriesValue
for _, v := range values {
if v.Partial {
continue
}
if v.Timestamp >= 0 && !math.IsNaN(v.Value) && !math.IsInf(v.Value, 0) {
result = append(result, v)
}
}
return result
@@ -284,15 +290,15 @@ func PrepareSampleLabelsForRule(seriesLabels map[string]string, thresholdName st
return lb.Labels()
}
func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float64) (Sample, bool) {
func (b BasicRuleThreshold) shouldAlertWithTarget(series qbtypes.TimeSeries, target float64) (Sample, bool) {
var shouldAlert bool
var alertSmpl Sample
lbls := PrepareSampleLabelsForRule(series.Labels, b.Name)
lbls := PrepareSampleLabelsForRule(series.LabelsMap(), b.Name)
series.Points = removeGroupinSetPoints(series)
values := filterValidValues(series.Values)
// nothing to evaluate
if len(series.Points) == 0 {
if len(values) == 0 {
return alertSmpl, false
}
@@ -300,7 +306,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
case AtleastOnce:
// If any sample matches the condition, the rule is firing.
if b.CompareOp == ValueIsAbove {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value > target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -308,7 +314,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
}
} else if b.CompareOp == ValueIsBelow {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value < target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -316,7 +322,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
}
} else if b.CompareOp == ValueIsEq {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value == target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -324,7 +330,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
}
} else if b.CompareOp == ValueIsNotEq {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value != target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -332,7 +338,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
}
} else if b.CompareOp == ValueOutsideBounds {
for _, smpl := range series.Points {
for _, smpl := range values {
if math.Abs(smpl.Value) >= target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
@@ -345,7 +351,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
shouldAlert = true
alertSmpl = Sample{Point: Point{V: target}, Metric: lbls}
if b.CompareOp == ValueIsAbove {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value <= target {
shouldAlert = false
break
@@ -354,7 +360,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
// use min value from the series
if shouldAlert {
var minValue float64 = math.Inf(1)
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value < minValue {
minValue = smpl.Value
}
@@ -362,7 +368,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
alertSmpl = Sample{Point: Point{V: minValue}, Metric: lbls}
}
} else if b.CompareOp == ValueIsBelow {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value >= target {
shouldAlert = false
break
@@ -370,7 +376,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
if shouldAlert {
var maxValue float64 = math.Inf(-1)
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value > maxValue {
maxValue = smpl.Value
}
@@ -378,14 +384,14 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
alertSmpl = Sample{Point: Point{V: maxValue}, Metric: lbls}
}
} else if b.CompareOp == ValueIsEq {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value != target {
shouldAlert = false
break
}
}
} else if b.CompareOp == ValueIsNotEq {
for _, smpl := range series.Points {
for _, smpl := range values {
if smpl.Value == target {
shouldAlert = false
break
@@ -393,7 +399,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
// use any non-inf or nan value from the series
if shouldAlert {
for _, smpl := range series.Points {
for _, smpl := range values {
if !math.IsInf(smpl.Value, 0) && !math.IsNaN(smpl.Value) {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
break
@@ -401,7 +407,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
}
}
} else if b.CompareOp == ValueOutsideBounds {
for _, smpl := range series.Points {
for _, smpl := range values {
if math.Abs(smpl.Value) < target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = false
@@ -412,7 +418,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
case OnAverage:
// If the average of all samples matches the condition, the rule is firing.
var sum, count float64
for _, smpl := range series.Points {
for _, smpl := range values {
if math.IsNaN(smpl.Value) || math.IsInf(smpl.Value, 0) {
continue
}
@@ -446,7 +452,7 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
// If the sum of all samples matches the condition, the rule is firing.
var sum float64
for _, smpl := range series.Points {
for _, smpl := range values {
if math.IsNaN(smpl.Value) || math.IsInf(smpl.Value, 0) {
continue
}
@@ -477,21 +483,22 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series v3.Series, target float
case Last:
// If the last sample matches the condition, the rule is firing.
shouldAlert = false
alertSmpl = Sample{Point: Point{V: series.Points[len(series.Points)-1].Value}, Metric: lbls}
lastValue := values[len(values)-1].Value
alertSmpl = Sample{Point: Point{V: lastValue}, Metric: lbls}
if b.CompareOp == ValueIsAbove {
if series.Points[len(series.Points)-1].Value > target {
if lastValue > target {
shouldAlert = true
}
} else if b.CompareOp == ValueIsBelow {
if series.Points[len(series.Points)-1].Value < target {
if lastValue < target {
shouldAlert = true
}
} else if b.CompareOp == ValueIsEq {
if series.Points[len(series.Points)-1].Value == target {
if lastValue == target {
shouldAlert = true
}
} else if b.CompareOp == ValueIsNotEq {
if series.Points[len(series.Points)-1].Value != target {
if lastValue != target {
shouldAlert = true
}
}

View File

@@ -6,16 +6,24 @@ import (
"github.com/stretchr/testify/assert"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
target := 100.0
makeLabel := func(name, value string) *qbtypes.Label {
return &qbtypes.Label{Key: telemetrytypes.TelemetryFieldKey{Name: name}, Value: value}
}
makeValue := func(value float64, ts int64) *qbtypes.TimeSeriesValue {
return &qbtypes.TimeSeriesValue{Value: value, Timestamp: ts}
}
tests := []struct {
name string
threshold BasicRuleThreshold
series v3.Series
series qbtypes.TimeSeries
ruleUnit string
shouldAlert bool
}{
@@ -28,11 +36,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000}, // 150ms in seconds
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)}, // 150ms in seconds
},
ruleUnit: "s",
shouldAlert: true,
@@ -46,11 +52,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.05, Timestamp: 1000}, // 50ms in seconds
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.05, 1000)}, // 50ms in seconds
},
ruleUnit: "s",
shouldAlert: false,
@@ -64,11 +68,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 150000, Timestamp: 1000}, // 150000ms = 150s
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(150000, 1000)}, // 150000ms = 150s
},
ruleUnit: "ms",
shouldAlert: true,
@@ -83,11 +85,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000}, // 0.15KiB ≈ 153.6 bytes
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)}, // 0.15KiB ≈ 153.6 bytes
},
ruleUnit: "kbytes",
shouldAlert: true,
@@ -101,11 +101,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000},
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)},
},
ruleUnit: "mbytes",
shouldAlert: true,
@@ -120,11 +118,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsBelow,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.05, Timestamp: 1000}, // 50ms in seconds
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.05, 1000)}, // 50ms in seconds
},
ruleUnit: "s",
shouldAlert: true,
@@ -138,12 +134,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: OnAverage,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.08, Timestamp: 1000}, // 80ms
{Value: 0.12, Timestamp: 2000}, // 120ms
{Value: 0.15, Timestamp: 3000}, // 150ms
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{
makeValue(0.08, 1000), // 80ms
makeValue(0.12, 2000), // 120ms
makeValue(0.15, 3000), // 150ms
},
},
ruleUnit: "s",
@@ -158,12 +154,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: InTotal,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.04, Timestamp: 1000}, // 40MB
{Value: 0.05, Timestamp: 2000}, // 50MB
{Value: 0.03, Timestamp: 3000}, // 30MB
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{
makeValue(0.04, 1000), // 40MB
makeValue(0.05, 2000), // 50MB
makeValue(0.03, 3000), // 30MB
},
},
ruleUnit: "decgbytes",
@@ -178,12 +174,12 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AllTheTimes,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.11, Timestamp: 1000}, // 110ms
{Value: 0.12, Timestamp: 2000}, // 120ms
{Value: 0.15, Timestamp: 3000}, // 150ms
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{
makeValue(0.11, 1000), // 110ms
makeValue(0.12, 2000), // 120ms
makeValue(0.15, 3000), // 150ms
},
},
ruleUnit: "s",
@@ -198,11 +194,11 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: Last,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000}, // 150kB
{Value: 0.05, Timestamp: 2000}, // 50kB (last value)
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{
makeValue(0.15, 1000), // 150kB
makeValue(0.05, 2000), // 50kB (last value)
},
},
ruleUnit: "decmbytes",
@@ -218,11 +214,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 0.15, Timestamp: 1000},
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(0.15, 1000)},
},
ruleUnit: "KBs",
shouldAlert: true,
@@ -237,11 +231,9 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 150, Timestamp: 1000}, // 150ms
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(150, 1000)}, // 150ms
},
ruleUnit: "ms",
shouldAlert: true,
@@ -256,55 +248,13 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
MatchType: AtleastOnce,
CompareOp: ValueIsAbove,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 150, Timestamp: 1000}, // 150 (unitless)
},
series: qbtypes.TimeSeries{
Labels: []*qbtypes.Label{makeLabel("service", "test")},
Values: []*qbtypes.TimeSeriesValue{makeValue(150, 1000)}, // 150 (unitless)
},
ruleUnit: "",
shouldAlert: true,
},
// bytes and Gibibytes,
// rule will only fire if target is converted to bytes so that the sample value becomes lower than the target 100GiBy
{
name: "bytes to Gibibytes - should alert",
threshold: BasicRuleThreshold{
Name: CriticalThresholdName,
TargetValue: &target, // 100 Gibibytes
TargetUnit: "GiBy",
MatchType: AtleastOnce,
CompareOp: ValueIsBelow,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 70 * 1024 * 1024 * 1024, Timestamp: 1000}, // 70 Gibibytes
},
},
ruleUnit: "bytes",
shouldAlert: true,
},
// data Rate conversion - bytes per second to MiB per second
// rule will only fire if target is converted to bytes so that the sample value becomes lower than the target 100 MiB/s
{
name: "bytes per second to MiB per second - should alert",
threshold: BasicRuleThreshold{
Name: CriticalThresholdName,
TargetValue: &target, // 100 MiB/s
TargetUnit: "MiBy/s",
MatchType: AtleastOnce,
CompareOp: ValueIsBelow,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 30 * 1024 * 1024, Timestamp: 1000}, // 30 MiB/s
},
},
ruleUnit: "By/s",
shouldAlert: true,
},
}
for _, tt := range tests {