Compare commits

...

11 Commits

Author SHA1 Message Date
Srikanth Chekuri
416e8d2a5e fix: panic from label set conversion (#9316) 2025-10-12 18:14:16 +05:30
Niladri Adhikary
43a6c7dcd6 feat: add abs value function in formula (#9315)
Signed-off-by: “niladrix719” <niladrix719@gmail.com>
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-10-12 11:15:59 +00:00
Amlan Kumar Nandy
5005cae2ad fix: edit alerts crash (#9310) 2025-10-12 15:55:42 +05:30
Tushar Vats
3123447005 Added integration tests for TTL methods (#9289)
This pull request refactors how TTL (Time-To-Live) settings are applied for logs, metrics, and traces in the ClickHouse reader service. The main change is the removal of the dedicated setTTLLogs method and the consolidation of TTL logic to only support metrics and traces. The code now routes TTL requests based on type, and logs TTL is no longer handled.
2025-10-10 22:20:25 +05:30
primus-bot[bot]
6c59b5405e chore(release): bump to v0.97.0 (#9305)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2025-10-10 12:19:35 +05:30
Chitransh
d26b57b0d8 feat: added new datasources (#9167)
* feat: added new datasources

* fix: added new datasource
2025-10-09 08:46:14 +00:00
Aditya Singh
da17375f10 Preferences framework re-factor (#9206)
* fix: logs popover content logic extracted out

* fix: logs popover content in live view

* fix: destory popover on close

* feat: add logs format tests

* feat: minor refactor

* feat: test case refactor

* feat: remove menu refs in logs live view

* feat: globalise Preference context and remove async logic

* feat: change preference context state structure to support both logs and traces pref

* feat: test refactor
2025-10-09 04:40:52 +00:00
Vikrant Gupta
a96489d06e feat(authz): address tenant isolation for authz (#9293)
* feat(authz): address tenant isolation for authz

* feat(authz): handle role module self registry

* feat(authz): keep role / user / resource sync in naming

* feat(authz): rename orgId to orgID

* feat(authz): add the missing / for user

* feat(authz): remove embedding for pkgopenfgaauthz service
2025-10-08 17:04:00 +00:00
Nityananda Gohain
8c29debb52 fix: use numerical comparison instead of lexicographical for string-encoded numbers (#9154)
* fix: let clickhouse handle string to number conversion

* fix: ignore casting if it's a comparison operator for number key

* fix: add integration tests

* fix: update comments

* fix: convert only if it's actually not a integrer with comparison operator

* fix: force convert to float when number

* fix: integration tests

* fix: correct the comment

* fix: update comment

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-10-08 14:21:40 +05:30
Srikanth Chekuri
9cebd49a2c fix: anomaly with below operator negates the target (#9288) 2025-10-08 12:11:31 +05:30
Shaheer Kochai
a22ef64bb0 fix: fix the flaky test (#9255)
* fix: fix the flaky test

* chore: remove unnecessary changes
2025-10-06 22:02:12 +05:30
91 changed files with 4063 additions and 1194 deletions

View File

@@ -42,7 +42,7 @@ services:
timeout: 5s
retries: 3
schema-migrator-sync:
image: signoz/signoz-schema-migrator:v0.129.6
image: signoz/signoz-schema-migrator:v0.129.7
container_name: schema-migrator-sync
command:
- sync
@@ -55,7 +55,7 @@ services:
condition: service_healthy
restart: on-failure
schema-migrator-async:
image: signoz/signoz-schema-migrator:v0.129.6
image: signoz/signoz-schema-migrator:v0.129.7
container_name: schema-migrator-async
command:
- async

View File

@@ -17,6 +17,7 @@ jobs:
- bootstrap
- auth
- querier
- ttl
sqlstore-provider:
- postgres
- sqlite

View File

@@ -176,7 +176,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.96.1
image: signoz/signoz:v0.97.0
command:
- --config=/root/config/prometheus.yml
ports:
@@ -209,7 +209,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.129.6
image: signoz/signoz-otel-collector:v0.129.7
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -233,7 +233,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.129.6
image: signoz/signoz-schema-migrator:v0.129.7
deploy:
restart_policy:
condition: on-failure

View File

@@ -117,7 +117,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.96.1
image: signoz/signoz:v0.97.0
command:
- --config=/root/config/prometheus.yml
ports:
@@ -150,7 +150,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.129.6
image: signoz/signoz-otel-collector:v0.129.7
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -176,7 +176,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.129.6
image: signoz/signoz-schema-migrator:v0.129.7
deploy:
restart_policy:
condition: on-failure

View File

@@ -179,7 +179,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.96.1}
image: signoz/signoz:${VERSION:-v0.97.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -213,7 +213,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.6}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -239,7 +239,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.6}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
container_name: schema-migrator-sync
command:
- sync
@@ -250,7 +250,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.6}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
container_name: schema-migrator-async
command:
- async

View File

@@ -111,7 +111,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.96.1}
image: signoz/signoz:${VERSION:-v0.97.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -144,7 +144,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.6}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -166,7 +166,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.6}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
container_name: schema-migrator-sync
command:
- sync
@@ -178,7 +178,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.6}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
container_name: schema-migrator-async
command:
- async

View File

@@ -232,7 +232,7 @@ func (p *BaseSeasonalProvider) getPredictedSeries(
// moving avg of the previous period series + z score threshold * std dev of the series
// moving avg of the previous period series - z score threshold * std dev of the series
func (p *BaseSeasonalProvider) getBounds(
series, predictedSeries *qbtypes.TimeSeries,
series, predictedSeries, weekSeries *qbtypes.TimeSeries,
zScoreThreshold float64,
) (*qbtypes.TimeSeries, *qbtypes.TimeSeries) {
upperBoundSeries := &qbtypes.TimeSeries{
@@ -246,8 +246,8 @@ func (p *BaseSeasonalProvider) getBounds(
}
for idx, curr := range series.Values {
upperBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) + zScoreThreshold*p.getStdDev(series)
lowerBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) - zScoreThreshold*p.getStdDev(series)
upperBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) + zScoreThreshold*p.getStdDev(weekSeries)
lowerBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) - zScoreThreshold*p.getStdDev(weekSeries)
upperBoundSeries.Values = append(upperBoundSeries.Values, &qbtypes.TimeSeriesValue{
Timestamp: curr.Timestamp,
Value: upperBound,
@@ -398,8 +398,6 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
aggOfInterest := result.Aggregations[0]
for _, series := range aggOfInterest.Series {
stdDev := p.getStdDev(series)
p.logger.InfoContext(ctx, "calculated standard deviation for series", "anomaly_std_dev", stdDev, "anomaly_labels", series.Labels)
pastPeriodSeries := p.getMatchingSeries(ctx, pastPeriodResult, series)
currentSeasonSeries := p.getMatchingSeries(ctx, currentSeasonResult, series)
@@ -407,6 +405,9 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
past2SeasonSeries := p.getMatchingSeries(ctx, past2SeasonResult, series)
past3SeasonSeries := p.getMatchingSeries(ctx, past3SeasonResult, series)
stdDev := p.getStdDev(currentSeasonSeries)
p.logger.InfoContext(ctx, "calculated standard deviation for series", "anomaly_std_dev", stdDev, "anomaly_labels", series.Labels)
prevSeriesAvg := p.getAvg(pastPeriodSeries)
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
@@ -435,6 +436,7 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
upperBoundSeries, lowerBoundSeries := p.getBounds(
series,
predictedSeries,
currentSeasonSeries,
zScoreThreshold,
)
aggOfInterest.UpperBoundSeries = append(aggOfInterest.UpperBoundSeries, upperBoundSeries)

View File

@@ -0,0 +1,79 @@
package openfgaauthz
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
pkgopenfgaauthz "github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
)
type provider struct {
pkgAuthzService authz.AuthZ
}
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return factory.NewProviderFactory(factory.MustNewName("openfga"), func(ctx context.Context, ps factory.ProviderSettings, config authz.Config) (authz.AuthZ, error) {
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema)
})
}
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
pkgOpenfgaAuthzProvider := pkgopenfgaauthz.NewProviderFactory(sqlstore, openfgaSchema)
pkgAuthzService, err := pkgOpenfgaAuthzProvider.New(ctx, settings, config)
if err != nil {
return nil, err
}
return &provider{
pkgAuthzService: pkgAuthzService,
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
return provider.pkgAuthzService.Start(ctx)
}
func (provider *provider) Stop(ctx context.Context) error {
return provider.pkgAuthzService.Stop(ctx)
}
func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return provider.pkgAuthzService.Check(ctx, tuple)
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeUser, claims.UserID, authtypes.Relation{})
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.BatchCheck(ctx, tuples)
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return provider.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.Write(ctx, additions, deletions)
}

View File

@@ -1,132 +0,0 @@
package middleware
import (
"log/slog"
"net/http"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/gorilla/mux"
)
const (
authzDeniedMessage string = "::AUTHZ-DENIED::"
)
type AuthZ struct {
logger *slog.Logger
authzService authz.AuthZ
}
func NewAuthZ(logger *slog.Logger) *AuthZ {
if logger == nil {
panic("cannot build authz middleware, logger is empty")
}
return &AuthZ{logger: logger}
}
func (middleware *AuthZ) ViewAccess(next http.HandlerFunc) http.HandlerFunc {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
if err := claims.IsViewer(); err != nil {
middleware.logger.WarnContext(req.Context(), authzDeniedMessage, "claims", claims)
render.Error(rw, err)
return
}
next(rw, req)
})
}
func (middleware *AuthZ) EditAccess(next http.HandlerFunc) http.HandlerFunc {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
if err := claims.IsEditor(); err != nil {
middleware.logger.WarnContext(req.Context(), authzDeniedMessage, "claims", claims)
render.Error(rw, err)
return
}
next(rw, req)
})
}
func (middleware *AuthZ) AdminAccess(next http.HandlerFunc) http.HandlerFunc {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
if err := claims.IsAdmin(); err != nil {
middleware.logger.WarnContext(req.Context(), authzDeniedMessage, "claims", claims)
render.Error(rw, err)
return
}
next(rw, req)
})
}
func (middleware *AuthZ) SelfAccess(next http.HandlerFunc) http.HandlerFunc {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
id := mux.Vars(req)["id"]
if err := claims.IsSelfAccess(id); err != nil {
middleware.logger.WarnContext(req.Context(), authzDeniedMessage, "claims", claims)
render.Error(rw, err)
return
}
next(rw, req)
})
}
func (middleware *AuthZ) OpenAccess(next http.HandlerFunc) http.HandlerFunc {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
next(rw, req)
})
}
// Check middleware accepts the relation, typeable, parentTypeable (for direct access + group relations) and a callback function to derive selector and parentSelectors on per request basis.
func (middleware *AuthZ) Check(next http.HandlerFunc, relation authtypes.Relation, translation authtypes.Relation, typeable authtypes.Typeable, cb authtypes.SelectorCallbackFn) http.HandlerFunc {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
selector, err := cb(req.Context(), claims)
if err != nil {
render.Error(rw, err)
return
}
err = middleware.authzService.CheckWithTupleCreation(req.Context(), claims, relation, typeable, selector)
if err != nil {
render.Error(rw, err)
return
}
next(rw, req)
})
}

View File

@@ -78,11 +78,6 @@ func NewAnomalyRule(
opts = append(opts, baserules.WithLogger(logger))
if p.RuleCondition.CompareOp == ruletypes.ValueIsBelow {
target := -1 * *p.RuleCondition.Target
p.RuleCondition.Target = &target
}
baseRule, err := baserules.NewBaseRule(id, orgID, p, reader, opts...)
if err != nil {
return nil, err

View File

@@ -0,0 +1,16 @@
<svg version="1.1" id="Layer_1" xmlns:x="ns_extend;" xmlns:i="ns_ai;" xmlns:graph="ns_graphs;" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 92.2 65" style="enable-background:new 0 0 92.2 65;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FFFFFF;}
</style>
<metadata>
<sfw xmlns="ns_sfw;">
<slices>
</slices>
<sliceSourceBounds bottomLeftOrigin="true" height="65" width="92.2" x="-43.7" y="-98">
</sliceSourceBounds>
</sfw>
</metadata>
<path class="st0" d="M66.5,0H52.4l25.7,65h14.1L66.5,0z M25.7,0L0,65h14.4l5.3-13.6h26.9L51.8,65h14.4L40.5,0C40.5,0,25.7,0,25.7,0z
M24.3,39.3l8.8-22.8l8.8,22.8H24.3z">
</path>
</svg>

After

Width:  |  Height:  |  Size: 714 B

View File

@@ -0,0 +1 @@
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>Claude</title><path d="M4.709 15.955l4.72-2.647.08-.23-.08-.128H9.2l-.79-.048-2.698-.073-2.339-.097-2.266-.122-.571-.121L0 11.784l.055-.352.48-.321.686.06 1.52.103 2.278.158 1.652.097 2.449.255h.389l.055-.157-.134-.098-.103-.097-2.358-1.596-2.552-1.688-1.336-.972-.724-.491-.364-.462-.158-1.008.656-.722.881.06.225.061.893.686 1.908 1.476 2.491 1.833.365.304.145-.103.019-.073-.164-.274-1.355-2.446-1.446-2.49-.644-1.032-.17-.619a2.97 2.97 0 01-.104-.729L6.283.134 6.696 0l.996.134.42.364.62 1.414 1.002 2.229 1.555 3.03.456.898.243.832.091.255h.158V9.01l.128-1.706.237-2.095.23-2.695.08-.76.376-.91.747-.492.584.28.48.685-.067.444-.286 1.851-.559 2.903-.364 1.942h.212l.243-.242.985-1.306 1.652-2.064.73-.82.85-.904.547-.431h1.033l.76 1.129-.34 1.166-1.064 1.347-.881 1.142-1.264 1.7-.79 1.36.073.11.188-.02 2.856-.606 1.543-.28 1.841-.315.833.388.091.395-.328.807-1.969.486-2.309.462-3.439.813-.042.03.049.061 1.549.146.662.036h1.622l3.02.225.79.522.474.638-.079.485-1.215.62-1.64-.389-3.829-.91-1.312-.329h-.182v.11l1.093 1.068 2.006 1.81 2.509 2.33.127.578-.322.455-.34-.049-2.205-1.657-.851-.747-1.926-1.62h-.128v.17l.444.649 2.345 3.521.122 1.08-.17.353-.608.213-.668-.122-1.374-1.925-1.415-2.167-1.143-1.943-.14.08-.674 7.254-.316.37-.729.28-.607-.461-.322-.747.322-1.476.389-1.924.315-1.53.286-1.9.17-.632-.012-.042-.14.018-1.434 1.967-2.18 2.945-1.726 1.845-.414.164-.717-.37.067-.662.401-.589 2.388-3.036 1.44-1.882.93-1.086-.006-.158h-.055L4.132 18.56l-1.13.146-.487-.456.061-.746.231-.243 1.908-1.312-.006.006z" fill="#D97757" fill-rule="nonzero"></path></svg>

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

@@ -0,0 +1 @@
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>DeepSeek</title><path d="M23.748 4.482c-.254-.124-.364.113-.512.234-.051.039-.094.09-.137.136-.372.397-.806.657-1.373.626-.829-.046-1.537.214-2.163.848-.133-.782-.575-1.248-1.247-1.548-.352-.156-.708-.311-.955-.65-.172-.241-.219-.51-.305-.774-.055-.16-.11-.323-.293-.35-.2-.031-.278.136-.356.276-.313.572-.434 1.202-.422 1.84.027 1.436.633 2.58 1.838 3.393.137.093.172.187.129.323-.082.28-.18.552-.266.833-.055.179-.137.217-.329.14a5.526 5.526 0 01-1.736-1.18c-.857-.828-1.631-1.742-2.597-2.458a11.365 11.365 0 00-.689-.471c-.985-.957.13-1.743.388-1.836.27-.098.093-.432-.779-.428-.872.004-1.67.295-2.687.684a3.055 3.055 0 01-.465.137 9.597 9.597 0 00-2.883-.102c-1.885.21-3.39 1.102-4.497 2.623C.082 8.606-.231 10.684.152 12.85c.403 2.284 1.569 4.175 3.36 5.653 1.858 1.533 3.997 2.284 6.438 2.14 1.482-.085 3.133-.284 4.994-1.86.47.234.962.327 1.78.397.63.059 1.236-.03 1.705-.128.735-.156.684-.837.419-.961-2.155-1.004-1.682-.595-2.113-.926 1.096-1.296 2.746-2.642 3.392-7.003.05-.347.007-.565 0-.845-.004-.17.035-.237.23-.256a4.173 4.173 0 001.545-.475c1.396-.763 1.96-2.015 2.093-3.517.02-.23-.004-.467-.247-.588zM11.581 18c-2.089-1.642-3.102-2.183-3.52-2.16-.392.024-.321.471-.235.763.09.288.207.486.371.739.114.167.192.416-.113.603-.673.416-1.842-.14-1.897-.167-1.361-.802-2.5-1.86-3.301-3.307-.774-1.393-1.224-2.887-1.298-4.482-.02-.386.093-.522.477-.592a4.696 4.696 0 011.529-.039c2.132.312 3.946 1.265 5.468 2.774.868.86 1.525 1.887 2.202 2.891.72 1.066 1.494 2.082 2.48 2.914.348.292.625.514.891.677-.802.09-2.14.11-3.054-.614zm1-6.44a.306.306 0 01.415-.287.302.302 0 01.2.288.306.306 0 01-.31.307.303.303 0 01-.304-.308zm3.11 1.596c-.2.081-.399.151-.59.16a1.245 1.245 0 01-.798-.254c-.274-.23-.47-.358-.552-.758a1.73 1.73 0 01.016-.588c.07-.327-.008-.537-.239-.727-.187-.156-.426-.199-.688-.199a.559.559 0 01-.254-.078c-.11-.054-.2-.19-.114-.358.028-.054.16-.186.192-.21.356-.202.767-.136 1.146.016.352.144.618.408 1.001.782.391.451.462.576.685.914.176.265.336.537.445.848.067.195-.019.354-.25.452z" fill="#4D6BFE"></path></svg>

After

Width:  |  Height:  |  Size: 2.1 KiB

View File

@@ -0,0 +1 @@
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>Gemini</title><path d="M20.616 10.835a14.147 14.147 0 01-4.45-3.001 14.111 14.111 0 01-3.678-6.452.503.503 0 00-.975 0 14.134 14.134 0 01-3.679 6.452 14.155 14.155 0 01-4.45 3.001c-.65.28-1.318.505-2.002.678a.502.502 0 000 .975c.684.172 1.35.397 2.002.677a14.147 14.147 0 014.45 3.001 14.112 14.112 0 013.679 6.453.502.502 0 00.975 0c.172-.685.397-1.351.677-2.003a14.145 14.145 0 013.001-4.45 14.113 14.113 0 016.453-3.678.503.503 0 000-.975 13.245 13.245 0 01-2.003-.678z" fill="#3186FF"></path><path d="M20.616 10.835a14.147 14.147 0 01-4.45-3.001 14.111 14.111 0 01-3.678-6.452.503.503 0 00-.975 0 14.134 14.134 0 01-3.679 6.452 14.155 14.155 0 01-4.45 3.001c-.65.28-1.318.505-2.002.678a.502.502 0 000 .975c.684.172 1.35.397 2.002.677a14.147 14.147 0 014.45 3.001 14.112 14.112 0 013.679 6.453.502.502 0 00.975 0c.172-.685.397-1.351.677-2.003a14.145 14.145 0 013.001-4.45 14.113 14.113 0 016.453-3.678.503.503 0 000-.975 13.245 13.245 0 01-2.003-.678z" fill="url(#lobe-icons-gemini-fill-0)"></path><path d="M20.616 10.835a14.147 14.147 0 01-4.45-3.001 14.111 14.111 0 01-3.678-6.452.503.503 0 00-.975 0 14.134 14.134 0 01-3.679 6.452 14.155 14.155 0 01-4.45 3.001c-.65.28-1.318.505-2.002.678a.502.502 0 000 .975c.684.172 1.35.397 2.002.677a14.147 14.147 0 014.45 3.001 14.112 14.112 0 013.679 6.453.502.502 0 00.975 0c.172-.685.397-1.351.677-2.003a14.145 14.145 0 013.001-4.45 14.113 14.113 0 016.453-3.678.503.503 0 000-.975 13.245 13.245 0 01-2.003-.678z" fill="url(#lobe-icons-gemini-fill-1)"></path><path d="M20.616 10.835a14.147 14.147 0 01-4.45-3.001 14.111 14.111 0 01-3.678-6.452.503.503 0 00-.975 0 14.134 14.134 0 01-3.679 6.452 14.155 14.155 0 01-4.45 3.001c-.65.28-1.318.505-2.002.678a.502.502 0 000 .975c.684.172 1.35.397 2.002.677a14.147 14.147 0 014.45 3.001 14.112 14.112 0 013.679 6.453.502.502 0 00.975 0c.172-.685.397-1.351.677-2.003a14.145 14.145 0 013.001-4.45 14.113 14.113 0 016.453-3.678.503.503 0 000-.975 13.245 13.245 0 01-2.003-.678z" fill="url(#lobe-icons-gemini-fill-2)"></path><defs><linearGradient gradientUnits="userSpaceOnUse" id="lobe-icons-gemini-fill-0" x1="7" x2="11" y1="15.5" y2="12"><stop stop-color="#08B962"></stop><stop offset="1" stop-color="#08B962" stop-opacity="0"></stop></linearGradient><linearGradient gradientUnits="userSpaceOnUse" id="lobe-icons-gemini-fill-1" x1="8" x2="11.5" y1="5.5" y2="11"><stop stop-color="#F94543"></stop><stop offset="1" stop-color="#F94543" stop-opacity="0"></stop></linearGradient><linearGradient gradientUnits="userSpaceOnUse" id="lobe-icons-gemini-fill-2" x1="3.5" x2="17.5" y1="13.5" y2="12"><stop stop-color="#FABC12"></stop><stop offset=".46" stop-color="#FABC12" stop-opacity="0"></stop></linearGradient></defs></svg>

After

Width:  |  Height:  |  Size: 2.8 KiB

View File

@@ -0,0 +1 @@
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>LangChain</title><path d="M8.373 14.502c.013-.06.024-.118.038-.17l.061.145c.115.28.229.557.506.714-.012.254-.334.357-.552.326-.048-.114-.115-.228-.255-.164-.143.056-.3-.01-.266-.185.333-.012.407-.371.468-.666zM18.385 9.245c-.318 0-.616.122-.839.342l-.902.887c-.243.24-.368.572-.343.913l.006.056c.032.262.149.498.337.682.13.128.273.21.447.266a.866.866 0 01-.247.777l-.056.055a2.022 2.022 0 01-1.355-1.555l-.01-.057-.046.037c-.03.024-.06.05-.088.078l-.902.887a1.156 1.156 0 000 1.65c.231.228.535.342.84.342.304 0 .607-.114.838-.341l.902-.888a1.156 1.156 0 00-.436-1.921.953.953 0 01.276-.842 2.062 2.062 0 011.371 1.57l.01.057.047-.037c.03-.024.06-.05.088-.078l.902-.888a1.155 1.155 0 000-1.65 1.188 1.188 0 00-.84-.342z" fill="#1C3C3C"></path><path clip-rule="evenodd" d="M17.901 6H6.1C2.736 6 0 8.692 0 12s2.736 6 6.099 6H17.9C21.264 18 24 15.308 24 12s-2.736-6-6.099-6zm-5.821 9.407c-.195.04-.414.047-.562-.106-.045.1-.136.077-.221.056a.797.797 0 00-.061-.014c-.01.025-.017.048-.026.073-.329.021-.575-.309-.732-.558a4.991 4.991 0 00-.473-.21c-.172-.07-.345-.14-.509-.23a2.218 2.218 0 00-.004.173c-.002.244-.004.503-.227.651-.007.295.236.292.476.29.207-.003.41-.005.447.184a.485.485 0 01-.05.003c-.046 0-.092 0-.127.034-.117.111-.242.063-.372.013-.12-.046-.243-.094-.367-.02a2.318 2.318 0 00-.262.154.97.97 0 01-.548.194c-.024-.036-.014-.059.006-.08a.562.562 0 00.043-.056c.019-.028.035-.057.051-.084.054-.095.103-.18.242-.22-.185-.029-.344.055-.5.137l-.004.002a4.21 4.21 0 01-.065.034c-.097.04-.154.009-.212-.023-.082-.045-.168-.092-.376.04-.04-.032-.02-.061.002-.086.091-.109.21-.125.345-.119-.351-.193-.604-.056-.81.055-.182.098-.327.176-.471-.012-.065.017-.102.063-.138.108-.015.02-.03.038-.047.055-.035-.039-.027-.083-.018-.128l.005-.026a.242.242 0 00.003-.03l-.027-.01c-.053-.022-.105-.044-.09-.124-.117-.04-.2.03-.286.094-.054-.041-.01-.095.032-.145a.279.279 0 00.045-.065c.038-.065.103-.067.166-.069.054-.001.108-.003.145-.042.133-.075.297-.036.462.003.121.028.242.057.354.042.203.025.454-.18.352-.385-.186-.233-.184-.528-.183-.813v-.143c-.016-.108-.172-.233-.328-.358-.12-.095-.24-.191-.298-.28-.16-.177-.285-.382-.409-.585l-.015-.024c-.212-.404-.297-.86-.382-1.315-.103-.546-.205-1.09-.526-1.54-.266.144-.612.075-.841-.118-.12.107-.13.247-.138.396l-.001.014c-.297-.292-.26-.844-.023-1.17.097-.128.213-.233.342-.326.03-.021.04-.042.039-.074.235-1.04 1.836-.839 2.342-.103.167.206.281.442.395.678.137.283.273.566.5.795.22.237.452.463.684.689.359.35.718.699 1.032 1.089.49.587.839 1.276 1.144 1.97.05.092.08.193.11.293.044.15.089.299.2.417.026.035.084.088.149.148.156.143.357.328.289.409.009.019.027.04.05.06.032.028.074.058.116.088.122.087.25.178.16.25zm7.778-3.545l-.902.887c-.24.237-.537.413-.859.51l-.017.005-.006.015A2.021 2.021 0 0117.6 14l-.902.888c-.393.387-.916.6-1.474.6-.557 0-1.08-.213-1.474-.6a2.03 2.03 0 010-2.9l.902-.888c.242-.238.531-.409.859-.508l.016-.004.006-.016c.105-.272.265-.516.475-.724l.902-.887c.393-.387.917-.6 1.474-.6.558 0 1.08.213 1.474.6.394.387.61.902.61 1.45 0 .549-.216 1.064-.61 1.45v.001z" fill="#1C3C3C" fill-rule="evenodd"></path></svg>

After

Width:  |  Height:  |  Size: 3.1 KiB

View File

@@ -0,0 +1 @@
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>LlamaIndex</title><path d="M15.855 17.122c-2.092.924-4.358.545-5.23.24 0 .21-.01.857-.048 1.78-.038.924-.332 1.507-.475 1.684.016.577.029 1.837-.047 2.26a1.93 1.93 0 01-.476.914H8.295c.114-.577.555-.946.761-1.058.114-1.193-.11-2.229-.238-2.597-.126.449-.437 1.49-.665 2.068a6.418 6.418 0 01-.713 1.299h-.951c-.048-.578.27-.77.475-.77.095-.177.323-.731.476-1.54.152-.807-.064-2.324-.19-2.981v-2.068c-1.522-.818-2.092-1.636-2.473-2.55-.304-.73-.222-1.843-.142-2.308-.096-.176-.373-.625-.476-1.25-.142-.866-.063-1.491 0-1.828-.095-.096-.285-.587-.285-1.78 0-1.192.349-1.811.523-1.972v-.529c-.666-.048-1.331-.336-1.712-.721-.38-.385-.095-.962.143-1.154.238-.193.475-.049.808-.145.333-.096.618-.192.76-.48C4.512 1.403 4.287.448 4.16 0c.57.077.935.577 1.046.818V0c.713.337 1.997 1.154 2.425 2.934.342 1.424.586 4.409.665 5.723 1.823.016 4.137-.26 6.229.193 1.901.412 2.757 1.25 3.755 1.25.999 0 1.57-.577 2.282-.096.714.481 1.094 1.828.999 2.838-.076.808-.697 1.074-.998 1.106-.38 1.27 0 2.485.237 2.934v1.827c.111.16.333.655.333 1.347 0 .693-.222 1.154-.333 1.299.19 1.077-.08 2.18-.238 2.597h-1.283c.152-.385.412-.481.523-.481.228-1.193.063-2.293-.048-2.693-.722-.424-1.188-1.17-1.331-1.491.016.272-.029 1.029-.333 1.875-.304.847-.76 1.347-.95 1.491v1.01h-1.284c0-.615.348-.737.523-.721.222-.4.76-1.01.76-2.212 0-1.015-.713-1.492-1.236-2.405-.248-.434-.127-.978-.047-1.203z" fill="url(#lobe-icons-llama-index-fill)"></path><defs><linearGradient gradientUnits="userSpaceOnUse" id="lobe-icons-llama-index-fill" x1="4.021" x2="24.613" y1="2.02" y2="19.277"><stop offset=".062" stop-color="#F6DCD9"></stop><stop offset=".326" stop-color="#FFA5EA"></stop><stop offset=".589" stop-color="#45DFF8"></stop><stop offset="1" stop-color="#BC8DEB"></stop></linearGradient></defs></svg>

After

Width:  |  Height:  |  Size: 1.8 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 5.5 KiB

View File

@@ -0,0 +1,2 @@
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg" fill="none"><path fill="#06D092" d="M8 0L1 4v8l7 4 7-4V4L8 0zm3.119 8.797L9.254 9.863 7.001 8.65v2.549l-2.118 1.33v-5.33l1.68-1.018 2.332 1.216V4.794l2.23-1.322-.006 5.325z"/></svg>

After

Width:  |  Height:  |  Size: 389 B

View File

@@ -0,0 +1,4 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128">
<path fill="#f5a800" d="M67.648 69.797c-5.246 5.25-5.246 13.758 0 19.008 5.25 5.246 13.758 5.246 19.004 0 5.25-5.25 5.25-13.758 0-19.008-5.246-5.246-13.754-5.246-19.004 0Zm14.207 14.219a6.649 6.649 0 0 1-9.41 0 6.65 6.65 0 0 1 0-9.407 6.649 6.649 0 0 1 9.41 0c2.598 2.586 2.598 6.809 0 9.407ZM86.43 3.672l-8.235 8.234a4.17 4.17 0 0 0 0 5.875l32.149 32.149a4.17 4.17 0 0 0 5.875 0l8.234-8.235c1.61-1.61 1.61-4.261 0-5.87L92.29 3.671a4.159 4.159 0 0 0-5.86 0ZM28.738 108.895a3.763 3.763 0 0 0 0-5.31l-4.183-4.187a3.768 3.768 0 0 0-5.313 0l-8.644 8.649-.016.012-2.371-2.375c-1.313-1.313-3.45-1.313-4.75 0-1.313 1.312-1.313 3.449 0 4.75l14.246 14.242a3.353 3.353 0 0 0 4.746 0c1.3-1.313 1.313-3.45 0-4.746l-2.375-2.375.016-.012Zm0 0"/>
<path fill="#425cc7" d="M72.297 27.313 54.004 45.605c-1.625 1.625-1.625 4.301 0 5.926L65.3 62.824c7.984-5.746 19.18-5.035 26.363 2.153l9.148-9.149c1.622-1.625 1.622-4.297 0-5.922L78.22 27.313a4.185 4.185 0 0 0-5.922 0ZM60.55 67.585l-6.672-6.672c-1.563-1.562-4.125-1.562-5.684 0l-23.53 23.54a4.036 4.036 0 0 0 0 5.687l13.331 13.332a4.036 4.036 0 0 0 5.688 0l15.132-15.157c-3.199-6.609-2.625-14.593 1.735-20.73Zm0 0"/>
</svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -0,0 +1,99 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
width="64"
height="64"
viewBox="0 0 64 64"
version="1.1"
id="svg20"
sodipodi:docname="supabase-icon.svg"
style="fill:none"
inkscape:version="0.92.4 (5da689c313, 2019-01-14)">
<metadata
id="metadata24">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
<dc:title></dc:title>
</cc:Work>
</rdf:RDF>
</metadata>
<sodipodi:namedview
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1"
objecttolerance="10"
gridtolerance="10"
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="1687"
inkscape:window-height="849"
id="namedview22"
showgrid="false"
inkscape:zoom="2.0884956"
inkscape:cx="54.5"
inkscape:cy="56.5"
inkscape:window-x="70"
inkscape:window-y="0"
inkscape:window-maximized="0"
inkscape:current-layer="svg20" />
<path
d="m 37.41219,62.936701 c -1.634985,2.05896 -4.950068,0.93085 -4.989463,-1.69817 L 31.846665,22.786035 h 25.855406 c 4.683108,0 7.294967,5.409033 4.382927,9.07673 z"
id="path2"
style="fill:url(#paint0_linear);stroke-width:0.57177335"
inkscape:connector-curvature="0" />
<path
d="m 37.41219,62.936701 c -1.634985,2.05896 -4.950068,0.93085 -4.989463,-1.69817 L 31.846665,22.786035 h 25.855406 c 4.683108,0 7.294967,5.409033 4.382927,9.07673 z"
id="path4"
style="fill:url(#paint1_linear);fill-opacity:0.2;stroke-width:0.57177335"
inkscape:connector-curvature="0" />
<path
d="m 26.89694,1.0634102 c 1.634986,-2.05918508 4.950125,-0.93090008 4.989521,1.698149 L 32.138899,41.214003 H 6.607076 c -4.6832501,0 -7.29518376,-5.409032 -4.3830007,-9.07673 z"
id="path6"
inkscape:connector-curvature="0"
style="fill:#3ecf8e;stroke-width:0.57177335" />
<defs
id="defs18">
<linearGradient
id="paint0_linear"
x1="53.973801"
y1="54.973999"
x2="94.163498"
y2="71.829498"
gradientUnits="userSpaceOnUse"
gradientTransform="matrix(0.57177306,0,0,0.57177334,0.98590077,-0.12074988)">
<stop
stop-color="#249361"
id="stop8" />
<stop
offset="1"
stop-color="#3ECF8E"
id="stop10" />
</linearGradient>
<linearGradient
id="paint1_linear"
x1="36.1558"
y1="30.577999"
x2="54.484402"
y2="65.080597"
gradientUnits="userSpaceOnUse"
gradientTransform="matrix(0.57177306,0,0,0.57177334,0.98590077,-0.12074988)">
<stop
id="stop13" />
<stop
offset="1"
stop-opacity="0"
id="stop15" />
</linearGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 3.2 KiB

View File

@@ -27,6 +27,7 @@ import { IUser } from 'providers/App/types';
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
import { KBarCommandPaletteProvider } from 'providers/KBarCommandPaletteProvider';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { QueryBuilderProvider } from 'providers/QueryBuilder';
import { Suspense, useCallback, useEffect, useState } from 'react';
import { Route, Router, Switch } from 'react-router-dom';
@@ -382,20 +383,22 @@ function App(): JSX.Element {
<KeyboardHotkeysProvider>
<AlertRuleProvider>
<AppLayout>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route exact path="/" component={Home} />
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
<PreferenceContextProvider>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route exact path="/" component={Home} />
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</PreferenceContextProvider>
</AppLayout>
</AlertRuleProvider>
</KeyboardHotkeysProvider>

View File

@@ -25,8 +25,8 @@ function QuerySection(): JSX.Element {
const alertDef = buildAlertDefForChartPreview({ alertType, thresholdState });
const onQueryCategoryChange = (val: EQueryType): void => {
const query: Query = { ...currentQuery, queryType: val };
const onQueryCategoryChange = (queryType: EQueryType): void => {
const query: Query = { ...currentQuery, queryType };
redirectWithQueryBuilderData(query);
};

View File

@@ -2,16 +2,28 @@
import { render, screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { QueryParams } from 'constants/query';
import {
initialClickHouseData,
initialQueryPromQLData,
} from 'constants/queryBuilder';
import { AlertDetectionTypes } from 'container/FormAlertRules';
import { QueryClient, QueryClientProvider } from 'react-query';
import { Provider } from 'react-redux';
import { MemoryRouter } from 'react-router-dom';
import store from 'store';
import { AlertTypes } from 'types/api/alerts/alertTypes';
import { EQueryType } from 'types/common/dashboard';
import { DataSource } from 'types/common/queryBuilder';
import { CreateAlertProvider } from '../../context';
import QuerySection from '../QuerySection';
jest.mock('uuid', () => ({
v4: (): string => 'test-uuid-12345',
}));
const MOCK_UUID = 'test-uuid-12345';
jest.mock('hooks/queryBuilder/useQueryBuilder', () => ({
useQueryBuilder: jest.fn(),
}));
@@ -48,12 +60,27 @@ jest.mock(
queryCategory,
alertType,
panelType,
setQueryCategory,
}: any): JSX.Element {
return (
<div data-testid="query-section-component">
<div data-testid="query-category">{queryCategory}</div>
<div data-testid="alert-type">{alertType}</div>
<div data-testid="panel-type">{panelType}</div>
<button
type="button"
data-testid="change-to-promql"
onClick={(): void => setQueryCategory(EQueryType.PROM)}
>
Change to PromQL
</button>
<button
type="button"
data-testid="change-to-query-builder"
onClick={(): void => setQueryCategory(EQueryType.QUERY_BUILDER)}
>
Change to Query Builder
</button>
</div>
);
},
@@ -240,17 +267,6 @@ describe('QuerySection', () => {
expect(screen.getByTestId('panel-type')).toHaveTextContent('graph');
});
it('has correct CSS classes for tab styling', () => {
renderQuerySection();
const tabs = screen.getAllByRole('button');
tabs.forEach((tab) => {
expect(tab).toHaveClass('list-view-tab');
expect(tab).toHaveClass('explorer-view-option');
});
});
it('renders with correct container structure', () => {
renderQuerySection();
@@ -307,4 +323,172 @@ describe('QuerySection', () => {
expect(metricsButton).toHaveClass(ACTIVE_TAB_CLASS);
expect(logsButton).not.toHaveClass(ACTIVE_TAB_CLASS);
});
it('updates the query data when the alert type changes', async () => {
const user = userEvent.setup();
renderQuerySection();
const metricsTab = screen.getByText(METRICS_TEXT);
await user.click(metricsTab);
const result = mockUseQueryBuilder.redirectWithQueryBuilderData.mock.calls[0];
expect(result[0]).toEqual({
id: MOCK_UUID,
queryType: EQueryType.QUERY_BUILDER,
unit: undefined,
builder: {
queryData: [
expect.objectContaining({
dataSource: DataSource.METRICS,
queryName: 'A',
}),
],
queryFormulas: [],
queryTraceOperator: [],
},
promql: [initialQueryPromQLData],
clickhouse_sql: [initialClickHouseData],
});
expect(result[1]).toEqual({
[QueryParams.alertType]: AlertTypes.METRICS_BASED_ALERT,
[QueryParams.ruleType]: AlertDetectionTypes.THRESHOLD_ALERT,
});
});
it('updates the query data when the query type changes from query_builder to promql', async () => {
const user = userEvent.setup();
renderQuerySection();
const changeToPromQLButton = screen.getByTestId('change-to-promql');
await user.click(changeToPromQLButton);
expect(
mockUseQueryBuilder.redirectWithQueryBuilderData,
).toHaveBeenCalledTimes(1);
const [
queryArg,
] = mockUseQueryBuilder.redirectWithQueryBuilderData.mock.calls[0];
expect(queryArg).toEqual({
...mockUseQueryBuilder.currentQuery,
queryType: EQueryType.PROM,
});
expect(mockUseQueryBuilder.redirectWithQueryBuilderData).toHaveBeenCalledWith(
queryArg,
);
});
it('updates the query data when switching from promql to query_builder for logs', async () => {
const user = userEvent.setup();
const mockCurrentQueryWithPromQL = {
...mockUseQueryBuilder.currentQuery,
queryType: EQueryType.PROM,
builder: {
queryData: [
{
dataSource: DataSource.LOGS,
},
],
},
};
useQueryBuilder.mockReturnValue({
...mockUseQueryBuilder,
currentQuery: mockCurrentQueryWithPromQL,
});
render(
<Provider store={store}>
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<CreateAlertProvider initialAlertType={AlertTypes.LOGS_BASED_ALERT}>
<QuerySection />
</CreateAlertProvider>
</MemoryRouter>
</QueryClientProvider>
</Provider>,
);
const changeToQueryBuilderButton = screen.getByTestId(
'change-to-query-builder',
);
await user.click(changeToQueryBuilderButton);
expect(
mockUseQueryBuilder.redirectWithQueryBuilderData,
).toHaveBeenCalledTimes(1);
const [
queryArg,
] = mockUseQueryBuilder.redirectWithQueryBuilderData.mock.calls[0];
expect(queryArg).toEqual({
...mockCurrentQueryWithPromQL,
queryType: EQueryType.QUERY_BUILDER,
});
expect(mockUseQueryBuilder.redirectWithQueryBuilderData).toHaveBeenCalledWith(
queryArg,
);
});
it('updates the query data when switching from clickhouse_sql to query_builder for traces', async () => {
const user = userEvent.setup();
const mockCurrentQueryWithClickhouseSQL = {
...mockUseQueryBuilder.currentQuery,
queryType: EQueryType.CLICKHOUSE,
builder: {
queryData: [
{
dataSource: DataSource.TRACES,
},
],
},
};
useQueryBuilder.mockReturnValue({
...mockUseQueryBuilder,
currentQuery: mockCurrentQueryWithClickhouseSQL,
});
render(
<Provider store={store}>
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<CreateAlertProvider initialAlertType={AlertTypes.TRACES_BASED_ALERT}>
<QuerySection />
</CreateAlertProvider>
</MemoryRouter>
</QueryClientProvider>
</Provider>,
);
const changeToQueryBuilderButton = screen.getByTestId(
'change-to-query-builder',
);
await user.click(changeToQueryBuilderButton);
expect(
mockUseQueryBuilder.redirectWithQueryBuilderData,
).toHaveBeenCalledTimes(1);
const [
queryArg,
] = mockUseQueryBuilder.redirectWithQueryBuilderData.mock.calls[0];
expect(queryArg).toEqual({
...mockCurrentQueryWithClickhouseSQL,
queryType: EQueryType.QUERY_BUILDER,
});
expect(mockUseQueryBuilder.redirectWithQueryBuilderData).toHaveBeenCalledWith(
queryArg,
);
});
});

View File

@@ -0,0 +1,678 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
import { initialQueriesMap } from 'constants/queryBuilder';
import {
alertDefaults,
anamolyAlertDefaults,
exceptionAlertDefaults,
logAlertDefaults,
traceAlertDefaults,
} from 'container/CreateAlertRule/defaults';
import dayjs from 'dayjs';
import { AlertTypes } from 'types/api/alerts/alertTypes';
import {
INITIAL_ADVANCED_OPTIONS_STATE,
INITIAL_ALERT_STATE,
INITIAL_ALERT_THRESHOLD_STATE,
INITIAL_EVALUATION_WINDOW_STATE,
INITIAL_NOTIFICATION_SETTINGS_STATE,
} from '../constants';
import {
AdvancedOptionsState,
AlertState,
AlertThresholdMatchType,
AlertThresholdOperator,
AlertThresholdState,
Algorithm,
EvaluationWindowState,
NotificationSettingsState,
Seasonality,
TimeDuration,
} from '../types';
import {
advancedOptionsReducer,
alertCreationReducer,
alertThresholdReducer,
buildInitialAlertDef,
evaluationWindowReducer,
getInitialAlertType,
getInitialAlertTypeFromURL,
notificationSettingsReducer,
} from '../utils';
const UNKNOWN_ACTION_TYPE = 'UNKNOWN_ACTION_TYPE';
const TEST_RESET_TO_INITIAL_STATE = 'should reset to initial state';
const TEST_SET_INITIAL_STATE_FROM_PAYLOAD =
'should set initial state from payload';
const TEST_RETURN_STATE_FOR_UNKNOWN_ACTION =
'should return current state for unknown action';
describe('CreateAlertV2 Context Utils', () => {
describe('alertCreationReducer', () => {
it('should set alert name', () => {
const result = alertCreationReducer(INITIAL_ALERT_STATE, {
type: 'SET_ALERT_NAME',
payload: 'Test Alert',
});
expect(result).toEqual({
...INITIAL_ALERT_STATE,
name: 'Test Alert',
});
});
it('should set alert labels', () => {
const labels = { severity: 'critical', team: 'backend' };
const result = alertCreationReducer(INITIAL_ALERT_STATE, {
type: 'SET_ALERT_LABELS',
payload: labels,
});
expect(result).toEqual({
...INITIAL_ALERT_STATE,
labels,
});
});
it('should set y-axis unit', () => {
const result = alertCreationReducer(INITIAL_ALERT_STATE, {
type: 'SET_Y_AXIS_UNIT',
payload: 'ms',
});
expect(result).toEqual({
...INITIAL_ALERT_STATE,
yAxisUnit: 'ms',
});
});
it(TEST_RESET_TO_INITIAL_STATE, () => {
const modifiedState: AlertState = {
name: 'Modified',
labels: { test: 'value' },
yAxisUnit: 'ms',
};
const result = alertCreationReducer(modifiedState, { type: 'RESET' });
expect(result).toEqual(INITIAL_ALERT_STATE);
});
it(TEST_SET_INITIAL_STATE_FROM_PAYLOAD, () => {
const newState: AlertState = {
name: 'Custom Alert',
labels: { env: 'production' },
yAxisUnit: 'bytes',
};
const result = alertCreationReducer(INITIAL_ALERT_STATE, {
type: 'SET_INITIAL_STATE',
payload: newState,
});
expect(result).toEqual(newState);
});
it(TEST_RETURN_STATE_FOR_UNKNOWN_ACTION, () => {
const result = alertCreationReducer(
INITIAL_ALERT_STATE,
{ type: UNKNOWN_ACTION_TYPE } as any,
);
expect(result).toEqual(INITIAL_ALERT_STATE);
});
});
describe('getInitialAlertType', () => {
it('should return METRICS_BASED_ALERT for metrics data source', () => {
const result = getInitialAlertType(initialQueriesMap.metrics);
expect(result).toBe(AlertTypes.METRICS_BASED_ALERT);
});
it('should return LOGS_BASED_ALERT for logs data source', () => {
const result = getInitialAlertType(initialQueriesMap.logs);
expect(result).toBe(AlertTypes.LOGS_BASED_ALERT);
});
it('should return TRACES_BASED_ALERT for traces data source', () => {
const result = getInitialAlertType(initialQueriesMap.traces);
expect(result).toBe(AlertTypes.TRACES_BASED_ALERT);
});
it('should return METRICS_BASED_ALERT for unknown data source', () => {
const queryWithUnknownDataSource = {
...initialQueriesMap.metrics,
builder: {
...initialQueriesMap.metrics.builder,
queryData: [],
},
};
const result = getInitialAlertType(queryWithUnknownDataSource);
expect(result).toBe(AlertTypes.METRICS_BASED_ALERT);
});
});
describe('buildInitialAlertDef', () => {
it('should return logAlertDefaults for LOGS_BASED_ALERT', () => {
const result = buildInitialAlertDef(AlertTypes.LOGS_BASED_ALERT);
expect(result).toBe(logAlertDefaults);
});
it('should return traceAlertDefaults for TRACES_BASED_ALERT', () => {
const result = buildInitialAlertDef(AlertTypes.TRACES_BASED_ALERT);
expect(result).toBe(traceAlertDefaults);
});
it('should return exceptionAlertDefaults for EXCEPTIONS_BASED_ALERT', () => {
const result = buildInitialAlertDef(AlertTypes.EXCEPTIONS_BASED_ALERT);
expect(result).toBe(exceptionAlertDefaults);
});
it('should return anamolyAlertDefaults for ANOMALY_BASED_ALERT', () => {
const result = buildInitialAlertDef(AlertTypes.ANOMALY_BASED_ALERT);
expect(result).toBe(anamolyAlertDefaults);
});
it('should return alertDefaults for METRICS_BASED_ALERT', () => {
const result = buildInitialAlertDef(AlertTypes.METRICS_BASED_ALERT);
expect(result).toBe(alertDefaults);
});
it('should return alertDefaults for unknown alert type', () => {
const result = buildInitialAlertDef('UNKNOWN' as AlertTypes);
expect(result).toBe(alertDefaults);
});
});
describe('getInitialAlertTypeFromURL', () => {
it('should return ANOMALY_BASED_ALERT when ruleType is anomaly_rule', () => {
const params = new URLSearchParams('?ruleType=anomaly_rule');
const result = getInitialAlertTypeFromURL(params, initialQueriesMap.metrics);
expect(result).toBe(AlertTypes.ANOMALY_BASED_ALERT);
});
it('should return alert type from alertType param', () => {
const params = new URLSearchParams('?alertType=LOGS_BASED_ALERT');
const result = getInitialAlertTypeFromURL(params, initialQueriesMap.metrics);
expect(result).toBe(AlertTypes.LOGS_BASED_ALERT);
});
it('should prioritize ruleType over alertType', () => {
const params = new URLSearchParams(
'?ruleType=anomaly_rule&alertType=LOGS_BASED_ALERT',
);
const result = getInitialAlertTypeFromURL(params, initialQueriesMap.metrics);
expect(result).toBe(AlertTypes.ANOMALY_BASED_ALERT);
});
it('should fall back to query data source when no URL params', () => {
const params = new URLSearchParams('');
const result = getInitialAlertTypeFromURL(params, initialQueriesMap.traces);
expect(result).toBe(AlertTypes.TRACES_BASED_ALERT);
});
});
describe('alertThresholdReducer', () => {
it('should set selected query', () => {
const result = alertThresholdReducer(INITIAL_ALERT_THRESHOLD_STATE, {
type: 'SET_SELECTED_QUERY',
payload: 'B',
});
expect(result).toEqual({
...INITIAL_ALERT_THRESHOLD_STATE,
selectedQuery: 'B',
});
});
it('should set operator', () => {
const result = alertThresholdReducer(INITIAL_ALERT_THRESHOLD_STATE, {
type: 'SET_OPERATOR',
payload: AlertThresholdOperator.IS_BELOW,
});
expect(result).toEqual({
...INITIAL_ALERT_THRESHOLD_STATE,
operator: AlertThresholdOperator.IS_BELOW,
});
});
it('should set match type', () => {
const result = alertThresholdReducer(INITIAL_ALERT_THRESHOLD_STATE, {
type: 'SET_MATCH_TYPE',
payload: AlertThresholdMatchType.ALL_THE_TIME,
});
expect(result).toEqual({
...INITIAL_ALERT_THRESHOLD_STATE,
matchType: AlertThresholdMatchType.ALL_THE_TIME,
});
});
it('should set thresholds', () => {
const newThresholds = [
{
id: '1',
label: 'critical',
thresholdValue: 100,
recoveryThresholdValue: 90,
unit: 'ms',
channels: ['channel1'],
color: '#FF0000',
},
];
const result = alertThresholdReducer(INITIAL_ALERT_THRESHOLD_STATE, {
type: 'SET_THRESHOLDS',
payload: newThresholds,
});
expect(result).toEqual({
...INITIAL_ALERT_THRESHOLD_STATE,
thresholds: newThresholds,
});
});
it(TEST_RESET_TO_INITIAL_STATE, () => {
const modifiedState: AlertThresholdState = {
selectedQuery: 'B',
operator: AlertThresholdOperator.IS_BELOW,
matchType: AlertThresholdMatchType.ALL_THE_TIME,
evaluationWindow: TimeDuration.TEN_MINUTES,
algorithm: Algorithm.STANDARD,
seasonality: Seasonality.DAILY,
thresholds: [],
};
const result = alertThresholdReducer(modifiedState, { type: 'RESET' });
expect(result).toEqual(INITIAL_ALERT_THRESHOLD_STATE);
});
it(TEST_SET_INITIAL_STATE_FROM_PAYLOAD, () => {
const newState: AlertThresholdState = {
selectedQuery: 'C',
operator: AlertThresholdOperator.IS_EQUAL_TO,
matchType: AlertThresholdMatchType.ON_AVERAGE,
evaluationWindow: TimeDuration.ONE_HOUR,
algorithm: Algorithm.STANDARD,
seasonality: Seasonality.WEEKLY,
thresholds: [],
};
const result = alertThresholdReducer(INITIAL_ALERT_THRESHOLD_STATE, {
type: 'SET_INITIAL_STATE',
payload: newState,
});
expect(result).toEqual(newState);
});
it(TEST_RETURN_STATE_FOR_UNKNOWN_ACTION, () => {
const result = alertThresholdReducer(
INITIAL_ALERT_THRESHOLD_STATE,
{ type: UNKNOWN_ACTION_TYPE } as any,
);
expect(result).toEqual(INITIAL_ALERT_THRESHOLD_STATE);
});
});
describe('advancedOptionsReducer', () => {
it('should set send notification if data is missing', () => {
const result = advancedOptionsReducer(INITIAL_ADVANCED_OPTIONS_STATE, {
type: 'SET_SEND_NOTIFICATION_IF_DATA_IS_MISSING',
payload: { toleranceLimit: 21, timeUnit: UniversalYAxisUnit.HOURS },
});
expect(result).toEqual({
...INITIAL_ADVANCED_OPTIONS_STATE,
sendNotificationIfDataIsMissing: {
...INITIAL_ADVANCED_OPTIONS_STATE.sendNotificationIfDataIsMissing,
toleranceLimit: 21,
timeUnit: UniversalYAxisUnit.HOURS,
},
});
});
it('should toggle send notification if data is missing', () => {
const result = advancedOptionsReducer(INITIAL_ADVANCED_OPTIONS_STATE, {
type: 'TOGGLE_SEND_NOTIFICATION_IF_DATA_IS_MISSING',
payload: true,
});
expect(result).toEqual({
...INITIAL_ADVANCED_OPTIONS_STATE,
sendNotificationIfDataIsMissing: {
...INITIAL_ADVANCED_OPTIONS_STATE.sendNotificationIfDataIsMissing,
enabled: true,
},
});
});
it('should set enforce minimum datapoints', () => {
const result = advancedOptionsReducer(INITIAL_ADVANCED_OPTIONS_STATE, {
type: 'SET_ENFORCE_MINIMUM_DATAPOINTS',
payload: { minimumDatapoints: 10 },
});
expect(result).toEqual({
...INITIAL_ADVANCED_OPTIONS_STATE,
enforceMinimumDatapoints: {
...INITIAL_ADVANCED_OPTIONS_STATE.enforceMinimumDatapoints,
minimumDatapoints: 10,
},
});
});
it('should toggle enforce minimum datapoints', () => {
const result = advancedOptionsReducer(INITIAL_ADVANCED_OPTIONS_STATE, {
type: 'TOGGLE_ENFORCE_MINIMUM_DATAPOINTS',
payload: true,
});
expect(result).toEqual({
...INITIAL_ADVANCED_OPTIONS_STATE,
enforceMinimumDatapoints: {
...INITIAL_ADVANCED_OPTIONS_STATE.enforceMinimumDatapoints,
enabled: true,
},
});
});
it('should set delay evaluation', () => {
const result = advancedOptionsReducer(INITIAL_ADVANCED_OPTIONS_STATE, {
type: 'SET_DELAY_EVALUATION',
payload: { delay: 10, timeUnit: UniversalYAxisUnit.HOURS },
});
expect(result).toEqual({
...INITIAL_ADVANCED_OPTIONS_STATE,
delayEvaluation: { delay: 10, timeUnit: UniversalYAxisUnit.HOURS },
});
});
it('should set evaluation cadence', () => {
const newCadence = {
default: { value: 5, timeUnit: UniversalYAxisUnit.HOURS },
custom: {
repeatEvery: 'week',
startAt: '12:00:00',
timezone: 'America/New_York',
occurence: ['Monday', 'Friday'],
},
rrule: { date: dayjs(), startAt: '10:00:00', rrule: 'test-rrule' },
};
const result = advancedOptionsReducer(INITIAL_ADVANCED_OPTIONS_STATE, {
type: 'SET_EVALUATION_CADENCE',
payload: newCadence,
});
expect(result).toEqual({
...INITIAL_ADVANCED_OPTIONS_STATE,
evaluationCadence: {
...INITIAL_ADVANCED_OPTIONS_STATE.evaluationCadence,
...newCadence,
},
});
});
it('should set evaluation cadence mode', () => {
const result = advancedOptionsReducer(INITIAL_ADVANCED_OPTIONS_STATE, {
type: 'SET_EVALUATION_CADENCE_MODE',
payload: 'custom',
});
expect(result).toEqual({
...INITIAL_ADVANCED_OPTIONS_STATE,
evaluationCadence: {
...INITIAL_ADVANCED_OPTIONS_STATE.evaluationCadence,
mode: 'custom',
},
});
});
it(TEST_RESET_TO_INITIAL_STATE, () => {
const modifiedState: AdvancedOptionsState = {
...INITIAL_ADVANCED_OPTIONS_STATE,
delayEvaluation: { delay: 10, timeUnit: UniversalYAxisUnit.HOURS },
};
const result = advancedOptionsReducer(modifiedState, { type: 'RESET' });
expect(result).toEqual(INITIAL_ADVANCED_OPTIONS_STATE);
});
it(TEST_SET_INITIAL_STATE_FROM_PAYLOAD, () => {
const newState: AdvancedOptionsState = {
...INITIAL_ADVANCED_OPTIONS_STATE,
sendNotificationIfDataIsMissing: {
toleranceLimit: 45,
timeUnit: UniversalYAxisUnit.SECONDS,
enabled: true,
},
};
const result = advancedOptionsReducer(INITIAL_ADVANCED_OPTIONS_STATE, {
type: 'SET_INITIAL_STATE',
payload: newState,
});
expect(result).toEqual(newState);
});
it(TEST_RETURN_STATE_FOR_UNKNOWN_ACTION, () => {
const result = advancedOptionsReducer(
INITIAL_ADVANCED_OPTIONS_STATE,
{ type: UNKNOWN_ACTION_TYPE } as any,
);
expect(result).toEqual(INITIAL_ADVANCED_OPTIONS_STATE);
});
});
describe('evaluationWindowReducer', () => {
it('should set window type to rolling and reset timeframe', () => {
const modifiedState: EvaluationWindowState = {
...INITIAL_EVALUATION_WINDOW_STATE,
windowType: 'cumulative',
timeframe: 'currentHour',
};
const result = evaluationWindowReducer(modifiedState, {
type: 'SET_WINDOW_TYPE',
payload: 'rolling',
});
expect(result).toEqual({
windowType: 'rolling',
timeframe: INITIAL_EVALUATION_WINDOW_STATE.timeframe,
startingAt: INITIAL_EVALUATION_WINDOW_STATE.startingAt,
});
});
it('should set window type to cumulative and set timeframe to currentHour', () => {
const result = evaluationWindowReducer(INITIAL_EVALUATION_WINDOW_STATE, {
type: 'SET_WINDOW_TYPE',
payload: 'cumulative',
});
expect(result).toEqual({
windowType: 'cumulative',
timeframe: 'currentHour',
startingAt: INITIAL_EVALUATION_WINDOW_STATE.startingAt,
});
});
it('should set timeframe', () => {
const result = evaluationWindowReducer(INITIAL_EVALUATION_WINDOW_STATE, {
type: 'SET_TIMEFRAME',
payload: '10m0s',
});
expect(result).toEqual({
...INITIAL_EVALUATION_WINDOW_STATE,
timeframe: '10m0s',
});
});
it('should set starting at', () => {
const newStartingAt = {
time: '14:30:00',
number: '5',
timezone: 'Europe/London',
unit: UniversalYAxisUnit.HOURS,
};
const result = evaluationWindowReducer(INITIAL_EVALUATION_WINDOW_STATE, {
type: 'SET_STARTING_AT',
payload: newStartingAt,
});
expect(result).toEqual({
...INITIAL_EVALUATION_WINDOW_STATE,
startingAt: newStartingAt,
});
});
it(TEST_RESET_TO_INITIAL_STATE, () => {
const modifiedState: EvaluationWindowState = {
windowType: 'cumulative',
timeframe: 'currentHour',
startingAt: {
time: '12:00:00',
number: '2',
timezone: 'America/New_York',
unit: UniversalYAxisUnit.HOURS,
},
};
const result = evaluationWindowReducer(modifiedState, { type: 'RESET' });
expect(result).toEqual(INITIAL_EVALUATION_WINDOW_STATE);
});
it(TEST_SET_INITIAL_STATE_FROM_PAYLOAD, () => {
const newState: EvaluationWindowState = {
windowType: 'cumulative',
timeframe: 'currentDay',
startingAt: {
time: '09:00:00',
number: '3',
timezone: 'Asia/Tokyo',
unit: UniversalYAxisUnit.HOURS,
},
};
const result = evaluationWindowReducer(INITIAL_EVALUATION_WINDOW_STATE, {
type: 'SET_INITIAL_STATE',
payload: newState,
});
expect(result).toEqual(newState);
});
it(TEST_RETURN_STATE_FOR_UNKNOWN_ACTION, () => {
const result = evaluationWindowReducer(
INITIAL_EVALUATION_WINDOW_STATE,
{ type: UNKNOWN_ACTION_TYPE } as any,
);
expect(result).toEqual(INITIAL_EVALUATION_WINDOW_STATE);
});
});
describe('notificationSettingsReducer', () => {
it('should set multiple notifications', () => {
const notifications = ['channel1', 'channel2', 'channel3'];
const result = notificationSettingsReducer(
INITIAL_NOTIFICATION_SETTINGS_STATE,
{
type: 'SET_MULTIPLE_NOTIFICATIONS',
payload: notifications,
},
);
expect(result).toEqual({
...INITIAL_NOTIFICATION_SETTINGS_STATE,
multipleNotifications: notifications,
});
});
it('should set multiple notifications to null', () => {
const modifiedState = {
...INITIAL_NOTIFICATION_SETTINGS_STATE,
multipleNotifications: ['channel1', 'channel2'],
};
const result = notificationSettingsReducer(modifiedState, {
type: 'SET_MULTIPLE_NOTIFICATIONS',
payload: null,
});
expect(result).toEqual({
...modifiedState,
multipleNotifications: null,
});
});
it('should set re-notification', () => {
const reNotification = {
enabled: true,
value: 60,
unit: UniversalYAxisUnit.HOURS,
conditions: ['firing' as const, 'nodata' as const],
};
const result = notificationSettingsReducer(
INITIAL_NOTIFICATION_SETTINGS_STATE,
{
type: 'SET_RE_NOTIFICATION',
payload: reNotification,
},
);
expect(result).toEqual({
...INITIAL_NOTIFICATION_SETTINGS_STATE,
reNotification,
});
});
it('should set description', () => {
const description = 'Custom alert description with {{$value}}';
const result = notificationSettingsReducer(
INITIAL_NOTIFICATION_SETTINGS_STATE,
{
type: 'SET_DESCRIPTION',
payload: description,
},
);
expect(result).toEqual({
...INITIAL_NOTIFICATION_SETTINGS_STATE,
description,
});
});
it('should set routing policies', () => {
const result = notificationSettingsReducer(
INITIAL_NOTIFICATION_SETTINGS_STATE,
{
type: 'SET_ROUTING_POLICIES',
payload: true,
},
);
expect(result).toEqual({
...INITIAL_NOTIFICATION_SETTINGS_STATE,
routingPolicies: true,
});
});
it(TEST_RESET_TO_INITIAL_STATE, () => {
const modifiedState: NotificationSettingsState = {
multipleNotifications: ['channel1'],
reNotification: {
enabled: true,
value: 120,
unit: UniversalYAxisUnit.HOURS,
conditions: ['firing'],
},
description: 'Modified description',
routingPolicies: true,
};
const result = notificationSettingsReducer(modifiedState, {
type: 'RESET',
});
expect(result).toEqual(INITIAL_NOTIFICATION_SETTINGS_STATE);
});
it(TEST_SET_INITIAL_STATE_FROM_PAYLOAD, () => {
const newState: NotificationSettingsState = {
multipleNotifications: ['channel4', 'channel5'],
reNotification: {
enabled: true,
value: 90,
unit: UniversalYAxisUnit.MINUTES,
conditions: ['nodata'],
},
description: 'New description',
routingPolicies: true,
};
const result = notificationSettingsReducer(
INITIAL_NOTIFICATION_SETTINGS_STATE,
{
type: 'SET_INITIAL_STATE',
payload: newState,
},
);
expect(result).toEqual(newState);
});
it(TEST_RETURN_STATE_FOR_UNKNOWN_ACTION, () => {
const result = notificationSettingsReducer(
INITIAL_NOTIFICATION_SETTINGS_STATE,
{ type: UNKNOWN_ACTION_TYPE } as any,
);
expect(result).toEqual(INITIAL_NOTIFICATION_SETTINGS_STATE);
});
});
});

View File

@@ -51,7 +51,13 @@ export const useCreateAlertState = (): ICreateAlertContextProps => {
export function CreateAlertProvider(
props: ICreateAlertProviderProps,
): JSX.Element {
const { children, initialAlertState, isEditMode, ruleId } = props;
const {
children,
initialAlertState,
isEditMode,
ruleId,
initialAlertType,
} = props;
const [alertState, setAlertState] = useReducer(
alertCreationReducer,
@@ -62,9 +68,12 @@ export function CreateAlertProvider(
const location = useLocation();
const queryParams = new URLSearchParams(location.search);
const [alertType, setAlertType] = useState<AlertTypes>(() =>
getInitialAlertTypeFromURL(queryParams, currentQuery),
);
const [alertType, setAlertType] = useState<AlertTypes>(() => {
if (isEditMode) {
return initialAlertType;
}
return getInitialAlertTypeFromURL(queryParams, currentQuery);
});
const handleAlertTypeChange = useCallback(
(value: AlertTypes): void => {

View File

@@ -62,7 +62,7 @@ export const alertCreationReducer = (
export function getInitialAlertType(currentQuery: Query): AlertTypes {
const dataSource =
currentQuery.builder.queryData[0].dataSource || DataSource.METRICS;
currentQuery.builder.queryData?.[0]?.dataSource || DataSource.METRICS;
switch (dataSource) {
case DataSource.METRICS:
return AlertTypes.METRICS_BASED_ALERT;

View File

@@ -0,0 +1,16 @@
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
import { initialQueryBuilderFormValuesMap } from 'constants/queryBuilder';
import { AlertTypes } from 'types/api/alerts/alertTypes';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
export function sanitizeDefaultAlertQuery(
query: Query,
alertType: AlertTypes,
): Query {
// If there are no queries, add a default one based on the alert type
if (query.builder.queryData.length === 0) {
const dataSource = ALERTS_DATA_SOURCE_MAP[alertType];
query.builder.queryData.push(initialQueryBuilderFormValuesMap[dataSource]);
}
return query;
}

View File

@@ -23,6 +23,7 @@ import LabelColumn from 'components/TableRenderer/LabelColumn';
import TextToolTip from 'components/TextToolTip';
import { QueryParams } from 'constants/query';
import ROUTES from 'constants/routes';
import { sanitizeDefaultAlertQuery } from 'container/EditAlertV2/utils';
import useSortableTable from 'hooks/ResizeTable/useSortableTable';
import useComponentPermission from 'hooks/useComponentPermission';
import useDebouncedFn from 'hooks/useDebouncedFunction';
@@ -36,6 +37,7 @@ import { useCallback, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { UseQueryResult } from 'react-query';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AlertTypes } from 'types/api/alerts/alertTypes';
import { GettableAlert } from 'types/api/alerts/get';
import DeleteAlert from './DeleteAlert';
@@ -141,7 +143,10 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
];
const onEditHandler = (record: GettableAlert, openInNewTab: boolean): void => {
const compositeQuery = mapQueryDataFromApi(record.condition.compositeQuery);
const compositeQuery = sanitizeDefaultAlertQuery(
mapQueryDataFromApi(record.condition.compositeQuery),
record.alertType as AlertTypes,
);
params.set(
QueryParams.compositeQuery,
encodeURIComponent(JSON.stringify(compositeQuery)),

View File

@@ -87,6 +87,21 @@
"imgUrl": "/Logos/signoz-brand-logo.svg",
"link": "https://signoz.io/docs/migration/migrate-from-signoz-self-host-to-signoz-cloud/"
},
{
"dataSource": "migrate-from-existing-opentelemetry",
"label": "From Existing OpenTelemetry",
"tags": ["migrate to SigNoz"],
"module": "home",
"relatedSearchKeywords": [
"apm migration",
"opentelemetry",
"migration guide",
"migrate",
"migration"
],
"imgUrl": "/Logos/opentelemetry.svg",
"link": "https://signoz.io/docs/migration/migrate-from-opentelemetry-to-signoz/"
},
{
"dataSource": "java",
"entityID": "dataSource",
@@ -2656,6 +2671,156 @@
],
"link": "https://signoz.io/docs/community/llm-monitoring/"
},
{
"dataSource": "anthropic-api",
"label": "Anthropic API",
"imgUrl": "/Logos/anthropic-api-monitoring.svg",
"tags": ["LLM Monitoring"],
"module": "metrics",
"relatedSearchKeywords": [
"llm monitoring",
"large language model observability",
"monitor anthropic",
"llm response time tracing",
"llm metrics",
"otel llm integration",
"llm performance tracking",
"metrics",
"traces",
"logs"
],
"link": "https://signoz.io/docs/anthropic-monitoring/"
},
{
"dataSource": "claude-code",
"label": "Claude Code",
"imgUrl": "/Logos/claude-code.svg",
"tags": ["LLM Monitoring"],
"module": "metrics",
"relatedSearchKeywords": [
"claude code monitoring",
"claude code observability",
"claude code performance tracking",
"claude code latency tracing",
"claude code metrics",
"otel claude integration",
"claude code response time",
"claude code logs",
"claude code error tracking",
"claude code debugging",
"metrics",
"logs"
],
"link": "https://signoz.io/docs/claude-code-monitoring/"
},
{
"dataSource": "deepseek-api",
"label": "DeepSeek API",
"imgUrl": "/Logos/deepseek.svg",
"tags": ["LLM Monitoring"],
"module": "metrics",
"relatedSearchKeywords": [
"deepseek api monitoring",
"deepseek api observability",
"deepseek api performance tracking",
"deepseek api latency tracing",
"deepseek api metrics",
"otel deepseek integration",
"deepseek api response time",
"deepseek api logs",
"deepseek api error tracking",
"deepseek api debugging",
"metrics",
"logs"
],
"link": "https://signoz.io/docs/deepseek-monitoring/"
},
{
"dataSource": "google-gemini-api",
"label": "Google Gemini",
"imgUrl": "/Logos/google-gemini.svg",
"tags": ["LLM Monitoring"],
"module": "metrics",
"relatedSearchKeywords": [
"google gemini api monitoring",
"google gemini api observability",
"google gemini api performance tracking",
"google gemini api latency tracing",
"google gemini api metrics",
"otel google gemini integration",
"google gemini api response time",
"google gemini api logs",
"google gemini api error tracking",
"google gemini api debugging",
"gemini",
"metrics",
"logs"
],
"link": "https://signoz.io/docs/google-gemini-monitoring/"
},
{
"dataSource": "langchain",
"label": "LangChain",
"imgUrl": "/Logos/langchain.svg",
"tags": ["LLM Monitoring"],
"module": "apm",
"relatedSearchKeywords": [
"langchain monitoring",
"langchain observability",
"langchain performance tracking",
"langchain latency tracing",
"langchain metrics",
"otel langchain integration",
"langchain response time",
"langchain logs",
"langchain error tracking",
"langchain debugging",
"traces"
],
"link": "https://signoz.io/docs/langchain-monitoring/"
},
{
"dataSource": "llamaindex",
"label": "LlamaIndex",
"imgUrl": "/Logos/llamaindex.svg",
"tags": ["LLM Monitoring"],
"module": "apm",
"relatedSearchKeywords": [
"llamaindex monitoring",
"llamaindex observability",
"llamaindex performance tracking",
"llamaindex latency tracing",
"llamaindex metrics",
"otel llamaindex integration",
"llamaindex response time",
"llamaindex logs",
"llamaindex error tracking",
"llamaindex debugging",
"traces"
],
"link": "https://signoz.io/docs/llamaindex-monitoring/"
},
{
"dataSource": "vercel-ai-sdk",
"label": "Vercel AI SDK",
"imgUrl": "/Logos/vercel.svg",
"tags": ["LLM Monitoring"],
"module": "apm",
"relatedSearchKeywords": [
"vercel ai sdk monitoring",
"vercel ai sdk observability",
"vercel ai sdk performance tracking",
"vercel ai sdk latency tracing",
"vercel ai sdk metrics",
"otel vercel ai sdk integration",
"vercel ai sdk response time",
"vercel ai sdk logs",
"vercel ai sdk error tracking",
"vercel ai sdk debugging",
"traces"
],
"link": "https://signoz.io/docs/vercel-ai-sdk-monitoring/"
},
{
"dataSource": "http-endpoints-monitoring",
"label": "HTTP Endpoints Monitoring",
@@ -3391,5 +3556,58 @@
}
]
}
},
{
"dataSource": "microsoft-sql-server",
"label": "Microsoft SQL Server",
"imgUrl": "/Logos/microsoft-sql-server.svg",
"tags": ["integrations"],
"module": "metrics",
"relatedSearchKeywords": [
"sql server metrics",
"mssql monitoring",
"sql server performance",
"sql server observability",
"Microsoft",
"sql server logs",
"metrics",
"logs"
],
"id": "microsoft-sql-server",
"link": "https://signoz.io/docs/integrations/sql-server/"
},
{
"dataSource": "supabase",
"label": "Supabase",
"imgUrl": "/Logos/supabase.svg",
"tags": ["integrations"],
"module": "metrics",
"relatedSearchKeywords": [
"supabase metrics",
"supabase monitoring",
"supabase performance",
"supabase observability",
"supabase",
"metrics"
],
"id": "supabase",
"link": "https://signoz.io/docs/integrations/supabase/"
},
{
"dataSource": "nomad",
"label": "Nomad",
"imgUrl": "/Logos/nomad.svg",
"tags": ["integrations"],
"module": "metrics",
"relatedSearchKeywords": [
"nomad metrics",
"nomad monitoring",
"nomad performance",
"nomad observability",
"nomad",
"metrics"
],
"id": "nomad",
"link": "https://signoz.io/docs/integrations/nomad/"
}
]

View File

@@ -1,3 +1,4 @@
/* eslint-disable sonarjs/cognitive-complexity */
import { getKeySuggestions } from 'api/querySuggestions/getKeySuggestions';
import { TelemetryFieldKey } from 'api/v5/v5';
import { AxiosResponse } from 'axios';
@@ -55,11 +56,10 @@ const useOptionsMenu = ({
initialOptions = {},
}: UseOptionsMenuProps): UseOptionsMenu => {
const { notifications } = useNotifications();
const {
preferences,
updateColumns,
updateFormatting,
} = usePreferenceContext();
const prefCtx = usePreferenceContext();
// TODO: send null to updateColumns and updateFormatting if dataSource is not logs or traces
const slice = dataSource === DataSource.TRACES ? prefCtx.traces : prefCtx.logs;
const { preferences, updateColumns, updateFormatting } = slice;
const [searchText, setSearchText] = useState<string>('');
const [isFocused, setIsFocused] = useState<boolean>(false);

View File

@@ -19,7 +19,6 @@ import { getOperatorValue } from 'container/QueryBuilder/filters/QueryBuilderSea
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import createQueryParams from 'lib/createQueryParams';
import { Compass } from 'lucide-react';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { useCallback, useMemo } from 'react';
import { Virtuoso } from 'react-virtuoso';
import { ILog } from 'types/api/logs/log';
@@ -212,24 +211,20 @@ function SpanLogs({
const renderContent = useMemo(
() => (
<div className="span-logs-list-container">
<PreferenceContextProvider>
<OverlayScrollbar isVirtuoso>
<Virtuoso
className="span-logs-virtuoso"
key="span-logs-virtuoso"
style={
logs.length <= 35 ? { height: `calc(${logs.length} * 22px)` } : {}
}
data={logs}
totalCount={logs.length}
itemContent={getItemContent}
overscan={200}
components={{
Footer: renderFooter,
}}
/>
</OverlayScrollbar>
</PreferenceContextProvider>
<OverlayScrollbar isVirtuoso>
<Virtuoso
className="span-logs-virtuoso"
key="span-logs-virtuoso"
style={logs.length <= 35 ? { height: `calc(${logs.length} * 22px)` } : {}}
data={logs}
totalCount={logs.length}
itemContent={getItemContent}
overscan={200}
components={{
Footer: renderFooter,
}}
/>
</OverlayScrollbar>
</div>
),
[logs, getItemContent, renderFooter],

View File

@@ -261,18 +261,16 @@ describe('SpanDetailsDrawer', () => {
const logsButton = screen.getByRole('radio', { name: /logs/i });
fireEvent.click(logsButton);
// Wait for logs view to open
// Wait for logs view to open and logs to be displayed
await waitFor(() => {
expect(screen.getByTestId('overlay-scrollbar')).toBeInTheDocument();
});
// Verify logs are displayed
await waitFor(() => {
// eslint-disable-next-line sonarjs/no-duplicate-string
expect(screen.getByTestId('raw-log-span-log-1')).toBeInTheDocument();
// eslint-disable-next-line sonarjs/no-duplicate-string
expect(screen.getByTestId('raw-log-span-log-2')).toBeInTheDocument();
// eslint-disable-next-line sonarjs/no-duplicate-string
expect(screen.getByTestId('raw-log-context-log-before')).toBeInTheDocument();
// eslint-disable-next-line sonarjs/no-duplicate-string
expect(screen.getByTestId('raw-log-context-log-after')).toBeInTheDocument();
});
});
@@ -285,12 +283,9 @@ describe('SpanDetailsDrawer', () => {
fireEvent.click(logsButton);
// Wait for all API calls to complete
await waitFor(
() => {
expect(GetMetricQueryRange).toHaveBeenCalledTimes(3);
},
{ timeout: 5000 },
);
await waitFor(() => {
expect(GetMetricQueryRange).toHaveBeenCalledTimes(3);
});
// Verify the three distinct queries were made
const [spanQuery, beforeQuery, afterQuery] = apiCallHistory;
@@ -319,12 +314,9 @@ describe('SpanDetailsDrawer', () => {
fireEvent.click(logsButton);
// Wait for all API calls to complete
await waitFor(
() => {
expect(GetMetricQueryRange).toHaveBeenCalledTimes(3);
},
{ timeout: 5000 },
);
await waitFor(() => {
expect(GetMetricQueryRange).toHaveBeenCalledTimes(3);
});
const [spanQuery, beforeQuery, afterQuery] = apiCallHistory;
@@ -484,9 +476,17 @@ describe('SpanDetailsDrawer', () => {
const logsButton = screen.getByRole('radio', { name: /logs/i });
fireEvent.click(logsButton);
// Wait for logs to load
// Wait for all API calls to complete first
await waitFor(() => {
expect(GetMetricQueryRange).toHaveBeenCalledTimes(3);
});
// Wait for all logs to be rendered - both span logs and context logs
await waitFor(() => {
expect(screen.getByTestId('raw-log-span-log-1')).toBeInTheDocument();
expect(screen.getByTestId('raw-log-span-log-2')).toBeInTheDocument();
expect(screen.getByTestId('raw-log-context-log-before')).toBeInTheDocument();
expect(screen.getByTestId('raw-log-context-log-after')).toBeInTheDocument();
});
// Verify span logs are highlighted

View File

@@ -6,8 +6,10 @@ import { DataSource } from 'types/common/queryBuilder';
export const useGetAllViews = (
sourcepage: DataSource | 'meter',
enabled?: boolean,
): UseQueryResult<AxiosResponse<AllViewsProps>, AxiosError> =>
useQuery<AxiosResponse<AllViewsProps>, AxiosError>({
queryKey: [{ sourcepage }],
queryFn: () => getAllViews(sourcepage as DataSource),
...(enabled !== undefined ? { enabled } : {}),
});

View File

@@ -117,6 +117,11 @@ function AlertDetails(): JSX.Element {
}
};
// Show spinner until we have alert data loaded
if (isLoading && !alertRuleDetails) {
return <Spinner />;
}
return (
<CreateAlertProvider
ruleId={ruleId || ''}

View File

@@ -8,7 +8,6 @@ import { isDrilldownEnabled } from 'container/QueryTable/Drilldown/drilldownUtil
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import useUrlQuery from 'hooks/useUrlQuery';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { useEffect, useState } from 'react';
import { generatePath, useLocation, useParams } from 'react-router-dom';
import { Widgets } from 'types/api/dashboard/getAll';
@@ -54,14 +53,12 @@ function DashboardWidget(): JSX.Element | null {
}
return (
<PreferenceContextProvider>
<NewWidget
yAxisUnit={selectedWidget?.yAxisUnit}
selectedGraph={selectedGraph}
fillSpans={selectedWidget?.fillSpans}
enableDrillDown={isDrilldownEnabled()}
/>
</PreferenceContextProvider>
<NewWidget
yAxisUnit={selectedWidget?.yAxisUnit}
selectedGraph={selectedGraph}
fillSpans={selectedWidget?.fillSpans}
enableDrillDown={isDrilldownEnabled()}
/>
);
}

View File

@@ -3,14 +3,9 @@ import ROUTES from 'constants/routes';
import InfraMonitoringHosts from 'container/InfraMonitoringHosts';
import InfraMonitoringK8s from 'container/InfraMonitoringK8s';
import { Inbox } from 'lucide-react';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
export const Hosts: TabRoutes = {
Component: (): JSX.Element => (
<PreferenceContextProvider>
<InfraMonitoringHosts />
</PreferenceContextProvider>
),
Component: (): JSX.Element => <InfraMonitoringHosts />,
name: (
<div className="tab-item">
<Inbox size={16} /> Hosts
@@ -21,11 +16,7 @@ export const Hosts: TabRoutes = {
};
export const Kubernetes: TabRoutes = {
Component: (): JSX.Element => (
<PreferenceContextProvider>
<InfraMonitoringK8s />
</PreferenceContextProvider>
),
Component: (): JSX.Element => <InfraMonitoringK8s />,
name: (
<div className="tab-item">
<Inbox size={16} /> Kubernetes

View File

@@ -3,7 +3,6 @@ import { liveLogsCompositeQuery } from 'container/LiveLogs/constants';
import LiveLogsContainer from 'container/LiveLogs/LiveLogsContainer';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { useEffect } from 'react';
import { DataSource } from 'types/common/queryBuilder';
@@ -15,11 +14,7 @@ function LiveLogs(): JSX.Element {
handleSetConfig(PANEL_TYPES.LIST, DataSource.LOGS);
}, [handleSetConfig]);
return (
<PreferenceContextProvider>
<LiveLogsContainer />
</PreferenceContextProvider>
);
return <LiveLogsContainer />;
}
export default LiveLogs;

View File

@@ -10,7 +10,6 @@ import LogsFilters from 'container/LogsFilters';
import LogsSearchFilter from 'container/LogsSearchFilter';
import LogsTable from 'container/LogsTable';
import history from 'lib/history';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { useCallback, useMemo } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { useLocation } from 'react-router-dom';
@@ -83,71 +82,69 @@ function OldLogsExplorer(): JSX.Element {
};
return (
<PreferenceContextProvider>
<div className="old-logs-explorer">
<SpaceContainer
split={<Divider type="vertical" />}
align="center"
direction="horizontal"
>
<LogsSearchFilter />
<LogLiveTail />
</SpaceContainer>
<div className="old-logs-explorer">
<SpaceContainer
split={<Divider type="vertical" />}
align="center"
direction="horizontal"
>
<LogsSearchFilter />
<LogLiveTail />
</SpaceContainer>
<LogsAggregate />
<LogsAggregate />
<Row gutter={20} wrap={false}>
<LogsFilters />
<Col flex={1} className="logs-col-container">
<Row>
<Col flex={1}>
<Space align="baseline" direction="horizontal">
<Select
<Row gutter={20} wrap={false}>
<LogsFilters />
<Col flex={1} className="logs-col-container">
<Row>
<Col flex={1}>
<Space align="baseline" direction="horizontal">
<Select
getPopupContainer={popupContainer}
style={defaultSelectStyle}
value={selectedViewModeOption}
onChange={onChangeVeiwMode}
>
{viewModeOptionList.map((option) => (
<Select.Option key={option.value}>{option.label}</Select.Option>
))}
</Select>
{isFormatButtonVisible && (
<Popover
getPopupContainer={popupContainer}
style={defaultSelectStyle}
value={selectedViewModeOption}
onChange={onChangeVeiwMode}
placement="right"
content={renderPopoverContent}
>
{viewModeOptionList.map((option) => (
<Select.Option key={option.value}>{option.label}</Select.Option>
))}
</Select>
<Button>Format</Button>
</Popover>
)}
{isFormatButtonVisible && (
<Popover
getPopupContainer={popupContainer}
placement="right"
content={renderPopoverContent}
>
<Button>Format</Button>
</Popover>
)}
<Select
getPopupContainer={popupContainer}
style={defaultSelectStyle}
defaultValue={order}
onChange={handleChangeOrder}
>
{orderItems.map((item) => (
<Select.Option key={item.enum}>{item.name}</Select.Option>
))}
</Select>
</Space>
</Col>
<Select
getPopupContainer={popupContainer}
style={defaultSelectStyle}
defaultValue={order}
onChange={handleChangeOrder}
>
{orderItems.map((item) => (
<Select.Option key={item.enum}>{item.name}</Select.Option>
))}
</Select>
</Space>
</Col>
<Col>
<LogControls />
</Col>
</Row>
<Col>
<LogControls />
</Col>
</Row>
<LogsTable viewMode={viewMode} linesPerRow={linesPerRow} />
</Col>
</Row>
<LogsTable viewMode={viewMode} linesPerRow={linesPerRow} />
</Col>
</Row>
<LogDetailedView />
</div>
</PreferenceContextProvider>
<LogDetailedView />
</div>
);
}

View File

@@ -54,7 +54,8 @@ function LogsExplorer(): JSX.Element {
const [selectedView, setSelectedView] = useState<ExplorerViews>(() =>
getExplorerViewFromUrl(searchParams, panelTypesFromUrl),
);
const { preferences, loading: preferencesLoading } = usePreferenceContext();
const { logs } = usePreferenceContext();
const { preferences } = logs;
const [showFilters, setShowFilters] = useState<boolean>(() => {
const localStorageValue = getLocalStorageKey(
@@ -273,7 +274,7 @@ function LogsExplorer(): JSX.Element {
);
useEffect(() => {
if (!preferences || preferencesLoading) {
if (!preferences) {
return;
}
const migratedQuery = migrateOptionsQuery({
@@ -295,12 +296,7 @@ function LogsExplorer(): JSX.Element {
) {
redirectWithOptionsData(migratedQuery);
}
}, [
migrateOptionsQuery,
preferences,
redirectWithOptionsData,
preferencesLoading,
]);
}, [migrateOptionsQuery, preferences, redirectWithOptionsData]);
const toolbarViews = useMemo(
() => ({

View File

@@ -4,14 +4,9 @@ import { Compass, TowerControl, Workflow } from 'lucide-react';
import LogsExplorer from 'pages/LogsExplorer';
import Pipelines from 'pages/Pipelines';
import SaveView from 'pages/SaveView';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
export const logsExplorer: TabRoutes = {
Component: (): JSX.Element => (
<PreferenceContextProvider>
<LogsExplorer />
</PreferenceContextProvider>
),
Component: (): JSX.Element => <LogsExplorer />,
name: (
<div className="tab-item">
<Compass size={16} /> Explorer
@@ -22,11 +17,7 @@ export const logsExplorer: TabRoutes = {
};
export const logsPipelines: TabRoutes = {
Component: (): JSX.Element => (
<PreferenceContextProvider>
<Pipelines />
</PreferenceContextProvider>
),
Component: (): JSX.Element => <Pipelines />,
name: (
<div className="tab-item">
<Workflow size={16} /> Pipelines

View File

@@ -4,14 +4,9 @@ import BreakDownPage from 'container/MeterExplorer/Breakdown/BreakDown';
import ExplorerPage from 'container/MeterExplorer/Explorer';
import { Compass, TowerControl } from 'lucide-react';
import SaveView from 'pages/SaveView';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
export const Explorer: TabRoutes = {
Component: (): JSX.Element => (
<PreferenceContextProvider>
<ExplorerPage />
</PreferenceContextProvider>
),
Component: (): JSX.Element => <ExplorerPage />,
name: (
<div className="tab-item">
<Compass size={16} /> Explorer

View File

@@ -4,7 +4,6 @@ import ExplorerPage from 'container/MetricsExplorer/Explorer';
import SummaryPage from 'container/MetricsExplorer/Summary';
import { BarChart2, Compass, TowerControl } from 'lucide-react';
import SaveView from 'pages/SaveView';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
export const Summary: TabRoutes = {
Component: SummaryPage,
@@ -18,11 +17,7 @@ export const Summary: TabRoutes = {
};
export const Explorer: TabRoutes = {
Component: (): JSX.Element => (
<PreferenceContextProvider>
<ExplorerPage />
</PreferenceContextProvider>
),
Component: (): JSX.Element => <ExplorerPage />,
name: (
<div className="tab-item">
<Compass size={16} /> Explorer

View File

@@ -4,7 +4,6 @@ import NotFound from 'components/NotFound';
import Spinner from 'components/Spinner';
import NewDashboard from 'container/NewDashboard';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { useEffect } from 'react';
import { ErrorType } from 'types/common';
@@ -36,11 +35,7 @@ function DashboardPage(): JSX.Element {
return <Spinner tip="Loading.." />;
}
return (
<PreferenceContextProvider>
<NewDashboard />
</PreferenceContextProvider>
);
return <NewDashboard />;
}
export default DashboardPage;

View File

@@ -5,15 +5,10 @@ import SaveView from 'pages/SaveView';
import TracesExplorer from 'pages/TracesExplorer';
import TracesFunnelDetails from 'pages/TracesFunnelDetails';
import TracesFunnels from 'pages/TracesFunnels';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { matchPath } from 'react-router-dom';
export const tracesExplorer: TabRoutes = {
Component: (): JSX.Element => (
<PreferenceContextProvider>
<TracesExplorer />
</PreferenceContextProvider>
),
Component: (): JSX.Element => <TracesExplorer />,
name: (
<div className="tab-item">
<Compass size={16} /> Explorer

View File

@@ -1,154 +0,0 @@
/* eslint-disable sonarjs/no-identical-functions */
import { render, screen } from '@testing-library/react';
import { TelemetryFieldKey } from 'api/v5/v5';
import {
FormattingOptions,
PreferenceMode,
Preferences,
} from 'providers/preferences/types';
import { MemoryRouter, Route, Switch } from 'react-router-dom';
import {
PreferenceContextProvider,
usePreferenceContext,
} from '../context/PreferenceContextProvider';
// Mock the usePreferenceSync hook
jest.mock('../sync/usePreferenceSync', () => ({
usePreferenceSync: jest.fn().mockReturnValue({
preferences: {
columns: [] as TelemetryFieldKey[],
formatting: {
maxLines: 2,
format: 'table',
fontSize: 'small',
version: 1,
} as FormattingOptions,
} as Preferences,
loading: false,
error: null,
updateColumns: jest.fn(),
updateFormatting: jest.fn(),
}),
}));
// Test component that consumes the context
function TestConsumer(): JSX.Element {
const context = usePreferenceContext();
return (
<div>
<div data-testid="mode">{context.mode}</div>
<div data-testid="dataSource">{context.dataSource}</div>
<div data-testid="loading">{String(context.loading)}</div>
<div data-testid="error">{String(context.error)}</div>
<div data-testid="savedViewId">{context.savedViewId || 'no-view-id'}</div>
</div>
);
}
describe('PreferenceContextProvider', () => {
it('should provide context with direct mode when no viewKey is present', () => {
render(
<MemoryRouter initialEntries={['/logs']}>
<Switch>
<Route
path="/logs"
component={(): JSX.Element => (
<PreferenceContextProvider>
<TestConsumer />
</PreferenceContextProvider>
)}
/>
</Switch>
</MemoryRouter>,
);
expect(screen.getByTestId('mode')).toHaveTextContent(PreferenceMode.DIRECT);
expect(screen.getByTestId('dataSource')).toHaveTextContent('logs');
expect(screen.getByTestId('loading')).toHaveTextContent('false');
expect(screen.getByTestId('error')).toHaveTextContent('null');
expect(screen.getByTestId('savedViewId')).toHaveTextContent('no-view-id');
});
it('should provide context with savedView mode when viewKey is present', () => {
render(
<MemoryRouter initialEntries={['/logs?viewKey="test-view-id"']}>
<Switch>
<Route
path="/logs"
component={(): JSX.Element => (
<PreferenceContextProvider>
<TestConsumer />
</PreferenceContextProvider>
)}
/>
</Switch>
</MemoryRouter>,
);
expect(screen.getByTestId('mode')).toHaveTextContent('savedView');
expect(screen.getByTestId('dataSource')).toHaveTextContent('logs');
expect(screen.getByTestId('savedViewId')).toHaveTextContent('test-view-id');
});
it('should set traces dataSource when pathname includes traces', () => {
render(
<MemoryRouter initialEntries={['/traces']}>
<Switch>
<Route
path="/traces"
component={(): JSX.Element => (
<PreferenceContextProvider>
<TestConsumer />
</PreferenceContextProvider>
)}
/>
</Switch>
</MemoryRouter>,
);
expect(screen.getByTestId('dataSource')).toHaveTextContent('traces');
});
it('should handle invalid viewKey JSON gracefully', () => {
// Mock console.error to avoid test output clutter
const originalConsoleError = console.error;
console.error = jest.fn();
render(
<MemoryRouter initialEntries={['/logs?viewKey=invalid-json']}>
<Switch>
<Route
path="/logs"
component={(): JSX.Element => (
<PreferenceContextProvider>
<TestConsumer />
</PreferenceContextProvider>
)}
/>
</Switch>
</MemoryRouter>,
);
expect(screen.getByTestId('mode')).toHaveTextContent(PreferenceMode.DIRECT);
expect(console.error).toHaveBeenCalled();
// Restore console.error
console.error = originalConsoleError;
});
it('should throw error when usePreferenceContext is used outside provider', () => {
// Suppress the error output for this test
const originalConsoleError = console.error;
console.error = jest.fn();
expect(() => {
render(<TestConsumer />);
}).toThrow(
'usePreferenceContext must be used within PreferenceContextProvider',
);
// Restore console.error
console.error = originalConsoleError;
});
});

View File

@@ -0,0 +1,402 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { TelemetryFieldKey } from 'api/v5/v5';
import { LOCALSTORAGE } from 'constants/localStorage';
import { LogViewMode } from 'container/LogsTable';
import {
defaultLogsSelectedColumns,
defaultTraceSelectedColumns,
} from 'container/OptionsMenu/constants';
import { FontSize } from 'container/OptionsMenu/types';
import { render, screen, userEvent } from 'tests/test-utils';
import { DataSource } from 'types/common/queryBuilder';
import { usePreferenceContext } from '../context/PreferenceContextProvider';
const ROUTE_LOGS = '/logs';
const ROUTE_TRACES = '/traces';
const TESTID_LOGS = 'logs';
const TESTID_TRACES = 'traces';
type LogsLocalOptions = {
selectColumns?: TelemetryFieldKey[];
maxLines?: number;
format?: string;
fontSize?: string;
version?: number;
};
type TracesLocalOptions = {
selectColumns?: TelemetryFieldKey[];
};
function setLocalStorageJSON(key: string, value: unknown): void {
localStorage.setItem(key, JSON.stringify(value));
}
function getLocalStorageJSON<T>(key: string): T | null {
const raw = localStorage.getItem(key);
return raw ? (JSON.parse(raw) as T) : null;
}
function Consumer({
dataSource,
testIdPrefix,
}: {
dataSource: DataSource;
testIdPrefix: string;
}): JSX.Element {
const ctx = usePreferenceContext();
const slice = dataSource === DataSource.TRACES ? ctx.traces : ctx.logs;
return (
<div>
<div data-testid={`${testIdPrefix}-loading`}>{String(slice.loading)}</div>
<div data-testid={`${testIdPrefix}-columns-len`}>
{String(slice.preferences?.columns?.length || 0)}
</div>
<button
data-testid={`${testIdPrefix}-update-columns`}
type="button"
onClick={(): void => {
const newCols: TelemetryFieldKey[] =
dataSource === DataSource.TRACES
? (defaultTraceSelectedColumns.slice(0, 1) as TelemetryFieldKey[])
: (defaultLogsSelectedColumns.slice(0, 1) as TelemetryFieldKey[]);
slice.updateColumns(newCols);
}}
>
update
</button>
</div>
);
}
describe('PreferencesProvider integration', () => {
beforeEach(() => {
localStorage.clear();
});
describe('Logs', () => {
it('loads defaults when no localStorage or url provided', () => {
render(
<Consumer dataSource={DataSource.LOGS} testIdPrefix={TESTID_LOGS} />,
undefined,
{
initialRoute: ROUTE_LOGS,
},
);
expect(screen.getByTestId('logs-loading')).toHaveTextContent('false');
expect(
Number(screen.getByTestId('logs-columns-len').textContent),
).toBeGreaterThan(0);
});
it('respects localStorage when present', () => {
setLocalStorageJSON(LOCALSTORAGE.LOGS_LIST_OPTIONS, {
selectColumns: [{ name: 'ls.col' }],
maxLines: 5,
format: 'json',
fontSize: 'large',
version: 2,
});
render(
<Consumer dataSource={DataSource.LOGS} testIdPrefix={TESTID_LOGS} />,
undefined,
{
initialRoute: ROUTE_LOGS,
},
);
expect(Number(screen.getByTestId('logs-columns-len').textContent)).toBe(1);
});
it('direct mode updateColumns persists to localStorage', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(
<Consumer dataSource={DataSource.LOGS} testIdPrefix={TESTID_LOGS} />,
undefined,
{
initialRoute: ROUTE_LOGS,
},
);
await user.click(screen.getByTestId('logs-update-columns'));
const stored = getLocalStorageJSON<LogsLocalOptions>(
LOCALSTORAGE.LOGS_LIST_OPTIONS,
);
expect(stored?.selectColumns).toEqual([
defaultLogsSelectedColumns[0] as TelemetryFieldKey,
]);
});
it('saved view mode uses in-memory preferences (no localStorage write)', async () => {
const viewKey = JSON.stringify('saved-view-id-1');
const initialEntry = `/logs?viewKey=${encodeURIComponent(viewKey)}`;
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(
<Consumer dataSource={DataSource.LOGS} testIdPrefix="logs" />,
undefined,
{
initialRoute: initialEntry,
},
);
await user.click(screen.getByTestId('logs-update-columns'));
const stored = getLocalStorageJSON<LogsLocalOptions>(
LOCALSTORAGE.LOGS_LIST_OPTIONS,
);
expect(stored?.selectColumns).toBeUndefined();
});
it('url options override defaults', () => {
const options = {
selectColumns: [{ name: 'url.col' }],
maxLines: 7,
format: 'json',
fontSize: 'large',
version: 2,
};
const originalLocation = window.location;
Object.defineProperty(window, 'location', {
writable: true,
value: {
...originalLocation,
search: `?options=${encodeURIComponent(JSON.stringify(options))}`,
},
});
render(
<Consumer dataSource={DataSource.LOGS} testIdPrefix={TESTID_LOGS} />,
undefined,
{
initialRoute: ROUTE_LOGS,
},
);
// restore
Object.defineProperty(window, 'location', {
writable: true,
value: originalLocation,
});
expect(Number(screen.getByTestId('logs-columns-len').textContent)).toBe(1);
});
it('updateFormatting persists to localStorage in direct mode', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
function FormattingConsumer(): JSX.Element {
const { logs } = usePreferenceContext();
return (
<button
data-testid="logs-update-formatting"
type="button"
onClick={(): void =>
logs.updateFormatting({
maxLines: 9,
format: 'json' as LogViewMode,
fontSize: 'large' as FontSize,
version: 2,
})
}
>
fmt
</button>
);
}
render(<FormattingConsumer />, undefined, { initialRoute: '/logs' });
await user.click(screen.getByTestId('logs-update-formatting'));
const stored = getLocalStorageJSON<LogsLocalOptions>(
LOCALSTORAGE.LOGS_LIST_OPTIONS,
);
expect(stored?.maxLines).toBe(9);
expect(stored?.format).toBe('json');
expect(stored?.fontSize).toBe('large');
expect(stored?.version).toBe(2);
});
it('saved view mode updates in-memory preferences (columns-len changes)', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
const viewKey = JSON.stringify('saved-view-id-3');
const initialEntry = `/logs?viewKey=${encodeURIComponent(viewKey)}`;
render(
<Consumer dataSource={DataSource.LOGS} testIdPrefix={TESTID_LOGS} />,
undefined,
{ initialRoute: initialEntry },
);
const before = Number(screen.getByTestId('logs-columns-len').textContent);
await user.click(screen.getByTestId('logs-update-columns'));
const after = Number(screen.getByTestId('logs-columns-len').textContent);
expect(after).toBeGreaterThanOrEqual(1);
// Should change from default to 1 for our new selection; tolerate default already being >=1
if (before !== after) {
expect(after).toBe(1);
}
});
});
describe('Traces', () => {
it('loads defaults when no localStorage or url provided', () => {
render(
<Consumer dataSource={DataSource.TRACES} testIdPrefix={TESTID_TRACES} />,
undefined,
{
initialRoute: ROUTE_TRACES,
},
);
expect(screen.getByTestId('traces-loading')).toHaveTextContent('false');
expect(
Number(screen.getByTestId('traces-columns-len').textContent),
).toBeGreaterThan(0);
});
it('respects localStorage when present', () => {
setLocalStorageJSON(LOCALSTORAGE.TRACES_LIST_OPTIONS, {
selectColumns: [{ name: 'trace.ls.col' }],
});
render(
<Consumer dataSource={DataSource.TRACES} testIdPrefix={TESTID_TRACES} />,
undefined,
{
initialRoute: ROUTE_TRACES,
},
);
expect(Number(screen.getByTestId('traces-columns-len').textContent)).toBe(1);
});
it('direct mode updateColumns persists to localStorage', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(
<Consumer dataSource={DataSource.TRACES} testIdPrefix={TESTID_TRACES} />,
undefined,
{
initialRoute: ROUTE_TRACES,
},
);
await user.click(screen.getByTestId('traces-update-columns'));
const stored = getLocalStorageJSON<TracesLocalOptions>(
LOCALSTORAGE.TRACES_LIST_OPTIONS,
);
expect(stored?.selectColumns).toEqual([
defaultTraceSelectedColumns[0] as TelemetryFieldKey,
]);
});
it('saved view mode uses in-memory preferences (no localStorage write)', async () => {
const viewKey = JSON.stringify('saved-view-id-2');
const initialEntry = `/traces?viewKey=${encodeURIComponent(viewKey)}`;
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(
<Consumer dataSource={DataSource.TRACES} testIdPrefix="traces" />,
undefined,
{
initialRoute: initialEntry,
},
);
await user.click(screen.getByTestId('traces-update-columns'));
const stored = getLocalStorageJSON<TracesLocalOptions>(
LOCALSTORAGE.TRACES_LIST_OPTIONS,
);
expect(stored?.selectColumns).toBeUndefined();
});
it('url options override defaults', () => {
const options = {
selectColumns: [{ name: 'trace.url.col' }],
};
const originalLocation = window.location;
Object.defineProperty(window, 'location', {
writable: true,
value: {
...originalLocation,
search: `?options=${encodeURIComponent(JSON.stringify(options))}`,
},
});
render(
<Consumer dataSource={DataSource.TRACES} testIdPrefix={TESTID_TRACES} />,
undefined,
{ initialRoute: ROUTE_TRACES },
);
Object.defineProperty(window, 'location', {
writable: true,
value: originalLocation,
});
expect(Number(screen.getByTestId('traces-columns-len').textContent)).toBe(1);
});
it('updateFormatting is a no-op in direct mode (no localStorage write)', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
function TracesFormattingConsumer(): JSX.Element {
const { traces } = usePreferenceContext();
return (
<button
data-testid="traces-update-formatting"
type="button"
onClick={(): void =>
traces.updateFormatting({
maxLines: 9,
format: 'json' as LogViewMode,
fontSize: 'large' as FontSize,
version: 2,
})
}
>
fmt
</button>
);
}
render(<TracesFormattingConsumer />, undefined, { initialRoute: '/traces' });
await user.click(screen.getByTestId('traces-update-formatting'));
const stored = getLocalStorageJSON<TracesLocalOptions>(
LOCALSTORAGE.TRACES_LIST_OPTIONS,
);
expect(stored).toBeNull();
});
it('saved view mode updates in-memory preferences (columns-len changes)', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
const viewKey = JSON.stringify('saved-view-id-4');
const initialEntry = `/traces?viewKey=${encodeURIComponent(viewKey)}`;
render(
<Consumer dataSource={DataSource.TRACES} testIdPrefix={TESTID_TRACES} />,
undefined,
{ initialRoute: initialEntry },
);
const before = Number(screen.getByTestId('traces-columns-len').textContent);
await user.click(screen.getByTestId('traces-update-columns'));
const after = Number(screen.getByTestId('traces-columns-len').textContent);
expect(after).toBeGreaterThanOrEqual(1);
if (before !== after) {
expect(after).toBe(1);
}
});
});
});

View File

@@ -11,18 +11,18 @@ jest.mock('../configs/logsLoaderConfig', () => ({
__esModule: true,
default: {
priority: ['local', 'url', 'default'],
local: jest.fn().mockResolvedValue({
local: jest.fn(() => ({
columns: [{ name: 'local-column' }],
formatting: { maxLines: 5, format: 'table', fontSize: 'medium', version: 1 },
}),
url: jest.fn().mockResolvedValue({
})),
url: jest.fn(() => ({
columns: [{ name: 'url-column' }],
formatting: { maxLines: 3, format: 'table', fontSize: 'small', version: 1 },
}),
default: jest.fn().mockResolvedValue({
})),
default: jest.fn(() => ({
columns: [{ name: 'default-column' }],
formatting: { maxLines: 2, format: 'table', fontSize: 'small', version: 1 },
}),
})),
},
}));
@@ -30,15 +30,15 @@ jest.mock('../configs/tracesLoaderConfig', () => ({
__esModule: true,
default: {
priority: ['local', 'url', 'default'],
local: jest.fn().mockResolvedValue({
local: jest.fn(() => ({
columns: [{ name: 'local-trace-column' }],
}),
url: jest.fn().mockResolvedValue({
})),
url: jest.fn(() => ({
columns: [{ name: 'url-trace-column' }],
}),
default: jest.fn().mockResolvedValue({
})),
default: jest.fn(() => ({
columns: [{ name: 'default-trace-column' }],
}),
})),
},
}));
@@ -57,11 +57,6 @@ describe('usePreferenceLoader', () => {
}),
);
// Initially it should be loading
expect(result.current.loading).toBe(true);
expect(result.current.preferences).toBe(null);
expect(result.current.error).toBe(null);
// Wait for the loader to complete
await waitFor(() => {
expect(result.current.loading).toBe(false);
@@ -123,30 +118,33 @@ describe('usePreferenceLoader', () => {
});
it('should handle errors during loading', async () => {
// Mock an error in the loader using jest.spyOn
const localSpy = jest.spyOn(logsLoaderConfig, 'local');
localSpy.mockRejectedValueOnce(new Error('Loading failed'));
// Make first call succeed (initial state), second call throw in reSync effect
const localSpy: jest.SpyInstance = jest.spyOn(logsLoaderConfig, 'local');
localSpy.mockImplementationOnce(() => ({
columns: [{ name: 'local-column' }],
formatting: { maxLines: 5, format: 'table', fontSize: 'medium', version: 1 },
}));
localSpy.mockImplementationOnce(() => {
throw new Error('Loading failed');
});
const setReSync = jest.fn();
const { result } = renderHook(() =>
usePreferenceLoader({
dataSource: DataSource.LOGS,
reSync: false,
reSync: true,
setReSync,
}),
);
// Wait for the loader to complete
await waitFor(() => {
expect(result.current.loading).toBe(false);
expect(result.current.error).toBeInstanceOf(Error);
expect(result.current.error?.message).toBe('Loading failed');
});
// Should have set the error
expect(result.current.error).toBeInstanceOf(Error);
expect(result.current.error?.message).toBe('Loading failed');
expect(result.current.preferences).toBe(null);
// Reset reSync should be called
expect(setReSync).toHaveBeenCalledWith(false);
// Restore original implementation
localSpy.mockRestore();
});
});

View File

@@ -10,10 +10,10 @@ import { FormattingOptions } from '../types';
// --- LOGS preferences loader config ---
const logsLoaders = {
local: async (): Promise<{
local: (): {
columns: BaseAutocompleteData[];
formatting: FormattingOptions;
}> => {
} => {
const local = getLocalStorageKey(LOCALSTORAGE.LOGS_LIST_OPTIONS);
if (local) {
try {
@@ -31,10 +31,10 @@ const logsLoaders = {
}
return { columns: [], formatting: undefined } as any;
},
url: async (): Promise<{
url: (): {
columns: BaseAutocompleteData[];
formatting: FormattingOptions;
}> => {
} => {
const urlParams = new URLSearchParams(window.location.search);
try {
const options = JSON.parse(urlParams.get('options') || '{}');
@@ -50,10 +50,10 @@ const logsLoaders = {
} catch {}
return { columns: [], formatting: undefined } as any;
},
default: async (): Promise<{
default: (): {
columns: TelemetryFieldKey[];
formatting: FormattingOptions;
}> => ({
} => ({
columns: defaultLogsSelectedColumns,
formatting: {
maxLines: 2,

View File

@@ -7,9 +7,9 @@ import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteRe
// --- TRACES preferences loader config ---
const tracesLoaders = {
local: async (): Promise<{
local: (): {
columns: BaseAutocompleteData[];
}> => {
} => {
const local = getLocalStorageKey(LOCALSTORAGE.TRACES_LIST_OPTIONS);
if (local) {
try {
@@ -21,9 +21,9 @@ const tracesLoaders = {
}
return { columns: [] };
},
url: async (): Promise<{
url: (): {
columns: BaseAutocompleteData[];
}> => {
} => {
const urlParams = new URLSearchParams(window.location.search);
try {
const options = JSON.parse(urlParams.get('options') || '{}');
@@ -33,9 +33,9 @@ const tracesLoaders = {
} catch {}
return { columns: [] };
},
default: async (): Promise<{
default: (): {
columns: TelemetryFieldKey[];
}> => ({
} => ({
columns: defaultTraceSelectedColumns,
}),
priority: ['local', 'url', 'default'] as const,

View File

@@ -4,7 +4,6 @@ import {
PreferenceMode,
} from 'providers/preferences/types';
import React, { createContext, useContext, useMemo } from 'react';
import { useLocation } from 'react-router-dom';
import { DataSource } from 'types/common/queryBuilder';
import { usePreferenceSync } from '../sync/usePreferenceSync';
@@ -18,7 +17,6 @@ export function PreferenceContextProvider({
}: {
children: React.ReactNode;
}): JSX.Element {
const location = useLocation();
const params = useUrlQuery();
let savedViewId = '';
@@ -30,41 +28,25 @@ export function PreferenceContextProvider({
console.error(e);
}
}
let dataSource: DataSource = DataSource.LOGS;
if (location.pathname.includes('traces')) dataSource = DataSource.TRACES;
const {
preferences,
loading,
error,
updateColumns,
updateFormatting,
} = usePreferenceSync({
const logsSlice = usePreferenceSync({
mode: savedViewId ? PreferenceMode.SAVED_VIEW : PreferenceMode.DIRECT,
savedViewId: savedViewId || undefined,
dataSource,
dataSource: DataSource.LOGS,
});
const tracesSlice = usePreferenceSync({
mode: savedViewId ? PreferenceMode.SAVED_VIEW : PreferenceMode.DIRECT,
savedViewId: savedViewId || undefined,
dataSource: DataSource.TRACES,
});
const value = useMemo<PreferenceContextValue>(
() => ({
preferences,
loading,
error,
mode: savedViewId ? PreferenceMode.SAVED_VIEW : PreferenceMode.DIRECT,
savedViewId: savedViewId || undefined,
dataSource,
updateColumns,
updateFormatting,
logs: logsSlice,
traces: tracesSlice,
}),
[
savedViewId,
dataSource,
preferences,
loading,
error,
updateColumns,
updateFormatting,
],
[logsSlice, tracesSlice],
);
return (

View File

@@ -17,55 +17,48 @@ const migrateColumns = (columns: any): any =>
return column;
});
// Generic preferences loader that works with any config
async function preferencesLoader<T>(config: {
// Generic preferences loader that works with any config (synchronous version)
function preferencesLoader<T>(config: {
priority: readonly string[];
[key: string]: any;
}): Promise<T> {
const findValidLoader = async (): Promise<T> => {
// Try each loader in priority order
const results = await Promise.all(
config.priority.map(async (source) => ({
source,
result: await config[source](),
})),
);
}): T {
// Try each loader in priority order synchronously
const results = config.priority.map((source: string) => ({
source,
result: config[source](),
}));
// Find valid columns and formatting independently
const validColumnsResult = results.find(
({ result }) => result.columns?.length,
);
// Find valid columns and formatting independently
const validColumnsResult = results.find(
({ result }) => result.columns?.length,
);
const validFormattingResult = results.find(({ result }) => result.formatting);
const validFormattingResult = results.find(({ result }) => result.formatting);
const migratedColumns = validColumnsResult?.result.columns
? migrateColumns(validColumnsResult.result.columns)
: undefined;
const migratedColumns = validColumnsResult?.result.columns
? migrateColumns(validColumnsResult?.result.columns)
: undefined;
// Combine valid results or fallback to default
const finalResult = {
columns: migratedColumns || config.default().columns,
formatting:
validFormattingResult?.result.formatting || config.default().formatting,
};
return finalResult as T;
// Combine valid results or fallback to default
const finalResult = {
columns: migratedColumns || config.default().columns,
formatting:
validFormattingResult?.result.formatting || config.default().formatting,
};
return findValidLoader();
return finalResult as T;
}
// Use the generic loader with specific configs
async function logsPreferencesLoader(): Promise<{
function logsPreferencesLoader(): {
columns: TelemetryFieldKey[];
formatting: FormattingOptions;
}> {
} {
return preferencesLoader(logsLoaderConfig);
}
async function tracesPreferencesLoader(): Promise<{
function tracesPreferencesLoader(): {
columns: TelemetryFieldKey[];
}> {
} {
return preferencesLoader(tracesLoaderConfig);
}
@@ -82,29 +75,36 @@ export function usePreferenceLoader({
loading: boolean;
error: Error | null;
} {
const [preferences, setPreferences] = useState<Preferences | null>(null);
const [loading, setLoading] = useState(true);
const [preferences, setPreferences] = useState<Preferences | null>(() => {
if (dataSource === DataSource.LOGS) {
const { columns, formatting } = logsPreferencesLoader();
return { columns, formatting };
}
if (dataSource === DataSource.TRACES) {
const { columns } = tracesPreferencesLoader();
return { columns };
}
return null;
});
const [error, setError] = useState<Error | null>(null);
useEffect((): void => {
async function loadPreferences(): Promise<void> {
setLoading(true);
function loadPreferences(): void {
setError(null);
try {
if (dataSource === DataSource.LOGS) {
const { columns, formatting } = await logsPreferencesLoader();
const { columns, formatting } = logsPreferencesLoader();
setPreferences({ columns, formatting });
}
if (dataSource === DataSource.TRACES) {
const { columns } = await tracesPreferencesLoader();
const { columns } = tracesPreferencesLoader();
setPreferences({ columns });
}
} catch (e) {
setError(e as Error);
} finally {
setLoading(false);
// Reset reSync back to false after loading is complete
if (reSync) {
setReSync(false);
@@ -113,10 +113,10 @@ export function usePreferenceLoader({
}
// Only load preferences on initial mount or when reSync is true
if (loading || reSync) {
if (reSync) {
loadPreferences();
}
}, [dataSource, reSync, setReSync, loading]);
}, [dataSource, reSync, setReSync]);
return { preferences, loading, error };
return { preferences, loading: false, error };
}

View File

@@ -25,7 +25,10 @@ export function usePreferenceSync({
updateColumns: (newColumns: TelemetryFieldKey[]) => void;
updateFormatting: (newFormatting: FormattingOptions) => void;
} {
const { data: viewsData } = useGetAllViews(dataSource);
const { data: viewsData } = useGetAllViews(
dataSource,
mode === PreferenceMode.SAVED_VIEW,
);
const [
savedViewPreferences,

View File

@@ -1,24 +1,25 @@
import { TelemetryFieldKey } from 'api/v5/v5';
import { LogViewMode } from 'container/LogsTable';
import { FontSize } from 'container/OptionsMenu/types';
import { DataSource } from 'types/common/queryBuilder';
export enum PreferenceMode {
SAVED_VIEW = 'savedView',
DIRECT = 'direct',
}
export interface PreferenceContextValue {
export interface PreferenceSlice {
preferences: Preferences | null;
loading: boolean;
error: Error | null;
mode: PreferenceMode;
savedViewId?: string;
dataSource: DataSource;
updateColumns: (newColumns: TelemetryFieldKey[]) => void;
updateFormatting: (newFormatting: FormattingOptions) => void;
}
export interface PreferenceContextValue {
logs: PreferenceSlice;
traces: PreferenceSlice;
}
export interface FormattingOptions {
maxLines?: number;
format?: LogViewMode;

View File

@@ -198,7 +198,7 @@ func (r *provider) Match(ctx context.Context, orgID string, ruleID string, set m
}
for _, route := range expressionRoutes {
evaluateExpr, err := r.evaluateExpr(route.Expression, set)
evaluateExpr, err := r.evaluateExpr(ctx, route.Expression, set)
if err != nil {
continue
}
@@ -210,32 +210,71 @@ func (r *provider) Match(ctx context.Context, orgID string, ruleID string, set m
return matchedChannels, nil
}
func (r *provider) evaluateExpr(expression string, labelSet model.LabelSet) (bool, error) {
// convertLabelSetToEnv converts a flat label set with dotted keys into a nested map structure for expr env.
// when both a leaf and a deeper nested path exist (e.g. "foo" and "foo.bar"),
// the nested structure takes precedence. That means we will replace an existing leaf at any
// intermediate path with a map so we can materialize the deeper structure.
// TODO(srikanthccv): we need a better solution to handle this, remove the following
// when we update the expr to support dotted keys
func (r *provider) convertLabelSetToEnv(ctx context.Context, labelSet model.LabelSet) map[string]interface{} {
env := make(map[string]interface{})
for k, v := range labelSet {
key := string(k)
value := string(v)
logForReview := false
for lk, lv := range labelSet {
key := strings.TrimSpace(string(lk))
value := string(lv)
if strings.Contains(key, ".") {
parts := strings.Split(key, ".")
current := env
for i, part := range parts {
if i == len(parts)-1 {
current[part] = value
} else {
if current[part] == nil {
current[part] = make(map[string]interface{})
for i, raw := range parts {
part := strings.TrimSpace(raw)
last := i == len(parts)-1
if last {
if _, isMap := current[part].(map[string]interface{}); isMap {
logForReview = true
// deeper structure already exists; do not overwrite.
break
}
current = current[part].(map[string]interface{})
current[part] = value
break
}
// ensure a map so we can keep descending.
if nextMap, ok := current[part].(map[string]interface{}); ok {
current = nextMap
continue
}
// if absent or a leaf, replace it with a map.
newMap := make(map[string]interface{})
current[part] = newMap
current = newMap
}
} else {
env[key] = value
continue
}
// if a map already sits here (due to nested keys), keep the map (nested wins).
if _, isMap := env[key].(map[string]interface{}); isMap {
logForReview = true
continue
}
env[key] = value
}
if logForReview {
r.settings.Logger().InfoContext(ctx, "found label set with conflicting prefix dotted keys", "labels", labelSet)
}
return env
}
func (r *provider) evaluateExpr(ctx context.Context, expression string, labelSet model.LabelSet) (bool, error) {
env := r.convertLabelSetToEnv(ctx, labelSet)
program, err := expr.Compile(expression, expr.Env(env))
if err != nil {
return false, errors.NewInternalf(errors.CodeInternal, "error compiling route policy %s: %v", expression, err)

View File

@@ -278,7 +278,9 @@ func TestProvider_ConcurrentAccess(t *testing.T) {
}
func TestProvider_EvaluateExpression(t *testing.T) {
provider := &provider{}
provider := &provider{
settings: factory.NewScopedProviderSettings(createTestProviderSettings(), "provider_test"),
}
tests := []struct {
name string
@@ -646,7 +648,7 @@ func TestProvider_EvaluateExpression(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := provider.evaluateExpr(tt.expression, tt.labelSet)
result, err := provider.evaluateExpr(context.Background(), tt.expression, tt.labelSet)
assert.NoError(t, err)
assert.Equal(t, tt.expected, result, "Expression: %s", tt.expression)
})
@@ -907,3 +909,72 @@ func TestProvider_CreateRoutes(t *testing.T) {
})
}
}
func TestConvertLabelSetToEnv(t *testing.T) {
tests := []struct {
name string
labelSet model.LabelSet
expected map[string]interface{}
}{
{
name: "simple keys",
labelSet: model.LabelSet{
"key1": "value1",
"key2": "value2",
},
expected: map[string]interface{}{
"key1": "value1",
"key2": "value2",
},
},
{
name: "nested keys",
labelSet: model.LabelSet{
"foo.bar": "value1",
"foo.baz": "value2",
},
expected: map[string]interface{}{
"foo": map[string]interface{}{
"bar": "value1",
"baz": "value2",
},
},
},
{
name: "conflict - nested structure wins",
labelSet: model.LabelSet{
"foo.bar.baz": "deep",
"foo.bar": "shallow",
},
expected: map[string]interface{}{
"foo": map[string]interface{}{
"bar": map[string]interface{}{
"baz": "deep",
},
},
},
},
{
name: "conflict - leaf value vs nested",
labelSet: model.LabelSet{
"foo.bar": "value",
"foo": "should_be_ignored",
},
expected: map[string]interface{}{
"foo": map[string]interface{}{
"bar": "value",
},
},
},
}
provider := &provider{
settings: factory.NewScopedProviderSettings(createTestProviderSettings(), "provider_test"),
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := provider.convertLabelSetToEnv(context.Background(), tt.labelSet)
assert.Equal(t, tt.expected, result)
})
}
}

View File

@@ -5,6 +5,7 @@ import (
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -15,11 +16,14 @@ type AuthZ interface {
Check(context.Context, *openfgav1.TupleKey) error
// CheckWithTupleCreation takes upon the responsibility for generating the tuples alongside everything Check does.
CheckWithTupleCreation(context.Context, authtypes.Claims, authtypes.Relation, authtypes.Typeable, []authtypes.Selector) error
CheckWithTupleCreation(context.Context, authtypes.Claims, valuer.UUID, authtypes.Relation, authtypes.Relation, authtypes.Typeable, []authtypes.Selector) error
// writes the tuples to upstream server
Write(context.Context, *openfgav1.WriteRequest) error
// Batch Check returns error when the upstream authorization server is unavailable or for all the tuples of subject (s) doesn't have relation (r) on object (o).
BatchCheck(context.Context, []*openfgav1.TupleKey) error
// lists the selectors for objects assigned to subject (s) with relation (r) on resource (s)
// Write accepts the insertion tuples and the deletion tuples.
Write(context.Context, []*openfgav1.TupleKey, []*openfgav1.TupleKey) error
// Lists the selectors for objects assigned to subject (s) with relation (r) on resource (s)
ListObjects(context.Context, string, authtypes.Relation, authtypes.Typeable) ([]*authtypes.Object, error)
}

View File

@@ -232,13 +232,13 @@ func (provider *provider) BatchCheck(ctx context.Context, tupleReq []*openfgav1.
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, translation authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeUser, claims.UserID, authtypes.Relation{})
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors)
tuples, err := authtypes.TypeableOrganization.Tuples(subject, translation, []authtypes.Selector{authtypes.MustNewSelector(authtypes.TypeOrganization, orgID.StringValue())}, orgID)
if err != nil {
return err
}
@@ -251,11 +251,21 @@ func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims aut
return nil
}
func (provider *provider) Write(ctx context.Context, req *openfgav1.WriteRequest) error {
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
for idx, tuple := range deletions {
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
}
_, err := provider.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
StoreId: provider.storeID,
AuthorizationModelId: provider.modelID,
Writes: req.Writes,
Writes: &openfgav1.WriteRequestWrites{
TupleKeys: additions,
},
Deletes: &openfgav1.WriteRequestDeletes{
TupleKeys: deletionTuplesWithoutCondition,
},
})
return err

View File

@@ -7,6 +7,7 @@ import (
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
)
@@ -106,7 +107,7 @@ func (middleware *AuthZ) OpenAccess(next http.HandlerFunc) http.HandlerFunc {
})
}
func (middleware *AuthZ) Check(next http.HandlerFunc, _ authtypes.Relation, translation authtypes.Relation, _ authtypes.Typeable, _ authtypes.Typeable, _ authtypes.SelectorCallbackFn) http.HandlerFunc {
func (middleware *AuthZ) Check(next http.HandlerFunc, relation authtypes.Relation, translation authtypes.Relation, typeable authtypes.Typeable, cb authtypes.SelectorCallbackFn) http.HandlerFunc {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
@@ -114,7 +115,19 @@ func (middleware *AuthZ) Check(next http.HandlerFunc, _ authtypes.Relation, tran
return
}
err = middleware.authzService.CheckWithTupleCreation(req.Context(), claims, translation, authtypes.TypeableOrganization, []authtypes.Selector{authtypes.MustNewSelector(authtypes.TypeOrganization, claims.OrgID)})
orgId, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, err)
return
}
selectors, err := cb(req.Context(), claims)
if err != nil {
render.Error(rw, err)
return
}
err = middleware.authzService.CheckWithTupleCreation(req.Context(), claims, orgId, relation, translation, typeable, selectors)
if err != nil {
render.Error(rw, err)
return

View File

@@ -27,6 +27,7 @@ type Module interface {
GetByMetricNames(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string][]map[string]string, error)
statsreporter.StatsCollector
role.RegisterTypeable
}

View File

@@ -225,5 +225,5 @@ func (module *module) Collect(ctx context.Context, orgID valuer.UUID) (map[strin
}
func (module *module) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{dashboardtypes.ResourceDashboard, dashboardtypes.ResourcesDashboards}
return []authtypes.Typeable{dashboardtypes.TypeableResourceDashboard, dashboardtypes.TypeableResourcesDashboards}
}

View File

@@ -9,7 +9,6 @@ import (
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
type module struct {
@@ -47,6 +46,8 @@ func (module *module) GetResources(_ context.Context) []*authtypes.Resource {
for _, register := range module.registry {
typeables = append(typeables, register.MustGetTypeables()...)
}
// role module cannot self register itself!
typeables = append(typeables, module.MustGetTypeables()...)
resources := make([]*authtypes.Resource, 0)
for _, typeable := range typeables {
@@ -142,24 +143,17 @@ func (module *module) Patch(ctx context.Context, orgID valuer.UUID, id valuer.UU
}
func (module *module) PatchObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
additionTuples, err := roletypes.GetAdditionTuples(id, relation, additions)
additionTuples, err := roletypes.GetAdditionTuples(id, orgID, relation, additions)
if err != nil {
return err
}
deletionTuples, err := roletypes.GetDeletionTuples(id, relation, deletions)
deletionTuples, err := roletypes.GetDeletionTuples(id, orgID, relation, deletions)
if err != nil {
return err
}
err = module.authz.Write(ctx, &openfgav1.WriteRequest{
Writes: &openfgav1.WriteRequestWrites{
TupleKeys: additionTuples,
},
Deletes: &openfgav1.WriteRequestDeletes{
TupleKeys: deletionTuples,
},
})
err = module.authz.Write(ctx, additionTuples, deletionTuples)
if err != nil {
return err
}
@@ -170,3 +164,7 @@ func (module *module) PatchObjects(ctx context.Context, orgID valuer.UUID, id va
func (module *module) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
return module.store.Delete(ctx, orgID, id)
}
func (module *module) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
}

View File

@@ -33,6 +33,8 @@ type Module interface {
// Deletes the role metadata and tuples in authorization server
Delete(context.Context, valuer.UUID, valuer.UUID) error
RegisterTypeable
}
type RegisterTypeable interface {
@@ -40,27 +42,19 @@ type RegisterTypeable interface {
}
type Handler interface {
// Creates the role metadata and tuples in authorization server
Create(http.ResponseWriter, *http.Request)
// Gets the role metadata
Get(http.ResponseWriter, *http.Request)
// Gets the objects for the given relation and role
GetObjects(http.ResponseWriter, *http.Request)
// Gets all the resources and the relations
GetResources(http.ResponseWriter, *http.Request)
// Lists all the roles metadata for the organization
List(http.ResponseWriter, *http.Request)
// Patches the role metdata
Patch(http.ResponseWriter, *http.Request)
// Patches the objects for the given relation and role
PatchObjects(http.ResponseWriter, *http.Request)
// Deletes the role metadata and tuples in authorization server
Delete(http.ResponseWriter, *http.Request)
}

View File

@@ -1276,154 +1276,6 @@ func getLocalTableName(tableName string) string {
}
func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError) {
hasCustomRetention, err := r.hasCustomRetentionColumn(ctx)
if hasCustomRetention {
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("SetTTLV2 only supported")}
}
if err != nil {
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing TTL")}
}
// uuid is used as transaction id
uuidWithHyphen := uuid.New()
uuid := strings.Replace(uuidWithHyphen.String(), "-", "", -1)
coldStorageDuration := -1
if len(params.ColdStorageVolume) > 0 {
coldStorageDuration = int(params.ToColdStorageDuration)
}
tableNameArray := []string{r.logsDB + "." + r.logsLocalTableV2, r.logsDB + "." + r.logsResourceLocalTableV2}
// check if there is existing things to be done
for _, tableName := range tableNameArray {
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
if err != nil {
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing ttl_status check sql query")}
}
if statusItem.Status == constants.StatusPending {
return nil, &model.ApiError{Typ: model.ErrorConflict, Err: fmt.Errorf("TTL is already running")}
}
}
// TTL query for logs_v2 table
ttlLogsV2 := fmt.Sprintf(
"ALTER TABLE %v ON CLUSTER %s MODIFY TTL toDateTime(timestamp / 1000000000) + "+
"INTERVAL %v SECOND DELETE", tableNameArray[0], r.cluster, params.DelDuration)
if len(params.ColdStorageVolume) > 0 {
ttlLogsV2 += fmt.Sprintf(", toDateTime(timestamp / 1000000000)"+
" + INTERVAL %v SECOND TO VOLUME '%s'",
params.ToColdStorageDuration, params.ColdStorageVolume)
}
// TTL query for logs_v2_resource table
// adding 1800 as our bucket size is 1800 seconds
ttlLogsV2Resource := fmt.Sprintf(
"ALTER TABLE %v ON CLUSTER %s MODIFY TTL toDateTime(seen_at_ts_bucket_start) + toIntervalSecond(1800) + "+
"INTERVAL %v SECOND DELETE", tableNameArray[1], r.cluster, params.DelDuration)
if len(params.ColdStorageVolume) > 0 {
ttlLogsV2Resource += fmt.Sprintf(", toDateTime(seen_at_ts_bucket_start) + toIntervalSecond(1800) + "+
"INTERVAL %v SECOND TO VOLUME '%s'",
params.ToColdStorageDuration, params.ColdStorageVolume)
}
ttlPayload := map[string]string{
tableNameArray[0]: ttlLogsV2,
tableNameArray[1]: ttlLogsV2Resource,
}
// set the ttl if nothing is pending/ no errors
go func(ttlPayload map[string]string) {
for tableName, query := range ttlPayload {
// https://github.com/SigNoz/signoz/issues/5470
// we will change ttl for only the new parts and not the old ones
query += " SETTINGS materialize_ttl_after_modify=0"
ttl := types.TTLSetting{
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
TransactionID: uuid,
TableName: tableName,
TTL: int(params.DelDuration),
Status: constants.StatusPending,
ColdStorageTTL: coldStorageDuration,
OrgID: orgID,
}
_, dbErr := r.
sqlDB.
BunDB().
NewInsert().
Model(&ttl).
Exec(ctx)
if dbErr != nil {
zap.L().Error("error in inserting to ttl_status table", zap.Error(dbErr))
return
}
err := r.setColdStorage(context.Background(), tableName, params.ColdStorageVolume)
if err != nil {
zap.L().Error("error in setting cold storage", zap.Error(err))
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
if err == nil {
_, dbErr := r.
sqlDB.
BunDB().
NewUpdate().
Model(new(types.TTLSetting)).
Set("updated_at = ?", time.Now()).
Set("status = ?", constants.StatusFailed).
Where("id = ?", statusItem.ID.StringValue()).
Exec(ctx)
if dbErr != nil {
zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr))
return
}
}
return
}
zap.L().Info("Executing TTL request: ", zap.String("request", query))
statusItem, _ := r.checkTTLStatusItem(ctx, orgID, tableName)
if err := r.db.Exec(ctx, query); err != nil {
zap.L().Error("error while setting ttl", zap.Error(err))
_, dbErr := r.
sqlDB.
BunDB().
NewUpdate().
Model(new(types.TTLSetting)).
Set("updated_at = ?", time.Now()).
Set("status = ?", constants.StatusFailed).
Where("id = ?", statusItem.ID.StringValue()).
Exec(ctx)
if dbErr != nil {
zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr))
return
}
return
}
_, dbErr = r.
sqlDB.
BunDB().
NewUpdate().
Model(new(types.TTLSetting)).
Set("updated_at = ?", time.Now()).
Set("status = ?", constants.StatusSuccess).
Where("id = ?", statusItem.ID.StringValue()).
Exec(ctx)
if dbErr != nil {
zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr))
return
}
}
}(ttlPayload)
return &model.SetTTLResponseItem{Message: "move ttl has been successfully set up"}, nil
}
func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, params *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError) {
// uuid is used as transaction id
uuidWithHyphen := uuid.New()
@@ -2043,6 +1895,19 @@ func (r *ClickHouseReader) validateTTLConditions(ctx context.Context, ttlConditi
func (r *ClickHouseReader) SetTTL(ctx context.Context, orgID string, params *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError) {
// Keep only latest 100 transactions/requests
r.deleteTtlTransactions(ctx, orgID, 100)
switch params.Type {
case constants.TraceTTL:
return r.setTTLTraces(ctx, orgID, params)
case constants.MetricsTTL:
return r.setTTLMetrics(ctx, orgID, params)
default:
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error while setting ttl. ttl type should be <metrics|traces>, got %v", params.Type)}
}
}
func (r *ClickHouseReader) setTTLMetrics(ctx context.Context, orgID string, params *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError) {
// uuid is used as transaction id
uuidWithHyphen := uuid.New()
uuid := strings.Replace(uuidWithHyphen.String(), "-", "", -1)
@@ -2051,95 +1916,69 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context, orgID string, params *mod
if len(params.ColdStorageVolume) > 0 {
coldStorageDuration = int(params.ToColdStorageDuration)
}
switch params.Type {
case constants.TraceTTL:
return r.setTTLTraces(ctx, orgID, params)
case constants.MetricsTTL:
tableNames := []string{
signozMetricDBName + "." + signozSampleLocalTableName,
signozMetricDBName + "." + signozSamplesAgg5mLocalTableName,
signozMetricDBName + "." + signozSamplesAgg30mLocalTableName,
signozMetricDBName + "." + signozExpHistLocalTableName,
signozMetricDBName + "." + signozTSLocalTableNameV4,
signozMetricDBName + "." + signozTSLocalTableNameV46Hrs,
signozMetricDBName + "." + signozTSLocalTableNameV41Day,
signozMetricDBName + "." + signozTSLocalTableNameV41Week,
tableNames := []string{
signozMetricDBName + "." + signozSampleLocalTableName,
signozMetricDBName + "." + signozSamplesAgg5mLocalTableName,
signozMetricDBName + "." + signozSamplesAgg30mLocalTableName,
signozMetricDBName + "." + signozExpHistLocalTableName,
signozMetricDBName + "." + signozTSLocalTableNameV4,
signozMetricDBName + "." + signozTSLocalTableNameV46Hrs,
signozMetricDBName + "." + signozTSLocalTableNameV41Day,
signozMetricDBName + "." + signozTSLocalTableNameV41Week,
}
for _, tableName := range tableNames {
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
if err != nil {
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing ttl_status check sql query")}
}
for _, tableName := range tableNames {
if statusItem.Status == constants.StatusPending {
return nil, &model.ApiError{Typ: model.ErrorConflict, Err: fmt.Errorf("TTL is already running")}
}
}
metricTTL := func(tableName string) {
ttl := types.TTLSetting{
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
TransactionID: uuid,
TableName: tableName,
TTL: int(params.DelDuration),
Status: constants.StatusPending,
ColdStorageTTL: coldStorageDuration,
OrgID: orgID,
}
_, dbErr := r.
sqlDB.
BunDB().
NewInsert().
Model(&ttl).
Exec(ctx)
if dbErr != nil {
zap.L().Error("error in inserting to ttl_status table", zap.Error(dbErr))
return
}
timeColumn := "timestamp_ms"
if strings.Contains(tableName, "v4") || strings.Contains(tableName, "exp_hist") {
timeColumn = "unix_milli"
}
req := fmt.Sprintf(
"ALTER TABLE %v ON CLUSTER %s MODIFY TTL toDateTime(toUInt32(%s / 1000), 'UTC') + "+
"INTERVAL %v SECOND DELETE", tableName, r.cluster, timeColumn, params.DelDuration)
if len(params.ColdStorageVolume) > 0 {
req += fmt.Sprintf(", toDateTime(toUInt32(%s / 1000), 'UTC')"+
" + INTERVAL %v SECOND TO VOLUME '%s'",
timeColumn, params.ToColdStorageDuration, params.ColdStorageVolume)
}
err := r.setColdStorage(context.Background(), tableName, params.ColdStorageVolume)
if err != nil {
zap.L().Error("Error in setting cold storage", zap.Error(err))
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
if err != nil {
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing ttl_status check sql query")}
}
if statusItem.Status == constants.StatusPending {
return nil, &model.ApiError{Typ: model.ErrorConflict, Err: fmt.Errorf("TTL is already running")}
}
}
metricTTL := func(tableName string) {
ttl := types.TTLSetting{
Identifiable: types.Identifiable{
ID: valuer.GenerateUUID(),
},
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
TransactionID: uuid,
TableName: tableName,
TTL: int(params.DelDuration),
Status: constants.StatusPending,
ColdStorageTTL: coldStorageDuration,
OrgID: orgID,
}
_, dbErr := r.
sqlDB.
BunDB().
NewInsert().
Model(&ttl).
Exec(ctx)
if dbErr != nil {
zap.L().Error("error in inserting to ttl_status table", zap.Error(dbErr))
return
}
timeColumn := "timestamp_ms"
if strings.Contains(tableName, "v4") || strings.Contains(tableName, "exp_hist") {
timeColumn = "unix_milli"
}
req := fmt.Sprintf(
"ALTER TABLE %v ON CLUSTER %s MODIFY TTL toDateTime(toUInt32(%s / 1000), 'UTC') + "+
"INTERVAL %v SECOND DELETE", tableName, r.cluster, timeColumn, params.DelDuration)
if len(params.ColdStorageVolume) > 0 {
req += fmt.Sprintf(", toDateTime(toUInt32(%s / 1000), 'UTC')"+
" + INTERVAL %v SECOND TO VOLUME '%s'",
timeColumn, params.ToColdStorageDuration, params.ColdStorageVolume)
}
err := r.setColdStorage(context.Background(), tableName, params.ColdStorageVolume)
if err != nil {
zap.L().Error("Error in setting cold storage", zap.Error(err))
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
if err == nil {
_, dbErr := r.
sqlDB.
BunDB().
NewUpdate().
Model(new(types.TTLSetting)).
Set("updated_at = ?", time.Now()).
Set("status = ?", constants.StatusFailed).
Where("id = ?", statusItem.ID.StringValue()).
Exec(ctx)
if dbErr != nil {
zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr))
return
}
}
return
}
req += " SETTINGS materialize_ttl_after_modify=0"
zap.L().Info("Executing TTL request: ", zap.String("request", req))
statusItem, _ := r.checkTTLStatusItem(ctx, orgID, tableName)
if err := r.db.Exec(ctx, req); err != nil {
zap.L().Error("error while setting ttl.", zap.Error(err))
if err == nil {
_, dbErr := r.
sqlDB.
BunDB().
@@ -2153,32 +1992,46 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context, orgID string, params *mod
zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr))
return
}
return
}
_, dbErr = r.
return
}
req += " SETTINGS materialize_ttl_after_modify=0"
zap.L().Info("Executing TTL request: ", zap.String("request", req))
statusItem, _ := r.checkTTLStatusItem(ctx, orgID, tableName)
if err := r.db.Exec(ctx, req); err != nil {
zap.L().Error("error while setting ttl.", zap.Error(err))
_, dbErr := r.
sqlDB.
BunDB().
NewUpdate().
Model(new(types.TTLSetting)).
Set("updated_at = ?", time.Now()).
Set("status = ?", constants.StatusSuccess).
Set("status = ?", constants.StatusFailed).
Where("id = ?", statusItem.ID.StringValue()).
Exec(ctx)
if dbErr != nil {
zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr))
return
}
return
}
for _, tableName := range tableNames {
go metricTTL(tableName)
_, dbErr = r.
sqlDB.
BunDB().
NewUpdate().
Model(new(types.TTLSetting)).
Set("updated_at = ?", time.Now()).
Set("status = ?", constants.StatusSuccess).
Where("id = ?", statusItem.ID.StringValue()).
Exec(ctx)
if dbErr != nil {
zap.L().Error("Error in processing ttl_status update sql query", zap.Error(dbErr))
return
}
case constants.LogsTTL:
return r.setTTLLogs(ctx, orgID, params)
default:
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error while setting ttl. ttl type should be <metrics|traces>, got %v", params.Type)}
}
for _, tableName := range tableNames {
go metricTTL(tableName)
}
return &model.SetTTLResponseItem{Message: "move ttl has been successfully set up"}, nil
}

View File

@@ -13,6 +13,7 @@ import (
)
var SupportedFunctions = []string{
"abs",
"exp",
"log",
"ln",

View File

@@ -197,10 +197,14 @@ func processResults(
}, nil
}
var SupportedFunctions = []string{"exp", "log", "ln", "exp2", "log2", "exp10", "log10", "sqrt", "cbrt", "erf", "erfc", "lgamma", "tgamma", "sin", "cos", "tan", "asin", "acos", "atan", "degrees", "radians", "now", "toUnixTimestamp"}
var SupportedFunctions = []string{"abs", "exp", "log", "ln", "exp2", "log2", "exp10", "log10", "sqrt", "cbrt", "erf", "erfc", "lgamma", "tgamma", "sin", "cos", "tan", "asin", "acos", "atan", "degrees", "radians", "now", "toUnixTimestamp"}
func EvalFuncs() map[string]govaluate.ExpressionFunction {
GoValuateFuncs := make(map[string]govaluate.ExpressionFunction)
// Returns the absolute value of the given argument.
GoValuateFuncs["abs"] = func(args ...interface{}) (interface{}, error) {
return math.Abs(args[0].(float64)), nil
}
// Returns e to the power of the given argument.
GoValuateFuncs["exp"] = func(args ...interface{}) (interface{}, error) {
return math.Exp(args[0].(float64)), nil

View File

@@ -94,7 +94,7 @@ func CollisionHandledFinalExpr(
return "", nil, err
}
colName, _ = fm.FieldFor(ctx, key)
colName, _ = telemetrytypes.DataTypeCollisionHandledFieldName(key, dummyValue, colName)
colName, _ = DataTypeCollisionHandledFieldName(key, dummyValue, colName, qbtypes.FilterOperatorUnknown)
stmts = append(stmts, colName)
}
}
@@ -109,7 +109,7 @@ func CollisionHandledFinalExpr(
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the body column")
// colName, _ = jsonKeyToKey(context.Background(), field, qbtypes.FilterOperatorUnknown, dummyValue)
} else {
colName, _ = telemetrytypes.DataTypeCollisionHandledFieldName(field, dummyValue, colName)
colName, _ = DataTypeCollisionHandledFieldName(field, dummyValue, colName, qbtypes.FilterOperatorUnknown)
}
stmts = append(stmts, colName)
@@ -194,3 +194,109 @@ func FormatFullTextSearch(input string) string {
}
return input
}
func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, value any, tblFieldName string, operator qbtypes.FilterOperator) (string, any) {
// This block of code exists to handle the data type collisions
// We don't want to fail the requests when there is a key with more than one data type
// Let's take an example of `http.status_code`, and consider user sent a string value and number value
// When they search for `http.status_code=200`, we will search across both the number columns and string columns
// and return the results from both the columns
// While we expect user not to send the mixed data types, it inevitably happens
// So we handle the data type collisions here
switch key.FieldDataType {
case telemetrytypes.FieldDataTypeString:
switch v := value.(type) {
case float64:
// try to convert the string value to to number
tblFieldName = castFloat(tblFieldName)
case []any:
if allFloats(v) {
tblFieldName = castFloat(tblFieldName)
} else if hasString(v) {
_, value = castString(tblFieldName), toStrings(v)
}
case bool:
// we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string
value = fmt.Sprintf("%t", v)
}
case telemetrytypes.FieldDataTypeFloat64, telemetrytypes.FieldDataTypeInt64, telemetrytypes.FieldDataTypeNumber:
switch v := value.(type) {
// why? ; CH returns an error for a simple check
// attributes_number['http.status_code'] = 200 but not for attributes_number['http.status_code'] >= 200
// DB::Exception: Bad get: has UInt64, requested Float64.
// How is it working in v4? v4 prepares the full query with values in query string
// When we format the float it becomes attributes_number['http.status_code'] = 200.000
// Which CH gladly accepts and doesn't throw error
// However, when passed as query args, the default formatter
// https://github.com/ClickHouse/clickhouse-go/blob/757e102f6d8c6059d564ce98795b4ce2a101b1a5/bind.go#L393
// is used which prepares the
// final query as attributes_number['http.status_code'] = 200 giving this error
// This following is one way to workaround it
// if the key is a number, the value is a string, we will let clickHouse handle the conversion
case float32, float64:
tblFieldName = castFloatHack(tblFieldName)
case string:
// check if it's a number inside a string
isNumber := false
if _, err := strconv.ParseFloat(v, 64); err == nil {
isNumber = true
}
if !operator.IsComparisonOperator() || !isNumber {
// try to convert the number attribute to string
tblFieldName = castString(tblFieldName) // numeric col vs string literal
} else {
tblFieldName = castFloatHack(tblFieldName)
}
case []any:
if allFloats(v) {
tblFieldName = castFloatHack(tblFieldName)
} else if hasString(v) {
tblFieldName, value = castString(tblFieldName), toStrings(v)
}
}
case telemetrytypes.FieldDataTypeBool:
switch v := value.(type) {
case string:
tblFieldName = castString(tblFieldName)
case []any:
if hasString(v) {
tblFieldName, value = castString(tblFieldName), toStrings(v)
}
}
}
return tblFieldName, value
}
func castFloat(col string) string { return fmt.Sprintf("toFloat64OrNull(%s)", col) }
func castFloatHack(col string) string { return fmt.Sprintf("toFloat64(%s)", col) }
func castString(col string) string { return fmt.Sprintf("toString(%s)", col) }
func allFloats(in []any) bool {
for _, x := range in {
if _, ok := x.(float64); !ok {
return false
}
}
return true
}
func hasString(in []any) bool {
for _, x := range in {
if _, ok := x.(string); ok {
return true
}
}
return false
}
func toStrings(in []any) []any {
out := make([]any, len(in))
for i, x := range in {
out[i] = fmt.Sprintf("%v", x)
}
return out
}

View File

@@ -0,0 +1,165 @@
package querybuilder
import (
"testing"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/assert"
)
func TestDataTypeCollisionHandledFieldName(t *testing.T) {
tests := []struct {
name string
key *telemetrytypes.TelemetryFieldKey
value any
tblFieldName string
expectedFieldName string
expectedValue any
operator qbtypes.FilterOperator
}{
{
name: "http_status_code_string_field_with_numeric_value",
key: &telemetrytypes.TelemetryFieldKey{
Name: "http.status_code",
FieldDataType: telemetrytypes.FieldDataTypeString,
},
value: float64(200),
tblFieldName: "attribute_string_http$$status_code",
expectedFieldName: "toFloat64OrNull(attribute_string_http$$status_code)",
expectedValue: float64(200),
},
{
name: "service_enabled_string_field_with_bool_value",
key: &telemetrytypes.TelemetryFieldKey{
Name: "service.enabled",
FieldDataType: telemetrytypes.FieldDataTypeString,
},
value: true,
tblFieldName: "attribute_string_service$$enabled",
expectedFieldName: "attribute_string_service$$enabled",
expectedValue: "true",
},
{
name: "http_method_string_field_with_string_value",
key: &telemetrytypes.TelemetryFieldKey{
Name: "http.method",
FieldDataType: telemetrytypes.FieldDataTypeString,
},
value: "GET",
tblFieldName: "attribute_string_http$$method",
expectedFieldName: "attribute_string_http$$method",
expectedValue: "GET",
},
{
name: "response_times_string_field_with_numeric_array",
key: &telemetrytypes.TelemetryFieldKey{
Name: "response.times",
FieldDataType: telemetrytypes.FieldDataTypeString,
},
value: []any{float64(100.5), float64(200.3), float64(150.7)},
tblFieldName: "attribute_string_response$$times",
expectedFieldName: "toFloat64OrNull(attribute_string_response$$times)",
expectedValue: []any{float64(100.5), float64(200.3), float64(150.7)},
},
{
name: "error_codes_string_field_with_mixed_array",
key: &telemetrytypes.TelemetryFieldKey{
Name: "error.codes",
FieldDataType: telemetrytypes.FieldDataTypeString,
},
value: []any{float64(500), "TIMEOUT", float64(503)},
tblFieldName: "attribute_string_error$$codes",
expectedFieldName: "attribute_string_error$$codes",
expectedValue: []any{"500", "TIMEOUT", "503"},
},
// numbers
{
// we cast the key to string if the value is not a number or operator is not a comparison operator
name: "http_request_duration_float_field_with_string_value",
key: &telemetrytypes.TelemetryFieldKey{
Name: "http.request.duration",
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
value: "1234.56",
tblFieldName: "attribute_float64_http$$request$$duration",
expectedFieldName: "toString(attribute_float64_http$$request$$duration)",
expectedValue: "1234.56",
operator: qbtypes.FilterOperatorEqual,
},
{
// we cast to float64 if it's a comparison operator and the value is a stringified number
// reason:- https://github.com/SigNoz/signoz/pull/9154#issuecomment-3369941207
name: "http_request_duration_float_field_with_string_value_comparison_operator",
key: &telemetrytypes.TelemetryFieldKey{
Name: "http.request.duration",
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
value: "9",
tblFieldName: "attribute_float64_http$$request$$duration",
expectedFieldName: "toFloat64(attribute_float64_http$$request$$duration)",
expectedValue: "9",
operator: qbtypes.FilterOperatorGreaterThan,
},
{
// we cast to float64 if it's a comparison operator and the value is a stringified number
// reason:- https://github.com/SigNoz/signoz/pull/9154#issuecomment-3369941207
name: "http_request_duration_float_field_with_string_value_comparison_operator_1",
key: &telemetrytypes.TelemetryFieldKey{
Name: "http.request.duration",
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
value: "9.11",
tblFieldName: "attribute_float64_http$$request$$duration",
expectedFieldName: "toFloat64(attribute_float64_http$$request$$duration)",
expectedValue: "9.11",
operator: qbtypes.FilterOperatorGreaterThan,
},
{
// we cast the key to string if the value is not a number or operator is not a comparison operator
name: "http_request_duration_float_field_with_string_value_comparison_operator_2",
key: &telemetrytypes.TelemetryFieldKey{
Name: "http.request.duration",
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
value: "ERROR",
tblFieldName: "attribute_float64_http$$request$$duration",
expectedFieldName: "toString(attribute_float64_http$$request$$duration)",
expectedValue: "ERROR",
operator: qbtypes.FilterOperatorGreaterThan,
},
// bools
{
name: "feature_enabled_bool_field_with_string_value",
key: &telemetrytypes.TelemetryFieldKey{
Name: "feature.enabled",
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
value: "true",
tblFieldName: "attribute_bool_feature$$enabled",
expectedFieldName: "toString(attribute_bool_feature$$enabled)",
expectedValue: "true",
},
{
name: "feature_flags_bool_field_with_mixed_array",
key: &telemetrytypes.TelemetryFieldKey{
Name: "feature.flags",
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
value: []any{true, "enabled", false},
tblFieldName: "attribute_bool_feature$$flags",
expectedFieldName: "toString(attribute_bool_feature$$flags)",
expectedValue: []any{"true", "enabled", "false"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
resultFieldName, resultValue := DataTypeCollisionHandledFieldName(tt.key, tt.value, tt.tblFieldName, tt.operator)
assert.Equal(t, tt.expectedFieldName, resultFieldName)
assert.Equal(t, tt.expectedValue, resultValue)
})
}
}

View File

@@ -55,7 +55,7 @@ func (c *conditionBuilder) conditionFor(
tblFieldName, value = GetBodyJSONKey(ctx, key, operator, value)
}
tblFieldName, value = telemetrytypes.DataTypeCollisionHandledFieldName(key, value, tblFieldName)
tblFieldName, value = querybuilder.DataTypeCollisionHandledFieldName(key, value, tblFieldName, operator)
// make use of case insensitive index for body
if tblFieldName == "body" {

View File

@@ -424,6 +424,16 @@ func TestFilterExprLogs(t *testing.T) {
expectedArgs: []any{"critical", "resolved", "open", true},
expectedErrorContains: "",
},
{
// this will result in failure from the DB side.
// user will have to use attribute.status:string > open
category: "FREETEXT with conditions",
query: "critical NOT resolved status > open",
shouldPass: true,
expectedQuery: "WHERE (match(LOWER(body), LOWER(?)) AND NOT (match(LOWER(body), LOWER(?))) AND (toString(attributes_number['status']) > ? AND mapContains(attributes_number, 'status') = ?))",
expectedArgs: []any{"critical", "resolved", "open", true},
expectedErrorContains: "",
},
{
category: "FREETEXT with conditions",
query: "database error type=mysql",

View File

@@ -55,7 +55,7 @@ func (c *conditionBuilder) ConditionFor(
return "", nil
}
tblFieldName, value = telemetrytypes.DataTypeCollisionHandledFieldName(key, value, tblFieldName)
tblFieldName, value = querybuilder.DataTypeCollisionHandledFieldName(key, value, tblFieldName, operator)
// key must exists to apply main filter
expr := `if(mapContains(%s, %s), %s, true)`

View File

@@ -73,7 +73,7 @@ func (c *conditionBuilder) conditionFor(
}
}
} else {
tblFieldName, value = telemetrytypes.DataTypeCollisionHandledFieldName(key, value, tblFieldName)
tblFieldName, value = querybuilder.DataTypeCollisionHandledFieldName(key, value, tblFieldName, operator)
}
// regular operators

View File

@@ -17,7 +17,8 @@ var (
typeRoleSelectorRegex = regexp.MustCompile(`^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
typeOrganizationSelectorRegex = regexp.MustCompile(`^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
typeResourceSelectorRegex = regexp.MustCompile(`^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
typeResourcesSelectorRegex = regexp.MustCompile(`^org/[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
// resources selectors are used to select either all or none
typeResourcesSelectorRegex = regexp.MustCompile(`^\*$`)
)
type SelectorCallbackFn func(context.Context, Claims) ([]Selector, error)
@@ -27,7 +28,7 @@ type Selector struct {
}
func NewSelector(typed Type, selector string) (Selector, error) {
err := IsValidSelector(typed, Selector{val: selector})
err := IsValidSelector(typed, selector)
if err != nil {
return Selector{}, err
}
@@ -35,26 +36,26 @@ func NewSelector(typed Type, selector string) (Selector, error) {
return Selector{val: selector}, nil
}
func IsValidSelector(typed Type, selector Selector) error {
func IsValidSelector(typed Type, selector string) error {
switch typed {
case TypeUser:
if !typeUserSelectorRegex.MatchString(selector.String()) {
if !typeUserSelectorRegex.MatchString(selector) {
return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthZInvalidSelectorRegex, "selector must conform to regex %s", typeUserSelectorRegex.String())
}
case TypeRole:
if !typeRoleSelectorRegex.MatchString(selector.String()) {
if !typeRoleSelectorRegex.MatchString(selector) {
return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthZInvalidSelectorRegex, "selector must conform to regex %s", typeRoleSelectorRegex.String())
}
case TypeOrganization:
if !typeOrganizationSelectorRegex.MatchString(selector.String()) {
if !typeOrganizationSelectorRegex.MatchString(selector) {
return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthZInvalidSelectorRegex, "selector must conform to regex %s", typeOrganizationSelectorRegex.String())
}
case TypeResource:
if !typeResourceSelectorRegex.MatchString(selector.String()) {
if !typeResourceSelectorRegex.MatchString(selector) {
return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthZInvalidSelectorRegex, "selector must conform to regex %s", typeResourceSelectorRegex.String())
}
case TypeResources:
if !typeResourcesSelectorRegex.MatchString(selector.String()) {
if !typeResourcesSelectorRegex.MatchString(selector) {
return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthZInvalidSelectorRegex, "selector must conform to regex %s", typeResourcesSelectorRegex.String())
}
}

View File

@@ -24,7 +24,7 @@ type Transaction struct {
}
func NewObject(resource Resource, selector Selector) (*Object, error) {
err := IsValidSelector(resource.Type, selector)
err := IsValidSelector(resource.Type, selector.val)
if err != nil {
return nil, err
}

View File

@@ -30,8 +30,8 @@ var (
type Typeable interface {
Type() Type
Name() Name
Prefix() string
Tuples(subject string, relation Relation, selector []Selector) ([]*openfgav1.TupleKey, error)
Prefix(orgId valuer.UUID) string
Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error)
}
type Type struct{ valuer.String }

View File

@@ -3,6 +3,7 @@ package authtypes
import (
"strings"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -10,7 +11,7 @@ var _ Typeable = new(typeableOrganization)
type typeableOrganization struct{}
func (typeableOrganization *typeableOrganization) Tuples(subject string, relation Relation, selector []Selector) ([]*openfgav1.TupleKey, error) {
func (typeableOrganization *typeableOrganization) Tuples(subject string, relation Relation, selector []Selector, _ valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selector {
object := strings.Join([]string{typeableOrganization.Type().StringValue(), selector.String()}, ":")
@@ -28,6 +29,6 @@ func (typeableOrganization *typeableOrganization) Name() Name {
return MustNewName("organization")
}
func (typeableOrganization *typeableOrganization) Prefix() string {
func (typeableOrganization *typeableOrganization) Prefix(_ valuer.UUID) string {
return typeableOrganization.Type().StringValue()
}

View File

@@ -1,8 +1,7 @@
package authtypes
import (
"strings"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -24,10 +23,10 @@ func MustNewTypeableResource(name Name) Typeable {
return typeableesource
}
func (typeableResource *typeableResource) Tuples(subject string, relation Relation, selector []Selector) ([]*openfgav1.TupleKey, error) {
func (typeableResource *typeableResource) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selector {
object := typeableResource.Prefix() + "/" + selector.String()
object := typeableResource.Prefix(orgID) + "/" + selector.String()
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}
@@ -42,6 +41,7 @@ func (typeableResource *typeableResource) Name() Name {
return typeableResource.name
}
func (typeableResource *typeableResource) Prefix() string {
return strings.Join([]string{typeableResource.Type().StringValue(), typeableResource.Name().String()}, ":")
func (typeableResource *typeableResource) Prefix(orgID valuer.UUID) string {
// example: resource:organization/0199c47d-f61b-7833-bc5f-c0730f12f046/dashboard
return typeableResource.Type().StringValue() + ":" + "organization" + "/" + orgID.StringValue() + "/" + typeableResource.Name().String()
}

View File

@@ -1,8 +1,7 @@
package authtypes
import (
"strings"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -24,10 +23,10 @@ func MustNewTypeableResources(name Name) Typeable {
return resources
}
func (typeableResources *typeableResources) Tuples(subject string, relation Relation, selector []Selector) ([]*openfgav1.TupleKey, error) {
func (typeableResources *typeableResources) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selector {
object := typeableResources.Prefix() + "/" + selector.String()
object := typeableResources.Prefix(orgID) + "/" + selector.String()
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}
@@ -42,6 +41,7 @@ func (typeableResources *typeableResources) Name() Name {
return typeableResources.name
}
func (typeableResources *typeableResources) Prefix() string {
return strings.Join([]string{typeableResources.Type().StringValue(), typeableResources.Name().String()}, ":")
func (typeableResources *typeableResources) Prefix(orgID valuer.UUID) string {
// example: resources:organization/0199c47d-f61b-7833-bc5f-c0730f12f046/dashboards
return typeableResources.Type().StringValue() + ":" + "organization" + "/" + orgID.StringValue() + "/" + typeableResources.Name().String()
}

View File

@@ -1,8 +1,7 @@
package authtypes
import (
"strings"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -10,10 +9,10 @@ var _ Typeable = new(typeableRole)
type typeableRole struct{}
func (typeableRole *typeableRole) Tuples(subject string, relation Relation, selector []Selector) ([]*openfgav1.TupleKey, error) {
func (typeableRole *typeableRole) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selector {
object := strings.Join([]string{typeableRole.Type().StringValue(), selector.String()}, ":")
object := typeableRole.Prefix(orgID) + "/" + selector.String()
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}
@@ -28,6 +27,7 @@ func (typeableRole *typeableRole) Name() Name {
return MustNewName("role")
}
func (typeableRole *typeableRole) Prefix() string {
return typeableRole.Type().StringValue()
func (typeableRole *typeableRole) Prefix(orgID valuer.UUID) string {
// example: role:organization/0199c47d-f61b-7833-bc5f-c0730f12f046/role
return typeableRole.Type().StringValue() + ":" + "organization" + "/" + orgID.StringValue() + "/" + typeableRole.Name().String()
}

View File

@@ -1,8 +1,7 @@
package authtypes
import (
"strings"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -10,10 +9,10 @@ var _ Typeable = new(typeableUser)
type typeableUser struct{}
func (typeableUser *typeableUser) Tuples(subject string, relation Relation, selector []Selector) ([]*openfgav1.TupleKey, error) {
func (typeableUser *typeableUser) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selector {
object := strings.Join([]string{typeableUser.Type().StringValue(), selector.String()}, ":")
object := typeableUser.Prefix(orgID) + "/" + selector.String()
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}
@@ -28,6 +27,7 @@ func (typeableUser *typeableUser) Name() Name {
return MustNewName("user")
}
func (typeableUser *typeableUser) Prefix() string {
return typeableUser.Type().StringValue()
func (typeableUser *typeableUser) Prefix(orgID valuer.UUID) string {
// example: user:organization/0199c47d-f61b-7833-bc5f-c0730f12f046/user
return typeableUser.Type().StringValue() + ":" + "organization" + "/" + orgID.StringValue() + "/" + typeableUser.Name().String()
}

View File

@@ -13,8 +13,8 @@ import (
)
var (
ResourceDashboard = authtypes.MustNewTypeableResource(authtypes.MustNewName("dashboard"))
ResourcesDashboards = authtypes.MustNewTypeableResources(authtypes.MustNewName("dashboards"))
TypeableResourceDashboard = authtypes.MustNewTypeableResource(authtypes.MustNewName("dashboard"))
TypeableResourcesDashboards = authtypes.MustNewTypeableResources(authtypes.MustNewName("dashboards"))
)
type StorableDashboard struct {

View File

@@ -137,6 +137,14 @@ func (f FilterOperator) IsNegativeOperator() bool {
return true
}
func (f FilterOperator) IsComparisonOperator() bool {
switch f {
case FilterOperatorGreaterThan, FilterOperatorGreaterThanOrEq, FilterOperatorLessThan, FilterOperatorLessThanOrEq:
return true
}
return false
}
type OrderDirection struct {
valuer.String
}

View File

@@ -545,6 +545,9 @@ func EvalFuncs() map[string]govaluate.ExpressionFunction {
rad180 := 180 / math.Pi
// Mathematical functions
funcs["abs"] = func(args ...any) (any, error) {
return math.Abs(args[0].(float64)), nil
}
funcs["exp"] = func(args ...any) (any, error) {
return math.Exp(args[0].(float64)), nil
}
@@ -623,7 +626,7 @@ func EvalFuncs() map[string]govaluate.ExpressionFunction {
// GetSupportedFunctions returns the list of supported function names
func GetSupportedFunctions() []string {
return []string{
"exp", "log", "ln", "exp2", "log2", "exp10", "log10",
"abs", "exp", "log", "ln", "exp2", "log2", "exp10", "log10",
"sqrt", "cbrt", "erf", "erfc", "lgamma", "tgamma",
"sin", "cos", "tan", "asin", "acos", "atan",
"degrees", "radians", "now",

View File

@@ -863,3 +863,39 @@ func TestComplexExpression(t *testing.T) {
}
}
}
func TestAbsValueExpression(t *testing.T) {
tsData := map[string]*TimeSeriesData{
"A": createFormulaTestTimeSeriesData("A", []*TimeSeries{
{
Labels: createLabels(map[string]string{"service_name": "frontend"}),
Values: createValues(map[int64]float64{
1: -10,
2: 20,
}),
},
}),
"B": createFormulaTestTimeSeriesData("B", []*TimeSeries{
{
Labels: createLabels(map[string]string{"service_name": "frontend"}),
Values: createValues(map[int64]float64{
1: 5,
2: -4,
}),
},
}),
}
evaluator, err := NewFormulaEvaluator("abs(A) + abs(B)", map[string]bool{"A": true, "B": true})
require.NoError(t, err)
result, err := evaluator.EvaluateFormula(tsData)
require.NoError(t, err)
require.NotNil(t, result)
require.Equal(t, 1, len(result))
series := result[0]
require.Equal(t, 2, len(series.Values))
assert.Equal(t, 15.0, series.Values[0].Value) // |10| + |5| = 15
assert.Equal(t, 24.0, series.Values[1].Value) // |20| + |4| = 24
}

View File

@@ -21,6 +21,10 @@ var (
ErrCodeRoleFailedTransactionsFromString = errors.MustNewCode("role_failed_transactions_from_string")
)
var (
TypeableResourcesRoles = authtypes.MustNewTypeableResources(authtypes.MustNewName("roles"))
)
type StorableRole struct {
bun.BaseModel `bun:"table:role"`
@@ -166,7 +170,7 @@ func (role *PatchableRole) UnmarshalJSON(data []byte) error {
return nil
}
func GetAdditionTuples(id valuer.UUID, relation authtypes.Relation, additions []*authtypes.Object) ([]*openfgav1.TupleKey, error) {
func GetAdditionTuples(id valuer.UUID, orgID valuer.UUID, relation authtypes.Relation, additions []*authtypes.Object) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, object := range additions {
@@ -179,6 +183,7 @@ func GetAdditionTuples(id valuer.UUID, relation authtypes.Relation, additions []
),
relation,
[]authtypes.Selector{object.Selector},
orgID,
)
if err != nil {
return nil, err
@@ -190,8 +195,8 @@ func GetAdditionTuples(id valuer.UUID, relation authtypes.Relation, additions []
return tuples, nil
}
func GetDeletionTuples(id valuer.UUID, relation authtypes.Relation, deletions []*authtypes.Object) ([]*openfgav1.TupleKeyWithoutCondition, error) {
tuples := make([]*openfgav1.TupleKeyWithoutCondition, 0)
func GetDeletionTuples(id valuer.UUID, orgID valuer.UUID, relation authtypes.Relation, deletions []*authtypes.Object) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, object := range deletions {
typeable := authtypes.MustNewTypeableFromType(object.Resource.Type, object.Resource.Name)
@@ -203,21 +208,13 @@ func GetDeletionTuples(id valuer.UUID, relation authtypes.Relation, deletions []
),
relation,
[]authtypes.Selector{object.Selector},
orgID,
)
if err != nil {
return nil, err
}
deletionTuples := make([]*openfgav1.TupleKeyWithoutCondition, len(transactionTuples))
for idx, tuple := range transactionTuples {
deletionTuples[idx] = &openfgav1.TupleKeyWithoutCondition{
User: tuple.User,
Relation: tuple.Relation,
Object: tuple.Object,
}
}
tuples = append(tuples, deletionTuples...)
tuples = append(tuples, transactionTuples...)
}
return tuples, nil

View File

@@ -207,6 +207,7 @@ func (r *PostableRule) processRuleDefaults() error {
q.Expression = qLabel
}
}
//added alerts v2 fields
if r.SchemaVersion == DefaultSchemaVersion {
thresholdName := CriticalThresholdName
@@ -215,12 +216,20 @@ func (r *PostableRule) processRuleDefaults() error {
thresholdName = severity
}
}
// For anomaly detection with ValueIsBelow, negate the target
targetValue := r.RuleCondition.Target
if r.RuleType == RuleTypeAnomaly && r.RuleCondition.CompareOp == ValueIsBelow && targetValue != nil {
negated := -1 * *targetValue
targetValue = &negated
}
thresholdData := RuleThresholdData{
Kind: BasicThresholdKind,
Spec: BasicRuleThresholds{{
Name: thresholdName,
TargetUnit: r.RuleCondition.TargetUnit,
TargetValue: r.RuleCondition.Target,
TargetValue: targetValue,
MatchType: r.RuleCondition.MatchType,
CompareOp: r.RuleCondition.CompareOp,
Channels: r.PreferredChannels,

View File

@@ -718,3 +718,353 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
assert.Equal(t, 1, len(vector))
}
func TestAnomalyNegationShouldAlert(t *testing.T) {
tests := []struct {
name string
ruleJSON []byte
series v3.Series
shouldAlert bool
expectedValue float64
}{
{
name: "anomaly rule with ValueIsBelow - should alert",
ruleJSON: []byte(`{
"alert": "AnomalyBelowTest",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 2.0,
"matchType": "1",
"op": "2",
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -2.1}, // below & at least once, should alert
{Timestamp: 2000, Value: -2.3},
},
},
shouldAlert: true,
expectedValue: -2.1,
},
{
name: "anomaly rule with ValueIsBelow; should not alert",
ruleJSON: []byte(`{
"alert": "AnomalyBelowTest",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 2.0,
"matchType": "1",
"op": "2",
"selectedQuery": "A"
}
}`), // below & at least once, no value below -2.0
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -1.9},
{Timestamp: 2000, Value: -1.8},
},
},
shouldAlert: false,
},
{
name: "anomaly rule with ValueIsAbove; should alert",
ruleJSON: []byte(`{
"alert": "AnomalyAboveTest",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 2.0,
"matchType": "1",
"op": "1",
"selectedQuery": "A"
}
}`), // above & at least once, should alert
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: 2.1}, // above 2.0, should alert
{Timestamp: 2000, Value: 2.2},
},
},
shouldAlert: true,
expectedValue: 2.1,
},
{
name: "anomaly rule with ValueIsAbove; should not alert",
ruleJSON: []byte(`{
"alert": "AnomalyAboveTest",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 2.0,
"matchType": "1",
"op": "1",
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: 1.1},
{Timestamp: 2000, Value: 1.2},
},
},
shouldAlert: false,
},
{
name: "anomaly rule with ValueIsBelow and AllTheTimes; should alert",
ruleJSON: []byte(`{
"alert": "AnomalyBelowAllTest",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 2.0,
"matchType": "2",
"op": "2",
"selectedQuery": "A"
}
}`), // below and all the times
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -2.1}, // all below -2
{Timestamp: 2000, Value: -2.2},
{Timestamp: 3000, Value: -2.5},
},
},
shouldAlert: true,
expectedValue: -2.1, // max value when all are below threshold
},
{
name: "anomaly rule with ValueIsBelow and AllTheTimes; should not alert",
ruleJSON: []byte(`{
"alert": "AnomalyBelowAllTest",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 2.0,
"matchType": "2",
"op": "2",
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -3.0},
{Timestamp: 2000, Value: -1.0}, // above -2, breaks condition
{Timestamp: 3000, Value: -2.5},
},
},
shouldAlert: false,
},
{
name: "anomaly rule with ValueOutsideBounds; should alert",
ruleJSON: []byte(`{
"alert": "AnomalyOutOfBoundsTest",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 7.0,
"matchType": "1",
"op": "7",
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -8.0}, // abs(8) >= 7, alert
{Timestamp: 2000, Value: 5.0},
},
},
shouldAlert: true,
expectedValue: -8.0,
},
{
name: "non-anomaly threshold rule with ValueIsBelow; should alert",
ruleJSON: []byte(`{
"alert": "ThresholdTest",
"ruleType": "threshold_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 90.0,
"matchType": "1",
"op": "2",
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: 80.0}, // below 90, should alert
{Timestamp: 2000, Value: 85.0},
},
},
shouldAlert: true,
expectedValue: 80.0,
},
{
name: "non-anomaly rule with ValueIsBelow - should not alert",
ruleJSON: []byte(`{
"alert": "ThresholdTest",
"ruleType": "threshold_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 50.0,
"matchType": "1",
"op": "2",
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: 60.0}, // below, should alert
{Timestamp: 2000, Value: 90.0},
},
},
shouldAlert: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
rule := PostableRule{}
err := json.Unmarshal(tt.ruleJSON, &rule)
if err != nil {
t.Fatalf("Failed to unmarshal rule: %v", err)
}
ruleThreshold, err := rule.RuleCondition.Thresholds.GetRuleThreshold()
if err != nil {
t.Fatalf("unexpected error from GetRuleThreshold: %v", err)
}
resultVector, err := ruleThreshold.ShouldAlert(tt.series, "")
if err != nil {
t.Fatalf("unexpected error from ShouldAlert: %v", err)
}
shouldAlert := len(resultVector) > 0
if shouldAlert != tt.shouldAlert {
t.Errorf("Expected shouldAlert=%v, got %v. %s",
tt.shouldAlert, shouldAlert, tt.name)
}
if tt.shouldAlert && len(resultVector) > 0 {
sample := resultVector[0]
if sample.V != tt.expectedValue {
t.Errorf("Expected alert value=%.2f, got %.2f. %s",
tt.expectedValue, sample.V, tt.name)
}
}
})
}
}

View File

@@ -136,97 +136,3 @@ type FieldValueSelector struct {
Value string `json:"value"`
Limit int `json:"limit"`
}
func DataTypeCollisionHandledFieldName(key *TelemetryFieldKey, value any, tblFieldName string) (string, any) {
// This block of code exists to handle the data type collisions
// We don't want to fail the requests when there is a key with more than one data type
// Let's take an example of `http.status_code`, and consider user sent a string value and number value
// When they search for `http.status_code=200`, we will search across both the number columns and string columns
// and return the results from both the columns
// While we expect user not to send the mixed data types, it inevitably happens
// So we handle the data type collisions here
switch key.FieldDataType {
case FieldDataTypeString:
switch v := value.(type) {
case float64:
// try to convert the string value to to number
tblFieldName = castFloat(tblFieldName)
case []any:
if allFloats(v) {
tblFieldName = castFloat(tblFieldName)
} else if hasString(v) {
_, value = castString(tblFieldName), toStrings(v)
}
case bool:
// we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string
value = fmt.Sprintf("%t", v)
}
case FieldDataTypeFloat64, FieldDataTypeInt64, FieldDataTypeNumber:
switch v := value.(type) {
// why? ; CH returns an error for a simple check
// attributes_number['http.status_code'] = 200 but not for attributes_number['http.status_code'] >= 200
// DB::Exception: Bad get: has UInt64, requested Float64.
// How is it working in v4? v4 prepares the full query with values in query string
// When we format the float it becomes attributes_number['http.status_code'] = 200.000
// Which CH gladly accepts and doesn't throw error
// However, when passed as query args, the default formatter
// https://github.com/ClickHouse/clickhouse-go/blob/757e102f6d8c6059d564ce98795b4ce2a101b1a5/bind.go#L393
// is used which prepares the
// final query as attributes_number['http.status_code'] = 200 giving this error
// This following is one way to workaround it
case float32, float64:
tblFieldName = castFloatHack(tblFieldName)
case string:
// try to convert the number attribute to string
tblFieldName = castString(tblFieldName) // numeric col vs string literal
case []any:
if allFloats(v) {
tblFieldName = castFloatHack(tblFieldName)
} else if hasString(v) {
tblFieldName, value = castString(tblFieldName), toStrings(v)
}
}
case FieldDataTypeBool:
switch v := value.(type) {
case string:
tblFieldName = castString(tblFieldName)
case []any:
if hasString(v) {
tblFieldName, value = castString(tblFieldName), toStrings(v)
}
}
}
return tblFieldName, value
}
func castFloat(col string) string { return fmt.Sprintf("toFloat64OrNull(%s)", col) }
func castFloatHack(col string) string { return fmt.Sprintf("toFloat64(%s)", col) }
func castString(col string) string { return fmt.Sprintf("toString(%s)", col) }
func allFloats(in []any) bool {
for _, x := range in {
if _, ok := x.(float64); !ok {
return false
}
}
return true
}
func hasString(in []any) bool {
for _, x := range in {
if _, ok := x.(string); ok {
return true
}
}
return false
}
func toStrings(in []any) []any {
out := make([]any, len(in))
for i, x := range in {
out[i] = fmt.Sprintf("%v", x)
}
return out
}

View File

@@ -2,8 +2,6 @@ package telemetrytypes
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestGetFieldKeyFromKeyText(t *testing.T) {
@@ -93,115 +91,3 @@ func TestGetFieldKeyFromKeyText(t *testing.T) {
}
}
}
func TestDataTypeCollisionHandledFieldName(t *testing.T) {
tests := []struct {
name string
key *TelemetryFieldKey
value any
tblFieldName string
expectedFieldName string
expectedValue any
}{
{
name: "http_status_code_string_field_with_numeric_value",
key: &TelemetryFieldKey{
Name: "http.status_code",
FieldDataType: FieldDataTypeString,
},
value: float64(200),
tblFieldName: "attribute_string_http$$status_code",
expectedFieldName: "toFloat64OrNull(attribute_string_http$$status_code)",
expectedValue: float64(200),
},
{
name: "service_enabled_string_field_with_bool_value",
key: &TelemetryFieldKey{
Name: "service.enabled",
FieldDataType: FieldDataTypeString,
},
value: true,
tblFieldName: "attribute_string_service$$enabled",
expectedFieldName: "attribute_string_service$$enabled",
expectedValue: "true",
},
{
name: "http_method_string_field_with_string_value",
key: &TelemetryFieldKey{
Name: "http.method",
FieldDataType: FieldDataTypeString,
},
value: "GET",
tblFieldName: "attribute_string_http$$method",
expectedFieldName: "attribute_string_http$$method",
expectedValue: "GET",
},
{
name: "response_times_string_field_with_numeric_array",
key: &TelemetryFieldKey{
Name: "response.times",
FieldDataType: FieldDataTypeString,
},
value: []any{float64(100.5), float64(200.3), float64(150.7)},
tblFieldName: "attribute_string_response$$times",
expectedFieldName: "toFloat64OrNull(attribute_string_response$$times)",
expectedValue: []any{float64(100.5), float64(200.3), float64(150.7)},
},
{
name: "error_codes_string_field_with_mixed_array",
key: &TelemetryFieldKey{
Name: "error.codes",
FieldDataType: FieldDataTypeString,
},
value: []any{float64(500), "TIMEOUT", float64(503)},
tblFieldName: "attribute_string_error$$codes",
expectedFieldName: "attribute_string_error$$codes",
expectedValue: []any{"500", "TIMEOUT", "503"},
},
// numbers
{
name: "http_request_duration_float_field_with_string_value",
key: &TelemetryFieldKey{
Name: "http.request.duration",
FieldDataType: FieldDataTypeFloat64,
},
value: "1234.56",
tblFieldName: "attribute_float64_http$$request$$duration",
expectedFieldName: "toString(attribute_float64_http$$request$$duration)",
expectedValue: "1234.56",
},
// bools
{
name: "feature_enabled_bool_field_with_string_value",
key: &TelemetryFieldKey{
Name: "feature.enabled",
FieldDataType: FieldDataTypeBool,
},
value: "true",
tblFieldName: "attribute_bool_feature$$enabled",
expectedFieldName: "toString(attribute_bool_feature$$enabled)",
expectedValue: "true",
},
{
name: "feature_flags_bool_field_with_mixed_array",
key: &TelemetryFieldKey{
Name: "feature.flags",
FieldDataType: FieldDataTypeBool,
},
value: []any{true, "enabled", false},
tblFieldName: "attribute_bool_feature$$flags",
expectedFieldName: "toString(attribute_bool_feature$$flags)",
expectedValue: []any{"true", "enabled", "false"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
resultFieldName, resultValue := DataTypeCollisionHandledFieldName(tt.key, tt.value, tt.tblFieldName)
assert.Equal(t, tt.expectedFieldName, resultFieldName)
assert.Equal(t, tt.expectedValue, resultValue)
})
}
}

View File

@@ -29,7 +29,15 @@ class LogsResource(ABC):
self.seen_at_ts_bucket_start = seen_at_ts_bucket_start
def np_arr(self) -> np.array:
return np.array([self.labels, self.fingerprint, self.seen_at_ts_bucket_start, np.uint64(10),np.uint64(15)])
return np.array(
[
self.labels,
self.fingerprint,
self.seen_at_ts_bucket_start,
np.uint64(10),
np.uint64(15),
]
)
class LogsResourceOrAttributeKeys(ABC):
@@ -381,7 +389,7 @@ def insert_logs(
table="distributed_logs_resource_keys",
data=[resource_key.np_arr() for resource_key in resource_keys],
)
clickhouse.conn.insert(
database="signoz_logs",
table="distributed_logs_v2",

View File

@@ -816,3 +816,429 @@ def test_logs_time_series_count(
),
"value": 9,
} in series[1]["values"]
def test_datatype_collision(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_jwt_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
"""
Setup:
Insert logs with data type collision scenarios to test DataTypeCollisionHandledFieldName function
Tests:
1. severity_number comparison with string value
2. http.status_code with mixed string/number values
3. response.time with string values in numeric field
4. Edge cases: empty strings
"""
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = []
# Logs with string values in numeric fields
severity_levels = ["DEBUG", "INFO", "WARN"]
for i in range(3):
logs.append(
Logs(
timestamp=now - timedelta(microseconds=i + 1),
resources={
"deployment.environment": "production",
"service.name": "java",
"os.type": "linux",
"host.name": f"linux-00{i%2}",
"cloud.provider": "integration",
"cloud.account.id": f"00{i%2}",
},
attributes={
"log.iostream": "stdout",
"logtag": "F",
"code.file": "/opt/Integration.java",
"code.function": "com.example.Integration.process",
"code.line": i + 1,
"telemetry.sdk.language": "java",
"http.status_code": "200", # String value
"response.time": "123.45", # String value
},
body=f"Test log {i+1} with string values",
severity_text=severity_levels[i], # DEBUG(5-8), INFO(9-12), WARN(13-16)
)
)
# Logs with numeric values in string fields
severity_levels_2 = ["ERROR", "FATAL", "TRACE", "DEBUG"]
for i in range(4):
logs.append(
Logs(
timestamp=now - timedelta(microseconds=i + 10),
resources={
"deployment.environment": "production",
"service.name": "go",
"os.type": "linux",
"host.name": f"linux-00{i%2}",
"cloud.provider": "integration",
"cloud.account.id": f"00{i%2}",
},
attributes={
"log.iostream": "stdout",
"logtag": "F",
"code.file": "/opt/integration.go",
"code.function": "com.example.Integration.process",
"code.line": i + 1,
"telemetry.sdk.language": "go",
"http.status_code": 404, # Numeric value
"response.time": 456.78, # Numeric value
},
body=f"Test log {i+4} with numeric values",
severity_text=severity_levels_2[i], # ERROR(17-20), FATAL(21-24), TRACE(1-4), DEBUG(5-8)
)
)
# Edge case: empty string and zero value
logs.append(
Logs(
timestamp=now - timedelta(microseconds=20),
resources={
"deployment.environment": "production",
"service.name": "python",
"os.type": "linux",
"host.name": "linux-002",
"cloud.provider": "integration",
"cloud.account.id": "002",
},
attributes={
"log.iostream": "stdout",
"logtag": "F",
"code.file": "/opt/integration.py",
"code.function": "com.example.Integration.process",
"code.line": 1,
"telemetry.sdk.language": "python",
"http.status_code": "", # Empty string
"response.time": 0, # Zero value
},
body="Edge case test log",
severity_text="ERROR",
)
)
insert_logs(logs)
token = get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)
# count() of all logs for the where severity_number > '7'
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
- timedelta(minutes=5)
).timestamp()
* 1000
),
"end": int(
datetime.now(tz=timezone.utc)
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"requestType": "scalar",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"stepInterval": 60,
"disabled": False,
"filter": {"expression": "severity_number > '7'"},
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": True, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
count = results[0]["data"][0][0]
assert count == 5
# count() of all logs for the where severity_number > '7.0'
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
- timedelta(minutes=5)
).timestamp()
* 1000
),
"end": int(
datetime.now(tz=timezone.utc)
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"requestType": "scalar",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"stepInterval": 60,
"disabled": False,
"filter": {"expression": "severity_number > '7.0'"},
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": True, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
count = results[0]["data"][0][0]
assert count == 5
# Test 2: severity_number comparison with string value
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
- timedelta(minutes=5)
).timestamp()
* 1000
),
"end": int(
datetime.now(tz=timezone.utc)
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"requestType": "scalar",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"stepInterval": 60,
"disabled": False,
"filter": {"expression": "severity_number = '13'"}, # String comparison with numeric field
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": True, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
count = results[0]["data"][0][0]
# WARN severity maps to 13-16 range, so should find 1 log with severity_number = 13
assert count == 1
# Test 3: http.status_code with numeric value (query contains number, actual value is string "200")
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
- timedelta(minutes=5)
).timestamp()
* 1000
),
"end": int(
datetime.now(tz=timezone.utc)
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"requestType": "scalar",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"stepInterval": 60,
"disabled": False,
"filter": {"expression": "http.status_code = 200"}, # Numeric comparison with string field
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": True, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
count = results[0]["data"][0][0]
# Should return 3 logs with http.status_code = "200" (first 3 logs have string value "200")
assert count == 3
# Test 4: http.status_code with string value (query contains string, actual value is numeric 404)
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
- timedelta(minutes=5)
).timestamp()
* 1000
),
"end": int(
datetime.now(tz=timezone.utc)
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"requestType": "scalar",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"stepInterval": 60,
"disabled": False,
"filter": {"expression": "http.status_code = '404'"}, # String comparison with numeric field
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": True, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
count = results[0]["data"][0][0]
# Should return 4 logs with http.status_code = 404 (next 4 logs have numeric value 404)
assert count == 4
# Test 5: Edge case - empty string comparison
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
- timedelta(minutes=5)
).timestamp()
* 1000
),
"end": int(
datetime.now(tz=timezone.utc)
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"requestType": "scalar",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"stepInterval": 60,
"disabled": False,
"filter": {"expression": "http.status_code = ''"}, # Empty string comparison
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": True, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
count = results[0]["data"][0][0]
# Should return 1 log with empty http.status_code (edge case log)
assert count == 1

View File

@@ -0,0 +1,603 @@
"""
Summary:
This test file contains integration tests for Time-To-Live (TTL) and custom retention policies in SigNoz's query service.
It verifies the correct behavior of TTL settings for traces, metrics, and logs, including support for cold storage, custom retention conditions, error handling for invalid configurations, and retrieval of TTL settings.
"""
import time
from http import HTTPStatus
import pytest
import requests
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.logger import setup_logger
from fixtures.logs import Logs
logger = setup_logger(__name__)
@pytest.fixture(name="ttl_test_suite_setup", scope="package", autouse=True)
def ttl_test_suite_setup(create_user_admin): # pylint: disable=unused-argument
# This fixture creates a admin user for the entire ttl test suite
# The create_user_admin fixture is executed just by being a dependency
print("Setting up ttl test suite")
yield
def test_set_ttl_traces_success(signoz: types.SigNoz, get_jwt_token):
"""Test setting TTL for traces with new ttlConfig structure."""
payload = {
"type": "traces",
"duration": "3600h",
}
headers = {
"Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}"
}
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/settings/ttl"),
params=payload,
headers=headers,
timeout=30,
)
print(response.text)
assert response.status_code == HTTPStatus.OK
response_data = response.json()
assert "message" in response_data
assert "successfully set up" in response_data["message"].lower()
# Verify TTL settings in Clickhouse
# Allow some time for the TTL to be applied
time.sleep(2)
# Check TTL settings on relevant tables
tables_to_check = [
"signoz_index_v3",
"traces_v3_resource",
"signoz_error_index_v2",
"usage_explorer",
"dependency_graph_minutes_v2",
"trace_summary",
]
# Query to get table engine info which includes TTL
table_list = ", ".join(f"'{table}'" for table in tables_to_check)
query = f"SELECT engine_full FROM system.tables WHERE table in [{table_list}]"
result = signoz.telemetrystore.conn.query(query).result_rows
# Verify TTL exists in all table definitions
assert all("TTL" in r[0] for r in result)
assert all(" SETTINGS" in r[0] for r in result)
ttl_parts = [r[0].split("TTL ")[1].split(" SETTINGS")[0] for r in result]
# All TTLs should include toIntervalSecond(12960000) which is 3600h
assert all("toIntervalSecond(12960000)" in ttl_part for ttl_part in ttl_parts)
def test_set_ttl_traces_with_cold_storage(signoz: types.SigNoz, get_jwt_token):
"""Test setting TTL for traces with cold storage configuration."""
payload = {
"type": "traces",
"duration": f"{90*24}h", # 90 days in hours
"coldStorageVolume": "cold_storage_vol",
"toColdStorageDuration": f"{30*24}h", # 30 days in hours
}
headers = {
"Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}"
}
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/settings/ttl"),
params=payload,
headers=headers,
timeout=30,
)
assert response.status_code == HTTPStatus.OK
response_data = response.json()
assert "message" in response_data
assert "successfully set up" in response_data["message"].lower()
def test_set_ttl_metrics_success(signoz: types.SigNoz, get_jwt_token):
"""Test setting TTL for metrics using the new setTTLMetrics method."""
payload = {
"type": "metrics",
"duration": f"{90*24}h", # 90 days in hours
"coldStorageVolume": "",
"toColdStorageDuration": 0,
}
headers = {
"Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}"
}
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/settings/ttl"),
params=payload,
headers=headers,
timeout=30,
)
assert response.status_code == HTTPStatus.OK
response_data = response.json()
assert "message" in response_data
assert "successfully set up" in response_data["message"].lower()
# Verify TTL settings in Clickhouse
# Allow some time for the TTL to be applied
time.sleep(2)
# Check TTL settings on relevant metrics tables
tables_to_check = [
"samples_v4",
"samples_v4_agg_5m",
"samples_v4_agg_30m",
"time_series_v4",
"time_series_v4_6hrs",
"time_series_v4_1day",
"time_series_v4_1week",
]
# Query to get table engine info which includes TTL
table_list = "', '".join(tables_to_check)
query = f"SELECT engine_full FROM system.tables WHERE table in ['{table_list}']"
result = signoz.telemetrystore.conn.query(query).result_rows
# Verify TTL exists in all table definitions
assert all("TTL" in r[0] for r in result)
assert all(" SETTINGS" in r[0] for r in result)
ttl_parts = [r[0].split("TTL ")[1].split(" SETTINGS")[0] for r in result]
# All TTLs should include toIntervalSecond(7776000) which is 90*24h
assert all("toIntervalSecond(7776000)" in ttl_part for ttl_part in ttl_parts)
def test_set_ttl_metrics_with_cold_storage(signoz: types.SigNoz, get_jwt_token):
"""Test setting TTL for metrics with cold storage configuration."""
payload = {
"type": "metrics",
"duration": f"{90*24}h", # 90 days in hours
"coldStorageVolume": "metrics_cold_vol",
"toColdStorageDuration": f"{20*24}h", # 20 days in hours
}
headers = {
"Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}"
}
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/settings/ttl"),
params=payload,
headers=headers,
timeout=30,
)
assert response.status_code == HTTPStatus.OK
response_data = response.json()
assert "message" in response_data
assert "successfully set up" in response_data["message"].lower()
def test_set_ttl_invalid_type(signoz: types.SigNoz, get_jwt_token):
"""Test setting TTL with invalid type returns error."""
payload = {
"type": "invalid_type",
"duration": f"{90*24}h",
"coldStorageVolume": "",
"toColdStorageDuration": 0,
}
headers = {
"Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}"
}
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/settings/ttl"),
params=payload,
headers=headers,
timeout=30,
)
assert response.status_code == HTTPStatus.BAD_REQUEST
def test_set_custom_retention_ttl_basic(signoz: types.SigNoz, get_jwt_token):
"""Test setting custom retention TTL with basic configuration."""
payload = {
"type": "logs",
"defaultTTLDays": 100,
"ttlConditions": [],
"coldStorageVolume": "",
"coldStorageDuration": 0,
}
headers = {
"Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}"
}
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v2/settings/ttl"),
json=payload,
headers=headers,
timeout=30,
)
assert response.status_code == HTTPStatus.OK
response_data = response.json()
assert "message" in response_data
# Verify TTL settings in Clickhouse
# Allow some time for the TTL to be applied
time.sleep(2)
# Check TTL settings on relevant tables
tables_to_check = [
"logs_v2",
"logs_v2_resource",
]
# Query to get table engine info which includes TTL
table_list = "', '".join(tables_to_check)
query = f"SELECT engine_full FROM system.tables WHERE table in ['{table_list}']"
result = signoz.telemetrystore.conn.query(query).result_rows
# Verify TTL exists in all table definitions
assert all("TTL" in r[0] for r in result)
assert all(" SETTINGS" in r[0] for r in result)
ttl_parts = [r[0].split("TTL ")[1].split(" SETTINGS")[0] for r in result]
# Also verify the TTL parts contain retention_days
assert all("_retention_days" in ttl_part for ttl_part in ttl_parts)
# Query to describe tables and check retention_days column
for table in tables_to_check:
describe_query = f"DESCRIBE TABLE signoz_logs.{table}"
describe_result = signoz.telemetrystore.conn.query(describe_query).result_rows
# Find the _retention_days column
retention_col = next(
(row for row in describe_result if row[0] == "_retention_days"), None
)
assert (
retention_col is not None
), f"_retention_days column not found in table {table}"
assert (
retention_col[1] == "UInt16"
), f"Expected _retention_days to be UInt16 in table {table}, but got {retention_col[1]}"
assert (
retention_col[3] == "100"
), f"Expected default value of _retention_days to be 100 in table {table}, but got {retention_col[3]}"
def test_set_custom_retention_ttl_with_conditions(
signoz: types.SigNoz, get_jwt_token, insert_logs
):
"""Test setting custom retention TTL with filter conditions."""
payload = {
"type": "logs",
"defaultTTLDays": 30,
"ttlConditions": [
{
"conditions": [
{"key": "service_name", "values": ["frontend", "backend"]}
],
"ttlDays": 60,
}
],
"coldStorageVolume": "",
"coldStorageDuration": 0,
}
headers = {
"Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}"
}
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v2/settings/ttl"),
json=payload,
headers=headers,
timeout=30,
)
assert response.status_code == HTTPStatus.BAD_REQUEST
# Need to ensure that "severity" and "service_name" keys exist in logsAttributeKeys table
# Insert some logs with these attribute keys
logs = [
Logs(resources={"service_name": "frontend"}, severity_text="ERROR"),
Logs(resources={"service_name": "backend"}, severity_text="FATAL"),
]
insert_logs(logs)
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v2/settings/ttl"),
json=payload,
headers=headers,
timeout=30,
)
assert response.status_code == HTTPStatus.OK
response_data = response.json()
assert "message" in response_data
def test_set_custom_retention_ttl_with_cold_storage(
signoz: types.SigNoz, get_jwt_token, insert_logs
):
"""Test setting custom retention TTL with cold storage configuration."""
payload = {
"type": "logs",
"defaultTTLDays": 60,
"ttlConditions": [
{
"conditions": [{"key": "environment", "values": ["production"]}],
"ttlDays": 180,
}
],
"coldStorageVolume": "logs_cold_storage",
"coldStorageDuration": 30, # 30 days to cold storage
}
# Insert some logs with these attribute keys
logs = [
Logs(resources={"environment": "production"}, severity_text="ERROR"),
]
insert_logs(logs)
headers = {
"Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}"
}
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v2/settings/ttl"),
json=payload,
headers=headers,
timeout=30,
)
assert response.status_code == HTTPStatus.BAD_REQUEST
response_data = response.json()
assert "error" in response_data
assert "message" in response_data["error"]
assert "Unknown storage policy `tiered`" in response_data["error"]["message"]
def test_set_custom_retention_ttl_duplicate_conditions(
signoz: types.SigNoz, get_jwt_token
):
"""Test that duplicate TTL conditions are rejected."""
payload = {
"type": "logs",
"defaultTTLDays": 30,
"ttlConditions": [
{
"conditions": [{"key": "service_name", "values": ["frontend"]}],
"ttlDays": 60,
},
{
"conditions": [
{
"key": "service_name",
"values": ["frontend"], # Duplicate condition
}
],
"ttlDays": 90,
},
],
"coldStorageVolume": "",
"coldStorageDuration": 0,
}
headers = {
"Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}"
}
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v2/settings/ttl"),
json=payload,
headers=headers,
timeout=30,
)
# Should return error for duplicate conditions
assert response.status_code == HTTPStatus.BAD_REQUEST
def test_set_custom_retention_ttl_invalid_condition(
signoz: types.SigNoz, get_jwt_token
):
"""Test that conditions with empty values are rejected."""
payload = {
"type": "logs",
"defaultTTLDays": 30,
"ttlConditions": [
{
"conditions": [
{
"key": "service_name",
"values": [], # Empty values should be rejected
}
],
"ttlDays": 60,
}
],
"coldStorageVolume": "",
"coldStorageDuration": 0,
}
headers = {
"Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}"
}
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v2/settings/ttl"),
json=payload,
headers=headers,
timeout=30,
)
# Should return error for empty condition values
assert response.status_code == HTTPStatus.BAD_REQUEST
def test_get_custom_retention_ttl(signoz: types.SigNoz, get_jwt_token, insert_logs):
"""Test getting custom retention TTL configuration."""
# First set a custom retention TTL
set_payload = {
"type": "logs",
"defaultTTLDays": 45,
"ttlConditions": [
{
"conditions": [{"key": "service_name", "values": ["test-service"]}],
"ttlDays": 90,
}
],
"coldStorageVolume": "",
"coldStorageDuration": 0,
}
# Insert some logs with these attribute keys
logs = [
Logs(resources={"service_name": "test-service"}, severity_text="ERROR"),
]
insert_logs(logs)
headers = {
"Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}"
}
set_response = requests.post(
signoz.self.host_configs["8080"].get("/api/v2/settings/ttl"),
json=set_payload,
headers=headers,
timeout=30,
)
assert set_response.status_code == HTTPStatus.OK
# Allow some time for the TTL to be processed
time.sleep(2)
# Now get the TTL configuration
headers = {
"Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}"
}
get_response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/settings/ttl"),
params={"type": "logs"},
headers=headers,
timeout=30,
)
response_data = get_response.json()
# Verify the response contains expected fields
assert response_data["status"] == "success"
assert response_data["default_ttl_days"] == 45
assert response_data["cold_storage_ttl_days"] == -1
assert response_data["ttl_conditions"][0]["ttlDays"] == 90
assert response_data["ttl_conditions"][0]["conditions"][0]["key"] == "service_name"
assert response_data["ttl_conditions"][0]["conditions"][0]["values"] == [
"test-service"
]
def test_get_ttl_traces_success(signoz: types.SigNoz, get_jwt_token):
"""Test getting TTL for traces."""
# First set a TTL configuration for traces
set_payload = {
"type": "traces",
"duration": "720h", # 30 days in hours
}
headers = {
"Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}"
}
set_response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/settings/ttl"),
params=set_payload,
headers=headers,
timeout=30,
)
print(set_response.text)
assert set_response.status_code == HTTPStatus.OK
# Allow some time for the TTL to be processed
time.sleep(2)
# Now get the TTL configuration for traces
get_response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/settings/ttl"),
params={"type": "traces"},
headers=headers,
timeout=30,
)
assert get_response.status_code == HTTPStatus.OK
response_data = get_response.json()
# Verify the response contains expected fields and values
assert response_data["status"] == "success"
assert "traces_ttl_duration_hrs" in response_data
assert "traces_move_ttl_duration_hrs" in response_data
assert (
response_data["traces_ttl_duration_hrs"] == 720
) # Note: response is in hours as integer
assert (
response_data["traces_move_ttl_duration_hrs"] == -1
) # -1 indicates no cold storage configured
def test_large_ttl_conditions_list(signoz: types.SigNoz, get_jwt_token, insert_logs):
"""Test custom retention TTL with many conditions."""
# Create a list of many TTL conditions to test performance and limits
conditions = []
for i in range(10): # Test with 10 conditions
conditions.append(
{
"conditions": [{"key": "service_name", "values": [f"service-{i}"]}],
"ttlDays": 30 + (i * 10),
}
)
logs = [
Logs(resources={"service_name": f"service-{i}"}, severity_text="ERROR")
for i in range(10)
]
insert_logs(logs)
payload = {
"type": "logs",
"defaultTTLDays": 30,
"ttlConditions": conditions,
"coldStorageVolume": "",
"coldStorageDuration": 0,
}
headers = {
"Authorization": f"Bearer {get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)}"
}
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v2/settings/ttl"),
json=payload,
headers=headers,
timeout=30,
)
assert response.status_code == HTTPStatus.OK
response_data = response.json()
assert "message" in response_data