Compare commits

..

1 Commits

Author SHA1 Message Date
srikanthccv
be185bdda8 fix: summary metric type works with query builder 2025-10-04 18:52:01 +05:30
89 changed files with 1111 additions and 2334 deletions

View File

@@ -42,7 +42,7 @@ services:
timeout: 5s
retries: 3
schema-migrator-sync:
image: signoz/signoz-schema-migrator:v0.129.7
image: signoz/signoz-schema-migrator:v0.129.6
container_name: schema-migrator-sync
command:
- sync
@@ -55,7 +55,7 @@ services:
condition: service_healthy
restart: on-failure
schema-migrator-async:
image: signoz/signoz-schema-migrator:v0.129.7
image: signoz/signoz-schema-migrator:v0.129.6
container_name: schema-migrator-async
command:
- async

View File

@@ -176,7 +176,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.97.0
image: signoz/signoz:v0.96.1
command:
- --config=/root/config/prometheus.yml
ports:
@@ -209,7 +209,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.129.7
image: signoz/signoz-otel-collector:v0.129.6
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -233,7 +233,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.129.7
image: signoz/signoz-schema-migrator:v0.129.6
deploy:
restart_policy:
condition: on-failure

View File

@@ -117,7 +117,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.97.0
image: signoz/signoz:v0.96.1
command:
- --config=/root/config/prometheus.yml
ports:
@@ -150,7 +150,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.129.7
image: signoz/signoz-otel-collector:v0.129.6
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -176,7 +176,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.129.7
image: signoz/signoz-schema-migrator:v0.129.6
deploy:
restart_policy:
condition: on-failure

View File

@@ -179,7 +179,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.97.0}
image: signoz/signoz:${VERSION:-v0.96.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -213,7 +213,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.6}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -239,7 +239,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.6}
container_name: schema-migrator-sync
command:
- sync
@@ -250,7 +250,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.6}
container_name: schema-migrator-async
command:
- async

View File

@@ -111,7 +111,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.97.0}
image: signoz/signoz:${VERSION:-v0.96.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -144,7 +144,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.6}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -166,7 +166,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.6}
container_name: schema-migrator-sync
command:
- sync
@@ -178,7 +178,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.7}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.6}
container_name: schema-migrator-async
command:
- async

View File

@@ -232,7 +232,7 @@ func (p *BaseSeasonalProvider) getPredictedSeries(
// moving avg of the previous period series + z score threshold * std dev of the series
// moving avg of the previous period series - z score threshold * std dev of the series
func (p *BaseSeasonalProvider) getBounds(
series, predictedSeries, weekSeries *qbtypes.TimeSeries,
series, predictedSeries *qbtypes.TimeSeries,
zScoreThreshold float64,
) (*qbtypes.TimeSeries, *qbtypes.TimeSeries) {
upperBoundSeries := &qbtypes.TimeSeries{
@@ -246,8 +246,8 @@ func (p *BaseSeasonalProvider) getBounds(
}
for idx, curr := range series.Values {
upperBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) + zScoreThreshold*p.getStdDev(weekSeries)
lowerBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) - zScoreThreshold*p.getStdDev(weekSeries)
upperBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) + zScoreThreshold*p.getStdDev(series)
lowerBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) - zScoreThreshold*p.getStdDev(series)
upperBoundSeries.Values = append(upperBoundSeries.Values, &qbtypes.TimeSeriesValue{
Timestamp: curr.Timestamp,
Value: upperBound,
@@ -398,6 +398,8 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
aggOfInterest := result.Aggregations[0]
for _, series := range aggOfInterest.Series {
stdDev := p.getStdDev(series)
p.logger.InfoContext(ctx, "calculated standard deviation for series", "anomaly_std_dev", stdDev, "anomaly_labels", series.Labels)
pastPeriodSeries := p.getMatchingSeries(ctx, pastPeriodResult, series)
currentSeasonSeries := p.getMatchingSeries(ctx, currentSeasonResult, series)
@@ -405,9 +407,6 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
past2SeasonSeries := p.getMatchingSeries(ctx, past2SeasonResult, series)
past3SeasonSeries := p.getMatchingSeries(ctx, past3SeasonResult, series)
stdDev := p.getStdDev(currentSeasonSeries)
p.logger.InfoContext(ctx, "calculated standard deviation for series", "anomaly_std_dev", stdDev, "anomaly_labels", series.Labels)
prevSeriesAvg := p.getAvg(pastPeriodSeries)
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
@@ -436,7 +435,6 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
upperBoundSeries, lowerBoundSeries := p.getBounds(
series,
predictedSeries,
currentSeasonSeries,
zScoreThreshold,
)
aggOfInterest.UpperBoundSeries = append(aggOfInterest.UpperBoundSeries, upperBoundSeries)

View File

@@ -1,79 +0,0 @@
package openfgaauthz
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
pkgopenfgaauthz "github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
)
type provider struct {
pkgAuthzService authz.AuthZ
}
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return factory.NewProviderFactory(factory.MustNewName("openfga"), func(ctx context.Context, ps factory.ProviderSettings, config authz.Config) (authz.AuthZ, error) {
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema)
})
}
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
pkgOpenfgaAuthzProvider := pkgopenfgaauthz.NewProviderFactory(sqlstore, openfgaSchema)
pkgAuthzService, err := pkgOpenfgaAuthzProvider.New(ctx, settings, config)
if err != nil {
return nil, err
}
return &provider{
pkgAuthzService: pkgAuthzService,
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
return provider.pkgAuthzService.Start(ctx)
}
func (provider *provider) Stop(ctx context.Context) error {
return provider.pkgAuthzService.Stop(ctx)
}
func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return provider.pkgAuthzService.Check(ctx, tuple)
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeUser, claims.UserID, authtypes.Relation{})
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.BatchCheck(ctx, tuples)
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return provider.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.Write(ctx, additions, deletions)
}

132
ee/http/middleware/authz.go Normal file
View File

@@ -0,0 +1,132 @@
package middleware
import (
"log/slog"
"net/http"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/gorilla/mux"
)
const (
authzDeniedMessage string = "::AUTHZ-DENIED::"
)
type AuthZ struct {
logger *slog.Logger
authzService authz.AuthZ
}
func NewAuthZ(logger *slog.Logger) *AuthZ {
if logger == nil {
panic("cannot build authz middleware, logger is empty")
}
return &AuthZ{logger: logger}
}
func (middleware *AuthZ) ViewAccess(next http.HandlerFunc) http.HandlerFunc {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
if err := claims.IsViewer(); err != nil {
middleware.logger.WarnContext(req.Context(), authzDeniedMessage, "claims", claims)
render.Error(rw, err)
return
}
next(rw, req)
})
}
func (middleware *AuthZ) EditAccess(next http.HandlerFunc) http.HandlerFunc {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
if err := claims.IsEditor(); err != nil {
middleware.logger.WarnContext(req.Context(), authzDeniedMessage, "claims", claims)
render.Error(rw, err)
return
}
next(rw, req)
})
}
func (middleware *AuthZ) AdminAccess(next http.HandlerFunc) http.HandlerFunc {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
if err := claims.IsAdmin(); err != nil {
middleware.logger.WarnContext(req.Context(), authzDeniedMessage, "claims", claims)
render.Error(rw, err)
return
}
next(rw, req)
})
}
func (middleware *AuthZ) SelfAccess(next http.HandlerFunc) http.HandlerFunc {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
id := mux.Vars(req)["id"]
if err := claims.IsSelfAccess(id); err != nil {
middleware.logger.WarnContext(req.Context(), authzDeniedMessage, "claims", claims)
render.Error(rw, err)
return
}
next(rw, req)
})
}
func (middleware *AuthZ) OpenAccess(next http.HandlerFunc) http.HandlerFunc {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
next(rw, req)
})
}
// Check middleware accepts the relation, typeable, parentTypeable (for direct access + group relations) and a callback function to derive selector and parentSelectors on per request basis.
func (middleware *AuthZ) Check(next http.HandlerFunc, relation authtypes.Relation, translation authtypes.Relation, typeable authtypes.Typeable, cb authtypes.SelectorCallbackFn) http.HandlerFunc {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
selector, err := cb(req.Context(), claims)
if err != nil {
render.Error(rw, err)
return
}
err = middleware.authzService.CheckWithTupleCreation(req.Context(), claims, relation, typeable, selector)
if err != nil {
render.Error(rw, err)
return
}
next(rw, req)
})
}

View File

@@ -78,6 +78,11 @@ func NewAnomalyRule(
opts = append(opts, baserules.WithLogger(logger))
if p.RuleCondition.CompareOp == ruletypes.ValueIsBelow {
target := -1 * *p.RuleCondition.Target
p.RuleCondition.Target = &target
}
baseRule, err := baserules.NewBaseRule(id, orgID, p, reader, opts...)
if err != nil {
return nil, err

View File

@@ -1,16 +0,0 @@
<svg version="1.1" id="Layer_1" xmlns:x="ns_extend;" xmlns:i="ns_ai;" xmlns:graph="ns_graphs;" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 92.2 65" style="enable-background:new 0 0 92.2 65;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FFFFFF;}
</style>
<metadata>
<sfw xmlns="ns_sfw;">
<slices>
</slices>
<sliceSourceBounds bottomLeftOrigin="true" height="65" width="92.2" x="-43.7" y="-98">
</sliceSourceBounds>
</sfw>
</metadata>
<path class="st0" d="M66.5,0H52.4l25.7,65h14.1L66.5,0z M25.7,0L0,65h14.4l5.3-13.6h26.9L51.8,65h14.4L40.5,0C40.5,0,25.7,0,25.7,0z
M24.3,39.3l8.8-22.8l8.8,22.8H24.3z">
</path>
</svg>

Before

Width:  |  Height:  |  Size: 714 B

View File

@@ -1 +0,0 @@
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>Claude</title><path d="M4.709 15.955l4.72-2.647.08-.23-.08-.128H9.2l-.79-.048-2.698-.073-2.339-.097-2.266-.122-.571-.121L0 11.784l.055-.352.48-.321.686.06 1.52.103 2.278.158 1.652.097 2.449.255h.389l.055-.157-.134-.098-.103-.097-2.358-1.596-2.552-1.688-1.336-.972-.724-.491-.364-.462-.158-1.008.656-.722.881.06.225.061.893.686 1.908 1.476 2.491 1.833.365.304.145-.103.019-.073-.164-.274-1.355-2.446-1.446-2.49-.644-1.032-.17-.619a2.97 2.97 0 01-.104-.729L6.283.134 6.696 0l.996.134.42.364.62 1.414 1.002 2.229 1.555 3.03.456.898.243.832.091.255h.158V9.01l.128-1.706.237-2.095.23-2.695.08-.76.376-.91.747-.492.584.28.48.685-.067.444-.286 1.851-.559 2.903-.364 1.942h.212l.243-.242.985-1.306 1.652-2.064.73-.82.85-.904.547-.431h1.033l.76 1.129-.34 1.166-1.064 1.347-.881 1.142-1.264 1.7-.79 1.36.073.11.188-.02 2.856-.606 1.543-.28 1.841-.315.833.388.091.395-.328.807-1.969.486-2.309.462-3.439.813-.042.03.049.061 1.549.146.662.036h1.622l3.02.225.79.522.474.638-.079.485-1.215.62-1.64-.389-3.829-.91-1.312-.329h-.182v.11l1.093 1.068 2.006 1.81 2.509 2.33.127.578-.322.455-.34-.049-2.205-1.657-.851-.747-1.926-1.62h-.128v.17l.444.649 2.345 3.521.122 1.08-.17.353-.608.213-.668-.122-1.374-1.925-1.415-2.167-1.143-1.943-.14.08-.674 7.254-.316.37-.729.28-.607-.461-.322-.747.322-1.476.389-1.924.315-1.53.286-1.9.17-.632-.012-.042-.14.018-1.434 1.967-2.18 2.945-1.726 1.845-.414.164-.717-.37.067-.662.401-.589 2.388-3.036 1.44-1.882.93-1.086-.006-.158h-.055L4.132 18.56l-1.13.146-.487-.456.061-.746.231-.243 1.908-1.312-.006.006z" fill="#D97757" fill-rule="nonzero"></path></svg>

Before

Width:  |  Height:  |  Size: 1.7 KiB

View File

@@ -1 +0,0 @@
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>DeepSeek</title><path d="M23.748 4.482c-.254-.124-.364.113-.512.234-.051.039-.094.09-.137.136-.372.397-.806.657-1.373.626-.829-.046-1.537.214-2.163.848-.133-.782-.575-1.248-1.247-1.548-.352-.156-.708-.311-.955-.65-.172-.241-.219-.51-.305-.774-.055-.16-.11-.323-.293-.35-.2-.031-.278.136-.356.276-.313.572-.434 1.202-.422 1.84.027 1.436.633 2.58 1.838 3.393.137.093.172.187.129.323-.082.28-.18.552-.266.833-.055.179-.137.217-.329.14a5.526 5.526 0 01-1.736-1.18c-.857-.828-1.631-1.742-2.597-2.458a11.365 11.365 0 00-.689-.471c-.985-.957.13-1.743.388-1.836.27-.098.093-.432-.779-.428-.872.004-1.67.295-2.687.684a3.055 3.055 0 01-.465.137 9.597 9.597 0 00-2.883-.102c-1.885.21-3.39 1.102-4.497 2.623C.082 8.606-.231 10.684.152 12.85c.403 2.284 1.569 4.175 3.36 5.653 1.858 1.533 3.997 2.284 6.438 2.14 1.482-.085 3.133-.284 4.994-1.86.47.234.962.327 1.78.397.63.059 1.236-.03 1.705-.128.735-.156.684-.837.419-.961-2.155-1.004-1.682-.595-2.113-.926 1.096-1.296 2.746-2.642 3.392-7.003.05-.347.007-.565 0-.845-.004-.17.035-.237.23-.256a4.173 4.173 0 001.545-.475c1.396-.763 1.96-2.015 2.093-3.517.02-.23-.004-.467-.247-.588zM11.581 18c-2.089-1.642-3.102-2.183-3.52-2.16-.392.024-.321.471-.235.763.09.288.207.486.371.739.114.167.192.416-.113.603-.673.416-1.842-.14-1.897-.167-1.361-.802-2.5-1.86-3.301-3.307-.774-1.393-1.224-2.887-1.298-4.482-.02-.386.093-.522.477-.592a4.696 4.696 0 011.529-.039c2.132.312 3.946 1.265 5.468 2.774.868.86 1.525 1.887 2.202 2.891.72 1.066 1.494 2.082 2.48 2.914.348.292.625.514.891.677-.802.09-2.14.11-3.054-.614zm1-6.44a.306.306 0 01.415-.287.302.302 0 01.2.288.306.306 0 01-.31.307.303.303 0 01-.304-.308zm3.11 1.596c-.2.081-.399.151-.59.16a1.245 1.245 0 01-.798-.254c-.274-.23-.47-.358-.552-.758a1.73 1.73 0 01.016-.588c.07-.327-.008-.537-.239-.727-.187-.156-.426-.199-.688-.199a.559.559 0 01-.254-.078c-.11-.054-.2-.19-.114-.358.028-.054.16-.186.192-.21.356-.202.767-.136 1.146.016.352.144.618.408 1.001.782.391.451.462.576.685.914.176.265.336.537.445.848.067.195-.019.354-.25.452z" fill="#4D6BFE"></path></svg>

Before

Width:  |  Height:  |  Size: 2.1 KiB

View File

@@ -1 +0,0 @@
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>Gemini</title><path d="M20.616 10.835a14.147 14.147 0 01-4.45-3.001 14.111 14.111 0 01-3.678-6.452.503.503 0 00-.975 0 14.134 14.134 0 01-3.679 6.452 14.155 14.155 0 01-4.45 3.001c-.65.28-1.318.505-2.002.678a.502.502 0 000 .975c.684.172 1.35.397 2.002.677a14.147 14.147 0 014.45 3.001 14.112 14.112 0 013.679 6.453.502.502 0 00.975 0c.172-.685.397-1.351.677-2.003a14.145 14.145 0 013.001-4.45 14.113 14.113 0 016.453-3.678.503.503 0 000-.975 13.245 13.245 0 01-2.003-.678z" fill="#3186FF"></path><path d="M20.616 10.835a14.147 14.147 0 01-4.45-3.001 14.111 14.111 0 01-3.678-6.452.503.503 0 00-.975 0 14.134 14.134 0 01-3.679 6.452 14.155 14.155 0 01-4.45 3.001c-.65.28-1.318.505-2.002.678a.502.502 0 000 .975c.684.172 1.35.397 2.002.677a14.147 14.147 0 014.45 3.001 14.112 14.112 0 013.679 6.453.502.502 0 00.975 0c.172-.685.397-1.351.677-2.003a14.145 14.145 0 013.001-4.45 14.113 14.113 0 016.453-3.678.503.503 0 000-.975 13.245 13.245 0 01-2.003-.678z" fill="url(#lobe-icons-gemini-fill-0)"></path><path d="M20.616 10.835a14.147 14.147 0 01-4.45-3.001 14.111 14.111 0 01-3.678-6.452.503.503 0 00-.975 0 14.134 14.134 0 01-3.679 6.452 14.155 14.155 0 01-4.45 3.001c-.65.28-1.318.505-2.002.678a.502.502 0 000 .975c.684.172 1.35.397 2.002.677a14.147 14.147 0 014.45 3.001 14.112 14.112 0 013.679 6.453.502.502 0 00.975 0c.172-.685.397-1.351.677-2.003a14.145 14.145 0 013.001-4.45 14.113 14.113 0 016.453-3.678.503.503 0 000-.975 13.245 13.245 0 01-2.003-.678z" fill="url(#lobe-icons-gemini-fill-1)"></path><path d="M20.616 10.835a14.147 14.147 0 01-4.45-3.001 14.111 14.111 0 01-3.678-6.452.503.503 0 00-.975 0 14.134 14.134 0 01-3.679 6.452 14.155 14.155 0 01-4.45 3.001c-.65.28-1.318.505-2.002.678a.502.502 0 000 .975c.684.172 1.35.397 2.002.677a14.147 14.147 0 014.45 3.001 14.112 14.112 0 013.679 6.453.502.502 0 00.975 0c.172-.685.397-1.351.677-2.003a14.145 14.145 0 013.001-4.45 14.113 14.113 0 016.453-3.678.503.503 0 000-.975 13.245 13.245 0 01-2.003-.678z" fill="url(#lobe-icons-gemini-fill-2)"></path><defs><linearGradient gradientUnits="userSpaceOnUse" id="lobe-icons-gemini-fill-0" x1="7" x2="11" y1="15.5" y2="12"><stop stop-color="#08B962"></stop><stop offset="1" stop-color="#08B962" stop-opacity="0"></stop></linearGradient><linearGradient gradientUnits="userSpaceOnUse" id="lobe-icons-gemini-fill-1" x1="8" x2="11.5" y1="5.5" y2="11"><stop stop-color="#F94543"></stop><stop offset="1" stop-color="#F94543" stop-opacity="0"></stop></linearGradient><linearGradient gradientUnits="userSpaceOnUse" id="lobe-icons-gemini-fill-2" x1="3.5" x2="17.5" y1="13.5" y2="12"><stop stop-color="#FABC12"></stop><stop offset=".46" stop-color="#FABC12" stop-opacity="0"></stop></linearGradient></defs></svg>

Before

Width:  |  Height:  |  Size: 2.8 KiB

View File

@@ -1 +0,0 @@
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>LangChain</title><path d="M8.373 14.502c.013-.06.024-.118.038-.17l.061.145c.115.28.229.557.506.714-.012.254-.334.357-.552.326-.048-.114-.115-.228-.255-.164-.143.056-.3-.01-.266-.185.333-.012.407-.371.468-.666zM18.385 9.245c-.318 0-.616.122-.839.342l-.902.887c-.243.24-.368.572-.343.913l.006.056c.032.262.149.498.337.682.13.128.273.21.447.266a.866.866 0 01-.247.777l-.056.055a2.022 2.022 0 01-1.355-1.555l-.01-.057-.046.037c-.03.024-.06.05-.088.078l-.902.887a1.156 1.156 0 000 1.65c.231.228.535.342.84.342.304 0 .607-.114.838-.341l.902-.888a1.156 1.156 0 00-.436-1.921.953.953 0 01.276-.842 2.062 2.062 0 011.371 1.57l.01.057.047-.037c.03-.024.06-.05.088-.078l.902-.888a1.155 1.155 0 000-1.65 1.188 1.188 0 00-.84-.342z" fill="#1C3C3C"></path><path clip-rule="evenodd" d="M17.901 6H6.1C2.736 6 0 8.692 0 12s2.736 6 6.099 6H17.9C21.264 18 24 15.308 24 12s-2.736-6-6.099-6zm-5.821 9.407c-.195.04-.414.047-.562-.106-.045.1-.136.077-.221.056a.797.797 0 00-.061-.014c-.01.025-.017.048-.026.073-.329.021-.575-.309-.732-.558a4.991 4.991 0 00-.473-.21c-.172-.07-.345-.14-.509-.23a2.218 2.218 0 00-.004.173c-.002.244-.004.503-.227.651-.007.295.236.292.476.29.207-.003.41-.005.447.184a.485.485 0 01-.05.003c-.046 0-.092 0-.127.034-.117.111-.242.063-.372.013-.12-.046-.243-.094-.367-.02a2.318 2.318 0 00-.262.154.97.97 0 01-.548.194c-.024-.036-.014-.059.006-.08a.562.562 0 00.043-.056c.019-.028.035-.057.051-.084.054-.095.103-.18.242-.22-.185-.029-.344.055-.5.137l-.004.002a4.21 4.21 0 01-.065.034c-.097.04-.154.009-.212-.023-.082-.045-.168-.092-.376.04-.04-.032-.02-.061.002-.086.091-.109.21-.125.345-.119-.351-.193-.604-.056-.81.055-.182.098-.327.176-.471-.012-.065.017-.102.063-.138.108-.015.02-.03.038-.047.055-.035-.039-.027-.083-.018-.128l.005-.026a.242.242 0 00.003-.03l-.027-.01c-.053-.022-.105-.044-.09-.124-.117-.04-.2.03-.286.094-.054-.041-.01-.095.032-.145a.279.279 0 00.045-.065c.038-.065.103-.067.166-.069.054-.001.108-.003.145-.042.133-.075.297-.036.462.003.121.028.242.057.354.042.203.025.454-.18.352-.385-.186-.233-.184-.528-.183-.813v-.143c-.016-.108-.172-.233-.328-.358-.12-.095-.24-.191-.298-.28-.16-.177-.285-.382-.409-.585l-.015-.024c-.212-.404-.297-.86-.382-1.315-.103-.546-.205-1.09-.526-1.54-.266.144-.612.075-.841-.118-.12.107-.13.247-.138.396l-.001.014c-.297-.292-.26-.844-.023-1.17.097-.128.213-.233.342-.326.03-.021.04-.042.039-.074.235-1.04 1.836-.839 2.342-.103.167.206.281.442.395.678.137.283.273.566.5.795.22.237.452.463.684.689.359.35.718.699 1.032 1.089.49.587.839 1.276 1.144 1.97.05.092.08.193.11.293.044.15.089.299.2.417.026.035.084.088.149.148.156.143.357.328.289.409.009.019.027.04.05.06.032.028.074.058.116.088.122.087.25.178.16.25zm7.778-3.545l-.902.887c-.24.237-.537.413-.859.51l-.017.005-.006.015A2.021 2.021 0 0117.6 14l-.902.888c-.393.387-.916.6-1.474.6-.557 0-1.08-.213-1.474-.6a2.03 2.03 0 010-2.9l.902-.888c.242-.238.531-.409.859-.508l.016-.004.006-.016c.105-.272.265-.516.475-.724l.902-.887c.393-.387.917-.6 1.474-.6.558 0 1.08.213 1.474.6.394.387.61.902.61 1.45 0 .549-.216 1.064-.61 1.45v.001z" fill="#1C3C3C" fill-rule="evenodd"></path></svg>

Before

Width:  |  Height:  |  Size: 3.1 KiB

View File

@@ -1 +0,0 @@
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>LlamaIndex</title><path d="M15.855 17.122c-2.092.924-4.358.545-5.23.24 0 .21-.01.857-.048 1.78-.038.924-.332 1.507-.475 1.684.016.577.029 1.837-.047 2.26a1.93 1.93 0 01-.476.914H8.295c.114-.577.555-.946.761-1.058.114-1.193-.11-2.229-.238-2.597-.126.449-.437 1.49-.665 2.068a6.418 6.418 0 01-.713 1.299h-.951c-.048-.578.27-.77.475-.77.095-.177.323-.731.476-1.54.152-.807-.064-2.324-.19-2.981v-2.068c-1.522-.818-2.092-1.636-2.473-2.55-.304-.73-.222-1.843-.142-2.308-.096-.176-.373-.625-.476-1.25-.142-.866-.063-1.491 0-1.828-.095-.096-.285-.587-.285-1.78 0-1.192.349-1.811.523-1.972v-.529c-.666-.048-1.331-.336-1.712-.721-.38-.385-.095-.962.143-1.154.238-.193.475-.049.808-.145.333-.096.618-.192.76-.48C4.512 1.403 4.287.448 4.16 0c.57.077.935.577 1.046.818V0c.713.337 1.997 1.154 2.425 2.934.342 1.424.586 4.409.665 5.723 1.823.016 4.137-.26 6.229.193 1.901.412 2.757 1.25 3.755 1.25.999 0 1.57-.577 2.282-.096.714.481 1.094 1.828.999 2.838-.076.808-.697 1.074-.998 1.106-.38 1.27 0 2.485.237 2.934v1.827c.111.16.333.655.333 1.347 0 .693-.222 1.154-.333 1.299.19 1.077-.08 2.18-.238 2.597h-1.283c.152-.385.412-.481.523-.481.228-1.193.063-2.293-.048-2.693-.722-.424-1.188-1.17-1.331-1.491.016.272-.029 1.029-.333 1.875-.304.847-.76 1.347-.95 1.491v1.01h-1.284c0-.615.348-.737.523-.721.222-.4.76-1.01.76-2.212 0-1.015-.713-1.492-1.236-2.405-.248-.434-.127-.978-.047-1.203z" fill="url(#lobe-icons-llama-index-fill)"></path><defs><linearGradient gradientUnits="userSpaceOnUse" id="lobe-icons-llama-index-fill" x1="4.021" x2="24.613" y1="2.02" y2="19.277"><stop offset=".062" stop-color="#F6DCD9"></stop><stop offset=".326" stop-color="#FFA5EA"></stop><stop offset=".589" stop-color="#45DFF8"></stop><stop offset="1" stop-color="#BC8DEB"></stop></linearGradient></defs></svg>

Before

Width:  |  Height:  |  Size: 1.8 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 5.5 KiB

View File

@@ -1,2 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg" fill="none"><path fill="#06D092" d="M8 0L1 4v8l7 4 7-4V4L8 0zm3.119 8.797L9.254 9.863 7.001 8.65v2.549l-2.118 1.33v-5.33l1.68-1.018 2.332 1.216V4.794l2.23-1.322-.006 5.325z"/></svg>

Before

Width:  |  Height:  |  Size: 389 B

View File

@@ -1,4 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128">
<path fill="#f5a800" d="M67.648 69.797c-5.246 5.25-5.246 13.758 0 19.008 5.25 5.246 13.758 5.246 19.004 0 5.25-5.25 5.25-13.758 0-19.008-5.246-5.246-13.754-5.246-19.004 0Zm14.207 14.219a6.649 6.649 0 0 1-9.41 0 6.65 6.65 0 0 1 0-9.407 6.649 6.649 0 0 1 9.41 0c2.598 2.586 2.598 6.809 0 9.407ZM86.43 3.672l-8.235 8.234a4.17 4.17 0 0 0 0 5.875l32.149 32.149a4.17 4.17 0 0 0 5.875 0l8.234-8.235c1.61-1.61 1.61-4.261 0-5.87L92.29 3.671a4.159 4.159 0 0 0-5.86 0ZM28.738 108.895a3.763 3.763 0 0 0 0-5.31l-4.183-4.187a3.768 3.768 0 0 0-5.313 0l-8.644 8.649-.016.012-2.371-2.375c-1.313-1.313-3.45-1.313-4.75 0-1.313 1.312-1.313 3.449 0 4.75l14.246 14.242a3.353 3.353 0 0 0 4.746 0c1.3-1.313 1.313-3.45 0-4.746l-2.375-2.375.016-.012Zm0 0"/>
<path fill="#425cc7" d="M72.297 27.313 54.004 45.605c-1.625 1.625-1.625 4.301 0 5.926L65.3 62.824c7.984-5.746 19.18-5.035 26.363 2.153l9.148-9.149c1.622-1.625 1.622-4.297 0-5.922L78.22 27.313a4.185 4.185 0 0 0-5.922 0ZM60.55 67.585l-6.672-6.672c-1.563-1.562-4.125-1.562-5.684 0l-23.53 23.54a4.036 4.036 0 0 0 0 5.687l13.331 13.332a4.036 4.036 0 0 0 5.688 0l15.132-15.157c-3.199-6.609-2.625-14.593 1.735-20.73Zm0 0"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -1,99 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
width="64"
height="64"
viewBox="0 0 64 64"
version="1.1"
id="svg20"
sodipodi:docname="supabase-icon.svg"
style="fill:none"
inkscape:version="0.92.4 (5da689c313, 2019-01-14)">
<metadata
id="metadata24">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
<dc:title></dc:title>
</cc:Work>
</rdf:RDF>
</metadata>
<sodipodi:namedview
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1"
objecttolerance="10"
gridtolerance="10"
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="1687"
inkscape:window-height="849"
id="namedview22"
showgrid="false"
inkscape:zoom="2.0884956"
inkscape:cx="54.5"
inkscape:cy="56.5"
inkscape:window-x="70"
inkscape:window-y="0"
inkscape:window-maximized="0"
inkscape:current-layer="svg20" />
<path
d="m 37.41219,62.936701 c -1.634985,2.05896 -4.950068,0.93085 -4.989463,-1.69817 L 31.846665,22.786035 h 25.855406 c 4.683108,0 7.294967,5.409033 4.382927,9.07673 z"
id="path2"
style="fill:url(#paint0_linear);stroke-width:0.57177335"
inkscape:connector-curvature="0" />
<path
d="m 37.41219,62.936701 c -1.634985,2.05896 -4.950068,0.93085 -4.989463,-1.69817 L 31.846665,22.786035 h 25.855406 c 4.683108,0 7.294967,5.409033 4.382927,9.07673 z"
id="path4"
style="fill:url(#paint1_linear);fill-opacity:0.2;stroke-width:0.57177335"
inkscape:connector-curvature="0" />
<path
d="m 26.89694,1.0634102 c 1.634986,-2.05918508 4.950125,-0.93090008 4.989521,1.698149 L 32.138899,41.214003 H 6.607076 c -4.6832501,0 -7.29518376,-5.409032 -4.3830007,-9.07673 z"
id="path6"
inkscape:connector-curvature="0"
style="fill:#3ecf8e;stroke-width:0.57177335" />
<defs
id="defs18">
<linearGradient
id="paint0_linear"
x1="53.973801"
y1="54.973999"
x2="94.163498"
y2="71.829498"
gradientUnits="userSpaceOnUse"
gradientTransform="matrix(0.57177306,0,0,0.57177334,0.98590077,-0.12074988)">
<stop
stop-color="#249361"
id="stop8" />
<stop
offset="1"
stop-color="#3ECF8E"
id="stop10" />
</linearGradient>
<linearGradient
id="paint1_linear"
x1="36.1558"
y1="30.577999"
x2="54.484402"
y2="65.080597"
gradientUnits="userSpaceOnUse"
gradientTransform="matrix(0.57177306,0,0,0.57177334,0.98590077,-0.12074988)">
<stop
id="stop13" />
<stop
offset="1"
stop-opacity="0"
id="stop15" />
</linearGradient>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 3.2 KiB

View File

@@ -27,7 +27,6 @@ import { IUser } from 'providers/App/types';
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
import { KBarCommandPaletteProvider } from 'providers/KBarCommandPaletteProvider';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { QueryBuilderProvider } from 'providers/QueryBuilder';
import { Suspense, useCallback, useEffect, useState } from 'react';
import { Route, Router, Switch } from 'react-router-dom';
@@ -383,22 +382,20 @@ function App(): JSX.Element {
<KeyboardHotkeysProvider>
<AlertRuleProvider>
<AppLayout>
<PreferenceContextProvider>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route exact path="/" component={Home} />
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</PreferenceContextProvider>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route exact path="/" component={Home} />
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</AppLayout>
</AlertRuleProvider>
</KeyboardHotkeysProvider>

View File

@@ -97,6 +97,7 @@ export const metricsSpaceAggregationOperatorsByType = {
Gauge: metricsGaugeSpaceAggregateOperatorOptions,
Histogram: metricsHistogramSpaceAggregateOperatorOptions,
ExponentialHistogram: metricsHistogramSpaceAggregateOperatorOptions,
Summary: metricsGaugeSpaceAggregateOperatorOptions,
};
export const mapOfQueryFilters: Record<DataSource, QueryAdditionalFilter[]> = {
@@ -372,6 +373,7 @@ export enum ATTRIBUTE_TYPES {
GAUGE = 'Gauge',
HISTOGRAM = 'Histogram',
EXPONENTIAL_HISTOGRAM = 'ExponentialHistogram',
SUMMARY = 'Summary',
}
export type IQueryBuilderState = 'search';

View File

@@ -13,12 +13,14 @@ import APIError from 'types/api/error';
import { useCreateAlertState } from '../context';
import AdvancedOptions from '../EvaluationSettings/AdvancedOptions';
import Stepper from '../Stepper';
import { showCondensedLayout } from '../utils';
import AlertThreshold from './AlertThreshold';
import AnomalyThreshold from './AnomalyThreshold';
import { ANOMALY_TAB_TOOLTIP, THRESHOLD_TAB_TOOLTIP } from './constants';
function AlertCondition(): JSX.Element {
const { alertType, setAlertType } = useCreateAlertState();
const showCondensedLayoutFlag = showCondensedLayout();
const {
data,
@@ -106,9 +108,11 @@ function AlertCondition(): JSX.Element {
refreshChannels={refreshChannels}
/>
)}
<div className="condensed-advanced-options-container">
<AdvancedOptions />
</div>
{showCondensedLayoutFlag ? (
<div className="condensed-advanced-options-container">
<AdvancedOptions />
</div>
) : null}
</div>
);
}

View File

@@ -18,6 +18,7 @@ import {
THRESHOLD_OPERATOR_OPTIONS,
} from '../context/constants';
import EvaluationSettings from '../EvaluationSettings/EvaluationSettings';
import { showCondensedLayout } from '../utils';
import ThresholdItem from './ThresholdItem';
import { AnomalyAndThresholdProps, UpdateThreshold } from './types';
import {
@@ -42,6 +43,8 @@ function AlertThreshold({
setNotificationSettings,
} = useCreateAlertState();
const showCondensedLayoutFlag = showCondensedLayout();
const { currentQuery } = useQueryBuilder();
const queryNames = getQueryNames(currentQuery);
@@ -160,12 +163,17 @@ function AlertThreshold({
}),
);
const evaluationWindowContext = showCondensedLayoutFlag ? (
<EvaluationSettings />
) : (
<strong>Evaluation Window.</strong>
);
return (
<div
className={classNames(
'alert-threshold-container',
'condensed-alert-threshold-container',
)}
className={classNames('alert-threshold-container', {
'condensed-alert-threshold-container': showCondensedLayoutFlag,
})}
>
{/* Main condition sentence */}
<div className="alert-condition-sentences">
@@ -211,7 +219,7 @@ function AlertThreshold({
options={matchTypeOptionsWithTooltips}
/>
<Typography.Text className="sentence-text">
during the <EvaluationSettings />
during the {evaluationWindowContext}
</Typography.Text>
</div>
</div>

View File

@@ -110,6 +110,7 @@ jest.mock('container/NewWidget/RightContainer/alertFomatCategories', () => ({
jest.mock('container/CreateAlertV2/utils', () => ({
...jest.requireActual('container/CreateAlertV2/utils'),
showCondensedLayout: jest.fn().mockReturnValue(false),
}));
const TEST_STRINGS = {
@@ -158,9 +159,7 @@ describe('AlertThreshold', () => {
expect(screen.getByText('Send a notification when')).toBeInTheDocument();
expect(screen.getByText('the threshold(s)')).toBeInTheDocument();
expect(screen.getByText('during the')).toBeInTheDocument();
expect(
screen.getByTestId('condensed-evaluation-settings-container'),
).toBeInTheDocument();
expect(screen.getByText('Evaluation Window.')).toBeInTheDocument();
});
it('renders query selection dropdown', async () => {

View File

@@ -8,11 +8,12 @@ import AlertCondition from './AlertCondition';
import { CreateAlertProvider } from './context';
import { buildInitialAlertDef } from './context/utils';
import CreateAlertHeader from './CreateAlertHeader';
import EvaluationSettings from './EvaluationSettings';
import Footer from './Footer';
import NotificationSettings from './NotificationSettings';
import QuerySection from './QuerySection';
import { CreateAlertV2Props } from './types';
import { Spinner } from './utils';
import { showCondensedLayout, Spinner } from './utils';
function CreateAlertV2({ alertType }: CreateAlertV2Props): JSX.Element {
const queryToRedirect = buildInitialAlertDef(alertType);
@@ -22,6 +23,8 @@ function CreateAlertV2({ alertType }: CreateAlertV2Props): JSX.Element {
useShareBuilderUrl({ defaultValue: currentQueryToRedirect });
const showCondensedLayoutFlag = showCondensedLayout();
return (
<CreateAlertProvider initialAlertType={alertType}>
<Spinner />
@@ -29,6 +32,7 @@ function CreateAlertV2({ alertType }: CreateAlertV2Props): JSX.Element {
<CreateAlertHeader />
<QuerySection />
<AlertCondition />
{!showCondensedLayoutFlag ? <EvaluationSettings /> : null}
<NotificationSettings />
</div>
<Footer />

View File

@@ -1,19 +1,28 @@
import './styles.scss';
import { Button, Popover } from 'antd';
import { Button, Popover, Typography } from 'antd';
import { ChevronDown, ChevronUp } from 'lucide-react';
import { useState } from 'react';
import { AlertTypes } from 'types/api/alerts/alertTypes';
import { useCreateAlertState } from '../context';
import Stepper from '../Stepper';
import { showCondensedLayout } from '../utils';
import AdvancedOptions from './AdvancedOptions';
import EvaluationWindowPopover from './EvaluationWindowPopover';
import { getEvaluationWindowTypeText, getTimeframeText } from './utils';
function EvaluationSettings(): JSX.Element {
const { evaluationWindow, setEvaluationWindow } = useCreateAlertState();
const {
alertType,
evaluationWindow,
setEvaluationWindow,
} = useCreateAlertState();
const [
isEvaluationWindowPopoverOpen,
setIsEvaluationWindowPopoverOpen,
] = useState(false);
const showCondensedLayoutFlag = showCondensedLayout();
const popoverContent = (
<Popover
@@ -48,12 +57,33 @@ function EvaluationSettings(): JSX.Element {
</Popover>
);
// Layout consists of only the evaluation window popover
if (showCondensedLayoutFlag) {
return (
<div
className="condensed-evaluation-settings-container"
data-testid="condensed-evaluation-settings-container"
>
{popoverContent}
</div>
);
}
// Layout consists of
// - Stepper header
// - Evaluation window popover
// - Advanced options
return (
<div
className="condensed-evaluation-settings-container"
data-testid="condensed-evaluation-settings-container"
>
{popoverContent}
<div className="evaluation-settings-container">
<Stepper stepNumber={3} label="Evaluation settings" />
{alertType !== AlertTypes.ANOMALY_BASED_ALERT && (
<div className="evaluate-alert-conditions-container">
<Typography.Text>Check conditions using data from</Typography.Text>
<div className="evaluate-alert-conditions-separator" />
{popoverContent}
</div>
)}
<AdvancedOptions />
</div>
);
}

View File

@@ -1,11 +1,14 @@
import { render, screen } from '@testing-library/react';
import * as alertState from 'container/CreateAlertV2/context';
import * as utils from 'container/CreateAlertV2/utils';
import { AlertTypes } from 'types/api/alerts/alertTypes';
import EvaluationSettings from '../EvaluationSettings';
import { createMockAlertContextState } from './testUtils';
jest.mock('container/CreateAlertV2/utils', () => ({
...jest.requireActual('container/CreateAlertV2/utils'),
showCondensedLayout: jest.fn().mockReturnValue(false),
}));
const mockSetEvaluationWindow = jest.fn();
@@ -15,14 +18,52 @@ jest.spyOn(alertState, 'useCreateAlertState').mockReturnValue(
}),
);
jest.mock('../AdvancedOptions', () => ({
__esModule: true,
default: (): JSX.Element => (
<div data-testid="advanced-options">AdvancedOptions</div>
),
}));
const EVALUATION_SETTINGS_TEXT = 'Evaluation settings';
const CHECK_CONDITIONS_USING_DATA_FROM_TEXT =
'Check conditions using data from';
describe('EvaluationSettings', () => {
it('should render the condensed evaluation settings layout', () => {
it('should render the default evaluation settings layout', () => {
render(<EvaluationSettings />);
expect(screen.getByText(EVALUATION_SETTINGS_TEXT)).toBeInTheDocument();
expect(
screen.getByText(CHECK_CONDITIONS_USING_DATA_FROM_TEXT),
).toBeInTheDocument();
expect(screen.getByTestId('advanced-options')).toBeInTheDocument();
});
it('should not render evaluation window for anomaly based alert', () => {
jest.spyOn(alertState, 'useCreateAlertState').mockReturnValueOnce(
createMockAlertContextState({
alertType: AlertTypes.ANOMALY_BASED_ALERT,
}),
);
render(<EvaluationSettings />);
expect(screen.getByText(EVALUATION_SETTINGS_TEXT)).toBeInTheDocument();
expect(
screen.queryByText(CHECK_CONDITIONS_USING_DATA_FROM_TEXT),
).not.toBeInTheDocument();
});
it('should render the condensed evaluation settings layout', () => {
jest.spyOn(utils, 'showCondensedLayout').mockReturnValueOnce(true);
render(<EvaluationSettings />);
// Header, check conditions using data from and advanced options should be hidden
expect(screen.queryByText(EVALUATION_SETTINGS_TEXT)).not.toBeInTheDocument();
expect(
screen.queryByText(CHECK_CONDITIONS_USING_DATA_FROM_TEXT),
).not.toBeInTheDocument();
expect(screen.queryByTestId('advanced-options')).not.toBeInTheDocument();
// Only evaluation window popover should be visible
expect(
screen.getByTestId('condensed-evaluation-settings-container'),
).toBeInTheDocument();
// Verify that default option is selected
expect(screen.getByText('Rolling')).toBeInTheDocument();
expect(screen.getByText('Last 5 minutes')).toBeInTheDocument();
});
});

View File

@@ -9,10 +9,13 @@ import {
} from '../context/constants';
import AdvancedOptionItem from '../EvaluationSettings/AdvancedOptionItem';
import Stepper from '../Stepper';
import { showCondensedLayout } from '../utils';
import MultipleNotifications from './MultipleNotifications';
import NotificationMessage from './NotificationMessage';
function NotificationSettings(): JSX.Element {
const showCondensedLayoutFlag = showCondensedLayout();
const {
notificationSettings,
setNotificationSettings,
@@ -79,7 +82,10 @@ function NotificationSettings(): JSX.Element {
return (
<div className="notification-settings-container">
<Stepper stepNumber={3} label="Notification settings" />
<Stepper
stepNumber={showCondensedLayoutFlag ? 3 : 4}
label="Notification settings"
/>
<NotificationMessage />
<div className="notification-settings-content">
<MultipleNotifications />

View File

@@ -1,6 +1,7 @@
import { fireEvent, render, screen } from '@testing-library/react';
import * as createAlertContext from 'container/CreateAlertV2/context';
import { createMockAlertContextState } from 'container/CreateAlertV2/EvaluationSettings/__tests__/testUtils';
import * as utils from 'container/CreateAlertV2/utils';
import NotificationSettings from '../NotificationSettings';
@@ -25,6 +26,7 @@ jest.mock(
jest.mock('container/CreateAlertV2/utils', () => ({
...jest.requireActual('container/CreateAlertV2/utils'),
showCondensedLayout: jest.fn().mockReturnValue(false),
}));
const initialNotificationSettings = createMockAlertContextState()
@@ -40,10 +42,10 @@ const REPEAT_NOTIFICATIONS_TEXT = 'Repeat notifications';
const ENTER_TIME_INTERVAL_TEXT = 'Enter time interval...';
describe('NotificationSettings', () => {
it('renders the notification settings tab with step number 3 and default values', () => {
it('renders the notification settings tab with step number 4 and default values', () => {
render(<NotificationSettings />);
expect(screen.getByText('Notification settings')).toBeInTheDocument();
expect(screen.getByText('3')).toBeInTheDocument();
expect(screen.getByText('4')).toBeInTheDocument();
expect(screen.getByTestId('multiple-notifications')).toBeInTheDocument();
expect(screen.getByTestId('notification-message')).toBeInTheDocument();
expect(screen.getByText(REPEAT_NOTIFICATIONS_TEXT)).toBeInTheDocument();
@@ -54,6 +56,15 @@ describe('NotificationSettings', () => {
).toBeInTheDocument();
});
it('renders the notification settings tab with step number 3 in condensed layout', () => {
jest.spyOn(utils, 'showCondensedLayout').mockReturnValueOnce(true);
render(<NotificationSettings />);
expect(screen.getByText('Notification settings')).toBeInTheDocument();
expect(screen.getByText('3')).toBeInTheDocument();
expect(screen.getByTestId('multiple-notifications')).toBeInTheDocument();
expect(screen.getByTestId('notification-message')).toBeInTheDocument();
});
describe('Repeat notifications', () => {
it('renders the repeat notifications with inputs hidden when the repeat notifications switch is off', () => {
render(<NotificationSettings />);

View File

@@ -27,6 +27,16 @@ import {
import { EVALUATION_WINDOW_TIMEFRAME } from './EvaluationSettings/constants';
import { GetCreateAlertLocalStateFromAlertDefReturn } from './types';
// UI side feature flag
export const showNewCreateAlertsPage = (): boolean =>
localStorage.getItem('showNewCreateAlertsPage') === 'true';
// UI side FF to switch between the 2 layouts of the create alert page
// Layout 1 - Default layout
// Layout 2 - Condensed layout
export const showCondensedLayout = (): boolean =>
localStorage.getItem('hideCondensedLayout') !== 'true';
export function Spinner(): JSX.Element | null {
const { isCreatingAlertRule, isUpdatingAlertRule } = useCreateAlertState();

View File

@@ -8,10 +8,11 @@ import { PostableAlertRuleV2 } from 'types/api/alerts/alertTypesV2';
import AlertCondition from '../CreateAlertV2/AlertCondition';
import { buildInitialAlertDef } from '../CreateAlertV2/context/utils';
import EvaluationSettings from '../CreateAlertV2/EvaluationSettings';
import Footer from '../CreateAlertV2/Footer';
import NotificationSettings from '../CreateAlertV2/NotificationSettings';
import QuerySection from '../CreateAlertV2/QuerySection';
import { Spinner } from '../CreateAlertV2/utils';
import { showCondensedLayout, Spinner } from '../CreateAlertV2/utils';
interface EditAlertV2Props {
alertType?: AlertTypes;
@@ -32,12 +33,15 @@ function EditAlertV2({
useShareBuilderUrl({ defaultValue: currentQueryToRedirect });
const showCondensedLayoutFlag = showCondensedLayout();
return (
<>
<Spinner />
<div className="create-alert-v2-container">
<QuerySection />
<AlertCondition />
{!showCondensedLayoutFlag ? <EvaluationSettings /> : null}
<NotificationSettings />
</div>
<Footer />

View File

@@ -87,21 +87,6 @@
"imgUrl": "/Logos/signoz-brand-logo.svg",
"link": "https://signoz.io/docs/migration/migrate-from-signoz-self-host-to-signoz-cloud/"
},
{
"dataSource": "migrate-from-existing-opentelemetry",
"label": "From Existing OpenTelemetry",
"tags": ["migrate to SigNoz"],
"module": "home",
"relatedSearchKeywords": [
"apm migration",
"opentelemetry",
"migration guide",
"migrate",
"migration"
],
"imgUrl": "/Logos/opentelemetry.svg",
"link": "https://signoz.io/docs/migration/migrate-from-opentelemetry-to-signoz/"
},
{
"dataSource": "java",
"entityID": "dataSource",
@@ -2671,156 +2656,6 @@
],
"link": "https://signoz.io/docs/community/llm-monitoring/"
},
{
"dataSource": "anthropic-api",
"label": "Anthropic API",
"imgUrl": "/Logos/anthropic-api-monitoring.svg",
"tags": ["LLM Monitoring"],
"module": "metrics",
"relatedSearchKeywords": [
"llm monitoring",
"large language model observability",
"monitor anthropic",
"llm response time tracing",
"llm metrics",
"otel llm integration",
"llm performance tracking",
"metrics",
"traces",
"logs"
],
"link": "https://signoz.io/docs/anthropic-monitoring/"
},
{
"dataSource": "claude-code",
"label": "Claude Code",
"imgUrl": "/Logos/claude-code.svg",
"tags": ["LLM Monitoring"],
"module": "metrics",
"relatedSearchKeywords": [
"claude code monitoring",
"claude code observability",
"claude code performance tracking",
"claude code latency tracing",
"claude code metrics",
"otel claude integration",
"claude code response time",
"claude code logs",
"claude code error tracking",
"claude code debugging",
"metrics",
"logs"
],
"link": "https://signoz.io/docs/claude-code-monitoring/"
},
{
"dataSource": "deepseek-api",
"label": "DeepSeek API",
"imgUrl": "/Logos/deepseek.svg",
"tags": ["LLM Monitoring"],
"module": "metrics",
"relatedSearchKeywords": [
"deepseek api monitoring",
"deepseek api observability",
"deepseek api performance tracking",
"deepseek api latency tracing",
"deepseek api metrics",
"otel deepseek integration",
"deepseek api response time",
"deepseek api logs",
"deepseek api error tracking",
"deepseek api debugging",
"metrics",
"logs"
],
"link": "https://signoz.io/docs/deepseek-monitoring/"
},
{
"dataSource": "google-gemini-api",
"label": "Google Gemini",
"imgUrl": "/Logos/google-gemini.svg",
"tags": ["LLM Monitoring"],
"module": "metrics",
"relatedSearchKeywords": [
"google gemini api monitoring",
"google gemini api observability",
"google gemini api performance tracking",
"google gemini api latency tracing",
"google gemini api metrics",
"otel google gemini integration",
"google gemini api response time",
"google gemini api logs",
"google gemini api error tracking",
"google gemini api debugging",
"gemini",
"metrics",
"logs"
],
"link": "https://signoz.io/docs/google-gemini-monitoring/"
},
{
"dataSource": "langchain",
"label": "LangChain",
"imgUrl": "/Logos/langchain.svg",
"tags": ["LLM Monitoring"],
"module": "apm",
"relatedSearchKeywords": [
"langchain monitoring",
"langchain observability",
"langchain performance tracking",
"langchain latency tracing",
"langchain metrics",
"otel langchain integration",
"langchain response time",
"langchain logs",
"langchain error tracking",
"langchain debugging",
"traces"
],
"link": "https://signoz.io/docs/langchain-monitoring/"
},
{
"dataSource": "llamaindex",
"label": "LlamaIndex",
"imgUrl": "/Logos/llamaindex.svg",
"tags": ["LLM Monitoring"],
"module": "apm",
"relatedSearchKeywords": [
"llamaindex monitoring",
"llamaindex observability",
"llamaindex performance tracking",
"llamaindex latency tracing",
"llamaindex metrics",
"otel llamaindex integration",
"llamaindex response time",
"llamaindex logs",
"llamaindex error tracking",
"llamaindex debugging",
"traces"
],
"link": "https://signoz.io/docs/llamaindex-monitoring/"
},
{
"dataSource": "vercel-ai-sdk",
"label": "Vercel AI SDK",
"imgUrl": "/Logos/vercel.svg",
"tags": ["LLM Monitoring"],
"module": "apm",
"relatedSearchKeywords": [
"vercel ai sdk monitoring",
"vercel ai sdk observability",
"vercel ai sdk performance tracking",
"vercel ai sdk latency tracing",
"vercel ai sdk metrics",
"otel vercel ai sdk integration",
"vercel ai sdk response time",
"vercel ai sdk logs",
"vercel ai sdk error tracking",
"vercel ai sdk debugging",
"traces"
],
"link": "https://signoz.io/docs/vercel-ai-sdk-monitoring/"
},
{
"dataSource": "http-endpoints-monitoring",
"label": "HTTP Endpoints Monitoring",
@@ -3556,58 +3391,5 @@
}
]
}
},
{
"dataSource": "microsoft-sql-server",
"label": "Microsoft SQL Server",
"imgUrl": "/Logos/microsoft-sql-server.svg",
"tags": ["integrations"],
"module": "metrics",
"relatedSearchKeywords": [
"sql server metrics",
"mssql monitoring",
"sql server performance",
"sql server observability",
"Microsoft",
"sql server logs",
"metrics",
"logs"
],
"id": "microsoft-sql-server",
"link": "https://signoz.io/docs/integrations/sql-server/"
},
{
"dataSource": "supabase",
"label": "Supabase",
"imgUrl": "/Logos/supabase.svg",
"tags": ["integrations"],
"module": "metrics",
"relatedSearchKeywords": [
"supabase metrics",
"supabase monitoring",
"supabase performance",
"supabase observability",
"supabase",
"metrics"
],
"id": "supabase",
"link": "https://signoz.io/docs/integrations/supabase/"
},
{
"dataSource": "nomad",
"label": "Nomad",
"imgUrl": "/Logos/nomad.svg",
"tags": ["integrations"],
"module": "metrics",
"relatedSearchKeywords": [
"nomad metrics",
"nomad monitoring",
"nomad performance",
"nomad observability",
"nomad",
"metrics"
],
"id": "nomad",
"link": "https://signoz.io/docs/integrations/nomad/"
}
]

View File

@@ -1,4 +1,3 @@
/* eslint-disable sonarjs/cognitive-complexity */
import { getKeySuggestions } from 'api/querySuggestions/getKeySuggestions';
import { TelemetryFieldKey } from 'api/v5/v5';
import { AxiosResponse } from 'axios';
@@ -56,10 +55,11 @@ const useOptionsMenu = ({
initialOptions = {},
}: UseOptionsMenuProps): UseOptionsMenu => {
const { notifications } = useNotifications();
const prefCtx = usePreferenceContext();
// TODO: send null to updateColumns and updateFormatting if dataSource is not logs or traces
const slice = dataSource === DataSource.TRACES ? prefCtx.traces : prefCtx.logs;
const { preferences, updateColumns, updateFormatting } = slice;
const {
preferences,
updateColumns,
updateFormatting,
} = usePreferenceContext();
const [searchText, setSearchText] = useState<string>('');
const [isFocused, setIsFocused] = useState<boolean>(false);

View File

@@ -5,6 +5,10 @@ import { SuccessResponseV2 } from 'types/api';
import { RoutingPolicy } from './types';
export function showRoutingPoliciesPage(): boolean {
return localStorage.getItem('showRoutingPoliciesPage') === 'true';
}
export function mapApiResponseToRoutingPolicies(
response: SuccessResponseV2<GetRoutingPoliciesResponse>,
): RoutingPolicy[] {

View File

@@ -19,6 +19,7 @@ import { getOperatorValue } from 'container/QueryBuilder/filters/QueryBuilderSea
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import createQueryParams from 'lib/createQueryParams';
import { Compass } from 'lucide-react';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { useCallback, useMemo } from 'react';
import { Virtuoso } from 'react-virtuoso';
import { ILog } from 'types/api/logs/log';
@@ -211,20 +212,24 @@ function SpanLogs({
const renderContent = useMemo(
() => (
<div className="span-logs-list-container">
<OverlayScrollbar isVirtuoso>
<Virtuoso
className="span-logs-virtuoso"
key="span-logs-virtuoso"
style={logs.length <= 35 ? { height: `calc(${logs.length} * 22px)` } : {}}
data={logs}
totalCount={logs.length}
itemContent={getItemContent}
overscan={200}
components={{
Footer: renderFooter,
}}
/>
</OverlayScrollbar>
<PreferenceContextProvider>
<OverlayScrollbar isVirtuoso>
<Virtuoso
className="span-logs-virtuoso"
key="span-logs-virtuoso"
style={
logs.length <= 35 ? { height: `calc(${logs.length} * 22px)` } : {}
}
data={logs}
totalCount={logs.length}
itemContent={getItemContent}
overscan={200}
components={{
Footer: renderFooter,
}}
/>
</OverlayScrollbar>
</PreferenceContextProvider>
</div>
),
[logs, getItemContent, renderFooter],

View File

@@ -261,16 +261,18 @@ describe('SpanDetailsDrawer', () => {
const logsButton = screen.getByRole('radio', { name: /logs/i });
fireEvent.click(logsButton);
// Wait for logs view to open and logs to be displayed
// Wait for logs view to open
await waitFor(() => {
expect(screen.getByTestId('overlay-scrollbar')).toBeInTheDocument();
});
// Verify logs are displayed
await waitFor(() => {
// eslint-disable-next-line sonarjs/no-duplicate-string
expect(screen.getByTestId('raw-log-span-log-1')).toBeInTheDocument();
// eslint-disable-next-line sonarjs/no-duplicate-string
expect(screen.getByTestId('raw-log-span-log-2')).toBeInTheDocument();
// eslint-disable-next-line sonarjs/no-duplicate-string
expect(screen.getByTestId('raw-log-context-log-before')).toBeInTheDocument();
// eslint-disable-next-line sonarjs/no-duplicate-string
expect(screen.getByTestId('raw-log-context-log-after')).toBeInTheDocument();
});
});
@@ -283,9 +285,12 @@ describe('SpanDetailsDrawer', () => {
fireEvent.click(logsButton);
// Wait for all API calls to complete
await waitFor(() => {
expect(GetMetricQueryRange).toHaveBeenCalledTimes(3);
});
await waitFor(
() => {
expect(GetMetricQueryRange).toHaveBeenCalledTimes(3);
},
{ timeout: 5000 },
);
// Verify the three distinct queries were made
const [spanQuery, beforeQuery, afterQuery] = apiCallHistory;
@@ -314,9 +319,12 @@ describe('SpanDetailsDrawer', () => {
fireEvent.click(logsButton);
// Wait for all API calls to complete
await waitFor(() => {
expect(GetMetricQueryRange).toHaveBeenCalledTimes(3);
});
await waitFor(
() => {
expect(GetMetricQueryRange).toHaveBeenCalledTimes(3);
},
{ timeout: 5000 },
);
const [spanQuery, beforeQuery, afterQuery] = apiCallHistory;
@@ -476,17 +484,9 @@ describe('SpanDetailsDrawer', () => {
const logsButton = screen.getByRole('radio', { name: /logs/i });
fireEvent.click(logsButton);
// Wait for all API calls to complete first
await waitFor(() => {
expect(GetMetricQueryRange).toHaveBeenCalledTimes(3);
});
// Wait for all logs to be rendered - both span logs and context logs
// Wait for logs to load
await waitFor(() => {
expect(screen.getByTestId('raw-log-span-log-1')).toBeInTheDocument();
expect(screen.getByTestId('raw-log-span-log-2')).toBeInTheDocument();
expect(screen.getByTestId('raw-log-context-log-before')).toBeInTheDocument();
expect(screen.getByTestId('raw-log-context-log-after')).toBeInTheDocument();
});
// Verify span logs are highlighted

View File

@@ -203,6 +203,10 @@ export const useQueryOperations: UseQueryOperations = ({
case ATTRIBUTE_TYPES.EXPONENTIAL_HISTOGRAM:
setSpaceAggregationOptions(metricsHistogramSpaceAggregateOperatorOptions);
break;
case ATTRIBUTE_TYPES.SUMMARY:
setSpaceAggregationOptions(metricsGaugeSpaceAggregateOperatorOptions);
break;
default:
setSpaceAggregationOptions(metricsUnknownSpaceAggregateOperatorOptions);
break;
@@ -235,11 +239,17 @@ export const useQueryOperations: UseQueryOperations = ({
} else if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.GAUGE) {
newQuery.aggregateOperator = MetricAggregateOperator.AVG;
newQuery.timeAggregation = MetricAggregateOperator.AVG;
} else if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.SUMMARY) {
newQuery.aggregateOperator = MetricAggregateOperator.MAX;
newQuery.timeAggregation = MetricAggregateOperator.MAX;
newQuery.spaceAggregation = MetricAggregateOperator.MAX;
} else {
newQuery.timeAggregation = '';
}
newQuery.spaceAggregation = '';
if (newQuery.aggregateAttribute?.type !== ATTRIBUTE_TYPES.SUMMARY) {
newQuery.spaceAggregation = '';
}
// Handled query with unknown metric to avoid 400 and 500 errors
// With metric value typed and not available then - time - 'avg', space - 'avg'
@@ -285,6 +295,15 @@ export const useQueryOperations: UseQueryOperations = ({
spaceAggregation: '',
},
];
} else if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.SUMMARY) {
newQuery.aggregations = [
{
timeAggregation: MetricAggregateOperator.MAX,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: MetricAggregateOperator.MAX,
},
];
} else {
newQuery.aggregations = [
{

View File

@@ -6,10 +6,8 @@ import { DataSource } from 'types/common/queryBuilder';
export const useGetAllViews = (
sourcepage: DataSource | 'meter',
enabled?: boolean,
): UseQueryResult<AxiosResponse<AllViewsProps>, AxiosError> =>
useQuery<AxiosResponse<AllViewsProps>, AxiosError>({
queryKey: [{ sourcepage }],
queryFn: () => getAllViews(sourcepage as DataSource),
...(enabled !== undefined ? { enabled } : {}),
});

View File

@@ -29,6 +29,10 @@ export const getMetricsOperatorsByAttributeType = ({
if (aggregateAttributeType === ATTRIBUTE_TYPES.GAUGE) {
return metricsOperatorsByType.Gauge;
}
if (aggregateAttributeType === ATTRIBUTE_TYPES.SUMMARY) {
return metricsOperatorsByType.Gauge;
}
}
if (dataSource === DataSource.METRICS && isEmpty(aggregateAttributeType)) {

View File

@@ -8,6 +8,7 @@ import ROUTES from 'constants/routes';
import AllAlertRules from 'container/ListAlertRules';
import { PlannedDowntime } from 'container/PlannedDowntime/PlannedDowntime';
import RoutingPolicies from 'container/RoutingPolicies';
import { showRoutingPoliciesPage } from 'container/RoutingPolicies/utils';
import TriggeredAlerts from 'container/TriggeredAlerts';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import useUrlQuery from 'hooks/useUrlQuery';
@@ -27,27 +28,36 @@ function AllAlertList(): JSX.Element {
const search = urlQuery.get('search');
const showRoutingPoliciesPageFlag = showRoutingPoliciesPage();
const configurationTab = useMemo(() => {
const tabs = [
{
label: 'Planned Downtime',
key: 'planned-downtime',
children: <PlannedDowntime />,
},
{
label: 'Routing Policies',
key: 'routing-policies',
children: <RoutingPolicies />,
},
];
if (showRoutingPoliciesPageFlag) {
const tabs = [
{
label: 'Planned Downtime',
key: 'planned-downtime',
children: <PlannedDowntime />,
},
{
label: 'Routing Policies',
key: 'routing-policies',
children: <RoutingPolicies />,
},
];
return (
<Tabs
className="configuration-tabs"
defaultActiveKey="planned-downtime"
items={tabs}
/>
);
}
return (
<Tabs
className="configuration-tabs"
defaultActiveKey="planned-downtime"
items={tabs}
/>
<div className="planned-downtime-container">
<PlannedDowntime />
</div>
);
}, []);
}, [showRoutingPoliciesPageFlag]);
const items: TabsProps['items'] = [
{

View File

@@ -8,6 +8,7 @@ import { isDrilldownEnabled } from 'container/QueryTable/Drilldown/drilldownUtil
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import useUrlQuery from 'hooks/useUrlQuery';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { useEffect, useState } from 'react';
import { generatePath, useLocation, useParams } from 'react-router-dom';
import { Widgets } from 'types/api/dashboard/getAll';
@@ -53,12 +54,14 @@ function DashboardWidget(): JSX.Element | null {
}
return (
<NewWidget
yAxisUnit={selectedWidget?.yAxisUnit}
selectedGraph={selectedGraph}
fillSpans={selectedWidget?.fillSpans}
enableDrillDown={isDrilldownEnabled()}
/>
<PreferenceContextProvider>
<NewWidget
yAxisUnit={selectedWidget?.yAxisUnit}
selectedGraph={selectedGraph}
fillSpans={selectedWidget?.fillSpans}
enableDrillDown={isDrilldownEnabled()}
/>
</PreferenceContextProvider>
);
}

View File

@@ -3,9 +3,14 @@ import ROUTES from 'constants/routes';
import InfraMonitoringHosts from 'container/InfraMonitoringHosts';
import InfraMonitoringK8s from 'container/InfraMonitoringK8s';
import { Inbox } from 'lucide-react';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
export const Hosts: TabRoutes = {
Component: (): JSX.Element => <InfraMonitoringHosts />,
Component: (): JSX.Element => (
<PreferenceContextProvider>
<InfraMonitoringHosts />
</PreferenceContextProvider>
),
name: (
<div className="tab-item">
<Inbox size={16} /> Hosts
@@ -16,7 +21,11 @@ export const Hosts: TabRoutes = {
};
export const Kubernetes: TabRoutes = {
Component: (): JSX.Element => <InfraMonitoringK8s />,
Component: (): JSX.Element => (
<PreferenceContextProvider>
<InfraMonitoringK8s />
</PreferenceContextProvider>
),
name: (
<div className="tab-item">
<Inbox size={16} /> Kubernetes

View File

@@ -3,6 +3,7 @@ import { liveLogsCompositeQuery } from 'container/LiveLogs/constants';
import LiveLogsContainer from 'container/LiveLogs/LiveLogsContainer';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { useEffect } from 'react';
import { DataSource } from 'types/common/queryBuilder';
@@ -14,7 +15,11 @@ function LiveLogs(): JSX.Element {
handleSetConfig(PANEL_TYPES.LIST, DataSource.LOGS);
}, [handleSetConfig]);
return <LiveLogsContainer />;
return (
<PreferenceContextProvider>
<LiveLogsContainer />
</PreferenceContextProvider>
);
}
export default LiveLogs;

View File

@@ -10,6 +10,7 @@ import LogsFilters from 'container/LogsFilters';
import LogsSearchFilter from 'container/LogsSearchFilter';
import LogsTable from 'container/LogsTable';
import history from 'lib/history';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { useCallback, useMemo } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { useLocation } from 'react-router-dom';
@@ -82,69 +83,71 @@ function OldLogsExplorer(): JSX.Element {
};
return (
<div className="old-logs-explorer">
<SpaceContainer
split={<Divider type="vertical" />}
align="center"
direction="horizontal"
>
<LogsSearchFilter />
<LogLiveTail />
</SpaceContainer>
<PreferenceContextProvider>
<div className="old-logs-explorer">
<SpaceContainer
split={<Divider type="vertical" />}
align="center"
direction="horizontal"
>
<LogsSearchFilter />
<LogLiveTail />
</SpaceContainer>
<LogsAggregate />
<LogsAggregate />
<Row gutter={20} wrap={false}>
<LogsFilters />
<Col flex={1} className="logs-col-container">
<Row>
<Col flex={1}>
<Space align="baseline" direction="horizontal">
<Select
getPopupContainer={popupContainer}
style={defaultSelectStyle}
value={selectedViewModeOption}
onChange={onChangeVeiwMode}
>
{viewModeOptionList.map((option) => (
<Select.Option key={option.value}>{option.label}</Select.Option>
))}
</Select>
{isFormatButtonVisible && (
<Popover
<Row gutter={20} wrap={false}>
<LogsFilters />
<Col flex={1} className="logs-col-container">
<Row>
<Col flex={1}>
<Space align="baseline" direction="horizontal">
<Select
getPopupContainer={popupContainer}
placement="right"
content={renderPopoverContent}
style={defaultSelectStyle}
value={selectedViewModeOption}
onChange={onChangeVeiwMode}
>
<Button>Format</Button>
</Popover>
)}
{viewModeOptionList.map((option) => (
<Select.Option key={option.value}>{option.label}</Select.Option>
))}
</Select>
<Select
getPopupContainer={popupContainer}
style={defaultSelectStyle}
defaultValue={order}
onChange={handleChangeOrder}
>
{orderItems.map((item) => (
<Select.Option key={item.enum}>{item.name}</Select.Option>
))}
</Select>
</Space>
</Col>
{isFormatButtonVisible && (
<Popover
getPopupContainer={popupContainer}
placement="right"
content={renderPopoverContent}
>
<Button>Format</Button>
</Popover>
)}
<Col>
<LogControls />
</Col>
</Row>
<Select
getPopupContainer={popupContainer}
style={defaultSelectStyle}
defaultValue={order}
onChange={handleChangeOrder}
>
{orderItems.map((item) => (
<Select.Option key={item.enum}>{item.name}</Select.Option>
))}
</Select>
</Space>
</Col>
<LogsTable viewMode={viewMode} linesPerRow={linesPerRow} />
</Col>
</Row>
<Col>
<LogControls />
</Col>
</Row>
<LogDetailedView />
</div>
<LogsTable viewMode={viewMode} linesPerRow={linesPerRow} />
</Col>
</Row>
<LogDetailedView />
</div>
</PreferenceContextProvider>
);
}

View File

@@ -54,8 +54,7 @@ function LogsExplorer(): JSX.Element {
const [selectedView, setSelectedView] = useState<ExplorerViews>(() =>
getExplorerViewFromUrl(searchParams, panelTypesFromUrl),
);
const { logs } = usePreferenceContext();
const { preferences } = logs;
const { preferences, loading: preferencesLoading } = usePreferenceContext();
const [showFilters, setShowFilters] = useState<boolean>(() => {
const localStorageValue = getLocalStorageKey(
@@ -274,7 +273,7 @@ function LogsExplorer(): JSX.Element {
);
useEffect(() => {
if (!preferences) {
if (!preferences || preferencesLoading) {
return;
}
const migratedQuery = migrateOptionsQuery({
@@ -296,7 +295,12 @@ function LogsExplorer(): JSX.Element {
) {
redirectWithOptionsData(migratedQuery);
}
}, [migrateOptionsQuery, preferences, redirectWithOptionsData]);
}, [
migrateOptionsQuery,
preferences,
redirectWithOptionsData,
preferencesLoading,
]);
const toolbarViews = useMemo(
() => ({

View File

@@ -4,9 +4,14 @@ import { Compass, TowerControl, Workflow } from 'lucide-react';
import LogsExplorer from 'pages/LogsExplorer';
import Pipelines from 'pages/Pipelines';
import SaveView from 'pages/SaveView';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
export const logsExplorer: TabRoutes = {
Component: (): JSX.Element => <LogsExplorer />,
Component: (): JSX.Element => (
<PreferenceContextProvider>
<LogsExplorer />
</PreferenceContextProvider>
),
name: (
<div className="tab-item">
<Compass size={16} /> Explorer
@@ -17,7 +22,11 @@ export const logsExplorer: TabRoutes = {
};
export const logsPipelines: TabRoutes = {
Component: (): JSX.Element => <Pipelines />,
Component: (): JSX.Element => (
<PreferenceContextProvider>
<Pipelines />
</PreferenceContextProvider>
),
name: (
<div className="tab-item">
<Workflow size={16} /> Pipelines

View File

@@ -4,9 +4,14 @@ import BreakDownPage from 'container/MeterExplorer/Breakdown/BreakDown';
import ExplorerPage from 'container/MeterExplorer/Explorer';
import { Compass, TowerControl } from 'lucide-react';
import SaveView from 'pages/SaveView';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
export const Explorer: TabRoutes = {
Component: (): JSX.Element => <ExplorerPage />,
Component: (): JSX.Element => (
<PreferenceContextProvider>
<ExplorerPage />
</PreferenceContextProvider>
),
name: (
<div className="tab-item">
<Compass size={16} /> Explorer

View File

@@ -4,6 +4,7 @@ import ExplorerPage from 'container/MetricsExplorer/Explorer';
import SummaryPage from 'container/MetricsExplorer/Summary';
import { BarChart2, Compass, TowerControl } from 'lucide-react';
import SaveView from 'pages/SaveView';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
export const Summary: TabRoutes = {
Component: SummaryPage,
@@ -17,7 +18,11 @@ export const Summary: TabRoutes = {
};
export const Explorer: TabRoutes = {
Component: (): JSX.Element => <ExplorerPage />,
Component: (): JSX.Element => (
<PreferenceContextProvider>
<ExplorerPage />
</PreferenceContextProvider>
),
name: (
<div className="tab-item">
<Compass size={16} /> Explorer

View File

@@ -4,6 +4,7 @@ import NotFound from 'components/NotFound';
import Spinner from 'components/Spinner';
import NewDashboard from 'container/NewDashboard';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { useEffect } from 'react';
import { ErrorType } from 'types/common';
@@ -35,7 +36,11 @@ function DashboardPage(): JSX.Element {
return <Spinner tip="Loading.." />;
}
return <NewDashboard />;
return (
<PreferenceContextProvider>
<NewDashboard />
</PreferenceContextProvider>
);
}
export default DashboardPage;

View File

@@ -5,10 +5,15 @@ import SaveView from 'pages/SaveView';
import TracesExplorer from 'pages/TracesExplorer';
import TracesFunnelDetails from 'pages/TracesFunnelDetails';
import TracesFunnels from 'pages/TracesFunnels';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { matchPath } from 'react-router-dom';
export const tracesExplorer: TabRoutes = {
Component: (): JSX.Element => <TracesExplorer />,
Component: (): JSX.Element => (
<PreferenceContextProvider>
<TracesExplorer />
</PreferenceContextProvider>
),
name: (
<div className="tab-item">
<Compass size={16} /> Explorer

View File

@@ -0,0 +1,154 @@
/* eslint-disable sonarjs/no-identical-functions */
import { render, screen } from '@testing-library/react';
import { TelemetryFieldKey } from 'api/v5/v5';
import {
FormattingOptions,
PreferenceMode,
Preferences,
} from 'providers/preferences/types';
import { MemoryRouter, Route, Switch } from 'react-router-dom';
import {
PreferenceContextProvider,
usePreferenceContext,
} from '../context/PreferenceContextProvider';
// Mock the usePreferenceSync hook
jest.mock('../sync/usePreferenceSync', () => ({
usePreferenceSync: jest.fn().mockReturnValue({
preferences: {
columns: [] as TelemetryFieldKey[],
formatting: {
maxLines: 2,
format: 'table',
fontSize: 'small',
version: 1,
} as FormattingOptions,
} as Preferences,
loading: false,
error: null,
updateColumns: jest.fn(),
updateFormatting: jest.fn(),
}),
}));
// Test component that consumes the context
function TestConsumer(): JSX.Element {
const context = usePreferenceContext();
return (
<div>
<div data-testid="mode">{context.mode}</div>
<div data-testid="dataSource">{context.dataSource}</div>
<div data-testid="loading">{String(context.loading)}</div>
<div data-testid="error">{String(context.error)}</div>
<div data-testid="savedViewId">{context.savedViewId || 'no-view-id'}</div>
</div>
);
}
describe('PreferenceContextProvider', () => {
it('should provide context with direct mode when no viewKey is present', () => {
render(
<MemoryRouter initialEntries={['/logs']}>
<Switch>
<Route
path="/logs"
component={(): JSX.Element => (
<PreferenceContextProvider>
<TestConsumer />
</PreferenceContextProvider>
)}
/>
</Switch>
</MemoryRouter>,
);
expect(screen.getByTestId('mode')).toHaveTextContent(PreferenceMode.DIRECT);
expect(screen.getByTestId('dataSource')).toHaveTextContent('logs');
expect(screen.getByTestId('loading')).toHaveTextContent('false');
expect(screen.getByTestId('error')).toHaveTextContent('null');
expect(screen.getByTestId('savedViewId')).toHaveTextContent('no-view-id');
});
it('should provide context with savedView mode when viewKey is present', () => {
render(
<MemoryRouter initialEntries={['/logs?viewKey="test-view-id"']}>
<Switch>
<Route
path="/logs"
component={(): JSX.Element => (
<PreferenceContextProvider>
<TestConsumer />
</PreferenceContextProvider>
)}
/>
</Switch>
</MemoryRouter>,
);
expect(screen.getByTestId('mode')).toHaveTextContent('savedView');
expect(screen.getByTestId('dataSource')).toHaveTextContent('logs');
expect(screen.getByTestId('savedViewId')).toHaveTextContent('test-view-id');
});
it('should set traces dataSource when pathname includes traces', () => {
render(
<MemoryRouter initialEntries={['/traces']}>
<Switch>
<Route
path="/traces"
component={(): JSX.Element => (
<PreferenceContextProvider>
<TestConsumer />
</PreferenceContextProvider>
)}
/>
</Switch>
</MemoryRouter>,
);
expect(screen.getByTestId('dataSource')).toHaveTextContent('traces');
});
it('should handle invalid viewKey JSON gracefully', () => {
// Mock console.error to avoid test output clutter
const originalConsoleError = console.error;
console.error = jest.fn();
render(
<MemoryRouter initialEntries={['/logs?viewKey=invalid-json']}>
<Switch>
<Route
path="/logs"
component={(): JSX.Element => (
<PreferenceContextProvider>
<TestConsumer />
</PreferenceContextProvider>
)}
/>
</Switch>
</MemoryRouter>,
);
expect(screen.getByTestId('mode')).toHaveTextContent(PreferenceMode.DIRECT);
expect(console.error).toHaveBeenCalled();
// Restore console.error
console.error = originalConsoleError;
});
it('should throw error when usePreferenceContext is used outside provider', () => {
// Suppress the error output for this test
const originalConsoleError = console.error;
console.error = jest.fn();
expect(() => {
render(<TestConsumer />);
}).toThrow(
'usePreferenceContext must be used within PreferenceContextProvider',
);
// Restore console.error
console.error = originalConsoleError;
});
});

View File

@@ -1,402 +0,0 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { TelemetryFieldKey } from 'api/v5/v5';
import { LOCALSTORAGE } from 'constants/localStorage';
import { LogViewMode } from 'container/LogsTable';
import {
defaultLogsSelectedColumns,
defaultTraceSelectedColumns,
} from 'container/OptionsMenu/constants';
import { FontSize } from 'container/OptionsMenu/types';
import { render, screen, userEvent } from 'tests/test-utils';
import { DataSource } from 'types/common/queryBuilder';
import { usePreferenceContext } from '../context/PreferenceContextProvider';
const ROUTE_LOGS = '/logs';
const ROUTE_TRACES = '/traces';
const TESTID_LOGS = 'logs';
const TESTID_TRACES = 'traces';
type LogsLocalOptions = {
selectColumns?: TelemetryFieldKey[];
maxLines?: number;
format?: string;
fontSize?: string;
version?: number;
};
type TracesLocalOptions = {
selectColumns?: TelemetryFieldKey[];
};
function setLocalStorageJSON(key: string, value: unknown): void {
localStorage.setItem(key, JSON.stringify(value));
}
function getLocalStorageJSON<T>(key: string): T | null {
const raw = localStorage.getItem(key);
return raw ? (JSON.parse(raw) as T) : null;
}
function Consumer({
dataSource,
testIdPrefix,
}: {
dataSource: DataSource;
testIdPrefix: string;
}): JSX.Element {
const ctx = usePreferenceContext();
const slice = dataSource === DataSource.TRACES ? ctx.traces : ctx.logs;
return (
<div>
<div data-testid={`${testIdPrefix}-loading`}>{String(slice.loading)}</div>
<div data-testid={`${testIdPrefix}-columns-len`}>
{String(slice.preferences?.columns?.length || 0)}
</div>
<button
data-testid={`${testIdPrefix}-update-columns`}
type="button"
onClick={(): void => {
const newCols: TelemetryFieldKey[] =
dataSource === DataSource.TRACES
? (defaultTraceSelectedColumns.slice(0, 1) as TelemetryFieldKey[])
: (defaultLogsSelectedColumns.slice(0, 1) as TelemetryFieldKey[]);
slice.updateColumns(newCols);
}}
>
update
</button>
</div>
);
}
describe('PreferencesProvider integration', () => {
beforeEach(() => {
localStorage.clear();
});
describe('Logs', () => {
it('loads defaults when no localStorage or url provided', () => {
render(
<Consumer dataSource={DataSource.LOGS} testIdPrefix={TESTID_LOGS} />,
undefined,
{
initialRoute: ROUTE_LOGS,
},
);
expect(screen.getByTestId('logs-loading')).toHaveTextContent('false');
expect(
Number(screen.getByTestId('logs-columns-len').textContent),
).toBeGreaterThan(0);
});
it('respects localStorage when present', () => {
setLocalStorageJSON(LOCALSTORAGE.LOGS_LIST_OPTIONS, {
selectColumns: [{ name: 'ls.col' }],
maxLines: 5,
format: 'json',
fontSize: 'large',
version: 2,
});
render(
<Consumer dataSource={DataSource.LOGS} testIdPrefix={TESTID_LOGS} />,
undefined,
{
initialRoute: ROUTE_LOGS,
},
);
expect(Number(screen.getByTestId('logs-columns-len').textContent)).toBe(1);
});
it('direct mode updateColumns persists to localStorage', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(
<Consumer dataSource={DataSource.LOGS} testIdPrefix={TESTID_LOGS} />,
undefined,
{
initialRoute: ROUTE_LOGS,
},
);
await user.click(screen.getByTestId('logs-update-columns'));
const stored = getLocalStorageJSON<LogsLocalOptions>(
LOCALSTORAGE.LOGS_LIST_OPTIONS,
);
expect(stored?.selectColumns).toEqual([
defaultLogsSelectedColumns[0] as TelemetryFieldKey,
]);
});
it('saved view mode uses in-memory preferences (no localStorage write)', async () => {
const viewKey = JSON.stringify('saved-view-id-1');
const initialEntry = `/logs?viewKey=${encodeURIComponent(viewKey)}`;
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(
<Consumer dataSource={DataSource.LOGS} testIdPrefix="logs" />,
undefined,
{
initialRoute: initialEntry,
},
);
await user.click(screen.getByTestId('logs-update-columns'));
const stored = getLocalStorageJSON<LogsLocalOptions>(
LOCALSTORAGE.LOGS_LIST_OPTIONS,
);
expect(stored?.selectColumns).toBeUndefined();
});
it('url options override defaults', () => {
const options = {
selectColumns: [{ name: 'url.col' }],
maxLines: 7,
format: 'json',
fontSize: 'large',
version: 2,
};
const originalLocation = window.location;
Object.defineProperty(window, 'location', {
writable: true,
value: {
...originalLocation,
search: `?options=${encodeURIComponent(JSON.stringify(options))}`,
},
});
render(
<Consumer dataSource={DataSource.LOGS} testIdPrefix={TESTID_LOGS} />,
undefined,
{
initialRoute: ROUTE_LOGS,
},
);
// restore
Object.defineProperty(window, 'location', {
writable: true,
value: originalLocation,
});
expect(Number(screen.getByTestId('logs-columns-len').textContent)).toBe(1);
});
it('updateFormatting persists to localStorage in direct mode', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
function FormattingConsumer(): JSX.Element {
const { logs } = usePreferenceContext();
return (
<button
data-testid="logs-update-formatting"
type="button"
onClick={(): void =>
logs.updateFormatting({
maxLines: 9,
format: 'json' as LogViewMode,
fontSize: 'large' as FontSize,
version: 2,
})
}
>
fmt
</button>
);
}
render(<FormattingConsumer />, undefined, { initialRoute: '/logs' });
await user.click(screen.getByTestId('logs-update-formatting'));
const stored = getLocalStorageJSON<LogsLocalOptions>(
LOCALSTORAGE.LOGS_LIST_OPTIONS,
);
expect(stored?.maxLines).toBe(9);
expect(stored?.format).toBe('json');
expect(stored?.fontSize).toBe('large');
expect(stored?.version).toBe(2);
});
it('saved view mode updates in-memory preferences (columns-len changes)', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
const viewKey = JSON.stringify('saved-view-id-3');
const initialEntry = `/logs?viewKey=${encodeURIComponent(viewKey)}`;
render(
<Consumer dataSource={DataSource.LOGS} testIdPrefix={TESTID_LOGS} />,
undefined,
{ initialRoute: initialEntry },
);
const before = Number(screen.getByTestId('logs-columns-len').textContent);
await user.click(screen.getByTestId('logs-update-columns'));
const after = Number(screen.getByTestId('logs-columns-len').textContent);
expect(after).toBeGreaterThanOrEqual(1);
// Should change from default to 1 for our new selection; tolerate default already being >=1
if (before !== after) {
expect(after).toBe(1);
}
});
});
describe('Traces', () => {
it('loads defaults when no localStorage or url provided', () => {
render(
<Consumer dataSource={DataSource.TRACES} testIdPrefix={TESTID_TRACES} />,
undefined,
{
initialRoute: ROUTE_TRACES,
},
);
expect(screen.getByTestId('traces-loading')).toHaveTextContent('false');
expect(
Number(screen.getByTestId('traces-columns-len').textContent),
).toBeGreaterThan(0);
});
it('respects localStorage when present', () => {
setLocalStorageJSON(LOCALSTORAGE.TRACES_LIST_OPTIONS, {
selectColumns: [{ name: 'trace.ls.col' }],
});
render(
<Consumer dataSource={DataSource.TRACES} testIdPrefix={TESTID_TRACES} />,
undefined,
{
initialRoute: ROUTE_TRACES,
},
);
expect(Number(screen.getByTestId('traces-columns-len').textContent)).toBe(1);
});
it('direct mode updateColumns persists to localStorage', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(
<Consumer dataSource={DataSource.TRACES} testIdPrefix={TESTID_TRACES} />,
undefined,
{
initialRoute: ROUTE_TRACES,
},
);
await user.click(screen.getByTestId('traces-update-columns'));
const stored = getLocalStorageJSON<TracesLocalOptions>(
LOCALSTORAGE.TRACES_LIST_OPTIONS,
);
expect(stored?.selectColumns).toEqual([
defaultTraceSelectedColumns[0] as TelemetryFieldKey,
]);
});
it('saved view mode uses in-memory preferences (no localStorage write)', async () => {
const viewKey = JSON.stringify('saved-view-id-2');
const initialEntry = `/traces?viewKey=${encodeURIComponent(viewKey)}`;
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(
<Consumer dataSource={DataSource.TRACES} testIdPrefix="traces" />,
undefined,
{
initialRoute: initialEntry,
},
);
await user.click(screen.getByTestId('traces-update-columns'));
const stored = getLocalStorageJSON<TracesLocalOptions>(
LOCALSTORAGE.TRACES_LIST_OPTIONS,
);
expect(stored?.selectColumns).toBeUndefined();
});
it('url options override defaults', () => {
const options = {
selectColumns: [{ name: 'trace.url.col' }],
};
const originalLocation = window.location;
Object.defineProperty(window, 'location', {
writable: true,
value: {
...originalLocation,
search: `?options=${encodeURIComponent(JSON.stringify(options))}`,
},
});
render(
<Consumer dataSource={DataSource.TRACES} testIdPrefix={TESTID_TRACES} />,
undefined,
{ initialRoute: ROUTE_TRACES },
);
Object.defineProperty(window, 'location', {
writable: true,
value: originalLocation,
});
expect(Number(screen.getByTestId('traces-columns-len').textContent)).toBe(1);
});
it('updateFormatting is a no-op in direct mode (no localStorage write)', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
function TracesFormattingConsumer(): JSX.Element {
const { traces } = usePreferenceContext();
return (
<button
data-testid="traces-update-formatting"
type="button"
onClick={(): void =>
traces.updateFormatting({
maxLines: 9,
format: 'json' as LogViewMode,
fontSize: 'large' as FontSize,
version: 2,
})
}
>
fmt
</button>
);
}
render(<TracesFormattingConsumer />, undefined, { initialRoute: '/traces' });
await user.click(screen.getByTestId('traces-update-formatting'));
const stored = getLocalStorageJSON<TracesLocalOptions>(
LOCALSTORAGE.TRACES_LIST_OPTIONS,
);
expect(stored).toBeNull();
});
it('saved view mode updates in-memory preferences (columns-len changes)', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
const viewKey = JSON.stringify('saved-view-id-4');
const initialEntry = `/traces?viewKey=${encodeURIComponent(viewKey)}`;
render(
<Consumer dataSource={DataSource.TRACES} testIdPrefix={TESTID_TRACES} />,
undefined,
{ initialRoute: initialEntry },
);
const before = Number(screen.getByTestId('traces-columns-len').textContent);
await user.click(screen.getByTestId('traces-update-columns'));
const after = Number(screen.getByTestId('traces-columns-len').textContent);
expect(after).toBeGreaterThanOrEqual(1);
if (before !== after) {
expect(after).toBe(1);
}
});
});
});

View File

@@ -11,18 +11,18 @@ jest.mock('../configs/logsLoaderConfig', () => ({
__esModule: true,
default: {
priority: ['local', 'url', 'default'],
local: jest.fn(() => ({
local: jest.fn().mockResolvedValue({
columns: [{ name: 'local-column' }],
formatting: { maxLines: 5, format: 'table', fontSize: 'medium', version: 1 },
})),
url: jest.fn(() => ({
}),
url: jest.fn().mockResolvedValue({
columns: [{ name: 'url-column' }],
formatting: { maxLines: 3, format: 'table', fontSize: 'small', version: 1 },
})),
default: jest.fn(() => ({
}),
default: jest.fn().mockResolvedValue({
columns: [{ name: 'default-column' }],
formatting: { maxLines: 2, format: 'table', fontSize: 'small', version: 1 },
})),
}),
},
}));
@@ -30,15 +30,15 @@ jest.mock('../configs/tracesLoaderConfig', () => ({
__esModule: true,
default: {
priority: ['local', 'url', 'default'],
local: jest.fn(() => ({
local: jest.fn().mockResolvedValue({
columns: [{ name: 'local-trace-column' }],
})),
url: jest.fn(() => ({
}),
url: jest.fn().mockResolvedValue({
columns: [{ name: 'url-trace-column' }],
})),
default: jest.fn(() => ({
}),
default: jest.fn().mockResolvedValue({
columns: [{ name: 'default-trace-column' }],
})),
}),
},
}));
@@ -57,6 +57,11 @@ describe('usePreferenceLoader', () => {
}),
);
// Initially it should be loading
expect(result.current.loading).toBe(true);
expect(result.current.preferences).toBe(null);
expect(result.current.error).toBe(null);
// Wait for the loader to complete
await waitFor(() => {
expect(result.current.loading).toBe(false);
@@ -118,33 +123,30 @@ describe('usePreferenceLoader', () => {
});
it('should handle errors during loading', async () => {
// Make first call succeed (initial state), second call throw in reSync effect
const localSpy: jest.SpyInstance = jest.spyOn(logsLoaderConfig, 'local');
localSpy.mockImplementationOnce(() => ({
columns: [{ name: 'local-column' }],
formatting: { maxLines: 5, format: 'table', fontSize: 'medium', version: 1 },
}));
localSpy.mockImplementationOnce(() => {
throw new Error('Loading failed');
});
// Mock an error in the loader using jest.spyOn
const localSpy = jest.spyOn(logsLoaderConfig, 'local');
localSpy.mockRejectedValueOnce(new Error('Loading failed'));
const setReSync = jest.fn();
const { result } = renderHook(() =>
usePreferenceLoader({
dataSource: DataSource.LOGS,
reSync: true,
reSync: false,
setReSync,
}),
);
// Wait for the loader to complete
await waitFor(() => {
expect(result.current.error).toBeInstanceOf(Error);
expect(result.current.error?.message).toBe('Loading failed');
expect(result.current.loading).toBe(false);
});
// Reset reSync should be called
expect(setReSync).toHaveBeenCalledWith(false);
// Should have set the error
expect(result.current.error).toBeInstanceOf(Error);
expect(result.current.error?.message).toBe('Loading failed');
expect(result.current.preferences).toBe(null);
// Restore original implementation
localSpy.mockRestore();
});
});

View File

@@ -10,10 +10,10 @@ import { FormattingOptions } from '../types';
// --- LOGS preferences loader config ---
const logsLoaders = {
local: (): {
local: async (): Promise<{
columns: BaseAutocompleteData[];
formatting: FormattingOptions;
} => {
}> => {
const local = getLocalStorageKey(LOCALSTORAGE.LOGS_LIST_OPTIONS);
if (local) {
try {
@@ -31,10 +31,10 @@ const logsLoaders = {
}
return { columns: [], formatting: undefined } as any;
},
url: (): {
url: async (): Promise<{
columns: BaseAutocompleteData[];
formatting: FormattingOptions;
} => {
}> => {
const urlParams = new URLSearchParams(window.location.search);
try {
const options = JSON.parse(urlParams.get('options') || '{}');
@@ -50,10 +50,10 @@ const logsLoaders = {
} catch {}
return { columns: [], formatting: undefined } as any;
},
default: (): {
default: async (): Promise<{
columns: TelemetryFieldKey[];
formatting: FormattingOptions;
} => ({
}> => ({
columns: defaultLogsSelectedColumns,
formatting: {
maxLines: 2,

View File

@@ -7,9 +7,9 @@ import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteRe
// --- TRACES preferences loader config ---
const tracesLoaders = {
local: (): {
local: async (): Promise<{
columns: BaseAutocompleteData[];
} => {
}> => {
const local = getLocalStorageKey(LOCALSTORAGE.TRACES_LIST_OPTIONS);
if (local) {
try {
@@ -21,9 +21,9 @@ const tracesLoaders = {
}
return { columns: [] };
},
url: (): {
url: async (): Promise<{
columns: BaseAutocompleteData[];
} => {
}> => {
const urlParams = new URLSearchParams(window.location.search);
try {
const options = JSON.parse(urlParams.get('options') || '{}');
@@ -33,9 +33,9 @@ const tracesLoaders = {
} catch {}
return { columns: [] };
},
default: (): {
default: async (): Promise<{
columns: TelemetryFieldKey[];
} => ({
}> => ({
columns: defaultTraceSelectedColumns,
}),
priority: ['local', 'url', 'default'] as const,

View File

@@ -4,6 +4,7 @@ import {
PreferenceMode,
} from 'providers/preferences/types';
import React, { createContext, useContext, useMemo } from 'react';
import { useLocation } from 'react-router-dom';
import { DataSource } from 'types/common/queryBuilder';
import { usePreferenceSync } from '../sync/usePreferenceSync';
@@ -17,6 +18,7 @@ export function PreferenceContextProvider({
}: {
children: React.ReactNode;
}): JSX.Element {
const location = useLocation();
const params = useUrlQuery();
let savedViewId = '';
@@ -28,25 +30,41 @@ export function PreferenceContextProvider({
console.error(e);
}
}
let dataSource: DataSource = DataSource.LOGS;
if (location.pathname.includes('traces')) dataSource = DataSource.TRACES;
const logsSlice = usePreferenceSync({
const {
preferences,
loading,
error,
updateColumns,
updateFormatting,
} = usePreferenceSync({
mode: savedViewId ? PreferenceMode.SAVED_VIEW : PreferenceMode.DIRECT,
savedViewId: savedViewId || undefined,
dataSource: DataSource.LOGS,
});
const tracesSlice = usePreferenceSync({
mode: savedViewId ? PreferenceMode.SAVED_VIEW : PreferenceMode.DIRECT,
savedViewId: savedViewId || undefined,
dataSource: DataSource.TRACES,
dataSource,
});
const value = useMemo<PreferenceContextValue>(
() => ({
logs: logsSlice,
traces: tracesSlice,
preferences,
loading,
error,
mode: savedViewId ? PreferenceMode.SAVED_VIEW : PreferenceMode.DIRECT,
savedViewId: savedViewId || undefined,
dataSource,
updateColumns,
updateFormatting,
}),
[logsSlice, tracesSlice],
[
savedViewId,
dataSource,
preferences,
loading,
error,
updateColumns,
updateFormatting,
],
);
return (

View File

@@ -17,48 +17,55 @@ const migrateColumns = (columns: any): any =>
return column;
});
// Generic preferences loader that works with any config (synchronous version)
function preferencesLoader<T>(config: {
// Generic preferences loader that works with any config
async function preferencesLoader<T>(config: {
priority: readonly string[];
[key: string]: any;
}): T {
// Try each loader in priority order synchronously
const results = config.priority.map((source: string) => ({
source,
result: config[source](),
}));
}): Promise<T> {
const findValidLoader = async (): Promise<T> => {
// Try each loader in priority order
const results = await Promise.all(
config.priority.map(async (source) => ({
source,
result: await config[source](),
})),
);
// Find valid columns and formatting independently
const validColumnsResult = results.find(
({ result }) => result.columns?.length,
);
const validFormattingResult = results.find(({ result }) => result.formatting);
// Find valid columns and formatting independently
const validColumnsResult = results.find(
({ result }) => result.columns?.length,
);
const migratedColumns = validColumnsResult?.result.columns
? migrateColumns(validColumnsResult.result.columns)
: undefined;
const validFormattingResult = results.find(({ result }) => result.formatting);
// Combine valid results or fallback to default
const finalResult = {
columns: migratedColumns || config.default().columns,
formatting:
validFormattingResult?.result.formatting || config.default().formatting,
const migratedColumns = validColumnsResult?.result.columns
? migrateColumns(validColumnsResult?.result.columns)
: undefined;
// Combine valid results or fallback to default
const finalResult = {
columns: migratedColumns || config.default().columns,
formatting:
validFormattingResult?.result.formatting || config.default().formatting,
};
return finalResult as T;
};
return finalResult as T;
return findValidLoader();
}
// Use the generic loader with specific configs
function logsPreferencesLoader(): {
async function logsPreferencesLoader(): Promise<{
columns: TelemetryFieldKey[];
formatting: FormattingOptions;
} {
}> {
return preferencesLoader(logsLoaderConfig);
}
function tracesPreferencesLoader(): {
async function tracesPreferencesLoader(): Promise<{
columns: TelemetryFieldKey[];
} {
}> {
return preferencesLoader(tracesLoaderConfig);
}
@@ -75,36 +82,29 @@ export function usePreferenceLoader({
loading: boolean;
error: Error | null;
} {
const [preferences, setPreferences] = useState<Preferences | null>(() => {
if (dataSource === DataSource.LOGS) {
const { columns, formatting } = logsPreferencesLoader();
return { columns, formatting };
}
if (dataSource === DataSource.TRACES) {
const { columns } = tracesPreferencesLoader();
return { columns };
}
return null;
});
const [preferences, setPreferences] = useState<Preferences | null>(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<Error | null>(null);
useEffect((): void => {
function loadPreferences(): void {
async function loadPreferences(): Promise<void> {
setLoading(true);
setError(null);
try {
if (dataSource === DataSource.LOGS) {
const { columns, formatting } = logsPreferencesLoader();
const { columns, formatting } = await logsPreferencesLoader();
setPreferences({ columns, formatting });
}
if (dataSource === DataSource.TRACES) {
const { columns } = tracesPreferencesLoader();
const { columns } = await tracesPreferencesLoader();
setPreferences({ columns });
}
} catch (e) {
setError(e as Error);
} finally {
setLoading(false);
// Reset reSync back to false after loading is complete
if (reSync) {
setReSync(false);
@@ -113,10 +113,10 @@ export function usePreferenceLoader({
}
// Only load preferences on initial mount or when reSync is true
if (reSync) {
if (loading || reSync) {
loadPreferences();
}
}, [dataSource, reSync, setReSync]);
}, [dataSource, reSync, setReSync, loading]);
return { preferences, loading: false, error };
return { preferences, loading, error };
}

View File

@@ -25,10 +25,7 @@ export function usePreferenceSync({
updateColumns: (newColumns: TelemetryFieldKey[]) => void;
updateFormatting: (newFormatting: FormattingOptions) => void;
} {
const { data: viewsData } = useGetAllViews(
dataSource,
mode === PreferenceMode.SAVED_VIEW,
);
const { data: viewsData } = useGetAllViews(dataSource);
const [
savedViewPreferences,

View File

@@ -1,25 +1,24 @@
import { TelemetryFieldKey } from 'api/v5/v5';
import { LogViewMode } from 'container/LogsTable';
import { FontSize } from 'container/OptionsMenu/types';
import { DataSource } from 'types/common/queryBuilder';
export enum PreferenceMode {
SAVED_VIEW = 'savedView',
DIRECT = 'direct',
}
export interface PreferenceSlice {
export interface PreferenceContextValue {
preferences: Preferences | null;
loading: boolean;
error: Error | null;
mode: PreferenceMode;
savedViewId?: string;
dataSource: DataSource;
updateColumns: (newColumns: TelemetryFieldKey[]) => void;
updateFormatting: (newFormatting: FormattingOptions) => void;
}
export interface PreferenceContextValue {
logs: PreferenceSlice;
traces: PreferenceSlice;
}
export interface FormattingOptions {
maxLines?: number;
format?: LogViewMode;

View File

@@ -5,7 +5,6 @@ import (
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -16,14 +15,11 @@ type AuthZ interface {
Check(context.Context, *openfgav1.TupleKey) error
// CheckWithTupleCreation takes upon the responsibility for generating the tuples alongside everything Check does.
CheckWithTupleCreation(context.Context, authtypes.Claims, valuer.UUID, authtypes.Relation, authtypes.Relation, authtypes.Typeable, []authtypes.Selector) error
CheckWithTupleCreation(context.Context, authtypes.Claims, authtypes.Relation, authtypes.Typeable, []authtypes.Selector) error
// Batch Check returns error when the upstream authorization server is unavailable or for all the tuples of subject (s) doesn't have relation (r) on object (o).
BatchCheck(context.Context, []*openfgav1.TupleKey) error
// writes the tuples to upstream server
Write(context.Context, *openfgav1.WriteRequest) error
// Write accepts the insertion tuples and the deletion tuples.
Write(context.Context, []*openfgav1.TupleKey, []*openfgav1.TupleKey) error
// Lists the selectors for objects assigned to subject (s) with relation (r) on resource (s)
// lists the selectors for objects assigned to subject (s) with relation (r) on resource (s)
ListObjects(context.Context, string, authtypes.Relation, authtypes.Typeable) ([]*authtypes.Object, error)
}

View File

@@ -232,13 +232,13 @@ func (provider *provider) BatchCheck(ctx context.Context, tupleReq []*openfgav1.
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, translation authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector) error {
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeUser, claims.UserID, authtypes.Relation{})
if err != nil {
return err
}
tuples, err := authtypes.TypeableOrganization.Tuples(subject, translation, []authtypes.Selector{authtypes.MustNewSelector(authtypes.TypeOrganization, orgID.StringValue())}, orgID)
tuples, err := typeable.Tuples(subject, relation, selectors)
if err != nil {
return err
}
@@ -251,21 +251,11 @@ func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims aut
return nil
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
for idx, tuple := range deletions {
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
}
func (provider *provider) Write(ctx context.Context, req *openfgav1.WriteRequest) error {
_, err := provider.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
StoreId: provider.storeID,
AuthorizationModelId: provider.modelID,
Writes: &openfgav1.WriteRequestWrites{
TupleKeys: additions,
},
Deletes: &openfgav1.WriteRequestDeletes{
TupleKeys: deletionTuplesWithoutCondition,
},
Writes: req.Writes,
})
return err

View File

@@ -7,7 +7,6 @@ import (
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
)
@@ -107,7 +106,7 @@ func (middleware *AuthZ) OpenAccess(next http.HandlerFunc) http.HandlerFunc {
})
}
func (middleware *AuthZ) Check(next http.HandlerFunc, relation authtypes.Relation, translation authtypes.Relation, typeable authtypes.Typeable, cb authtypes.SelectorCallbackFn) http.HandlerFunc {
func (middleware *AuthZ) Check(next http.HandlerFunc, _ authtypes.Relation, translation authtypes.Relation, _ authtypes.Typeable, _ authtypes.Typeable, _ authtypes.SelectorCallbackFn) http.HandlerFunc {
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
@@ -115,19 +114,7 @@ func (middleware *AuthZ) Check(next http.HandlerFunc, relation authtypes.Relatio
return
}
orgId, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, err)
return
}
selectors, err := cb(req.Context(), claims)
if err != nil {
render.Error(rw, err)
return
}
err = middleware.authzService.CheckWithTupleCreation(req.Context(), claims, orgId, relation, translation, typeable, selectors)
err = middleware.authzService.CheckWithTupleCreation(req.Context(), claims, translation, authtypes.TypeableOrganization, []authtypes.Selector{authtypes.MustNewSelector(authtypes.TypeOrganization, claims.OrgID)})
if err != nil {
render.Error(rw, err)
return

View File

@@ -27,7 +27,6 @@ type Module interface {
GetByMetricNames(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string][]map[string]string, error)
statsreporter.StatsCollector
role.RegisterTypeable
}

View File

@@ -225,5 +225,5 @@ func (module *module) Collect(ctx context.Context, orgID valuer.UUID) (map[strin
}
func (module *module) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{dashboardtypes.TypeableResourceDashboard, dashboardtypes.TypeableResourcesDashboards}
return []authtypes.Typeable{dashboardtypes.ResourceDashboard, dashboardtypes.ResourcesDashboards}
}

View File

@@ -9,6 +9,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
type module struct {
@@ -46,8 +47,6 @@ func (module *module) GetResources(_ context.Context) []*authtypes.Resource {
for _, register := range module.registry {
typeables = append(typeables, register.MustGetTypeables()...)
}
// role module cannot self register itself!
typeables = append(typeables, module.MustGetTypeables()...)
resources := make([]*authtypes.Resource, 0)
for _, typeable := range typeables {
@@ -143,17 +142,24 @@ func (module *module) Patch(ctx context.Context, orgID valuer.UUID, id valuer.UU
}
func (module *module) PatchObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
additionTuples, err := roletypes.GetAdditionTuples(id, orgID, relation, additions)
additionTuples, err := roletypes.GetAdditionTuples(id, relation, additions)
if err != nil {
return err
}
deletionTuples, err := roletypes.GetDeletionTuples(id, orgID, relation, deletions)
deletionTuples, err := roletypes.GetDeletionTuples(id, relation, deletions)
if err != nil {
return err
}
err = module.authz.Write(ctx, additionTuples, deletionTuples)
err = module.authz.Write(ctx, &openfgav1.WriteRequest{
Writes: &openfgav1.WriteRequestWrites{
TupleKeys: additionTuples,
},
Deletes: &openfgav1.WriteRequestDeletes{
TupleKeys: deletionTuples,
},
})
if err != nil {
return err
}
@@ -164,7 +170,3 @@ func (module *module) PatchObjects(ctx context.Context, orgID valuer.UUID, id va
func (module *module) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
return module.store.Delete(ctx, orgID, id)
}
func (module *module) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
}

View File

@@ -33,8 +33,6 @@ type Module interface {
// Deletes the role metadata and tuples in authorization server
Delete(context.Context, valuer.UUID, valuer.UUID) error
RegisterTypeable
}
type RegisterTypeable interface {
@@ -42,19 +40,27 @@ type RegisterTypeable interface {
}
type Handler interface {
// Creates the role metadata and tuples in authorization server
Create(http.ResponseWriter, *http.Request)
// Gets the role metadata
Get(http.ResponseWriter, *http.Request)
// Gets the objects for the given relation and role
GetObjects(http.ResponseWriter, *http.Request)
// Gets all the resources and the relations
GetResources(http.ResponseWriter, *http.Request)
// Lists all the roles metadata for the organization
List(http.ResponseWriter, *http.Request)
// Patches the role metdata
Patch(http.ResponseWriter, *http.Request)
// Patches the objects for the given relation and role
PatchObjects(http.ResponseWriter, *http.Request)
// Deletes the role metadata and tuples in authorization server
Delete(http.ResponseWriter, *http.Request)
}

View File

@@ -94,7 +94,7 @@ func CollisionHandledFinalExpr(
return "", nil, err
}
colName, _ = fm.FieldFor(ctx, key)
colName, _ = DataTypeCollisionHandledFieldName(key, dummyValue, colName, qbtypes.FilterOperatorUnknown)
colName, _ = telemetrytypes.DataTypeCollisionHandledFieldName(key, dummyValue, colName)
stmts = append(stmts, colName)
}
}
@@ -109,7 +109,7 @@ func CollisionHandledFinalExpr(
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the body column")
// colName, _ = jsonKeyToKey(context.Background(), field, qbtypes.FilterOperatorUnknown, dummyValue)
} else {
colName, _ = DataTypeCollisionHandledFieldName(field, dummyValue, colName, qbtypes.FilterOperatorUnknown)
colName, _ = telemetrytypes.DataTypeCollisionHandledFieldName(field, dummyValue, colName)
}
stmts = append(stmts, colName)
@@ -194,109 +194,3 @@ func FormatFullTextSearch(input string) string {
}
return input
}
func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, value any, tblFieldName string, operator qbtypes.FilterOperator) (string, any) {
// This block of code exists to handle the data type collisions
// We don't want to fail the requests when there is a key with more than one data type
// Let's take an example of `http.status_code`, and consider user sent a string value and number value
// When they search for `http.status_code=200`, we will search across both the number columns and string columns
// and return the results from both the columns
// While we expect user not to send the mixed data types, it inevitably happens
// So we handle the data type collisions here
switch key.FieldDataType {
case telemetrytypes.FieldDataTypeString:
switch v := value.(type) {
case float64:
// try to convert the string value to to number
tblFieldName = castFloat(tblFieldName)
case []any:
if allFloats(v) {
tblFieldName = castFloat(tblFieldName)
} else if hasString(v) {
_, value = castString(tblFieldName), toStrings(v)
}
case bool:
// we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string
value = fmt.Sprintf("%t", v)
}
case telemetrytypes.FieldDataTypeFloat64, telemetrytypes.FieldDataTypeInt64, telemetrytypes.FieldDataTypeNumber:
switch v := value.(type) {
// why? ; CH returns an error for a simple check
// attributes_number['http.status_code'] = 200 but not for attributes_number['http.status_code'] >= 200
// DB::Exception: Bad get: has UInt64, requested Float64.
// How is it working in v4? v4 prepares the full query with values in query string
// When we format the float it becomes attributes_number['http.status_code'] = 200.000
// Which CH gladly accepts and doesn't throw error
// However, when passed as query args, the default formatter
// https://github.com/ClickHouse/clickhouse-go/blob/757e102f6d8c6059d564ce98795b4ce2a101b1a5/bind.go#L393
// is used which prepares the
// final query as attributes_number['http.status_code'] = 200 giving this error
// This following is one way to workaround it
// if the key is a number, the value is a string, we will let clickHouse handle the conversion
case float32, float64:
tblFieldName = castFloatHack(tblFieldName)
case string:
// check if it's a number inside a string
isNumber := false
if _, err := strconv.ParseFloat(v, 64); err == nil {
isNumber = true
}
if !operator.IsComparisonOperator() || !isNumber {
// try to convert the number attribute to string
tblFieldName = castString(tblFieldName) // numeric col vs string literal
} else {
tblFieldName = castFloatHack(tblFieldName)
}
case []any:
if allFloats(v) {
tblFieldName = castFloatHack(tblFieldName)
} else if hasString(v) {
tblFieldName, value = castString(tblFieldName), toStrings(v)
}
}
case telemetrytypes.FieldDataTypeBool:
switch v := value.(type) {
case string:
tblFieldName = castString(tblFieldName)
case []any:
if hasString(v) {
tblFieldName, value = castString(tblFieldName), toStrings(v)
}
}
}
return tblFieldName, value
}
func castFloat(col string) string { return fmt.Sprintf("toFloat64OrNull(%s)", col) }
func castFloatHack(col string) string { return fmt.Sprintf("toFloat64(%s)", col) }
func castString(col string) string { return fmt.Sprintf("toString(%s)", col) }
func allFloats(in []any) bool {
for _, x := range in {
if _, ok := x.(float64); !ok {
return false
}
}
return true
}
func hasString(in []any) bool {
for _, x := range in {
if _, ok := x.(string); ok {
return true
}
}
return false
}
func toStrings(in []any) []any {
out := make([]any, len(in))
for i, x := range in {
out[i] = fmt.Sprintf("%v", x)
}
return out
}

View File

@@ -1,165 +0,0 @@
package querybuilder
import (
"testing"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/assert"
)
func TestDataTypeCollisionHandledFieldName(t *testing.T) {
tests := []struct {
name string
key *telemetrytypes.TelemetryFieldKey
value any
tblFieldName string
expectedFieldName string
expectedValue any
operator qbtypes.FilterOperator
}{
{
name: "http_status_code_string_field_with_numeric_value",
key: &telemetrytypes.TelemetryFieldKey{
Name: "http.status_code",
FieldDataType: telemetrytypes.FieldDataTypeString,
},
value: float64(200),
tblFieldName: "attribute_string_http$$status_code",
expectedFieldName: "toFloat64OrNull(attribute_string_http$$status_code)",
expectedValue: float64(200),
},
{
name: "service_enabled_string_field_with_bool_value",
key: &telemetrytypes.TelemetryFieldKey{
Name: "service.enabled",
FieldDataType: telemetrytypes.FieldDataTypeString,
},
value: true,
tblFieldName: "attribute_string_service$$enabled",
expectedFieldName: "attribute_string_service$$enabled",
expectedValue: "true",
},
{
name: "http_method_string_field_with_string_value",
key: &telemetrytypes.TelemetryFieldKey{
Name: "http.method",
FieldDataType: telemetrytypes.FieldDataTypeString,
},
value: "GET",
tblFieldName: "attribute_string_http$$method",
expectedFieldName: "attribute_string_http$$method",
expectedValue: "GET",
},
{
name: "response_times_string_field_with_numeric_array",
key: &telemetrytypes.TelemetryFieldKey{
Name: "response.times",
FieldDataType: telemetrytypes.FieldDataTypeString,
},
value: []any{float64(100.5), float64(200.3), float64(150.7)},
tblFieldName: "attribute_string_response$$times",
expectedFieldName: "toFloat64OrNull(attribute_string_response$$times)",
expectedValue: []any{float64(100.5), float64(200.3), float64(150.7)},
},
{
name: "error_codes_string_field_with_mixed_array",
key: &telemetrytypes.TelemetryFieldKey{
Name: "error.codes",
FieldDataType: telemetrytypes.FieldDataTypeString,
},
value: []any{float64(500), "TIMEOUT", float64(503)},
tblFieldName: "attribute_string_error$$codes",
expectedFieldName: "attribute_string_error$$codes",
expectedValue: []any{"500", "TIMEOUT", "503"},
},
// numbers
{
// we cast the key to string if the value is not a number or operator is not a comparison operator
name: "http_request_duration_float_field_with_string_value",
key: &telemetrytypes.TelemetryFieldKey{
Name: "http.request.duration",
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
value: "1234.56",
tblFieldName: "attribute_float64_http$$request$$duration",
expectedFieldName: "toString(attribute_float64_http$$request$$duration)",
expectedValue: "1234.56",
operator: qbtypes.FilterOperatorEqual,
},
{
// we cast to float64 if it's a comparison operator and the value is a stringified number
// reason:- https://github.com/SigNoz/signoz/pull/9154#issuecomment-3369941207
name: "http_request_duration_float_field_with_string_value_comparison_operator",
key: &telemetrytypes.TelemetryFieldKey{
Name: "http.request.duration",
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
value: "9",
tblFieldName: "attribute_float64_http$$request$$duration",
expectedFieldName: "toFloat64(attribute_float64_http$$request$$duration)",
expectedValue: "9",
operator: qbtypes.FilterOperatorGreaterThan,
},
{
// we cast to float64 if it's a comparison operator and the value is a stringified number
// reason:- https://github.com/SigNoz/signoz/pull/9154#issuecomment-3369941207
name: "http_request_duration_float_field_with_string_value_comparison_operator_1",
key: &telemetrytypes.TelemetryFieldKey{
Name: "http.request.duration",
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
value: "9.11",
tblFieldName: "attribute_float64_http$$request$$duration",
expectedFieldName: "toFloat64(attribute_float64_http$$request$$duration)",
expectedValue: "9.11",
operator: qbtypes.FilterOperatorGreaterThan,
},
{
// we cast the key to string if the value is not a number or operator is not a comparison operator
name: "http_request_duration_float_field_with_string_value_comparison_operator_2",
key: &telemetrytypes.TelemetryFieldKey{
Name: "http.request.duration",
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
value: "ERROR",
tblFieldName: "attribute_float64_http$$request$$duration",
expectedFieldName: "toString(attribute_float64_http$$request$$duration)",
expectedValue: "ERROR",
operator: qbtypes.FilterOperatorGreaterThan,
},
// bools
{
name: "feature_enabled_bool_field_with_string_value",
key: &telemetrytypes.TelemetryFieldKey{
Name: "feature.enabled",
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
value: "true",
tblFieldName: "attribute_bool_feature$$enabled",
expectedFieldName: "toString(attribute_bool_feature$$enabled)",
expectedValue: "true",
},
{
name: "feature_flags_bool_field_with_mixed_array",
key: &telemetrytypes.TelemetryFieldKey{
Name: "feature.flags",
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
value: []any{true, "enabled", false},
tblFieldName: "attribute_bool_feature$$flags",
expectedFieldName: "toString(attribute_bool_feature$$flags)",
expectedValue: []any{"true", "enabled", "false"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
resultFieldName, resultValue := DataTypeCollisionHandledFieldName(tt.key, tt.value, tt.tblFieldName, tt.operator)
assert.Equal(t, tt.expectedFieldName, resultFieldName)
assert.Equal(t, tt.expectedValue, resultValue)
})
}
}

View File

@@ -55,7 +55,7 @@ func (c *conditionBuilder) conditionFor(
tblFieldName, value = GetBodyJSONKey(ctx, key, operator, value)
}
tblFieldName, value = querybuilder.DataTypeCollisionHandledFieldName(key, value, tblFieldName, operator)
tblFieldName, value = telemetrytypes.DataTypeCollisionHandledFieldName(key, value, tblFieldName)
// make use of case insensitive index for body
if tblFieldName == "body" {

View File

@@ -424,16 +424,6 @@ func TestFilterExprLogs(t *testing.T) {
expectedArgs: []any{"critical", "resolved", "open", true},
expectedErrorContains: "",
},
{
// this will result in failure from the DB side.
// user will have to use attribute.status:string > open
category: "FREETEXT with conditions",
query: "critical NOT resolved status > open",
shouldPass: true,
expectedQuery: "WHERE (match(LOWER(body), LOWER(?)) AND NOT (match(LOWER(body), LOWER(?))) AND (toString(attributes_number['status']) > ? AND mapContains(attributes_number, 'status') = ?))",
expectedArgs: []any{"critical", "resolved", "open", true},
expectedErrorContains: "",
},
{
category: "FREETEXT with conditions",
query: "database error type=mysql",

View File

@@ -55,7 +55,7 @@ func (c *conditionBuilder) ConditionFor(
return "", nil
}
tblFieldName, value = querybuilder.DataTypeCollisionHandledFieldName(key, value, tblFieldName, operator)
tblFieldName, value = telemetrytypes.DataTypeCollisionHandledFieldName(key, value, tblFieldName)
// key must exists to apply main filter
expr := `if(mapContains(%s, %s), %s, true)`

View File

@@ -73,7 +73,7 @@ func (c *conditionBuilder) conditionFor(
}
}
} else {
tblFieldName, value = querybuilder.DataTypeCollisionHandledFieldName(key, value, tblFieldName, operator)
tblFieldName, value = telemetrytypes.DataTypeCollisionHandledFieldName(key, value, tblFieldName)
}
// regular operators

View File

@@ -17,8 +17,7 @@ var (
typeRoleSelectorRegex = regexp.MustCompile(`^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
typeOrganizationSelectorRegex = regexp.MustCompile(`^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
typeResourceSelectorRegex = regexp.MustCompile(`^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
// resources selectors are used to select either all or none
typeResourcesSelectorRegex = regexp.MustCompile(`^\*$`)
typeResourcesSelectorRegex = regexp.MustCompile(`^org/[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
)
type SelectorCallbackFn func(context.Context, Claims) ([]Selector, error)
@@ -28,7 +27,7 @@ type Selector struct {
}
func NewSelector(typed Type, selector string) (Selector, error) {
err := IsValidSelector(typed, selector)
err := IsValidSelector(typed, Selector{val: selector})
if err != nil {
return Selector{}, err
}
@@ -36,26 +35,26 @@ func NewSelector(typed Type, selector string) (Selector, error) {
return Selector{val: selector}, nil
}
func IsValidSelector(typed Type, selector string) error {
func IsValidSelector(typed Type, selector Selector) error {
switch typed {
case TypeUser:
if !typeUserSelectorRegex.MatchString(selector) {
if !typeUserSelectorRegex.MatchString(selector.String()) {
return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthZInvalidSelectorRegex, "selector must conform to regex %s", typeUserSelectorRegex.String())
}
case TypeRole:
if !typeRoleSelectorRegex.MatchString(selector) {
if !typeRoleSelectorRegex.MatchString(selector.String()) {
return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthZInvalidSelectorRegex, "selector must conform to regex %s", typeRoleSelectorRegex.String())
}
case TypeOrganization:
if !typeOrganizationSelectorRegex.MatchString(selector) {
if !typeOrganizationSelectorRegex.MatchString(selector.String()) {
return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthZInvalidSelectorRegex, "selector must conform to regex %s", typeOrganizationSelectorRegex.String())
}
case TypeResource:
if !typeResourceSelectorRegex.MatchString(selector) {
if !typeResourceSelectorRegex.MatchString(selector.String()) {
return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthZInvalidSelectorRegex, "selector must conform to regex %s", typeResourceSelectorRegex.String())
}
case TypeResources:
if !typeResourcesSelectorRegex.MatchString(selector) {
if !typeResourcesSelectorRegex.MatchString(selector.String()) {
return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthZInvalidSelectorRegex, "selector must conform to regex %s", typeResourcesSelectorRegex.String())
}
}

View File

@@ -24,7 +24,7 @@ type Transaction struct {
}
func NewObject(resource Resource, selector Selector) (*Object, error) {
err := IsValidSelector(resource.Type, selector.val)
err := IsValidSelector(resource.Type, selector)
if err != nil {
return nil, err
}

View File

@@ -30,8 +30,8 @@ var (
type Typeable interface {
Type() Type
Name() Name
Prefix(orgId valuer.UUID) string
Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error)
Prefix() string
Tuples(subject string, relation Relation, selector []Selector) ([]*openfgav1.TupleKey, error)
}
type Type struct{ valuer.String }

View File

@@ -3,7 +3,6 @@ package authtypes
import (
"strings"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -11,7 +10,7 @@ var _ Typeable = new(typeableOrganization)
type typeableOrganization struct{}
func (typeableOrganization *typeableOrganization) Tuples(subject string, relation Relation, selector []Selector, _ valuer.UUID) ([]*openfgav1.TupleKey, error) {
func (typeableOrganization *typeableOrganization) Tuples(subject string, relation Relation, selector []Selector) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selector {
object := strings.Join([]string{typeableOrganization.Type().StringValue(), selector.String()}, ":")
@@ -29,6 +28,6 @@ func (typeableOrganization *typeableOrganization) Name() Name {
return MustNewName("organization")
}
func (typeableOrganization *typeableOrganization) Prefix(_ valuer.UUID) string {
func (typeableOrganization *typeableOrganization) Prefix() string {
return typeableOrganization.Type().StringValue()
}

View File

@@ -1,7 +1,8 @@
package authtypes
import (
"github.com/SigNoz/signoz/pkg/valuer"
"strings"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -23,10 +24,10 @@ func MustNewTypeableResource(name Name) Typeable {
return typeableesource
}
func (typeableResource *typeableResource) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
func (typeableResource *typeableResource) Tuples(subject string, relation Relation, selector []Selector) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selector {
object := typeableResource.Prefix(orgID) + "/" + selector.String()
object := typeableResource.Prefix() + "/" + selector.String()
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}
@@ -41,7 +42,6 @@ func (typeableResource *typeableResource) Name() Name {
return typeableResource.name
}
func (typeableResource *typeableResource) Prefix(orgID valuer.UUID) string {
// example: resource:organization/0199c47d-f61b-7833-bc5f-c0730f12f046/dashboard
return typeableResource.Type().StringValue() + ":" + "organization" + "/" + orgID.StringValue() + "/" + typeableResource.Name().String()
func (typeableResource *typeableResource) Prefix() string {
return strings.Join([]string{typeableResource.Type().StringValue(), typeableResource.Name().String()}, ":")
}

View File

@@ -1,7 +1,8 @@
package authtypes
import (
"github.com/SigNoz/signoz/pkg/valuer"
"strings"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -23,10 +24,10 @@ func MustNewTypeableResources(name Name) Typeable {
return resources
}
func (typeableResources *typeableResources) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
func (typeableResources *typeableResources) Tuples(subject string, relation Relation, selector []Selector) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selector {
object := typeableResources.Prefix(orgID) + "/" + selector.String()
object := typeableResources.Prefix() + "/" + selector.String()
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}
@@ -41,7 +42,6 @@ func (typeableResources *typeableResources) Name() Name {
return typeableResources.name
}
func (typeableResources *typeableResources) Prefix(orgID valuer.UUID) string {
// example: resources:organization/0199c47d-f61b-7833-bc5f-c0730f12f046/dashboards
return typeableResources.Type().StringValue() + ":" + "organization" + "/" + orgID.StringValue() + "/" + typeableResources.Name().String()
func (typeableResources *typeableResources) Prefix() string {
return strings.Join([]string{typeableResources.Type().StringValue(), typeableResources.Name().String()}, ":")
}

View File

@@ -1,7 +1,8 @@
package authtypes
import (
"github.com/SigNoz/signoz/pkg/valuer"
"strings"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -9,10 +10,10 @@ var _ Typeable = new(typeableRole)
type typeableRole struct{}
func (typeableRole *typeableRole) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
func (typeableRole *typeableRole) Tuples(subject string, relation Relation, selector []Selector) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selector {
object := typeableRole.Prefix(orgID) + "/" + selector.String()
object := strings.Join([]string{typeableRole.Type().StringValue(), selector.String()}, ":")
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}
@@ -27,7 +28,6 @@ func (typeableRole *typeableRole) Name() Name {
return MustNewName("role")
}
func (typeableRole *typeableRole) Prefix(orgID valuer.UUID) string {
// example: role:organization/0199c47d-f61b-7833-bc5f-c0730f12f046/role
return typeableRole.Type().StringValue() + ":" + "organization" + "/" + orgID.StringValue() + "/" + typeableRole.Name().String()
func (typeableRole *typeableRole) Prefix() string {
return typeableRole.Type().StringValue()
}

View File

@@ -1,7 +1,8 @@
package authtypes
import (
"github.com/SigNoz/signoz/pkg/valuer"
"strings"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -9,10 +10,10 @@ var _ Typeable = new(typeableUser)
type typeableUser struct{}
func (typeableUser *typeableUser) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
func (typeableUser *typeableUser) Tuples(subject string, relation Relation, selector []Selector) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selector {
object := typeableUser.Prefix(orgID) + "/" + selector.String()
object := strings.Join([]string{typeableUser.Type().StringValue(), selector.String()}, ":")
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}
@@ -27,7 +28,6 @@ func (typeableUser *typeableUser) Name() Name {
return MustNewName("user")
}
func (typeableUser *typeableUser) Prefix(orgID valuer.UUID) string {
// example: user:organization/0199c47d-f61b-7833-bc5f-c0730f12f046/user
return typeableUser.Type().StringValue() + ":" + "organization" + "/" + orgID.StringValue() + "/" + typeableUser.Name().String()
func (typeableUser *typeableUser) Prefix() string {
return typeableUser.Type().StringValue()
}

View File

@@ -13,8 +13,8 @@ import (
)
var (
TypeableResourceDashboard = authtypes.MustNewTypeableResource(authtypes.MustNewName("dashboard"))
TypeableResourcesDashboards = authtypes.MustNewTypeableResources(authtypes.MustNewName("dashboards"))
ResourceDashboard = authtypes.MustNewTypeableResource(authtypes.MustNewName("dashboard"))
ResourcesDashboards = authtypes.MustNewTypeableResources(authtypes.MustNewName("dashboards"))
)
type StorableDashboard struct {

View File

@@ -137,14 +137,6 @@ func (f FilterOperator) IsNegativeOperator() bool {
return true
}
func (f FilterOperator) IsComparisonOperator() bool {
switch f {
case FilterOperatorGreaterThan, FilterOperatorGreaterThanOrEq, FilterOperatorLessThan, FilterOperatorLessThanOrEq:
return true
}
return false
}
type OrderDirection struct {
valuer.String
}

View File

@@ -134,11 +134,6 @@ func (q *QueryBuilderQuery[T]) Validate(requestType RequestType) error {
return err
}
// Validate GroupBy
if err := q.validateGroupByFields(); err != nil {
return err
}
if requestType != RequestTypeRaw && requestType != RequestTypeTrace && len(q.Aggregations) > 0 {
if err := q.validateOrderByForAggregation(); err != nil {
return err
@@ -173,27 +168,6 @@ func (q *QueryBuilderQuery[T]) validateSelectFields() error {
"isRoot and isEntryPoint fields are not supported in selectFields",
)
}
// for logs the selectFields is not present.
// in traces, timestamp is added by default, so it will conflict with timestamp attribute.
if v.Name == "timestamp" {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"timestamp field is not supported in selectFields, it's added by default where needed",
)
}
}
return nil
}
func (q *QueryBuilderQuery[T]) validateGroupByFields() error {
for _, v := range q.GroupBy {
if v.Name == "timestamp" {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"timestamp field is not supported in groupBy, it's added by default where needed",
)
}
}
return nil
}

View File

@@ -21,10 +21,6 @@ var (
ErrCodeRoleFailedTransactionsFromString = errors.MustNewCode("role_failed_transactions_from_string")
)
var (
TypeableResourcesRoles = authtypes.MustNewTypeableResources(authtypes.MustNewName("roles"))
)
type StorableRole struct {
bun.BaseModel `bun:"table:role"`
@@ -170,7 +166,7 @@ func (role *PatchableRole) UnmarshalJSON(data []byte) error {
return nil
}
func GetAdditionTuples(id valuer.UUID, orgID valuer.UUID, relation authtypes.Relation, additions []*authtypes.Object) ([]*openfgav1.TupleKey, error) {
func GetAdditionTuples(id valuer.UUID, relation authtypes.Relation, additions []*authtypes.Object) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, object := range additions {
@@ -183,7 +179,6 @@ func GetAdditionTuples(id valuer.UUID, orgID valuer.UUID, relation authtypes.Rel
),
relation,
[]authtypes.Selector{object.Selector},
orgID,
)
if err != nil {
return nil, err
@@ -195,8 +190,8 @@ func GetAdditionTuples(id valuer.UUID, orgID valuer.UUID, relation authtypes.Rel
return tuples, nil
}
func GetDeletionTuples(id valuer.UUID, orgID valuer.UUID, relation authtypes.Relation, deletions []*authtypes.Object) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
func GetDeletionTuples(id valuer.UUID, relation authtypes.Relation, deletions []*authtypes.Object) ([]*openfgav1.TupleKeyWithoutCondition, error) {
tuples := make([]*openfgav1.TupleKeyWithoutCondition, 0)
for _, object := range deletions {
typeable := authtypes.MustNewTypeableFromType(object.Resource.Type, object.Resource.Name)
@@ -208,13 +203,21 @@ func GetDeletionTuples(id valuer.UUID, orgID valuer.UUID, relation authtypes.Rel
),
relation,
[]authtypes.Selector{object.Selector},
orgID,
)
if err != nil {
return nil, err
}
tuples = append(tuples, transactionTuples...)
deletionTuples := make([]*openfgav1.TupleKeyWithoutCondition, len(transactionTuples))
for idx, tuple := range transactionTuples {
deletionTuples[idx] = &openfgav1.TupleKeyWithoutCondition{
User: tuple.User,
Relation: tuple.Relation,
Object: tuple.Object,
}
}
tuples = append(tuples, deletionTuples...)
}
return tuples, nil

View File

@@ -207,7 +207,6 @@ func (r *PostableRule) processRuleDefaults() error {
q.Expression = qLabel
}
}
//added alerts v2 fields
if r.SchemaVersion == DefaultSchemaVersion {
thresholdName := CriticalThresholdName
@@ -216,20 +215,12 @@ func (r *PostableRule) processRuleDefaults() error {
thresholdName = severity
}
}
// For anomaly detection with ValueIsBelow, negate the target
targetValue := r.RuleCondition.Target
if r.RuleType == RuleTypeAnomaly && r.RuleCondition.CompareOp == ValueIsBelow && targetValue != nil {
negated := -1 * *targetValue
targetValue = &negated
}
thresholdData := RuleThresholdData{
Kind: BasicThresholdKind,
Spec: BasicRuleThresholds{{
Name: thresholdName,
TargetUnit: r.RuleCondition.TargetUnit,
TargetValue: targetValue,
TargetValue: r.RuleCondition.Target,
MatchType: r.RuleCondition.MatchType,
CompareOp: r.RuleCondition.CompareOp,
Channels: r.PreferredChannels,

View File

@@ -718,353 +718,3 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
assert.Equal(t, 1, len(vector))
}
func TestAnomalyNegationShouldAlert(t *testing.T) {
tests := []struct {
name string
ruleJSON []byte
series v3.Series
shouldAlert bool
expectedValue float64
}{
{
name: "anomaly rule with ValueIsBelow - should alert",
ruleJSON: []byte(`{
"alert": "AnomalyBelowTest",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 2.0,
"matchType": "1",
"op": "2",
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -2.1}, // below & at least once, should alert
{Timestamp: 2000, Value: -2.3},
},
},
shouldAlert: true,
expectedValue: -2.1,
},
{
name: "anomaly rule with ValueIsBelow; should not alert",
ruleJSON: []byte(`{
"alert": "AnomalyBelowTest",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 2.0,
"matchType": "1",
"op": "2",
"selectedQuery": "A"
}
}`), // below & at least once, no value below -2.0
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -1.9},
{Timestamp: 2000, Value: -1.8},
},
},
shouldAlert: false,
},
{
name: "anomaly rule with ValueIsAbove; should alert",
ruleJSON: []byte(`{
"alert": "AnomalyAboveTest",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 2.0,
"matchType": "1",
"op": "1",
"selectedQuery": "A"
}
}`), // above & at least once, should alert
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: 2.1}, // above 2.0, should alert
{Timestamp: 2000, Value: 2.2},
},
},
shouldAlert: true,
expectedValue: 2.1,
},
{
name: "anomaly rule with ValueIsAbove; should not alert",
ruleJSON: []byte(`{
"alert": "AnomalyAboveTest",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 2.0,
"matchType": "1",
"op": "1",
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: 1.1},
{Timestamp: 2000, Value: 1.2},
},
},
shouldAlert: false,
},
{
name: "anomaly rule with ValueIsBelow and AllTheTimes; should alert",
ruleJSON: []byte(`{
"alert": "AnomalyBelowAllTest",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 2.0,
"matchType": "2",
"op": "2",
"selectedQuery": "A"
}
}`), // below and all the times
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -2.1}, // all below -2
{Timestamp: 2000, Value: -2.2},
{Timestamp: 3000, Value: -2.5},
},
},
shouldAlert: true,
expectedValue: -2.1, // max value when all are below threshold
},
{
name: "anomaly rule with ValueIsBelow and AllTheTimes; should not alert",
ruleJSON: []byte(`{
"alert": "AnomalyBelowAllTest",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 2.0,
"matchType": "2",
"op": "2",
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -3.0},
{Timestamp: 2000, Value: -1.0}, // above -2, breaks condition
{Timestamp: 3000, Value: -2.5},
},
},
shouldAlert: false,
},
{
name: "anomaly rule with ValueOutsideBounds; should alert",
ruleJSON: []byte(`{
"alert": "AnomalyOutOfBoundsTest",
"ruleType": "anomaly_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 7.0,
"matchType": "1",
"op": "7",
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: -8.0}, // abs(8) >= 7, alert
{Timestamp: 2000, Value: 5.0},
},
},
shouldAlert: true,
expectedValue: -8.0,
},
{
name: "non-anomaly threshold rule with ValueIsBelow; should alert",
ruleJSON: []byte(`{
"alert": "ThresholdTest",
"ruleType": "threshold_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 90.0,
"matchType": "1",
"op": "2",
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: 80.0}, // below 90, should alert
{Timestamp: 2000, Value: 85.0},
},
},
shouldAlert: true,
expectedValue: 80.0,
},
{
name: "non-anomaly rule with ValueIsBelow - should not alert",
ruleJSON: []byte(`{
"alert": "ThresholdTest",
"ruleType": "threshold_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 50.0,
"matchType": "1",
"op": "2",
"selectedQuery": "A"
}
}`),
series: v3.Series{
Labels: map[string]string{"host": "server1"},
Points: []v3.Point{
{Timestamp: 1000, Value: 60.0}, // below, should alert
{Timestamp: 2000, Value: 90.0},
},
},
shouldAlert: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
rule := PostableRule{}
err := json.Unmarshal(tt.ruleJSON, &rule)
if err != nil {
t.Fatalf("Failed to unmarshal rule: %v", err)
}
ruleThreshold, err := rule.RuleCondition.Thresholds.GetRuleThreshold()
if err != nil {
t.Fatalf("unexpected error from GetRuleThreshold: %v", err)
}
resultVector, err := ruleThreshold.ShouldAlert(tt.series, "")
if err != nil {
t.Fatalf("unexpected error from ShouldAlert: %v", err)
}
shouldAlert := len(resultVector) > 0
if shouldAlert != tt.shouldAlert {
t.Errorf("Expected shouldAlert=%v, got %v. %s",
tt.shouldAlert, shouldAlert, tt.name)
}
if tt.shouldAlert && len(resultVector) > 0 {
sample := resultVector[0]
if sample.V != tt.expectedValue {
t.Errorf("Expected alert value=%.2f, got %.2f. %s",
tt.expectedValue, sample.V, tt.name)
}
}
})
}
}

View File

@@ -136,3 +136,97 @@ type FieldValueSelector struct {
Value string `json:"value"`
Limit int `json:"limit"`
}
func DataTypeCollisionHandledFieldName(key *TelemetryFieldKey, value any, tblFieldName string) (string, any) {
// This block of code exists to handle the data type collisions
// We don't want to fail the requests when there is a key with more than one data type
// Let's take an example of `http.status_code`, and consider user sent a string value and number value
// When they search for `http.status_code=200`, we will search across both the number columns and string columns
// and return the results from both the columns
// While we expect user not to send the mixed data types, it inevitably happens
// So we handle the data type collisions here
switch key.FieldDataType {
case FieldDataTypeString:
switch v := value.(type) {
case float64:
// try to convert the string value to to number
tblFieldName = castFloat(tblFieldName)
case []any:
if allFloats(v) {
tblFieldName = castFloat(tblFieldName)
} else if hasString(v) {
_, value = castString(tblFieldName), toStrings(v)
}
case bool:
// we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string
value = fmt.Sprintf("%t", v)
}
case FieldDataTypeFloat64, FieldDataTypeInt64, FieldDataTypeNumber:
switch v := value.(type) {
// why? ; CH returns an error for a simple check
// attributes_number['http.status_code'] = 200 but not for attributes_number['http.status_code'] >= 200
// DB::Exception: Bad get: has UInt64, requested Float64.
// How is it working in v4? v4 prepares the full query with values in query string
// When we format the float it becomes attributes_number['http.status_code'] = 200.000
// Which CH gladly accepts and doesn't throw error
// However, when passed as query args, the default formatter
// https://github.com/ClickHouse/clickhouse-go/blob/757e102f6d8c6059d564ce98795b4ce2a101b1a5/bind.go#L393
// is used which prepares the
// final query as attributes_number['http.status_code'] = 200 giving this error
// This following is one way to workaround it
case float32, float64:
tblFieldName = castFloatHack(tblFieldName)
case string:
// try to convert the number attribute to string
tblFieldName = castString(tblFieldName) // numeric col vs string literal
case []any:
if allFloats(v) {
tblFieldName = castFloatHack(tblFieldName)
} else if hasString(v) {
tblFieldName, value = castString(tblFieldName), toStrings(v)
}
}
case FieldDataTypeBool:
switch v := value.(type) {
case string:
tblFieldName = castString(tblFieldName)
case []any:
if hasString(v) {
tblFieldName, value = castString(tblFieldName), toStrings(v)
}
}
}
return tblFieldName, value
}
func castFloat(col string) string { return fmt.Sprintf("toFloat64OrNull(%s)", col) }
func castFloatHack(col string) string { return fmt.Sprintf("toFloat64(%s)", col) }
func castString(col string) string { return fmt.Sprintf("toString(%s)", col) }
func allFloats(in []any) bool {
for _, x := range in {
if _, ok := x.(float64); !ok {
return false
}
}
return true
}
func hasString(in []any) bool {
for _, x := range in {
if _, ok := x.(string); ok {
return true
}
}
return false
}
func toStrings(in []any) []any {
out := make([]any, len(in))
for i, x := range in {
out[i] = fmt.Sprintf("%v", x)
}
return out
}

View File

@@ -2,6 +2,8 @@ package telemetrytypes
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestGetFieldKeyFromKeyText(t *testing.T) {
@@ -91,3 +93,115 @@ func TestGetFieldKeyFromKeyText(t *testing.T) {
}
}
}
func TestDataTypeCollisionHandledFieldName(t *testing.T) {
tests := []struct {
name string
key *TelemetryFieldKey
value any
tblFieldName string
expectedFieldName string
expectedValue any
}{
{
name: "http_status_code_string_field_with_numeric_value",
key: &TelemetryFieldKey{
Name: "http.status_code",
FieldDataType: FieldDataTypeString,
},
value: float64(200),
tblFieldName: "attribute_string_http$$status_code",
expectedFieldName: "toFloat64OrNull(attribute_string_http$$status_code)",
expectedValue: float64(200),
},
{
name: "service_enabled_string_field_with_bool_value",
key: &TelemetryFieldKey{
Name: "service.enabled",
FieldDataType: FieldDataTypeString,
},
value: true,
tblFieldName: "attribute_string_service$$enabled",
expectedFieldName: "attribute_string_service$$enabled",
expectedValue: "true",
},
{
name: "http_method_string_field_with_string_value",
key: &TelemetryFieldKey{
Name: "http.method",
FieldDataType: FieldDataTypeString,
},
value: "GET",
tblFieldName: "attribute_string_http$$method",
expectedFieldName: "attribute_string_http$$method",
expectedValue: "GET",
},
{
name: "response_times_string_field_with_numeric_array",
key: &TelemetryFieldKey{
Name: "response.times",
FieldDataType: FieldDataTypeString,
},
value: []any{float64(100.5), float64(200.3), float64(150.7)},
tblFieldName: "attribute_string_response$$times",
expectedFieldName: "toFloat64OrNull(attribute_string_response$$times)",
expectedValue: []any{float64(100.5), float64(200.3), float64(150.7)},
},
{
name: "error_codes_string_field_with_mixed_array",
key: &TelemetryFieldKey{
Name: "error.codes",
FieldDataType: FieldDataTypeString,
},
value: []any{float64(500), "TIMEOUT", float64(503)},
tblFieldName: "attribute_string_error$$codes",
expectedFieldName: "attribute_string_error$$codes",
expectedValue: []any{"500", "TIMEOUT", "503"},
},
// numbers
{
name: "http_request_duration_float_field_with_string_value",
key: &TelemetryFieldKey{
Name: "http.request.duration",
FieldDataType: FieldDataTypeFloat64,
},
value: "1234.56",
tblFieldName: "attribute_float64_http$$request$$duration",
expectedFieldName: "toString(attribute_float64_http$$request$$duration)",
expectedValue: "1234.56",
},
// bools
{
name: "feature_enabled_bool_field_with_string_value",
key: &TelemetryFieldKey{
Name: "feature.enabled",
FieldDataType: FieldDataTypeBool,
},
value: "true",
tblFieldName: "attribute_bool_feature$$enabled",
expectedFieldName: "toString(attribute_bool_feature$$enabled)",
expectedValue: "true",
},
{
name: "feature_flags_bool_field_with_mixed_array",
key: &TelemetryFieldKey{
Name: "feature.flags",
FieldDataType: FieldDataTypeBool,
},
value: []any{true, "enabled", false},
tblFieldName: "attribute_bool_feature$$flags",
expectedFieldName: "toString(attribute_bool_feature$$flags)",
expectedValue: []any{"true", "enabled", "false"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
resultFieldName, resultValue := DataTypeCollisionHandledFieldName(tt.key, tt.value, tt.tblFieldName)
assert.Equal(t, tt.expectedFieldName, resultFieldName)
assert.Equal(t, tt.expectedValue, resultValue)
})
}
}

View File

@@ -816,429 +816,3 @@ def test_logs_time_series_count(
),
"value": 9,
} in series[1]["values"]
def test_datatype_collision(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_jwt_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
"""
Setup:
Insert logs with data type collision scenarios to test DataTypeCollisionHandledFieldName function
Tests:
1. severity_number comparison with string value
2. http.status_code with mixed string/number values
3. response.time with string values in numeric field
4. Edge cases: empty strings
"""
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = []
# Logs with string values in numeric fields
severity_levels = ["DEBUG", "INFO", "WARN"]
for i in range(3):
logs.append(
Logs(
timestamp=now - timedelta(microseconds=i + 1),
resources={
"deployment.environment": "production",
"service.name": "java",
"os.type": "linux",
"host.name": f"linux-00{i%2}",
"cloud.provider": "integration",
"cloud.account.id": f"00{i%2}",
},
attributes={
"log.iostream": "stdout",
"logtag": "F",
"code.file": "/opt/Integration.java",
"code.function": "com.example.Integration.process",
"code.line": i + 1,
"telemetry.sdk.language": "java",
"http.status_code": "200", # String value
"response.time": "123.45", # String value
},
body=f"Test log {i+1} with string values",
severity_text=severity_levels[i], # DEBUG(5-8), INFO(9-12), WARN(13-16)
)
)
# Logs with numeric values in string fields
severity_levels_2 = ["ERROR", "FATAL", "TRACE", "DEBUG"]
for i in range(4):
logs.append(
Logs(
timestamp=now - timedelta(microseconds=i + 10),
resources={
"deployment.environment": "production",
"service.name": "go",
"os.type": "linux",
"host.name": f"linux-00{i%2}",
"cloud.provider": "integration",
"cloud.account.id": f"00{i%2}",
},
attributes={
"log.iostream": "stdout",
"logtag": "F",
"code.file": "/opt/integration.go",
"code.function": "com.example.Integration.process",
"code.line": i + 1,
"telemetry.sdk.language": "go",
"http.status_code": 404, # Numeric value
"response.time": 456.78, # Numeric value
},
body=f"Test log {i+4} with numeric values",
severity_text=severity_levels_2[i], # ERROR(17-20), FATAL(21-24), TRACE(1-4), DEBUG(5-8)
)
)
# Edge case: empty string and zero value
logs.append(
Logs(
timestamp=now - timedelta(microseconds=20),
resources={
"deployment.environment": "production",
"service.name": "python",
"os.type": "linux",
"host.name": "linux-002",
"cloud.provider": "integration",
"cloud.account.id": "002",
},
attributes={
"log.iostream": "stdout",
"logtag": "F",
"code.file": "/opt/integration.py",
"code.function": "com.example.Integration.process",
"code.line": 1,
"telemetry.sdk.language": "python",
"http.status_code": "", # Empty string
"response.time": 0, # Zero value
},
body="Edge case test log",
severity_text="ERROR",
)
)
insert_logs(logs)
token = get_jwt_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)
# count() of all logs for the where severity_number > '7'
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
- timedelta(minutes=5)
).timestamp()
* 1000
),
"end": int(
datetime.now(tz=timezone.utc)
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"requestType": "scalar",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"stepInterval": 60,
"disabled": False,
"filter": {"expression": "severity_number > '7'"},
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": True, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
count = results[0]["data"][0][0]
assert count == 5
# count() of all logs for the where severity_number > '7.0'
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
- timedelta(minutes=5)
).timestamp()
* 1000
),
"end": int(
datetime.now(tz=timezone.utc)
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"requestType": "scalar",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"stepInterval": 60,
"disabled": False,
"filter": {"expression": "severity_number > '7.0'"},
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": True, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
count = results[0]["data"][0][0]
assert count == 5
# Test 2: severity_number comparison with string value
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
- timedelta(minutes=5)
).timestamp()
* 1000
),
"end": int(
datetime.now(tz=timezone.utc)
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"requestType": "scalar",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"stepInterval": 60,
"disabled": False,
"filter": {"expression": "severity_number = '13'"}, # String comparison with numeric field
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": True, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
count = results[0]["data"][0][0]
# WARN severity maps to 13-16 range, so should find 1 log with severity_number = 13
assert count == 1
# Test 3: http.status_code with numeric value (query contains number, actual value is string "200")
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
- timedelta(minutes=5)
).timestamp()
* 1000
),
"end": int(
datetime.now(tz=timezone.utc)
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"requestType": "scalar",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"stepInterval": 60,
"disabled": False,
"filter": {"expression": "http.status_code = 200"}, # Numeric comparison with string field
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": True, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
count = results[0]["data"][0][0]
# Should return 3 logs with http.status_code = "200" (first 3 logs have string value "200")
assert count == 3
# Test 4: http.status_code with string value (query contains string, actual value is numeric 404)
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
- timedelta(minutes=5)
).timestamp()
* 1000
),
"end": int(
datetime.now(tz=timezone.utc)
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"requestType": "scalar",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"stepInterval": 60,
"disabled": False,
"filter": {"expression": "http.status_code = '404'"}, # String comparison with numeric field
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": True, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
count = results[0]["data"][0][0]
# Should return 4 logs with http.status_code = 404 (next 4 logs have numeric value 404)
assert count == 4
# Test 5: Edge case - empty string comparison
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
- timedelta(minutes=5)
).timestamp()
* 1000
),
"end": int(
datetime.now(tz=timezone.utc)
.replace(second=0, microsecond=0)
.timestamp()
* 1000
),
"requestType": "scalar",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"stepInterval": 60,
"disabled": False,
"filter": {"expression": "http.status_code = ''"}, # Empty string comparison
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": True, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
count = results[0]["data"][0][0]
# Should return 1 log with empty http.status_code (edge case log)
assert count == 1