Compare commits

..

2 Commits

Author SHA1 Message Date
Nikhil Soni
18d71bb472 fix: switch to using http_host instead of external_http_url
external_http_url stores the hostname but the name
is confusing, so switching to http_host
2026-02-06 13:42:14 +05:30
Nikhil Soni
4d9282ad18 fix: fix incosistent use of http attribute in ext. api
HTTP attributes like http.url, url.full along with server.name and net.peer.name
were used inconsitantly leading to bugs in aggregation query and they were
expensive to query as well since these attr are stored as json instead of
direct columns. Using columns like http_url optimises these queries since
it gets populated using all relevant attributes during ingestion itself.
2026-02-06 13:42:14 +05:30
132 changed files with 2755 additions and 5675 deletions

7
.github/CODEOWNERS vendored
View File

@@ -133,8 +133,5 @@
/frontend/src/pages/PublicDashboard/ @SigNoz/pulse-frontend
/frontend/src/container/PublicDashboardContainer/ @SigNoz/pulse-frontend
## Dashboard Libs + Components
/frontend/src/lib/uPlotV2/ @SigNoz/pulse-frontend
/frontend/src/lib/dashboard/ @SigNoz/pulse-frontend
/frontend/src/lib/dashboardVariables/ @SigNoz/pulse-frontend
/frontend/src/components/NewSelect/ @SigNoz/pulse-frontend
## UplotV2
/frontend/src/lib/uPlotV2/ @SigNoz/pulse-frontend

View File

@@ -93,13 +93,3 @@ jobs:
run: |
go run cmd/enterprise/*.go generate openapi
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in openapi spec. Run go run cmd/enterprise/*.go generate openapi locally and commit."; exit 1)
- name: node-install
uses: actions/setup-node@v5
with:
node-version: "22"
- name: install-frontend
run: cd frontend && yarn install
- name: generate-api-clients
run: |
cd frontend && yarn generate:api
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in generated api clients. Run yarn generate:api in frontend/ locally and commit."; exit 1)

5
.gitignore vendored
View File

@@ -1,8 +1,11 @@
node_modules
# editor
.vscode
!.vscode/settings.json
.zed
.idea
deploy/docker/environment_tiny/common_test
frontend/node_modules
@@ -31,8 +34,6 @@ frontend/yarn-debug.log*
frontend/yarn-error.log*
frontend/src/constants/env.ts
.idea
**/build
**/storage
**/locust-scripts/__pycache__/

View File

@@ -18,6 +18,8 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/queryparser"
@@ -76,15 +78,18 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
},
func(ctx context.Context, sqlstore sqlstore.SQLStore, _ licensing.Licensing, _ dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
},
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, _ role.Setter, _ role.Granter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(impldashboard.NewStore(store), settings, analytics, orgGetter, queryParser)
},
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return noopgateway.NewProviderFactory()
},
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, _ []role.RegisterTypeable) role.Setter {
return implrole.NewSetter(implrole.NewStore(store), authz)
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)

View File

@@ -14,6 +14,7 @@ import (
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/ee/modules/role/implrole"
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
@@ -28,6 +29,8 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
pkgimplrole "github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/signoz"
@@ -115,15 +118,18 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
return authNs, nil
},
func(ctx context.Context, sqlstore sqlstore.SQLStore, licensing licensing.Licensing, dashboardModule dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx), licensing, dashboardModule)
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
},
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, queryParser, querier, licensing)
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, roleSetter, granter, queryParser, querier, licensing)
},
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return httpgateway.NewProviderFactory(licensing)
},
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
return implrole.NewSetter(pkgimplrole.NewStore(store), authz, licensing, registry)
},
)
if err != nil {

View File

@@ -607,178 +607,6 @@ paths:
summary: Update auth domain
tags:
- authdomains
/api/v1/fields/keys:
get:
deprecated: false
description: This endpoint returns field keys
operationId: GetFieldsKeys
parameters:
- in: query
name: signal
schema:
type: string
- in: query
name: source
schema:
type: string
- in: query
name: limit
schema:
type: integer
- in: query
name: startUnixMilli
schema:
format: int64
type: integer
- in: query
name: endUnixMilli
schema:
format: int64
type: integer
- in: query
name: fieldContext
schema:
type: string
- in: query
name: fieldDataType
schema:
type: string
- in: query
name: metricName
schema:
type: string
- in: query
name: searchText
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/TelemetrytypesGettableFieldKeys'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get field keys
tags:
- fields
/api/v1/fields/values:
get:
deprecated: false
description: This endpoint returns field values
operationId: GetFieldsValues
parameters:
- in: query
name: signal
schema:
type: string
- in: query
name: source
schema:
type: string
- in: query
name: limit
schema:
type: integer
- in: query
name: startUnixMilli
schema:
format: int64
type: integer
- in: query
name: endUnixMilli
schema:
format: int64
type: integer
- in: query
name: fieldContext
schema:
type: string
- in: query
name: fieldDataType
schema:
type: string
- in: query
name: metricName
schema:
type: string
- in: query
name: searchText
schema:
type: string
- in: query
name: name
schema:
type: string
- in: query
name: existingQuery
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/TelemetrytypesGettableFieldValues'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get field values
tags:
- fields
/api/v1/getResetPasswordToken/{id}:
get:
deprecated: false
@@ -2398,12 +2226,6 @@ paths:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"422":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unprocessable Entity
"500":
content:
application/json:
@@ -4100,9 +3922,19 @@ components:
isMonotonic:
type: boolean
temporality:
$ref: '#/components/schemas/MetrictypesTemporality'
enum:
- delta
- cumulative
- unspecified
type: string
type:
$ref: '#/components/schemas/MetrictypesType'
enum:
- gauge
- sum
- histogram
- summary
- exponentialhistogram
type: string
unit:
type: string
required:
@@ -4125,7 +3957,13 @@ components:
minimum: 0
type: integer
type:
$ref: '#/components/schemas/MetrictypesType'
enum:
- gauge
- sum
- histogram
- summary
- exponentialhistogram
type: string
unit:
type: string
required:
@@ -4186,11 +4024,6 @@ components:
- percentage
- totalValue
type: object
MetricsexplorertypesTreemapMode:
enum:
- timeseries
- samples
type: string
MetricsexplorertypesTreemapRequest:
properties:
end:
@@ -4201,7 +4034,10 @@ components:
limit:
type: integer
mode:
$ref: '#/components/schemas/MetricsexplorertypesTreemapMode'
enum:
- timeseries
- samples
type: string
start:
format: int64
type: integer
@@ -4236,9 +4072,19 @@ components:
metricName:
type: string
temporality:
$ref: '#/components/schemas/MetrictypesTemporality'
enum:
- delta
- cumulative
- unspecified
type: string
type:
$ref: '#/components/schemas/MetrictypesType'
enum:
- gauge
- sum
- histogram
- summary
- exponentialhistogram
type: string
unit:
type: string
required:
@@ -4249,20 +4095,6 @@ components:
- temporality
- isMonotonic
type: object
MetrictypesTemporality:
enum:
- delta
- cumulative
- unspecified
type: string
MetrictypesType:
enum:
- gauge
- sum
- histogram
- summary
- exponentialhistogram
type: string
PreferencetypesPreference:
properties:
allowedScopes:
@@ -4355,8 +4187,6 @@ components:
type: string
unit:
type: string
required:
- name
type: object
Querybuildertypesv5QueryData:
properties:
@@ -4418,68 +4248,6 @@ components:
format: date-time
type: string
type: object
TelemetrytypesGettableFieldKeys:
properties:
complete:
type: boolean
keys:
additionalProperties:
items:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
type: array
nullable: true
type: object
required:
- keys
- complete
type: object
TelemetrytypesGettableFieldValues:
properties:
complete:
type: boolean
values:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldValues'
required:
- values
- complete
type: object
TelemetrytypesTelemetryFieldKey:
properties:
description:
type: string
fieldContext:
type: string
fieldDataType:
type: string
name:
type: string
signal:
type: string
unit:
type: string
required:
- name
type: object
TelemetrytypesTelemetryFieldValues:
properties:
boolValues:
items:
type: boolean
type: array
numberValues:
items:
format: double
type: number
type: array
relatedValues:
items:
type: string
type: array
stringValues:
items:
type: string
type: array
type: object
TypesChangePasswordRequest:
properties:
newPassword:
@@ -4608,9 +4376,6 @@ components:
type: string
orgId:
type: string
required:
- orgId
- email
type: object
TypesPostableInvite:
properties:

View File

@@ -2,18 +2,12 @@ package openfgaauthz
import (
"context"
"slices"
"github.com/SigNoz/signoz/ee/authz/openfgaserver"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/authzstore/sqlauthzstore"
pkgopenfgaauthz "github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
@@ -21,224 +15,50 @@ import (
type provider struct {
pkgAuthzService authz.AuthZ
openfgaServer *openfgaserver.Server
licensing licensing.Licensing
store roletypes.Store
registry []authz.RegisterTypeable
}
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry ...authz.RegisterTypeable) factory.ProviderFactory[authz.AuthZ, authz.Config] {
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return factory.NewProviderFactory(factory.MustNewName("openfga"), func(ctx context.Context, ps factory.ProviderSettings, config authz.Config) (authz.AuthZ, error) {
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema, licensing, registry)
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema)
})
}
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry []authz.RegisterTypeable) (authz.AuthZ, error) {
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
pkgOpenfgaAuthzProvider := pkgopenfgaauthz.NewProviderFactory(sqlstore, openfgaSchema)
pkgAuthzService, err := pkgOpenfgaAuthzProvider.New(ctx, settings, config)
if err != nil {
return nil, err
}
openfgaServer, err := openfgaserver.NewOpenfgaServer(ctx, pkgAuthzService)
if err != nil {
return nil, err
}
return &provider{
pkgAuthzService: pkgAuthzService,
openfgaServer: openfgaServer,
licensing: licensing,
store: sqlauthzstore.NewSqlAuthzStore(sqlstore),
registry: registry,
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
return provider.openfgaServer.Start(ctx)
return provider.pkgAuthzService.Start(ctx)
}
func (provider *provider) Stop(ctx context.Context) error {
return provider.openfgaServer.Stop(ctx)
return provider.pkgAuthzService.Stop(ctx)
}
func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return provider.openfgaServer.Check(ctx, tuple)
return provider.pkgAuthzService.Check(ctx, tuple)
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.openfgaServer.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.openfgaServer.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return provider.openfgaServer.BatchCheck(ctx, tuples)
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return provider.openfgaServer.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return provider.openfgaServer.Write(ctx, additions, deletions)
}
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
return provider.pkgAuthzService.Get(ctx, orgID, id)
}
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
return provider.pkgAuthzService.GetByOrgIDAndName(ctx, orgID, name)
}
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
return provider.pkgAuthzService.List(ctx, orgID)
}
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
return provider.pkgAuthzService.ListByOrgIDAndNames(ctx, orgID, names)
}
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
return provider.pkgAuthzService.Grant(ctx, orgID, name, subject)
}
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
return provider.pkgAuthzService.ModifyGrant(ctx, orgID, existingRoleName, updatedRoleName, subject)
}
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
return provider.pkgAuthzService.Revoke(ctx, orgID, name, subject)
}
func (provider *provider) CreateManagedRoles(ctx context.Context, orgID valuer.UUID, managedRoles []*roletypes.Role) error {
return provider.pkgAuthzService.CreateManagedRoles(ctx, orgID, managedRoles)
}
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
tuples := make([]*openfgav1.TupleKey, 0)
grantTuples, err := provider.getManagedRoleGrantTuples(orgID, userID)
if err != nil {
return err
}
tuples = append(tuples, grantTuples...)
managedRoleTuples, err := provider.getManagedRoleTransactionTuples(orgID)
if err != nil {
return err
}
tuples = append(tuples, managedRoleTuples...)
return provider.Write(ctx, tuples, nil)
}
func (provider *provider) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
}
func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
existingRole, err := provider.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
}
if existingRole != nil {
return roletypes.NewRoleFromStorableRole(existingRole), nil
}
err = provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return nil, err
}
return role, nil
}
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
typeables := make([]authtypes.Typeable, 0)
for _, register := range provider.registry {
typeables = append(typeables, register.MustGetTypeables()...)
}
// role module cannot self register itself!
typeables = append(typeables, provider.MustGetTypeables()...)
resources := make([]*authtypes.Resource, 0)
for _, typeable := range typeables {
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
}
return resources
}
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
storableRole, err := provider.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
objects := make([]*authtypes.Object, 0)
for _, resource := range provider.GetResources(ctx) {
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
resourceObjects, err := provider.
ListObjects(
ctx,
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
relation,
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
)
if err != nil {
return nil, err
}
objects = append(objects, resourceObjects...)
}
}
return objects, nil
}
func (provider *provider) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return provider.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
}
func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = provider.Write(ctx, additionTuples, deletionTuples)
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
@@ -246,95 +66,33 @@ func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, n
return nil
}
func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
storableRole, err := provider.store.Get(ctx, orgID, id)
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
role := roletypes.NewRoleFromStorableRole(storableRole)
err = role.CanEditDelete()
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
return provider.store.Delete(ctx, orgID, id)
}
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
}
func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := []*openfgav1.TupleKey{}
// Grant the admin role to the user
adminSubject := authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil)
adminTuple, err := authtypes.TypeableRole.Tuples(
adminSubject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAdminRoleName),
},
orgID,
)
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return nil, err
return err
}
tuples = append(tuples, adminTuple...)
// Grant the admin role to the anonymous user
anonymousSubject := authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
anonymousTuple, err := authtypes.TypeableRole.Tuples(
anonymousSubject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAnonymousRoleName),
},
orgID,
)
if err != nil {
return nil, err
}
tuples = append(tuples, anonymousTuple...)
return tuples, nil
return nil
}
func (provider *provider) getManagedRoleTransactionTuples(orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
transactionsByRole := make(map[string][]*authtypes.Transaction)
for _, register := range provider.registry {
for roleName, txns := range register.MustGetManagedRoleTransactions() {
transactionsByRole[roleName] = append(transactionsByRole[roleName], txns...)
}
}
tuples := make([]*openfgav1.TupleKey, 0)
for roleName, transactions := range transactionsByRole {
for _, txn := range transactions {
typeable := authtypes.MustNewTypeableFromType(txn.Object.Resource.Type, txn.Object.Resource.Name)
txnTuples, err := typeable.Tuples(
authtypes.MustNewSubject(
authtypes.TypeableRole,
roleName,
orgID,
&authtypes.RelationAssignee,
),
txn.Relation,
[]authtypes.Selector{txn.Object.Selector},
orgID,
)
if err != nil {
return nil, err
}
tuples = append(tuples, txnTuples...)
}
}
return tuples, nil
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.BatchCheck(ctx, tuples)
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return provider.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.Write(ctx, additions, deletions)
}

View File

@@ -1,83 +0,0 @@
package openfgaserver
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
type Server struct {
pkgAuthzService authz.AuthZ
}
func NewOpenfgaServer(ctx context.Context, pkgAuthzService authz.AuthZ) (*Server, error) {
return &Server{
pkgAuthzService: pkgAuthzService,
}, nil
}
func (server *Server) Start(ctx context.Context) error {
return server.pkgAuthzService.Start(ctx)
}
func (server *Server) Stop(ctx context.Context) error {
return server.pkgAuthzService.Stop(ctx)
}
func (server *Server) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return server.pkgAuthzService.Check(ctx, tuple)
}
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return server.pkgAuthzService.BatchCheck(ctx, tuples)
}
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return server.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
}
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return server.pkgAuthzService.Write(ctx, additions, deletions)
}

View File

@@ -11,6 +11,7 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/types"
@@ -25,11 +26,13 @@ type module struct {
pkgDashboardModule dashboard.Module
store dashboardtypes.Store
settings factory.ScopedProviderSettings
roleSetter role.Setter
granter role.Granter
querier querier.Querier
licensing licensing.Licensing
}
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard")
pkgDashboardModule := pkgimpldashboard.NewModule(store, settings, analytics, orgGetter, queryParser)
@@ -37,6 +40,8 @@ func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, an
pkgDashboardModule: pkgDashboardModule,
store: store,
settings: scopedProviderSettings,
roleSetter: roleSetter,
granter: granter,
querier: querier,
licensing: licensing,
}
@@ -56,6 +61,29 @@ func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publi
return errors.Newf(errors.TypeAlreadyExists, dashboardtypes.ErrCodePublicDashboardAlreadyExists, "dashboard with id %s is already public", storablePublicDashboard.DashboardID)
}
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
err = module.granter.Grant(ctx, orgID, roletypes.SigNozAnonymousRoleName, authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.StringValue(), orgID, nil))
if err != nil {
return err
}
additionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, []*authtypes.Object{additionObject}, nil)
if err != nil {
return err
}
err = module.store.CreatePublic(ctx, dashboardtypes.NewStorablePublicDashboardFromPublicDashboard(publicDashboard))
if err != nil {
return err
@@ -100,7 +128,6 @@ func (module *module) GetPublicDashboardSelectorsAndOrg(ctx context.Context, id
return []authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeMetaResource, id.StringValue()),
authtypes.MustNewSelector(authtypes.TypeMetaResource, authtypes.WildCardSelectorString),
}, storableDashboard.OrgID, nil
}
@@ -163,6 +190,29 @@ func (module *module) DeletePublic(ctx context.Context, orgID valuer.UUID, dashb
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
publicDashboard, err := module.GetPublic(ctx, orgID, dashboardID)
if err != nil {
return err
}
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
deletionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
if err != nil {
return err
}
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
if err != nil {
return err
@@ -200,6 +250,10 @@ func (module *module) GetByMetricNames(ctx context.Context, orgID valuer.UUID, m
return module.pkgDashboardModule.GetByMetricNames(ctx, orgID, metricNames)
}
func (module *module) MustGetTypeables() []authtypes.Typeable {
return module.pkgDashboardModule.MustGetTypeables()
}
func (module *module) List(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
return module.pkgDashboardModule.List(ctx, orgID)
}
@@ -212,27 +266,34 @@ func (module *module) LockUnlock(ctx context.Context, orgID valuer.UUID, id valu
return module.pkgDashboardModule.LockUnlock(ctx, orgID, id, updatedBy, role, lock)
}
func (module *module) MustGetTypeables() []authtypes.Typeable {
return module.pkgDashboardModule.MustGetTypeables()
}
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
return map[string][]*authtypes.Transaction{
roletypes.SigNozAnonymousRoleName: {
{
Relation: authtypes.RelationRead,
Object: *authtypes.MustNewObject(
authtypes.Resource{
Type: authtypes.TypeMetaResource,
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, "*"),
),
},
},
func (module *module) deletePublic(ctx context.Context, orgID valuer.UUID, dashboardID valuer.UUID) error {
publicDashboard, err := module.store.GetPublic(ctx, dashboardID.String())
if err != nil {
return err
}
}
func (module *module) deletePublic(ctx context.Context, _ valuer.UUID, dashboardID valuer.UUID) error {
return module.store.DeletePublic(ctx, dashboardID.StringValue())
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
deletionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
if err != nil {
return err
}
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
if err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,165 @@
package implrole
import (
"context"
"slices"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type setter struct {
store roletypes.Store
authz authz.AuthZ
licensing licensing.Licensing
registry []role.RegisterTypeable
}
func NewSetter(store roletypes.Store, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
return &setter{
store: store,
authz: authz,
licensing: licensing,
registry: registry,
}
}
func (setter *setter) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
}
func (setter *setter) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
existingRole, err := setter.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
}
if existingRole != nil {
return roletypes.NewRoleFromStorableRole(existingRole), nil
}
err = setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return nil, err
}
return role, nil
}
func (setter *setter) GetResources(_ context.Context) []*authtypes.Resource {
typeables := make([]authtypes.Typeable, 0)
for _, register := range setter.registry {
typeables = append(typeables, register.MustGetTypeables()...)
}
// role module cannot self register itself!
typeables = append(typeables, setter.MustGetTypeables()...)
resources := make([]*authtypes.Resource, 0)
for _, typeable := range typeables {
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
}
return resources
}
func (setter *setter) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
storableRole, err := setter.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
objects := make([]*authtypes.Object, 0)
for _, resource := range setter.GetResources(ctx) {
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
resourceObjects, err := setter.
authz.
ListObjects(
ctx,
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
relation,
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
)
if err != nil {
return nil, err
}
objects = append(objects, resourceObjects...)
}
}
return objects, nil
}
func (setter *setter) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return setter.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
}
func (setter *setter) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
if err != nil {
return err
}
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
if err != nil {
return err
}
err = setter.authz.Write(ctx, additionTuples, deletionTuples)
if err != nil {
return err
}
return nil
}
func (setter *setter) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
storableRole, err := setter.store.Get(ctx, orgID, id)
if err != nil {
return err
}
role := roletypes.NewRoleFromStorableRole(storableRole)
err = role.CanEditDelete()
if err != nil {
return err
}
return setter.store.Delete(ctx, orgID, id)
}
func (setter *setter) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
}

View File

@@ -9,6 +9,7 @@ import (
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
@@ -55,6 +56,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
FluxInterval: opts.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),

View File

@@ -211,7 +211,7 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz, s.signoz.Modules.RoleGetter)
r.Use(otelmux.Middleware(
"apiserver",
@@ -237,6 +237,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterLogsRoutes(r, am)
apiHandler.RegisterIntegrationRoutes(r, am)
apiHandler.RegisterCloudIntegrationsRoutes(r, am)
apiHandler.RegisterFieldsRoutes(r, am)
apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterInfraMetricsRoutes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am)

View File

@@ -15,7 +15,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
@@ -63,8 +63,6 @@ type AnomalyRule struct {
seasonality anomaly.Seasonality
}
var _ baserules.Rule = (*AnomalyRule)(nil)
func NewAnomalyRule(
id string,
orgID valuer.UUID,
@@ -492,7 +490,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration().Duration() {
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration() {
a.State = model.StateFiring
a.FiredAt = ts
state := model.StateFiring
@@ -555,7 +553,7 @@ func (r *AnomalyRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.Name(),
RuleCondition: r.Condition(),
EvalWindow: r.EvalWindow(),
EvalWindow: ruletypes.Duration(r.EvalWindow()),
Labels: r.Labels().Map(),
Annotations: r.Annotations().Map(),
PreferredChannels: r.PreferredChannels(),

View File

@@ -40,7 +40,7 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
// Test basic AlertOnAbsent functionality (without AbsentFor grace period)
baseTime := time.Unix(1700000000, 0)
evalWindow := valuer.MustParseTextDuration("5m")
evalWindow := 5 * time.Minute
evalTime := baseTime.Add(5 * time.Minute)
target := 500.0
@@ -50,8 +50,8 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: RuleTypeAnomaly,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,
@@ -147,7 +147,7 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
// 3. Alert fires only if t2 - t1 > AbsentFor
baseTime := time.Unix(1700000000, 0)
evalWindow := valuer.MustParseTextDuration("5m")
evalWindow := 5 * time.Minute
// Set target higher than test data so regular threshold alerts don't fire
target := 500.0
@@ -157,8 +157,8 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: RuleTypeAnomaly,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,

View File

@@ -48,7 +48,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, tr)
// create ch rule task for evaluation
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
@@ -72,7 +72,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, pr)
// create promql rule task for evaluation
task = newTask(baserules.TaskTypeProm, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
// create anomaly rule
@@ -96,7 +96,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, ar)
// create anomaly rule task for evaluation
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else {
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
@@ -213,7 +213,8 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
return alertsFound, nil
}
// newTask returns an appropriate group for the rule type
// newTask returns an appropriate group for
// rule type
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) baserules.Task {
if taskType == baserules.TaskTypeCh {
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)

View File

@@ -1,222 +0,0 @@
/**
* ! Do not edit manually
* * The file has been auto-generated using Orval for SigNoz
* * regenerate with 'yarn generate:api'
* SigNoz
*/
import type {
InvalidateOptions,
QueryClient,
QueryFunction,
QueryKey,
UseQueryOptions,
UseQueryResult,
} from 'react-query';
import { useQuery } from 'react-query';
import { GeneratedAPIInstance } from '../../../index';
import type {
GetFieldsKeys200,
GetFieldsKeysParams,
GetFieldsValues200,
GetFieldsValuesParams,
RenderErrorResponseDTO,
} from '../sigNoz.schemas';
type AwaitedInput<T> = PromiseLike<T> | T;
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
/**
* This endpoint returns field keys
* @summary Get field keys
*/
export const getFieldsKeys = (
params?: GetFieldsKeysParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetFieldsKeys200>({
url: `/api/v1/fields/keys`,
method: 'GET',
params,
signal,
});
};
export const getGetFieldsKeysQueryKey = (params?: GetFieldsKeysParams) => {
return ['getFieldsKeys', ...(params ? [params] : [])] as const;
};
export const getGetFieldsKeysQueryOptions = <
TData = Awaited<ReturnType<typeof getFieldsKeys>>,
TError = RenderErrorResponseDTO
>(
params?: GetFieldsKeysParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getFieldsKeys>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getGetFieldsKeysQueryKey(params);
const queryFn: QueryFunction<Awaited<ReturnType<typeof getFieldsKeys>>> = ({
signal,
}) => getFieldsKeys(params, signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getFieldsKeys>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetFieldsKeysQueryResult = NonNullable<
Awaited<ReturnType<typeof getFieldsKeys>>
>;
export type GetFieldsKeysQueryError = RenderErrorResponseDTO;
/**
* @summary Get field keys
*/
export function useGetFieldsKeys<
TData = Awaited<ReturnType<typeof getFieldsKeys>>,
TError = RenderErrorResponseDTO
>(
params?: GetFieldsKeysParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getFieldsKeys>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetFieldsKeysQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get field keys
*/
export const invalidateGetFieldsKeys = async (
queryClient: QueryClient,
params?: GetFieldsKeysParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetFieldsKeysQueryKey(params) },
options,
);
return queryClient;
};
/**
* This endpoint returns field values
* @summary Get field values
*/
export const getFieldsValues = (
params?: GetFieldsValuesParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetFieldsValues200>({
url: `/api/v1/fields/values`,
method: 'GET',
params,
signal,
});
};
export const getGetFieldsValuesQueryKey = (params?: GetFieldsValuesParams) => {
return ['getFieldsValues', ...(params ? [params] : [])] as const;
};
export const getGetFieldsValuesQueryOptions = <
TData = Awaited<ReturnType<typeof getFieldsValues>>,
TError = RenderErrorResponseDTO
>(
params?: GetFieldsValuesParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getFieldsValues>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getGetFieldsValuesQueryKey(params);
const queryFn: QueryFunction<Awaited<ReturnType<typeof getFieldsValues>>> = ({
signal,
}) => getFieldsValues(params, signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getFieldsValues>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetFieldsValuesQueryResult = NonNullable<
Awaited<ReturnType<typeof getFieldsValues>>
>;
export type GetFieldsValuesQueryError = RenderErrorResponseDTO;
/**
* @summary Get field values
*/
export function useGetFieldsValues<
TData = Awaited<ReturnType<typeof getFieldsValues>>,
TError = RenderErrorResponseDTO
>(
params?: GetFieldsValuesParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getFieldsValues>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetFieldsValuesQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get field values
*/
export const invalidateGetFieldsValues = async (
queryClient: QueryClient,
params?: GetFieldsValuesParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetFieldsValuesQueryKey(params) },
options,
);
return queryClient;
};

View File

@@ -762,6 +762,18 @@ export interface MetricsexplorertypesMetricHighlightsResponseDTO {
totalTimeSeries: number;
}
export enum MetricsexplorertypesMetricMetadataDTOTemporality {
delta = 'delta',
cumulative = 'cumulative',
unspecified = 'unspecified',
}
export enum MetricsexplorertypesMetricMetadataDTOType {
gauge = 'gauge',
sum = 'sum',
histogram = 'histogram',
summary = 'summary',
exponentialhistogram = 'exponentialhistogram',
}
export interface MetricsexplorertypesMetricMetadataDTO {
/**
* @type string
@@ -771,14 +783,29 @@ export interface MetricsexplorertypesMetricMetadataDTO {
* @type boolean
*/
isMonotonic: boolean;
temporality: MetrictypesTemporalityDTO;
type: MetrictypesTypeDTO;
/**
* @enum delta,cumulative,unspecified
* @type string
*/
temporality: MetricsexplorertypesMetricMetadataDTOTemporality;
/**
* @enum gauge,sum,histogram,summary,exponentialhistogram
* @type string
*/
type: MetricsexplorertypesMetricMetadataDTOType;
/**
* @type string
*/
unit: string;
}
export enum MetricsexplorertypesStatDTOType {
gauge = 'gauge',
sum = 'sum',
histogram = 'histogram',
summary = 'summary',
exponentialhistogram = 'exponentialhistogram',
}
export interface MetricsexplorertypesStatDTO {
/**
* @type string
@@ -798,7 +825,11 @@ export interface MetricsexplorertypesStatDTO {
* @minimum 0
*/
timeseries: number;
type: MetrictypesTypeDTO;
/**
* @enum gauge,sum,histogram,summary,exponentialhistogram
* @type string
*/
type: MetricsexplorertypesStatDTOType;
/**
* @type string
*/
@@ -858,7 +889,7 @@ export interface MetricsexplorertypesTreemapEntryDTO {
totalValue: number;
}
export enum MetricsexplorertypesTreemapModeDTO {
export enum MetricsexplorertypesTreemapRequestDTOMode {
timeseries = 'timeseries',
samples = 'samples',
}
@@ -873,7 +904,11 @@ export interface MetricsexplorertypesTreemapRequestDTO {
* @type integer
*/
limit: number;
mode: MetricsexplorertypesTreemapModeDTO;
/**
* @enum timeseries,samples
* @type string
*/
mode: MetricsexplorertypesTreemapRequestDTOMode;
/**
* @type integer
* @format int64
@@ -894,6 +929,18 @@ export interface MetricsexplorertypesTreemapResponseDTO {
timeseries: MetricsexplorertypesTreemapEntryDTO[] | null;
}
export enum MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality {
delta = 'delta',
cumulative = 'cumulative',
unspecified = 'unspecified',
}
export enum MetricsexplorertypesUpdateMetricMetadataRequestDTOType {
gauge = 'gauge',
sum = 'sum',
histogram = 'histogram',
summary = 'summary',
exponentialhistogram = 'exponentialhistogram',
}
export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
/**
* @type string
@@ -907,26 +954,22 @@ export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
* @type string
*/
metricName: string;
temporality: MetrictypesTemporalityDTO;
type: MetrictypesTypeDTO;
/**
* @enum delta,cumulative,unspecified
* @type string
*/
temporality: MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality;
/**
* @enum gauge,sum,histogram,summary,exponentialhistogram
* @type string
*/
type: MetricsexplorertypesUpdateMetricMetadataRequestDTOType;
/**
* @type string
*/
unit: string;
}
export enum MetrictypesTemporalityDTO {
delta = 'delta',
cumulative = 'cumulative',
unspecified = 'unspecified',
}
export enum MetrictypesTypeDTO {
gauge = 'gauge',
sum = 'sum',
histogram = 'histogram',
summary = 'summary',
exponentialhistogram = 'exponentialhistogram',
}
export interface PreferencetypesPreferenceDTO {
/**
* @type array
@@ -1049,7 +1092,7 @@ export interface Querybuildertypesv5OrderByKeyDTO {
/**
* @type string
*/
name: string;
name?: string;
/**
* @type string
*/
@@ -1141,79 +1184,6 @@ export interface RoletypesRoleDTO {
updatedAt?: Date;
}
/**
* @nullable
*/
export type TelemetrytypesGettableFieldKeysDTOKeys = {
[key: string]: TelemetrytypesTelemetryFieldKeyDTO[];
} | null;
export interface TelemetrytypesGettableFieldKeysDTO {
/**
* @type boolean
*/
complete: boolean;
/**
* @type object
* @nullable true
*/
keys: TelemetrytypesGettableFieldKeysDTOKeys;
}
export interface TelemetrytypesGettableFieldValuesDTO {
/**
* @type boolean
*/
complete: boolean;
values: TelemetrytypesTelemetryFieldValuesDTO;
}
export interface TelemetrytypesTelemetryFieldKeyDTO {
/**
* @type string
*/
description?: string;
/**
* @type string
*/
fieldContext?: string;
/**
* @type string
*/
fieldDataType?: string;
/**
* @type string
*/
name: string;
/**
* @type string
*/
signal?: string;
/**
* @type string
*/
unit?: string;
}
export interface TelemetrytypesTelemetryFieldValuesDTO {
/**
* @type array
*/
boolValues?: boolean[];
/**
* @type array
*/
numberValues?: number[];
/**
* @type array
*/
relatedValues?: string[];
/**
* @type array
*/
stringValues?: string[];
}
export interface TypesChangePasswordRequestDTO {
/**
* @type string
@@ -1418,7 +1388,7 @@ export interface TypesPostableForgotPasswordDTO {
/**
* @type string
*/
email: string;
email?: string;
/**
* @type string
*/
@@ -1426,7 +1396,7 @@ export interface TypesPostableForgotPasswordDTO {
/**
* @type string
*/
orgId: string;
orgId?: string;
}
export interface TypesPostableInviteDTO {
@@ -1661,132 +1631,6 @@ export type DeleteAuthDomainPathParameters = {
export type UpdateAuthDomainPathParameters = {
id: string;
};
export type GetFieldsKeysParams = {
/**
* @type string
* @description undefined
*/
signal?: string;
/**
* @type string
* @description undefined
*/
source?: string;
/**
* @type integer
* @description undefined
*/
limit?: number;
/**
* @type integer
* @format int64
* @description undefined
*/
startUnixMilli?: number;
/**
* @type integer
* @format int64
* @description undefined
*/
endUnixMilli?: number;
/**
* @type string
* @description undefined
*/
fieldContext?: string;
/**
* @type string
* @description undefined
*/
fieldDataType?: string;
/**
* @type string
* @description undefined
*/
metricName?: string;
/**
* @type string
* @description undefined
*/
searchText?: string;
};
export type GetFieldsKeys200 = {
data?: TelemetrytypesGettableFieldKeysDTO;
/**
* @type string
*/
status?: string;
};
export type GetFieldsValuesParams = {
/**
* @type string
* @description undefined
*/
signal?: string;
/**
* @type string
* @description undefined
*/
source?: string;
/**
* @type integer
* @description undefined
*/
limit?: number;
/**
* @type integer
* @format int64
* @description undefined
*/
startUnixMilli?: number;
/**
* @type integer
* @format int64
* @description undefined
*/
endUnixMilli?: number;
/**
* @type string
* @description undefined
*/
fieldContext?: string;
/**
* @type string
* @description undefined
*/
fieldDataType?: string;
/**
* @type string
* @description undefined
*/
metricName?: string;
/**
* @type string
* @description undefined
*/
searchText?: string;
/**
* @type string
* @description undefined
*/
name?: string;
/**
* @type string
* @description undefined
*/
existingQuery?: string;
};
export type GetFieldsValues200 = {
data?: TelemetrytypesGettableFieldValuesDTO;
/**
* @type string
*/
status?: string;
};
export type GetResetPasswordTokenPathParameters = {
id: string;
};

View File

@@ -34,230 +34,159 @@ const themeColors = {
cyan: '#00FFFF',
},
chartcolors: {
// Blues (3)
dodgerBlue: '#2F80ED',
royalBlue: '#3366E6',
steelBlue: '#4682B4',
// Teals / Cyans (3)
turquoise: '#00CEC9',
lagoon: '#1ABC9C',
cyanBright: '#22A6F2',
// Greens (3)
emeraldGreen: '#27AE60',
mediumSeaGreen: '#3CB371',
limeGreen: '#A3E635',
// Yellows / Golds (3)
festivalYellow: '#F2C94C',
sunflower: '#FFD93D',
warmAmber: '#FFCA28',
// Oranges (3)
festivalOrange: '#F2994A',
coralOrange: '#E17055',
pumpkin: '#FF7F50',
// Reds (3)
radicalRed: '#FF1A66',
crimsonRed: '#EB5757',
fireRed: '#E10600',
// Pinks (3)
hotPink: '#E84393',
rosePink: '#FD79A8',
blush: '#FF7EB6',
// Purples / Violets (3)
mediumPurple: '#BB6BD9',
royalPurple: '#9B51E0',
orchid: '#DA77F2',
// Accent / Neon / Unique Colors (3)
neonViolet: '#C77DFF',
electricPurple: '#6C5CE7',
arcticBlue: '#48DBFB',
// Extended palette — systematic variations to reach 100 colors
blue1: '#1F63E0',
blue2: '#3A7AED',
blue3: '#5A9DF5',
cyan1: '#00B0AA',
cyan2: '#33D6C2',
cyan3: '#66E9DA',
green1: '#1E8449',
green2: '#2ECC71',
green3: '#58D68D',
yellow1: '#F1C40F',
yellow2: '#F7DC6F',
yellow3: '#F9E79F',
orange1: '#D35400',
orange2: '#E67E22',
orange3: '#F5B041',
red1: '#C0392B',
red2: '#E74C3C',
red3: '#EC7063',
pink1: '#D81B60',
pink2: '#E91E63',
pink3: '#F06292',
purple1: '#8E44AD',
purple2: '#9B59B6',
purple3: '#BB8FCE',
teal1: '#009688',
teal2: '#1ABC9C',
teal3: '#48C9B0',
lime1: '#A3E635',
lime2: '#B9F18D',
lime3: '#D4FFB0',
gold1: '#F39C12',
gold2: '#F1C40F',
gold3: '#F7DC6F',
coral1: '#E67E22',
coral2: '#F39C12',
coral3: '#F5B041',
crimson1: '#C0392B',
crimson2: '#E74C3C',
crimson3: '#EC7063',
violet1: '#8E44AD',
violet2: '#9B59B6',
violet3: '#BB8FCE',
aqua1: '#00BFFF',
aqua2: '#1E90FF',
aqua3: '#63B8FF',
forest1: '#27AE60',
forest2: '#2ECC71',
forest3: '#58D68D',
blush1: '#FF6F91',
blush2: '#FF85A2',
blush3: '#FFA0B3',
lavender1: '#9B59B6',
lavender2: '#AF7AC5',
lavender3: '#C39BD3',
tomato1: '#E74C3C',
tomato2: '#EC7063',
tomato3: '#F1948A',
salmon1: '#FF6B6B',
salmon2: '#FF8787',
salmon3: '#FFA1A1',
mustard1: '#F1C40F',
mustard2: '#F7DC6F',
mustard3: '#F9E79F',
teal4: '#1ABC9C',
teal5: '#48C9B0',
teal6: '#76D7C4',
magenta1: '#D6336C',
magenta2: '#E84393',
magenta3: '#F06292',
violet4: '#7D3C98',
violet5: '#8E44AD',
violet6: '#9B59B6',
green4: '#229954',
green5: '#27AE60',
green6: '#52BE80',
blue4: '#2874A6',
blue5: '#2E86C1',
blue6: '#3498DB',
red4: '#C0392B',
red5: '#E74C3C',
red6: '#EC7063',
orange4: '#D35400',
orange5: '#E67E22',
orange6: '#EB984E',
pink4: '#C2185B',
pink5: '#D81B60',
pink6: '#E91E63',
gold4: '#B7950B',
gold5: '#F1C40F',
gold6: '#F4D03F',
dodgerBlue: '#2F80ED',
mediumOrchid: '#BB6BD9',
seaBuckthorn: '#F2994A',
seaGreen: '#219653',
turquoiseBlue: '#56CCF2',
festivalOrange: '#F2C94C',
silver: '#BDBDBD',
outrageousOrange: '#FF6633',
roseBud: '#FFB399',
canary: '#FFFF99',
deepSkyBlue: '#00B3E6',
goldTips: '#E6B333',
royalBlue: '#3366E6',
avocado: '#999966',
mintGreen: '#99FF99',
chestnut: '#B34D4D',
lima: '#80B300',
olive: '#809900',
beautyBush: '#E6B3B3',
danube: '#6680B3',
oliveDrab: '#66991A',
lavenderRose: '#FF99E6',
electricLime: '#CCFF1A',
robin: '#3F5ECC',
harleyOrange: '#E6331A',
turquoise: '#33FFCC',
gladeGreen: '#66994D',
hemlock: '#66664D',
vidaLoca: '#4D8000',
rust: '#B33300',
red: '#FF0000', // Adding more colors, we need to get better colors from design team
blue: '#0000FF',
green: '#00FF00',
yellow: '#FFFF00',
purple: '#800080',
cyan: '#00FFFF',
magenta: '#FF00FF',
orange: '#FFA500',
pink: '#FFC0CB',
brown: '#A52A2A',
teal: '#008080',
lime: '#00FF00',
maroon: '#800000',
navy: '#000080',
aquamarine: '#7FFFD4',
darkSeaGreen: '#8FBC8F',
gray: '#808080',
skyBlue: '#87CEEB',
indigo: '#4B0082',
slateGray: '#708090',
chocolate: '#D2691E',
tomato: '#FF6347',
steelBlue: '#4682B4',
peru: '#CD853F',
darkOliveGreen: '#556B2F',
indianRed: '#CD5C5C',
mediumSlateBlue: '#7B68EE',
rosyBrown: '#BC8F8F',
darkSlateGray: '#2F4F4F',
mediumAquamarine: '#66CDAA',
lavender: '#E6E6FA',
thistle: '#D8BFD8',
salmon: '#FA8072',
darkSalmon: '#E9967A',
paleVioletRed: '#DB7093',
mediumPurple: '#9370DB',
darkOrchid: '#9932CC',
lawnGreen: '#7CFC00',
mediumSeaGreen: '#3CB371',
lightCoral: '#F08080',
gold: '#FFD700',
sandyBrown: '#F4A460',
darkKhaki: '#BDB76B',
cornflowerBlue: '#6495ED',
mediumVioletRed: '#C71585',
paleGreen: '#98FB98',
},
lightModeColor: {
radicalRed: '#D81B60',
dodgerBlueDark: '#1E5BD9',
steelgrey: '#344B6B',
steelpurple: '#5E548E',
steelindigo: '#8E4A7C',
steelpink: '#B63A6F',
steelcoral: '#E14B5A',
steelorange: '#E76F2F',
steelgold: '#E09B00',
steelrust: '#C93A50',
steelgreen: '#2F7D69',
mediumOrchidDark: '#8E24AA',
seaBuckthornDark: '#C75A00',
seaGreen: '#1E7F5A',
turquoiseBlueDark: '#007EA7',
silverDark: '#5F5F5F',
outrageousOrangeDark: '#E64A19',
roseBudDark: '#D84315',
deepSkyBlueDark: '#0277BD',
royalBlue: '#2A4FDB',
avocadoDark: '#6B6B1E',
mintGreenDark: '#2E9E55',
chestnut: '#8B3A3A',
limaDark: '#5C7F00',
olive: '#6E7F00',
beautyBushDark: '#C93C3C',
danube: '#4F6FB3',
oliveDrab: '#4F7F1A',
lavenderRoseDark: '#B0178F',
electricLimeDark: '#6B8F00',
robin: '#2F4FCC',
harleyOrange: '#CC2E12',
gladeGreen: '#4F7F46',
hemlock: '#5C5C45',
vidaLoca: '#3D6B00',
rust: '#993300',
red: '#C62828',
blue: '#1A237E',
green: '#1B7F3A',
purple: '#6A1B9A',
magentaDark: '#B000B5',
pinkDark: '#C2185B',
brown: '#7A3A1E',
teal: '#006D6F',
limeDark: '#4C8C2B',
maroon: '#6D1B1B',
navy: '#0D1B5E',
gray: '#616161',
skyBlueDark: '#0288D1',
indigo: '#303F9F',
slateGray: '#556B7C',
chocolate: '#9C4A1A',
tomato: '#E53935',
steelBlue: '#3A6EA5',
peruDark: '#B35E00',
darkOliveGreen: '#445B1F',
indianRed: '#B04040',
mediumSlateBlue: '#5C6BC0',
rosyBrownDark: '#A94444',
darkSlateGray: '#2E4A4A',
fuchsia: '#C511C5',
salmonDark: '#E64A3C',
darkSalmonDark: '#C85A3A',
paleVioletRedDark: '#C2186A',
mediumPurple: '#7E57C2',
darkOrchid: '#7B1FA2',
mediumSeaGreenDark: '#2E8B57',
lightCoralDark: '#E57373',
gold: '#D4AF37',
sandyBrownDark: '#C76A15',
darkKhakiDark: '#8A7F00',
cornflowerBlueDark: '#355FCC',
mediumVioletRed: '#AD1457',
paleGreenDark: '#2E7D32',
radicalRed: '#FF1A66',
dodgerBlueDark: '#0C6EED',
steelgrey: '#2f4b7c',
steelpurple: '#665191',
steelindigo: '#a05195',
steelpink: '#d45087',
steelcoral: '#f95d6a',
steelorange: '#ff7c43',
steelgold: '#ffa600',
steelrust: '#de425b',
steelgreen: '#41967e',
mediumOrchidDark: '#C326FD',
seaBuckthornDark: '#E66E05',
seaGreen: '#219653',
turquoiseBlueDark: '#0099CC',
silverDark: '#757575',
outrageousOrangeDark: '#F9521A',
roseBudDark: '#EB6437',
deepSkyBlueDark: '#0595BD',
royalBlue: '#3366E6',
avocadoDark: '#8E8E29',
mintGreenDark: '#00C700',
chestnut: '#B34D4D',
limaDark: '#6E9900',
olive: '#809900',
beautyBushDark: '#E25555',
danube: '#6680B3',
oliveDrab: '#66991A',
lavenderRoseDark: '#F024BD',
electricLimeDark: '#84A800',
robin: '#3F5ECC',
harleyOrange: '#E6331A',
gladeGreen: '#66994D',
hemlock: '#66664D',
vidaLoca: '#4D8000',
rust: '#B33300',
red: '#FF0000', // Adding more colors, we need to get better colors from design team
blue: '#0000FF',
green: '#00FF00',
purple: '#800080',
magentaDark: '#EB00EB',
pinkDark: '#FF3D5E',
brown: '#A52A2A',
teal: '#008080',
limeDark: '#07A207',
maroon: '#800000',
navy: '#000080',
gray: '#808080',
skyBlueDark: '#0CA7E4',
indigo: '#4B0082',
slateGray: '#708090',
chocolate: '#D2691E',
tomato: '#FF6347',
steelBlue: '#4682B4',
peruDark: '#D16E0A',
darkOliveGreen: '#556B2F',
indianRed: '#CD5C5C',
mediumSlateBlue: '#7B68EE',
rosyBrownDark: '#CB4848',
darkSlateGray: '#2F4F4F',
fuchsia: '#FF0AFF',
salmonDark: '#FF432E',
darkSalmonDark: '#D26541',
paleVioletRedDark: '#E83089',
mediumPurple: '#9370DB',
darkOrchid: '#9932CC',
mediumSeaGreenDark: '#109E50',
lightCoralDark: '#F85959',
gold: '#FFD700',
sandyBrownDark: '#D97117',
darkKhakiDark: '#99900A',
cornflowerBlueDark: '#3371E6',
mediumVioletRed: '#C71585',
paleGreenDark: '#0D910D',
},
errorColor: '#d32f2f',
royalGrey: '#888888',

View File

@@ -14,6 +14,6 @@ export const VIEW_TYPES = {
export const SPAN_ATTRIBUTES = {
URL_PATH: 'http.url',
RESPONSE_STATUS_CODE: 'response_status_code',
SERVER_NAME: 'net.peer.name',
SERVER_NAME: 'http_host',
SERVER_PORT: 'net.peer.port',
} as const;

View File

@@ -638,7 +638,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: 'tag',
type: '',
},
op: '=',
value: domainName,
@@ -685,7 +685,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: 'tag',
type: '',
},
op: '=',
value: domainName,
@@ -733,7 +733,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: 'tag',
type: '',
},
op: '=',
value: domainName,
@@ -780,7 +780,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: 'tag',
type: '',
},
op: '=',
value: domainName,
@@ -1302,7 +1302,7 @@ export const getTopErrorsCoRelationQueryFilters = (
{
id: 'e8a043b7',
key: {
key: 'net.peer.name',
key: SPAN_ATTRIBUTES.SERVER_NAME,
dataType: DataTypes.String,
type: '',
},
@@ -2198,7 +2198,7 @@ export const getEndPointZeroStateQueryPayload = (
key: {
key: SPAN_ATTRIBUTES.SERVER_NAME,
dataType: DataTypes.String,
type: 'tag',
type: '',
},
op: '=',
value: domainName,
@@ -2793,7 +2793,7 @@ export const getStatusCodeBarChartWidgetData = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: 'tag',
type: '',
},
op: '=',
value: domainName,

View File

@@ -18,8 +18,8 @@ import { useWidgetsByDynamicVariableId } from 'hooks/dashboard/useWidgetsByDynam
import { getWidgetsHavingDynamicVariableAttribute } from 'hooks/dashboard/utils';
import { useGetFieldValues } from 'hooks/dynamicVariables/useGetFieldValues';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { commaValuesParser } from 'lib/dashboardVariables/customCommaValuesParser';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import { isEmpty, map } from 'lodash-es';
import {
ArrowLeft,

View File

@@ -1,53 +0,0 @@
import { memo, useMemo } from 'react';
import { commaValuesParser } from 'lib/dashboardVariables/customCommaValuesParser';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { VariableItemProps } from './VariableItem';
type CustomVariableInputProps = Pick<
VariableItemProps,
'variableData' | 'onValueUpdate'
>;
function CustomVariableInput({
variableData,
onValueUpdate,
}: CustomVariableInputProps): JSX.Element {
const optionsData: (string | number | boolean)[] = useMemo(() => {
return sortValues(
commaValuesParser(variableData.customValue || ''),
variableData.sort,
) as (string | number | boolean)[];
}, [variableData.customValue, variableData.sort]);
const {
value,
defaultValue,
enableSelectAll,
onChange,
onDropdownVisibleChange,
handleClear,
} = useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
});
return (
<SelectVariableInput
variableId={variableData.id}
options={optionsData}
value={value}
onChange={onChange}
onDropdownVisibleChange={onDropdownVisibleChange}
onClear={handleClear}
enableSelectAll={enableSelectAll}
defaultValue={defaultValue}
isMultiSelect={variableData.multiSelect}
/>
);
}
export default memo(CustomVariableInput);

View File

@@ -25,6 +25,12 @@
}
}
&.focused {
.variable-value {
outline: 1px solid var(--bg-robin-400);
}
}
.variable-value {
display: flex;
min-width: 120px;
@@ -42,11 +48,6 @@
font-style: normal;
font-weight: 400;
line-height: 16px; /* 133.333% */
&:hover,
&:focus-within {
outline: 1px solid var(--bg-robin-400);
}
}
.variable-select {
@@ -98,6 +99,12 @@
.lightMode {
.variable-item {
&.focused {
.variable-value {
border: 1px solid var(--bg-robin-400);
}
}
.variable-name {
border: 1px solid var(--bg-vanilla-300);
background: var(--bg-vanilla-100);
@@ -108,11 +115,6 @@
border: 1px solid var(--bg-vanilla-300);
background: var(--bg-vanilla-100);
color: var(--bg-ink-400);
&:hover,
&:focus-within {
outline: 1px solid var(--bg-robin-400);
}
}
}
}
@@ -122,9 +124,3 @@
padding: 4px 12px;
font-size: 12px;
}
.dashboard-variables-selection-container {
display: flex;
flex-wrap: wrap;
gap: 12px;
}

View File

@@ -1,4 +1,4 @@
import { memo, useCallback, useEffect, useMemo } from 'react';
import { memo, useEffect } from 'react';
import { useSelector } from 'react-redux';
import { Row } from 'antd';
import { ALL_SELECTED_VALUE } from 'components/NewSelect/utils';
@@ -7,6 +7,7 @@ import {
useDashboardVariablesSelector,
} from 'hooks/dashboard/useDashboardVariables';
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
import { isEmpty } from 'lodash-es';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
import { AppState } from 'store/reducers';
@@ -21,6 +22,7 @@ import './DashboardVariableSelection.styles.scss';
function DashboardVariableSelection(): JSX.Element | null {
const {
selectedDashboard,
setSelectedDashboard,
updateLocalStorageDashboardVariables,
variablesToGetUpdated,
@@ -50,36 +52,34 @@ function DashboardVariableSelection(): JSX.Element | null {
);
}, [getUrlVariables, updateUrlVariable, dashboardVariables]);
// Memoize the order key to avoid unnecessary triggers
const dependencyOrderKey = useMemo(
() => dependencyData?.order?.join(',') ?? '',
[dependencyData?.order],
);
// Trigger refetch when dependency order changes or global time changes
useEffect(() => {
if (dependencyData?.order && dependencyData.order.length > 0) {
setVariablesToGetUpdated(dependencyData?.order || []);
}
// this handles the case where the dependency order changes i.e. variable list updated via creation or deletion etc. and we need to refetch the variables
// also trigger when the global time changes
useEffect(
() => {
if (!isEmpty(dependencyData?.order)) {
setVariablesToGetUpdated(dependencyData?.order || []);
}
},
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [dependencyOrderKey, minTime, maxTime]);
[JSON.stringify(dependencyData?.order), minTime, maxTime],
);
// Performance optimization: For dynamic variables with allSelected=true, we don't store
// individual values in localStorage since we can always derive them from available options.
// This makes localStorage much lighter and more efficient.
const onValueUpdate = useCallback(
(
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
// eslint-disable-next-line sonarjs/cognitive-complexity
): void => {
const onValueUpdate = (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
// eslint-disable-next-line sonarjs/cognitive-complexity
): void => {
if (id) {
// For dynamic variables, only store in localStorage when NOT allSelected
// This makes localStorage much lighter by avoiding storing all individual values
const variable = dashboardVariables[id] || dashboardVariables[name];
const isDynamic = variable.type === 'DYNAMIC';
const variable = dashboardVariables?.[id] || dashboardVariables?.[name];
const isDynamic = variable?.type === 'DYNAMIC';
updateLocalStorageDashboardVariables(name, value, allSelected, isDynamic);
if (allSelected) {
@@ -88,39 +88,41 @@ function DashboardVariableSelection(): JSX.Element | null {
updateUrlVariable(name || id, value);
}
setSelectedDashboard((prev) => {
if (prev) {
const oldVariables = { ...prev?.data.variables };
// this is added to handle case where we have two different
// schemas for variable response
if (oldVariables?.[id]) {
oldVariables[id] = {
...oldVariables[id],
selectedValue: value,
allSelected,
haveCustomValuesSelected,
};
}
if (oldVariables?.[name]) {
oldVariables[name] = {
...oldVariables[name],
selectedValue: value,
allSelected,
haveCustomValuesSelected,
};
}
return {
...prev,
data: {
...prev?.data,
variables: {
...oldVariables,
if (selectedDashboard) {
setSelectedDashboard((prev) => {
if (prev) {
const oldVariables = prev?.data.variables;
// this is added to handle case where we have two different
// schemas for variable response
if (oldVariables?.[id]) {
oldVariables[id] = {
...oldVariables[id],
selectedValue: value,
allSelected,
haveCustomValuesSelected,
};
}
if (oldVariables?.[name]) {
oldVariables[name] = {
...oldVariables[name],
selectedValue: value,
allSelected,
haveCustomValuesSelected,
};
}
return {
...prev,
data: {
...prev?.data,
variables: {
...oldVariables,
},
},
},
};
}
return prev;
});
};
}
return prev;
});
}
if (dependencyData) {
const updatedVariables: string[] = [];
@@ -136,20 +138,11 @@ function DashboardVariableSelection(): JSX.Element | null {
} else {
setVariablesToGetUpdated((prev) => prev.filter((v) => v !== name));
}
},
[
// This can be removed
dashboardVariables,
updateLocalStorageDashboardVariables,
dependencyData,
updateUrlVariable,
setSelectedDashboard,
setVariablesToGetUpdated,
],
);
}
};
return (
<Row className="dashboard-variables-selection-container">
<Row style={{ display: 'flex', gap: '12px' }}>
{sortedVariablesArray.map((variable) => {
const key = `${variable.name}${variable.id}${variable.order}`;

View File

@@ -23,9 +23,9 @@ import { SelectItemStyle } from './styles';
import {
areArraysEqual,
getOptionsForDynamicVariable,
getSelectValue,
uniqueValues,
} from './util';
import { getSelectValue } from './VariableItem';
import './DashboardVariableSelection.styles.scss';

View File

@@ -1,229 +0,0 @@
import { memo, useCallback, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import { isArray, isString } from 'lodash-es';
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
import { GlobalReducer } from 'types/reducer/globalTime';
import { variablePropsToPayloadVariables } from '../utils';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { areArraysEqual, checkAPIInvocation } from './util';
interface QueryVariableInputProps {
variableData: IDashboardVariable;
existingVariables: Record<string, IDashboardVariable>;
onValueUpdate: (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
) => void;
variablesToGetUpdated: string[];
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
dependencyData: IDependencyData | null;
}
function QueryVariableInput({
variableData,
existingVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dependencyData,
onValueUpdate,
}: QueryVariableInputProps): JSX.Element {
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
[],
);
const [errorMessage, setErrorMessage] = useState<null | string>(null);
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
const {
tempSelection,
setTempSelection,
value,
defaultValue,
enableSelectAll,
onChange,
onDropdownVisibleChange,
handleClear,
} = useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
});
const validVariableUpdate = (): boolean => {
if (!variableData.name) {
return false;
}
return Boolean(
variablesToGetUpdated.length &&
variablesToGetUpdated[0] === variableData.name,
);
};
// eslint-disable-next-line sonarjs/cognitive-complexity
const getOptions = (variablesRes: VariableResponseProps | null): void => {
try {
setErrorMessage(null);
if (
variablesRes?.variableValues &&
Array.isArray(variablesRes?.variableValues)
) {
const newOptionsData = sortValues(
variablesRes?.variableValues,
variableData.sort,
);
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
let valueNotInList = false;
if (isArray(variableData.selectedValue)) {
variableData.selectedValue.forEach((val) => {
if (!newOptionsData.includes(val)) {
valueNotInList = true;
}
});
} else if (
isString(variableData.selectedValue) &&
!newOptionsData.includes(variableData.selectedValue)
) {
valueNotInList = true;
}
// variablesData.allSelected is added for the case where on change of options we need to update the
// local storage
if (
variableData.name &&
(validVariableUpdate() || valueNotInList || variableData.allSelected)
) {
if (
variableData.allSelected &&
variableData.multiSelect &&
variableData.showALLOption
) {
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
// Update tempSelection to maintain ALL state when dropdown is open
if (tempSelection !== undefined) {
setTempSelection(newOptionsData.map((option) => option.toString()));
}
} else {
const value = variableData.selectedValue;
let allSelected = false;
if (variableData.multiSelect) {
const { selectedValue } = variableData;
allSelected =
newOptionsData.length > 0 &&
Array.isArray(selectedValue) &&
newOptionsData.every((option) => selectedValue.includes(option));
}
if (variableData.name && variableData.id) {
onValueUpdate(variableData.name, variableData.id, value, allSelected);
}
}
}
setOptionsData(newOptionsData);
} else {
setVariablesToGetUpdated((prev) =>
prev.filter((name) => name !== variableData.name),
);
}
}
} catch (e) {
console.error(e);
}
};
const { isLoading, refetch } = useQuery(
[
REACT_QUERY_KEY.DASHBOARD_BY_ID,
variableData.name || '',
`${minTime}`,
`${maxTime}`,
JSON.stringify(dependencyData?.order),
],
{
enabled:
variableData &&
variableData.type === 'QUERY' &&
checkAPIInvocation(
variablesToGetUpdated,
variableData,
dependencyData?.parentDependencyGraph,
),
queryFn: () =>
dashboardVariablesQuery({
query: variableData.queryValue || '',
variables: variablePropsToPayloadVariables(existingVariables),
}),
refetchOnWindowFocus: false,
onSuccess: (response) => {
getOptions(response.payload);
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
},
onError: (error: {
details: {
error: string;
};
}) => {
const { details } = error;
if (details.error) {
let message = details.error;
if ((details.error ?? '').toString().includes('Syntax error:')) {
message =
'Please make sure query is valid and dependent variables are selected';
}
setErrorMessage(message);
}
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
},
},
);
const handleRetry = useCallback((): void => {
setErrorMessage(null);
refetch();
}, [refetch]);
return (
<SelectVariableInput
variableId={variableData.id}
options={optionsData}
value={value}
onChange={onChange}
onDropdownVisibleChange={onDropdownVisibleChange}
onClear={handleClear}
enableSelectAll={enableSelectAll}
defaultValue={defaultValue}
isMultiSelect={variableData.multiSelect}
// query variable specific, API related props
loading={isLoading}
errorMessage={errorMessage}
onRetry={handleRetry}
/>
);
}
export default memo(QueryVariableInput);

View File

@@ -1,134 +0,0 @@
import { memo, useMemo } from 'react';
import { orange } from '@ant-design/colors';
import { WarningOutlined } from '@ant-design/icons';
import { Popover, Tooltip, Typography } from 'antd';
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
import { popupContainer } from 'utils/selectPopupContainer';
import { ALL_SELECT_VALUE } from '../utils';
import { SelectItemStyle } from './styles';
const errorIconStyle = { margin: '0 0.5rem' };
interface SelectVariableInputProps {
variableId: string;
options: (string | number | boolean)[];
value: string | string[] | undefined;
enableSelectAll: boolean;
isMultiSelect: boolean;
onChange: (value: string | string[]) => void;
onClear: () => void;
defaultValue?: string | string[];
onDropdownVisibleChange?: (visible: boolean) => void;
loading?: boolean;
errorMessage?: string | null;
onRetry?: () => void;
}
const MAX_TAG_DISPLAY_VALUES = 10;
function maxTagPlaceholder(
omittedValues: { label?: React.ReactNode; value?: string | number }[],
): JSX.Element {
const valuesToShow = omittedValues.slice(0, MAX_TAG_DISPLAY_VALUES);
const hasMore = omittedValues.length > MAX_TAG_DISPLAY_VALUES;
const tooltipText =
valuesToShow.map(({ value: v }) => v ?? '').join(', ') +
(hasMore ? ` + ${omittedValues.length - MAX_TAG_DISPLAY_VALUES} more` : '');
return (
<Tooltip title={tooltipText}>
<span>+ {omittedValues.length} </span>
</Tooltip>
);
}
function SelectVariableInput({
variableId,
options,
value,
onChange,
onDropdownVisibleChange,
onClear,
loading,
errorMessage,
onRetry,
enableSelectAll,
isMultiSelect,
defaultValue,
}: SelectVariableInputProps): JSX.Element {
const selectOptions = useMemo(
() =>
options.map((option) => ({
label: option.toString(),
value: option.toString(),
})),
[options],
);
const commonProps = useMemo(
() => ({
// main props
key: variableId,
value,
defaultValue,
// setup props
placeholder: 'Select value',
className: 'variable-select',
popupClassName: 'dropdown-styles',
getPopupContainer: popupContainer,
style: SelectItemStyle,
showSearch: true,
bordered: false,
// dynamic props
'data-testid': 'variable-select',
onChange,
loading,
options: selectOptions,
errorMessage,
onRetry,
}),
[
variableId,
defaultValue,
onChange,
loading,
selectOptions,
value,
errorMessage,
onRetry,
],
);
return (
<>
{isMultiSelect ? (
<CustomMultiSelect
{...commonProps}
placement="bottomLeft"
maxTagCount={2}
onDropdownVisibleChange={onDropdownVisibleChange}
maxTagPlaceholder={maxTagPlaceholder}
onClear={onClear}
enableAllSelection={enableSelectAll}
maxTagTextLength={30}
allowClear={value !== ALL_SELECT_VALUE && value !== 'ALL'}
/>
) : (
<CustomSelect {...commonProps} />
)}
{errorMessage && (
<span style={errorIconStyle}>
<Popover placement="top" content={<Typography>{errorMessage}</Typography>}>
<WarningOutlined style={{ color: orange[5] }} />
</Popover>
</span>
)}
</>
);
}
export default memo(SelectVariableInput);

View File

@@ -1,85 +0,0 @@
import { memo, useCallback, useRef, useState } from 'react';
import { Input, InputRef } from 'antd';
import { VariableItemProps } from './VariableItem';
type TextboxVariableInputProps = Pick<
VariableItemProps,
'variableData' | 'onValueUpdate'
>;
function TextboxVariableInput({
variableData,
onValueUpdate,
}: TextboxVariableInputProps): JSX.Element {
const handleChange = useCallback(
(inputValue: string | string[]): void => {
if (inputValue === variableData.selectedValue) {
return;
}
if (variableData.name) {
onValueUpdate(variableData.name, variableData.id, inputValue, false);
}
},
[
onValueUpdate,
variableData.id,
variableData.name,
variableData.selectedValue,
],
);
const textboxInputRef = useRef<InputRef>(null);
const [textboxInputValue, setTextboxInputValue] = useState<string>(
(variableData.selectedValue?.toString() ||
variableData.defaultValue?.toString()) ??
'',
);
const handleInputOnChange = useCallback(
(event: React.ChangeEvent<HTMLInputElement>) => {
setTextboxInputValue(event.target.value);
},
[setTextboxInputValue],
);
const handleInputOnBlur = useCallback(
(event: React.FocusEvent<HTMLInputElement>): void => {
const value = event.target.value.trim();
// If empty, reset to default value
if (!value && variableData.defaultValue) {
setTextboxInputValue(variableData.defaultValue.toString());
handleChange(variableData.defaultValue.toString());
} else {
handleChange(value);
}
},
[handleChange, variableData.defaultValue],
);
const handleInputOnKeyDown = useCallback(
(event: React.KeyboardEvent<HTMLInputElement>): void => {
if (event.key === 'Enter') {
textboxInputRef.current?.blur();
}
},
[],
);
return (
<Input
key={variableData.id}
ref={textboxInputRef}
placeholder="Enter value"
data-testid={`variable-textbox-${variableData.id}`}
bordered={false}
value={textboxInputValue}
title={textboxInputValue}
onChange={handleInputOnChange}
onBlur={handleInputOnBlur}
onKeyDown={handleInputOnKeyDown}
/>
);
}
export default memo(TextboxVariableInput);

View File

@@ -1,16 +1,35 @@
import { memo } from 'react';
import { InfoCircleOutlined } from '@ant-design/icons';
import { Tooltip, Typography } from 'antd';
/* eslint-disable sonarjs/cognitive-complexity */
/* eslint-disable jsx-a11y/click-events-have-key-events */
/* eslint-disable jsx-a11y/no-static-element-interactions */
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable react/jsx-props-no-spreading */
/* eslint-disable no-nested-ternary */
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import { orange } from '@ant-design/colors';
import { InfoCircleOutlined, WarningOutlined } from '@ant-design/icons';
import { Input, InputRef, Popover, Tooltip, Typography } from 'antd';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import { debounce, isArray, isEmpty, isString } from 'lodash-es';
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
import { GlobalReducer } from 'types/reducer/globalTime';
import { popupContainer } from 'utils/selectPopupContainer';
import CustomVariableInput from './CustomVariableInput';
import QueryVariableInput from './QueryVariableInput';
import TextboxVariableInput from './TextboxVariableInput';
import { ALL_SELECT_VALUE, variablePropsToPayloadVariables } from '../utils';
import { SelectItemStyle } from './styles';
import { areArraysEqual, checkAPIInvocation } from './util';
import './DashboardVariableSelection.styles.scss';
export interface VariableItemProps {
interface VariableItemProps {
variableData: IDashboardVariable;
existingVariables: Record<string, IDashboardVariable>;
onValueUpdate: (
@@ -24,49 +43,488 @@ export interface VariableItemProps {
dependencyData: IDependencyData | null;
}
export const getSelectValue = (
selectedValue: IDashboardVariable['selectedValue'],
variableData: IDashboardVariable,
): string | string[] | undefined => {
if (Array.isArray(selectedValue)) {
if (!variableData.multiSelect && selectedValue.length === 1) {
return selectedValue[0]?.toString();
}
return selectedValue.map((item) => item.toString());
}
return selectedValue?.toString();
};
// eslint-disable-next-line sonarjs/cognitive-complexity
function VariableItem({
variableData,
onValueUpdate,
existingVariables,
onValueUpdate,
variablesToGetUpdated,
setVariablesToGetUpdated,
dependencyData,
}: VariableItemProps): JSX.Element {
const { name, description, type: variableType } = variableData;
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
[],
);
const [tempSelection, setTempSelection] = useState<
string | string[] | undefined
>(undefined);
// Local state for textbox input to ensure smooth editing experience
const [textboxInputValue, setTextboxInputValue] = useState<string>(
(variableData.selectedValue?.toString() ||
variableData.defaultValue?.toString()) ??
'',
);
const [isTextboxFocused, setIsTextboxFocused] = useState<boolean>(false);
const textboxInputRef = useRef<InputRef>(null);
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
const validVariableUpdate = (): boolean => {
if (!variableData.name) {
return false;
}
// variableData.name is present as the top element or next in the queue - variablesToGetUpdated
return Boolean(
variablesToGetUpdated.length &&
variablesToGetUpdated[0] === variableData.name,
);
};
const [errorMessage, setErrorMessage] = useState<null | string>(null);
// eslint-disable-next-line sonarjs/cognitive-complexity
const getOptions = (variablesRes: VariableResponseProps | null): void => {
if (variablesRes && variableData.type === 'QUERY') {
try {
setErrorMessage(null);
if (
variablesRes?.variableValues &&
Array.isArray(variablesRes?.variableValues)
) {
const newOptionsData = sortValues(
variablesRes?.variableValues,
variableData.sort,
);
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
/* eslint-disable no-useless-escape */
let valueNotInList = false;
if (isArray(variableData.selectedValue)) {
variableData.selectedValue.forEach((val) => {
const isUsed = newOptionsData.includes(val);
if (!isUsed) {
valueNotInList = true;
}
});
} else if (isString(variableData.selectedValue)) {
const isUsed = newOptionsData.includes(variableData.selectedValue);
if (!isUsed) {
valueNotInList = true;
}
}
// variablesData.allSelected is added for the case where on change of options we need to update the
// local storage
if (
variableData.type === 'QUERY' &&
variableData.name &&
(validVariableUpdate() || valueNotInList || variableData.allSelected)
) {
if (
variableData.allSelected &&
variableData.multiSelect &&
variableData.showALLOption
) {
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
// Update tempSelection to maintain ALL state when dropdown is open
if (tempSelection !== undefined) {
setTempSelection(newOptionsData.map((option) => option.toString()));
}
} else {
const value = variableData.selectedValue;
let allSelected = false;
if (variableData.multiSelect) {
const { selectedValue } = variableData;
allSelected =
newOptionsData.length > 0 &&
Array.isArray(selectedValue) &&
newOptionsData.every((option) => selectedValue.includes(option));
}
if (variableData && variableData?.name && variableData?.id) {
onValueUpdate(variableData.name, variableData.id, value, allSelected);
}
}
}
setOptionsData(newOptionsData);
} else {
setVariablesToGetUpdated((prev) =>
prev.filter((name) => name !== variableData.name),
);
}
}
} catch (e) {
console.error(e);
}
} else if (variableData.type === 'CUSTOM') {
const optionsData = sortValues(
commaValuesParser(variableData.customValue || ''),
variableData.sort,
) as never;
setOptionsData(optionsData);
}
};
const { isLoading, refetch } = useQuery(
[
REACT_QUERY_KEY.DASHBOARD_BY_ID,
variableData.name || '',
`${minTime}`,
`${maxTime}`,
JSON.stringify(dependencyData?.order),
],
{
enabled:
variableData &&
variableData.type === 'QUERY' &&
checkAPIInvocation(
variablesToGetUpdated,
variableData,
dependencyData?.parentDependencyGraph,
),
queryFn: () =>
dashboardVariablesQuery({
query: variableData.queryValue || '',
variables: variablePropsToPayloadVariables(existingVariables),
}),
refetchOnWindowFocus: false,
onSuccess: (response) => {
getOptions(response.payload);
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
},
onError: (error: {
details: {
error: string;
};
}) => {
const { details } = error;
if (details.error) {
let message = details.error;
if ((details.error ?? '').toString().includes('Syntax error:')) {
message =
'Please make sure query is valid and dependent variables are selected';
}
setErrorMessage(message);
}
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
},
},
);
const handleChange = useCallback(
(inputValue: string | string[]): void => {
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
if (
value === variableData.selectedValue ||
(Array.isArray(value) &&
Array.isArray(variableData.selectedValue) &&
areArraysEqual(value, variableData.selectedValue))
) {
return;
}
if (variableData.name) {
// Check if ALL is effectively selected by comparing with available options
const isAllSelected =
Array.isArray(value) &&
value.length > 0 &&
optionsData.every((option) => value.includes(option.toString()));
if (isAllSelected && variableData.showALLOption) {
// For ALL selection, pass null to avoid storing values
onValueUpdate(variableData.name, variableData.id, optionsData, true);
} else {
onValueUpdate(variableData.name, variableData.id, value, false);
}
}
},
[
variableData.multiSelect,
variableData.selectedValue,
variableData.name,
variableData.id,
onValueUpdate,
optionsData,
variableData.showALLOption,
],
);
// Add a handler for tracking temporary selection changes
const handleTempChange = (inputValue: string | string[]): void => {
// Store the selection in temporary state while dropdown is open
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
setTempSelection(value);
};
// Handle dropdown visibility changes
const handleDropdownVisibleChange = (visible: boolean): void => {
// Initialize temp selection when opening dropdown
if (visible) {
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
}
// Apply changes when closing dropdown
else if (!visible && tempSelection !== undefined) {
// Call handleChange with the temporarily stored selection
handleChange(tempSelection);
setTempSelection(undefined);
}
};
// do not debounce the above function as we do not need debounce in select variables
const debouncedHandleChange = debounce(handleChange, 500);
const { selectedValue } = variableData;
const selectedValueStringified = useMemo(
() => getSelectValue(selectedValue, variableData),
[selectedValue, variableData],
);
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
const selectValue =
variableData.allSelected && enableSelectAll
? 'ALL'
: selectedValueStringified;
// Apply default value on first render if no selection exists
// eslint-disable-next-line sonarjs/cognitive-complexity
const finalSelectedValues = useMemo(() => {
if (variableData.multiSelect) {
let value = tempSelection || selectedValue;
if (isEmpty(value)) {
if (variableData.showALLOption) {
if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData;
}
} else if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData?.[0];
}
}
return value;
}
if (isEmpty(selectedValue)) {
if (variableData.defaultValue) {
return variableData.defaultValue;
}
return optionsData[0]?.toString();
}
return selectedValue;
}, [
variableData.multiSelect,
variableData.showALLOption,
variableData.defaultValue,
selectedValue,
tempSelection,
optionsData,
]);
useEffect(() => {
if (
(variableData.multiSelect && !(tempSelection || selectValue)) ||
isEmpty(selectValue)
) {
handleChange(finalSelectedValues as string[] | string);
}
}, [
finalSelectedValues,
handleChange,
selectValue,
tempSelection,
variableData.multiSelect,
]);
useEffect(() => {
// Fetch options for CUSTOM Type
if (variableData.type === 'CUSTOM') {
getOptions(null);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [variableData.type, variableData.customValue]);
return (
<div className="variable-item">
<div className={`variable-item${isTextboxFocused ? ' focused' : ''}`}>
<Typography.Text className="variable-name" ellipsis>
${name}
{description && (
<Tooltip title={description}>
${variableData.name}
{variableData.description && (
<Tooltip title={variableData.description}>
<InfoCircleOutlined className="info-icon" />
</Tooltip>
)}
</Typography.Text>
<div className="variable-value">
{variableType === 'TEXTBOX' && (
<TextboxVariableInput
variableData={variableData}
onValueUpdate={onValueUpdate}
{variableData.type === 'TEXTBOX' ? (
<Input
ref={textboxInputRef}
placeholder="Enter value"
data-testid={`variable-textbox-${variableData.id}`}
bordered={false}
value={textboxInputValue}
title={textboxInputValue}
onChange={(e): void => {
setTextboxInputValue(e.target.value);
}}
onFocus={(): void => {
setIsTextboxFocused(true);
}}
onBlur={(e): void => {
setIsTextboxFocused(false);
const value = e.target.value.trim();
// If empty, reset to default value
if (!value && variableData.defaultValue) {
setTextboxInputValue(variableData.defaultValue.toString());
debouncedHandleChange(variableData.defaultValue.toString());
} else {
debouncedHandleChange(value);
}
}}
onKeyDown={(e): void => {
if (e.key === 'Enter') {
const value = textboxInputValue.trim();
if (!value && variableData.defaultValue) {
setTextboxInputValue(variableData.defaultValue.toString());
debouncedHandleChange(variableData.defaultValue.toString());
} else {
debouncedHandleChange(value);
}
textboxInputRef.current?.blur();
}
}}
/>
) : (
optionsData &&
(variableData.multiSelect ? (
<CustomMultiSelect
key={
selectValue && Array.isArray(selectValue)
? selectValue.join(' ')
: selectValue || variableData.id
}
options={optionsData.map((option) => ({
label: option.toString(),
value: option.toString(),
}))}
defaultValue={variableData.defaultValue || selectValue}
onChange={handleTempChange}
bordered={false}
placeholder="Select value"
placement="bottomLeft"
style={SelectItemStyle}
loading={isLoading}
showSearch
data-testid="variable-select"
className="variable-select"
popupClassName="dropdown-styles"
maxTagCount={2}
getPopupContainer={popupContainer}
value={tempSelection || selectValue}
onDropdownVisibleChange={handleDropdownVisibleChange}
errorMessage={errorMessage}
// eslint-disable-next-line react/no-unstable-nested-components
maxTagPlaceholder={(omittedValues): JSX.Element => {
const maxDisplayValues = 10;
const valuesToShow = omittedValues.slice(0, maxDisplayValues);
const hasMore = omittedValues.length > maxDisplayValues;
const tooltipText =
valuesToShow.map(({ value }) => value).join(', ') +
(hasMore ? ` + ${omittedValues.length - maxDisplayValues} more` : '');
return (
<Tooltip title={tooltipText}>
<span>+ {omittedValues.length} </span>
</Tooltip>
);
}}
onClear={(): void => {
handleChange([]);
}}
enableAllSelection={enableSelectAll}
maxTagTextLength={30}
allowClear={selectValue !== ALL_SELECT_VALUE && selectValue !== 'ALL'}
onRetry={(): void => {
setErrorMessage(null);
refetch();
}}
/>
) : (
<CustomSelect
key={
selectValue && Array.isArray(selectValue)
? selectValue.join(' ')
: selectValue || variableData.id
}
defaultValue={variableData.defaultValue || selectValue}
onChange={handleChange}
bordered={false}
placeholder="Select value"
style={SelectItemStyle}
loading={isLoading}
showSearch
data-testid="variable-select"
className="variable-select"
popupClassName="dropdown-styles"
getPopupContainer={popupContainer}
options={optionsData.map((option) => ({
label: option.toString(),
value: option.toString(),
}))}
value={selectValue}
errorMessage={errorMessage}
onRetry={(): void => {
setErrorMessage(null);
refetch();
}}
/>
))
)}
{variableType === 'CUSTOM' && (
<CustomVariableInput
variableData={variableData}
onValueUpdate={onValueUpdate}
/>
)}
{variableType === 'QUERY' && (
<QueryVariableInput
variableData={variableData}
onValueUpdate={onValueUpdate}
existingVariables={existingVariables}
variablesToGetUpdated={variablesToGetUpdated}
setVariablesToGetUpdated={setVariablesToGetUpdated}
dependencyData={dependencyData}
/>
{variableData.type !== 'TEXTBOX' && errorMessage && (
<span style={{ margin: '0 0.5rem' }}>
<Popover
placement="top"
content={<Typography>{errorMessage}</Typography>}
>
<WarningOutlined style={{ color: orange[5] }} />
</Popover>
</span>
)}
</div>
</div>

View File

@@ -1,201 +0,0 @@
import { useCallback, useEffect, useMemo, useState } from 'react';
import { isEmpty } from 'lodash-es';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { areArraysEqual, getSelectValue } from './util';
interface UseDashboardVariableSelectHelperParams {
variableData: IDashboardVariable;
optionsData: (string | number | boolean)[];
onValueUpdate: (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
) => void;
}
interface UseDashboardVariableSelectHelperReturn {
// State
tempSelection: string | string[] | undefined;
setTempSelection: React.Dispatch<
React.SetStateAction<string | string[] | undefined>
>;
value: string | string[] | undefined;
defaultValue: string | string[] | undefined;
// Derived values
enableSelectAll: boolean;
// Handlers
onChange: (value: string | string[]) => void;
onDropdownVisibleChange: (visible: boolean) => void;
handleClear: () => void;
}
// eslint-disable-next-line sonarjs/cognitive-complexity
export function useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
}: UseDashboardVariableSelectHelperParams): UseDashboardVariableSelectHelperReturn {
const { selectedValue } = variableData;
const [tempSelection, setTempSelection] = useState<
string | string[] | undefined
>(undefined);
const selectedValueStringified = useMemo(
() => getSelectValue(selectedValue, variableData),
[selectedValue, variableData],
);
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
const selectValue =
variableData.allSelected && enableSelectAll
? 'ALL'
: selectedValueStringified;
const handleChange = useCallback(
(inputValue: string | string[]): void => {
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
if (
value === variableData.selectedValue ||
(Array.isArray(value) &&
Array.isArray(variableData.selectedValue) &&
areArraysEqual(value, variableData.selectedValue))
) {
return;
}
if (variableData.name) {
// Check if ALL is effectively selected by comparing with available options
const isAllSelected =
Array.isArray(value) &&
value.length > 0 &&
optionsData.every((option) => value.includes(option.toString()));
if (isAllSelected && variableData.showALLOption) {
// For ALL selection, pass optionsData as the value and set allSelected to true
onValueUpdate(variableData.name, variableData.id, optionsData, true);
} else {
onValueUpdate(variableData.name, variableData.id, value, false);
}
}
},
[
variableData.multiSelect,
variableData.selectedValue,
variableData.name,
variableData.id,
variableData.showALLOption,
onValueUpdate,
optionsData,
],
);
const handleTempChange = useCallback(
(inputValue: string | string[]): void => {
// Store the selection in temporary state while dropdown is open
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
setTempSelection(value);
},
[variableData.multiSelect],
);
// Apply default value on first render if no selection exists
const finalSelectedValues = useMemo(() => {
if (variableData.multiSelect) {
let value = tempSelection || selectedValue;
if (isEmpty(value)) {
if (variableData.showALLOption) {
if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData;
}
} else if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData?.[0];
}
}
return value;
}
if (isEmpty(selectedValue)) {
if (variableData.defaultValue) {
return variableData.defaultValue;
}
return optionsData[0]?.toString();
}
return selectedValue;
}, [
variableData.multiSelect,
variableData.showALLOption,
variableData.defaultValue,
selectedValue,
tempSelection,
optionsData,
]);
// Apply default values when needed
useEffect(() => {
if (
(variableData.multiSelect && !(tempSelection || selectValue)) ||
isEmpty(selectValue)
) {
handleChange(finalSelectedValues as string[] | string);
}
}, [
finalSelectedValues,
handleChange,
selectValue,
tempSelection,
variableData.multiSelect,
]);
// Handle dropdown visibility changes
const onDropdownVisibleChange = useCallback(
(visible: boolean): void => {
// Initialize temp selection when opening dropdown
if (visible) {
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
}
// Apply changes when closing dropdown
else if (!visible && tempSelection !== undefined) {
// Call handleChange with the temporarily stored selection
handleChange(tempSelection);
setTempSelection(undefined);
}
},
[variableData, tempSelection, handleChange],
);
const handleClear = useCallback((): void => {
handleChange([]);
}, [handleChange]);
const value = variableData.multiSelect
? tempSelection || selectValue
: selectValue;
const defaultValue = variableData.defaultValue || selectValue;
const onChange = useMemo(() => {
return variableData.multiSelect ? handleTempChange : handleChange;
}, [variableData.multiSelect, handleTempChange, handleChange]);
return {
tempSelection,
setTempSelection,
enableSelectAll,
onDropdownVisibleChange,
handleClear,
value,
defaultValue,
onChange,
};
}

View File

@@ -381,16 +381,3 @@ export const uniqueValues = (values: string[] | string): string[] | string => {
return values;
};
export const getSelectValue = (
selectedValue: IDashboardVariable['selectedValue'],
variableData: IDashboardVariable,
): string | string[] | undefined => {
if (Array.isArray(selectedValue)) {
if (!variableData.multiSelect && selectedValue.length === 1) {
return selectedValue[0]?.toString();
}
return selectedValue.map((item) => item.toString());
}
return selectedValue?.toString();
};

View File

@@ -1,21 +0,0 @@
.chart-manager-container {
width: 100%;
max-height: calc(40% - 40px);
display: flex;
flex-direction: column;
gap: 16px;
.chart-manager-header {
display: flex;
justify-content: space-between;
align-items: center;
gap: 16px;
.chart-manager-actions-container {
display: flex;
justify-content: flex-end;
align-items: center;
gap: 8px;
}
}
}

View File

@@ -1,147 +0,0 @@
import { useCallback, useEffect, useMemo, useState } from 'react';
import { Button, Input } from 'antd';
import { ResizeTable } from 'components/ResizeTable';
import { getGraphManagerTableColumns } from 'container/GridCardLayout/GridCard/FullView/TableRender/GraphManagerColumns';
import { ExtendedChartDataset } from 'container/GridCardLayout/GridCard/FullView/types';
import { getDefaultTableDataSet } from 'container/GridCardLayout/GridCard/FullView/utils';
import { useNotifications } from 'hooks/useNotifications';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import { usePlotContext } from 'lib/uPlotV2/context/PlotContext';
import useLegendsSync from 'lib/uPlotV2/hooks/useLegendsSync';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import './ChartManager.styles.scss';
interface ChartManagerProps {
config: UPlotConfigBuilder;
alignedData: uPlot.AlignedData;
yAxisUnit?: string;
onCancel?: () => void;
}
/**
* ChartManager provides a tabular view to manage the visibility of
* individual series on a uPlot chart.
*
* It syncs with the legend state coming from the plot context and
* allows users to:
* - filter series by label
* - toggle individual series on/off
* - persist the visibility configuration to local storage.
*
* @param config - `UPlotConfigBuilder` instance used to derive chart options.
* @param alignedData - uPlot aligned data used to build the initial table dataset.
* @param yAxisUnit - Optional unit label for Y-axis values shown in the table.
* @param onCancel - Optional callback invoked when the user cancels the dialog.
*/
export default function ChartManager({
config,
alignedData,
yAxisUnit,
onCancel,
}: ChartManagerProps): JSX.Element {
const { notifications } = useNotifications();
const { legendItemsMap } = useLegendsSync({
config,
subscribeToFocusChange: false,
});
const {
onToggleSeriesOnOff,
onToggleSeriesVisibility,
syncSeriesVisibilityToLocalStorage,
} = usePlotContext();
const { isDashboardLocked } = useDashboard();
const [tableDataSet, setTableDataSet] = useState<ExtendedChartDataset[]>(() =>
getDefaultTableDataSet(config.getConfig() as uPlot.Options, alignedData),
);
const graphVisibilityState = useMemo(
() =>
Object.entries(legendItemsMap).reduce<boolean[]>((acc, [key, item]) => {
acc[Number(key)] = item.show;
return acc;
}, []),
[legendItemsMap],
);
useEffect(() => {
setTableDataSet(
getDefaultTableDataSet(config.getConfig() as uPlot.Options, alignedData),
);
}, [alignedData, config]);
const filterHandler = useCallback(
(event: React.ChangeEvent<HTMLInputElement>): void => {
const value = event.target.value.toString().toLowerCase();
const updatedDataSet = tableDataSet.map((item) => {
if (item.label?.toLocaleLowerCase().includes(value)) {
return { ...item, show: true };
}
return { ...item, show: false };
});
setTableDataSet(updatedDataSet);
},
[tableDataSet],
);
const dataSource = useMemo(
() =>
tableDataSet.filter(
(item, index) => index !== 0 && item.show, // skipping the first item as it is the x-axis
),
[tableDataSet],
);
const columns = useMemo(
() =>
getGraphManagerTableColumns({
tableDataSet,
checkBoxOnChangeHandler: (_e, index) => {
onToggleSeriesOnOff(index);
},
graphVisibilityState,
labelClickedHandler: onToggleSeriesVisibility,
yAxisUnit,
isGraphDisabled: isDashboardLocked,
}),
// eslint-disable-next-line react-hooks/exhaustive-deps
[tableDataSet, graphVisibilityState, yAxisUnit, isDashboardLocked],
);
const handleSave = useCallback((): void => {
syncSeriesVisibilityToLocalStorage();
notifications.success({
message: 'The updated graphs & legends are saved',
});
if (onCancel) {
onCancel();
}
}, [syncSeriesVisibilityToLocalStorage, notifications, onCancel]);
return (
<div className="chart-manager-container">
<div className="chart-manager-header">
<Input onChange={filterHandler} placeholder="Filter Series" />
<div className="chart-manager-actions-container">
<Button type="default" onClick={onCancel}>
Cancel
</Button>
<Button type="primary" onClick={handleSave}>
Save
</Button>
</div>
</div>
<div className="chart-manager-table-container">
<ResizeTable
columns={columns}
dataSource={dataSource}
virtual
rowKey="index"
scroll={{ y: 200 }}
pagination={false}
/>
</div>
</div>
);
}

View File

@@ -1,120 +0,0 @@
import { useCallback } from 'react';
import { UseQueryResult } from 'react-query';
import {
getTimeRangeFromStepInterval,
isApmMetric,
} from 'container/PanelWrapper/utils';
import { getUplotClickData } from 'container/QueryTable/Drilldown/drilldownUtils';
import useGraphContextMenu from 'container/QueryTable/Drilldown/useGraphContextMenu';
import {
PopoverPosition,
useCoordinates,
} from 'periscope/components/ContextMenu';
import { SuccessResponse } from 'types/api';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { DataSource } from 'types/common/queryBuilder';
interface UseTimeSeriesContextMenuParams {
widget: Widgets;
queryResponse: UseQueryResult<
SuccessResponse<MetricRangePayloadProps, unknown>,
Error
>;
}
export const usePanelContextMenu = ({
widget,
queryResponse,
}: UseTimeSeriesContextMenuParams): {
coordinates: { x: number; y: number } | null;
popoverPosition: PopoverPosition | null;
onClose: () => void;
menuItemsConfig: {
header?: string | React.ReactNode;
items?: React.ReactNode;
};
clickHandlerWithContextMenu: (...args: any[]) => void;
} => {
const {
coordinates,
popoverPosition,
clickedData,
onClose,
subMenu,
onClick,
setSubMenu,
} = useCoordinates();
const { menuItemsConfig } = useGraphContextMenu({
widgetId: widget.id || '',
query: widget.query,
graphData: clickedData,
onClose,
coordinates,
subMenu,
setSubMenu,
contextLinks: widget.contextLinks,
panelType: widget.panelTypes,
queryRange: queryResponse,
});
const clickHandlerWithContextMenu = useCallback(
(...args: any[]) => {
const [
xValue,
_yvalue,
_mouseX,
_mouseY,
metric,
queryData,
absoluteMouseX,
absoluteMouseY,
axesData,
focusedSeries,
] = args;
const data = getUplotClickData({
metric,
queryData,
absoluteMouseX,
absoluteMouseY,
focusedSeries,
});
let timeRange;
if (axesData && queryData?.queryName) {
const compositeQuery = (queryResponse?.data?.params as any)?.compositeQuery;
if (compositeQuery?.queries) {
const specificQuery = compositeQuery.queries.find(
(query: any) => query.spec?.name === queryData.queryName,
);
const stepInterval = specificQuery?.spec?.stepInterval || 60;
timeRange = getTimeRangeFromStepInterval(
stepInterval,
metric?.clickedTimestamp || xValue,
specificQuery?.spec?.signal === DataSource.METRICS &&
isApmMetric(specificQuery?.spec?.aggregations[0]?.metricName),
);
}
}
if (data && data?.record?.queryName) {
onClick(data.coord, { ...data.record, label: data.label, timeRange });
}
},
[onClick, queryResponse],
);
return {
coordinates,
popoverPosition,
onClose,
menuItemsConfig,
clickHandlerWithContextMenu,
};
};

View File

@@ -1,4 +0,0 @@
.panel-container {
height: 100%;
width: 100%;
}

View File

@@ -1,176 +0,0 @@
import { useEffect, useMemo, useRef, useState } from 'react';
import TimeSeries from 'container/DashboardContainer/visualization/charts/TimeSeries/TimeSeries';
import ChartManager from 'container/DashboardContainer/visualization/components/ChartManager/ChartManager';
import { usePanelContextMenu } from 'container/DashboardContainer/visualization/hooks/usePanelContextMenu';
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
import { LegendPosition } from 'lib/uPlotV2/components/types';
import { LineInterpolation } from 'lib/uPlotV2/config/types';
import { ContextMenu } from 'periscope/components/ContextMenu';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { useTimezone } from 'providers/Timezone';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import uPlot from 'uplot';
import { getTimeRange } from 'utils/getTimeRange';
import { prepareChartData, prepareUPlotConfig } from '../TimeSeriesPanel/utils';
import '../Panel.styles.scss';
function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
const {
panelMode,
queryResponse,
widget,
onDragSelect,
isFullViewMode,
onToggleModelHandler,
} = props;
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
const graphRef = useRef<HTMLDivElement>(null);
const [minTimeScale, setMinTimeScale] = useState<number>();
const [maxTimeScale, setMaxTimeScale] = useState<number>();
const containerDimensions = useResizeObserver(graphRef);
const isDarkMode = useIsDarkMode();
const { timezone } = useTimezone();
useEffect(() => {
if (toScrollWidgetId === widget.id) {
graphRef.current?.scrollIntoView({
behavior: 'smooth',
block: 'center',
});
graphRef.current?.focus();
setToScrollWidgetId('');
}
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
useEffect((): void => {
const { startTime, endTime } = getTimeRange(queryResponse);
setMinTimeScale(startTime);
setMaxTimeScale(endTime);
}, [queryResponse]);
const {
coordinates,
popoverPosition,
onClose,
menuItemsConfig,
clickHandlerWithContextMenu,
} = usePanelContextMenu({
widget,
queryResponse,
});
const chartData = useMemo(() => {
if (!queryResponse?.data?.payload) {
return [];
}
return prepareChartData(queryResponse?.data?.payload);
}, [queryResponse?.data?.payload]);
const config = useMemo(() => {
const tzDate = (timestamp: number): Date =>
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value);
return prepareUPlotConfig({
widgetId: widget.id || '',
apiResponse: queryResponse?.data?.payload as MetricRangePayloadProps,
tzDate,
minTimeScale: minTimeScale,
maxTimeScale: maxTimeScale,
isLogScale: widget?.isLogScale ?? false,
thresholds: {
scaleKey: 'y',
thresholds: (widget.thresholds || []).map((threshold) => ({
thresholdValue: threshold.thresholdValue ?? 0,
thresholdColor: threshold.thresholdColor,
thresholdUnit: threshold.thresholdUnit,
thresholdLabel: threshold.thresholdLabel,
})),
yAxisUnit: widget.yAxisUnit,
},
yAxisUnit: widget.yAxisUnit || '',
softMin: widget.softMin === undefined ? null : widget.softMin,
softMax: widget.softMax === undefined ? null : widget.softMax,
spanGaps: false,
colorMapping: widget.customLegendColors ?? {},
lineInterpolation: LineInterpolation.Spline,
isDarkMode,
onClick: clickHandlerWithContextMenu,
onDragSelect,
currentQuery: widget.query,
panelMode,
});
}, [
widget.id,
maxTimeScale,
minTimeScale,
timezone.value,
widget.customLegendColors,
widget.isLogScale,
widget.softMax,
widget.softMin,
isDarkMode,
queryResponse?.data?.payload,
widget.query,
widget.thresholds,
widget.yAxisUnit,
panelMode,
clickHandlerWithContextMenu,
onDragSelect,
]);
const layoutChildren = useMemo(() => {
if (!isFullViewMode) {
return null;
}
return (
<ChartManager
config={config}
alignedData={chartData}
yAxisUnit={widget.yAxisUnit}
onCancel={onToggleModelHandler}
/>
);
}, [
isFullViewMode,
config,
chartData,
widget.yAxisUnit,
onToggleModelHandler,
]);
return (
<div className="panel-container" ref={graphRef}>
{containerDimensions.width > 0 && containerDimensions.height > 0 && (
<TimeSeries
config={config}
legendConfig={{
position: widget?.legendPosition ?? LegendPosition.BOTTOM,
}}
yAxisUnit={widget.yAxisUnit}
decimalPrecision={widget.decimalPrecision}
timezone={timezone.value}
data={chartData as uPlot.AlignedData}
width={containerDimensions.width}
height={containerDimensions.height}
layoutChildren={layoutChildren}
>
<ContextMenu
coordinates={coordinates}
popoverPosition={popoverPosition}
title={menuItemsConfig.header as string}
items={menuItemsConfig.items}
onClose={onClose}
/>
</TimeSeries>
)}
</div>
);
}
export default TimeSeriesPanel;

View File

@@ -1,170 +0,0 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import {
fillMissingXAxisTimestamps,
getXAxisTimestamps,
} from 'container/DashboardContainer/visualization/panels/utils';
import { getLegend } from 'lib/dashboard/getQueryResults';
import getLabelName from 'lib/getLabelName';
import onClickPlugin, {
OnClickPluginOpts,
} from 'lib/uPlotLib/plugins/onClickPlugin';
import {
DistributionType,
DrawStyle,
LineInterpolation,
LineStyle,
SelectionPreferencesSource,
VisibilityMode,
} from 'lib/uPlotV2/config/types';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import { ThresholdsDrawHookOptions } from 'lib/uPlotV2/hooks/types';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { PanelMode } from '../types';
export const prepareChartData = (
apiResponse: MetricRangePayloadProps,
): uPlot.AlignedData => {
const seriesList = apiResponse?.data?.result || [];
const timestampArr = getXAxisTimestamps(seriesList);
const yAxisValuesArr = fillMissingXAxisTimestamps(timestampArr, seriesList);
return [timestampArr, ...yAxisValuesArr];
};
export const prepareUPlotConfig = ({
widgetId,
apiResponse,
tzDate,
minTimeScale,
maxTimeScale,
isLogScale,
thresholds,
softMin,
softMax,
spanGaps,
colorMapping,
lineInterpolation,
isDarkMode,
currentQuery,
onDragSelect,
onClick,
yAxisUnit,
panelMode,
}: {
widgetId: string;
apiResponse: MetricRangePayloadProps;
tzDate: uPlot.LocalDateFromUnix;
minTimeScale: number | undefined;
maxTimeScale: number | undefined;
isLogScale: boolean;
softMin: number | null;
softMax: number | null;
spanGaps: boolean;
colorMapping: Record<string, string>;
lineInterpolation: LineInterpolation;
isDarkMode: boolean;
thresholds: ThresholdsDrawHookOptions;
currentQuery: Query;
yAxisUnit: string;
onDragSelect: (startTime: number, endTime: number) => void;
onClick?: OnClickPluginOpts['onClick'];
panelMode: PanelMode;
}): UPlotConfigBuilder => {
const builder = new UPlotConfigBuilder({
onDragSelect,
widgetId,
tzDate,
shouldSaveSelectionPreference: panelMode === PanelMode.DASHBOARD_VIEW,
selectionPreferencesSource: [
PanelMode.DASHBOARD_VIEW,
PanelMode.STANDALONE_VIEW,
].includes(panelMode)
? SelectionPreferencesSource.LOCAL_STORAGE
: SelectionPreferencesSource.IN_MEMORY,
});
// X scale time axis
builder.addScale({
scaleKey: 'x',
time: true,
min: minTimeScale,
max: maxTimeScale,
logBase: isLogScale ? 10 : undefined,
distribution: isLogScale
? DistributionType.Logarithmic
: DistributionType.Linear,
});
// Y scale value axis, driven primarily by softMin/softMax and data
builder.addScale({
scaleKey: 'y',
time: false,
min: undefined,
max: undefined,
softMin: softMin ?? undefined,
softMax: softMax ?? undefined,
thresholds,
logBase: isLogScale ? 10 : undefined,
distribution: isLogScale
? DistributionType.Logarithmic
: DistributionType.Linear,
});
builder.addThresholds(thresholds);
if (typeof onClick === 'function') {
builder.addPlugin(
onClickPlugin({
onClick,
apiResponse,
}),
);
}
builder.addAxis({
scaleKey: 'x',
show: true,
side: 2,
isDarkMode,
isLogScale: false,
panelType: PANEL_TYPES.TIME_SERIES,
});
builder.addAxis({
scaleKey: 'y',
show: true,
side: 3,
isDarkMode,
isLogScale: false,
yAxisUnit,
panelType: PANEL_TYPES.TIME_SERIES,
});
apiResponse.data?.result?.forEach((series) => {
const baseLabelName = getLabelName(
series.metric,
series.queryName || '', // query
series.legend || '',
);
const label = currentQuery
? getLegend(series, currentQuery, baseLabelName)
: baseLabelName;
builder.addSeries({
scaleKey: 'y',
drawStyle: DrawStyle.Line,
label: label,
colorMapping,
spanGaps,
lineStyle: LineStyle.Solid,
lineInterpolation,
showPoints: VisibilityMode.Never,
pointSize: 5,
isDarkMode,
});
});
return builder;
};

View File

@@ -1,54 +0,0 @@
import { normalizePlotValue } from 'lib/uPlotV2/utils/dataUtils';
import { QueryData } from 'types/api/widgets/getQuery';
export function getXAxisTimestamps(seriesList: QueryData[]): number[] {
const timestamps = new Set<number>();
seriesList.forEach((series: { values?: [number, string][] }) => {
if (series?.values) {
series.values.forEach((value) => {
timestamps.add(value[0]);
});
}
});
const timestampsArr = Array.from(timestamps);
timestampsArr.sort((a, b) => a - b);
return timestampsArr;
}
export function fillMissingXAxisTimestamps(
timestampArr: number[],
data: Array<{ values?: [number, string][] }>,
): (number | null)[][] {
// Ensure we work with a sorted, deduplicated list of x-axis timestamps
const canonicalTimestamps = Array.from(new Set(timestampArr)).sort(
(a, b) => a - b,
);
return data.map(({ values }) =>
buildSeriesYValues(canonicalTimestamps, values),
);
}
function buildSeriesYValues(
timestamps: number[],
values?: [number, string][],
): (number | null)[] {
if (!values?.length) {
return [];
}
const valueByTimestamp = new Map<number, number | null>();
for (let i = 0; i < values.length; i++) {
const [timestamp, rawValue] = values[i];
valueByTimestamp.set(timestamp, normalizePlotValue(rawValue));
}
return timestamps.map((timestamp) => {
const value = valueByTimestamp.get(timestamp);
return value !== undefined ? value : null;
});
}

View File

@@ -33,8 +33,8 @@ import { useChartMutable } from 'hooks/useChartMutable';
import useComponentPermission from 'hooks/useComponentPermission';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import useUrlQuery from 'hooks/useUrlQuery';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import GetMinMax from 'lib/getMinMax';
import { isEmpty } from 'lodash-es';
import { useAppContext } from 'providers/App/App';

View File

@@ -8,8 +8,8 @@ import { populateMultipleResults } from 'container/NewWidget/LeftContainer/Widge
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/types';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
import { useIntersectionObserver } from 'hooks/useIntersectionObserver';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import getTimeString from 'lib/getTimeString';
import { isEqual } from 'lodash-es';
import isEmpty from 'lodash-es/isEmpty';

View File

@@ -6,7 +6,7 @@ import { prepareQueryRangePayloadV5 } from 'api/v5/v5';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
import { AppState } from 'store/reducers';
import { Query } from 'types/api/queryBuilder/queryBuilderData';

View File

@@ -27,8 +27,8 @@ import { useIsDarkMode } from 'hooks/useDarkMode';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import useUrlQuery from 'hooks/useUrlQuery';
import createQueryParams from 'lib/createQueryParams';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { cloneDeep, defaultTo, isEmpty, isUndefined } from 'lodash-es';
import { Check, X } from 'lucide-react';
import { DashboardWidgetPageParams } from 'pages/DashboardWidget';

View File

@@ -1,8 +1,8 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsApplication.factory';
import { updateStepInterval } from 'hooks/queryBuilder/useStepInterval';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { ServicesList } from 'types/api/metrics/getService';
import { QueryDataV3 } from 'types/api/widgets/getQuery';
import { EQueryType } from 'types/common/dashboard';

View File

@@ -13,7 +13,7 @@ import { MenuItemKeys } from 'container/GridCardLayout/WidgetHeader/contants';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
import { useNotifications } from 'hooks/useNotifications';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
import { isEmpty } from 'lodash-es';
import { useDashboard } from 'providers/Dashboard/Dashboard';

View File

@@ -3,8 +3,8 @@ import { useSelector } from 'react-redux';
import { initialQueriesMap } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { AppState } from 'store/reducers';
import { SuccessResponse } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';

View File

@@ -14,7 +14,7 @@ describe('Get Series Data', () => {
expect(seriesData.length).toBe(5);
expect(seriesData[1].label).toBe('firstLegend');
expect(seriesData[1].show).toBe(true);
expect(seriesData[1].fill).toBe('#FF6F91');
expect(seriesData[1].fill).toBe('#C71585');
expect(seriesData[1].width).toBe(2);
});

View File

@@ -24,14 +24,6 @@
opacity: 1;
}
.legend-empty-state {
font-size: 12px;
color: var(--bg-vanilla-400);
text-align: center;
padding: 12px;
padding: 2rem 0;
}
.legend-virtuoso-container {
height: 100%;
width: 100%;

View File

@@ -81,13 +81,6 @@ export default function Legend({
[focusedSeriesIndex, position],
);
const isEmptyState = useMemo(() => {
if (position !== LegendPosition.RIGHT || !legendSearchQuery.trim()) {
return false;
}
return visibleLegendItems.length === 0;
}, [position, legendSearchQuery, visibleLegendItems]);
return (
<div
ref={legendContainerRef}
@@ -110,21 +103,15 @@ export default function Legend({
/>
</div>
)}
{isEmptyState ? (
<div className="legend-empty-state">
No series found matching &quot;{legendSearchQuery}&quot;
</div>
) : (
<VirtuosoGrid
className={cx(
'legend-virtuoso-container',
`legend-virtuoso-container-${position.toLowerCase()}`,
{ 'legend-virtuoso-container-single-row': isSingleRow },
)}
data={visibleLegendItems}
itemContent={(_, item): JSX.Element => renderLegendItem(item)}
/>
)}
<VirtuosoGrid
className={cx(
'legend-virtuoso-container',
`legend-virtuoso-container-${position.toLowerCase()}`,
{ 'legend-virtuoso-container-single-row': isSingleRow },
)}
data={visibleLegendItems}
itemContent={(_, item): JSX.Element => renderLegendItem(item)}
/>
</div>
);
}

View File

@@ -1,7 +1,7 @@
import { ALL_SELECTED_VALUE } from 'components/NewSelect/utils';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { commaValuesParser } from '../../lib/dashboardVariables/customCommaValuesParser';
import { commaValuesParser } from '../../lib/dashbaordVariables/customCommaValuesParser';
interface UrlVariables {
[key: string]: any;

View File

@@ -4,7 +4,6 @@ import {
} from 'container/DashboardContainer/DashboardVariablesSelection/util';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { initializeVariableFetchStore } from '../variableFetchStore';
import {
IDashboardVariables,
IDashboardVariablesStoreState,
@@ -63,30 +62,9 @@ export function buildDependencyData(
};
}
/**
* Initialize the variable fetch store with the computed dependency data
*/
function initializeFetchStore(
sortedVariablesArray: IDashboardVariable[],
dependencyData: IDependencyData | null,
): void {
if (dependencyData) {
const allVariableNames = sortedVariablesArray
.map((v) => v.name)
.filter((name): name is string => !!name);
initializeVariableFetchStore(
allVariableNames,
dependencyData.graph,
dependencyData.parentDependencyGraph,
);
}
}
/**
* Compute derived values from variables
* This is a composition of buildSortedVariablesArray and buildDependencyData
* Also initializes the variable fetch store with the new dependency data
*/
export function computeDerivedValues(
variables: IDashboardVariablesStoreState['variables'],
@@ -97,22 +75,15 @@ export function computeDerivedValues(
const sortedVariablesArray = buildSortedVariablesArray(variables);
const dependencyData = buildDependencyData(sortedVariablesArray);
// Initialize the variable fetch store when dependency data is computed
initializeFetchStore(sortedVariablesArray, dependencyData);
return { sortedVariablesArray, dependencyData };
}
/**
* Update derived values in the store state (for use with immer)
* Also initializes the variable fetch store with the new dependency data
*/
export function updateDerivedValues(
draft: IDashboardVariablesStoreState,
): void {
draft.sortedVariablesArray = buildSortedVariablesArray(draft.variables);
draft.dependencyData = buildDependencyData(draft.sortedVariablesArray);
// Initialize the variable fetch store when dependency data is updated
initializeFetchStore(draft.sortedVariablesArray, draft.dependencyData);
}

View File

@@ -1,57 +0,0 @@
import { VariableGraph } from 'container/DashboardContainer/DashboardVariablesSelection/util';
import createStore from './store';
// Fetch state for each variable
export type VariableFetchState =
| 'idle' // stable state - initial or complete
| 'loading' // actively fetching data (first time)
| 'revalidating' // refetching existing data
| 'waiting' // blocked on parent dependencies
| 'error';
export interface IVariableFetchStoreState {
// Per-variable fetch state
states: Record<string, VariableFetchState>;
// Dependency graphs (set once when variables change)
dependencyGraph: VariableGraph; // variable -> children that depend on it
parentGraph: VariableGraph; // variable -> parents it depends on
// Track last update timestamp per variable to trigger re-fetches
lastUpdated: Record<string, number>;
}
const initialState: IVariableFetchStoreState = {
states: {},
dependencyGraph: {},
parentGraph: {},
lastUpdated: {},
};
export const variableFetchStore = createStore<IVariableFetchStoreState>(
initialState,
);
// ============== Actions ==============
/**
* Initialize the store with dependency graphs and set initial states
*/
export function initializeVariableFetchStore(
variableNames: string[],
dependencyGraph: VariableGraph,
parentGraph: VariableGraph,
): void {
variableFetchStore.update((draft) => {
draft.dependencyGraph = dependencyGraph;
draft.parentGraph = parentGraph;
// Initialize all variables to idle, preserving existing ready states
variableNames.forEach((name) => {
if (!draft.states[name]) {
draft.states[name] = 'idle';
}
});
});
}

127
pkg/apis/fields/api.go Normal file
View File

@@ -0,0 +1,127 @@
package fields
import (
"bytes"
"io"
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
"github.com/SigNoz/signoz/pkg/telemetrymeter"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrytraces"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type API struct {
telemetryStore telemetrystore.TelemetryStore
telemetryMetadataStore telemetrytypes.MetadataStore
}
// TODO: move this to module and remove metastore init
func NewAPI(
settings factory.ProviderSettings,
telemetryStore telemetrystore.TelemetryStore,
) *API {
telemetryMetadataStore := telemetrymetadata.NewTelemetryMetaStore(
settings,
telemetryStore,
telemetrytraces.DBName,
telemetrytraces.TagAttributesV2TableName,
telemetrytraces.SpanAttributesKeysTblName,
telemetrytraces.SpanIndexV3TableName,
telemetrymetrics.DBName,
telemetrymetrics.AttributesMetadataTableName,
telemetrymeter.DBName,
telemetrymeter.SamplesAgg1dTableName,
telemetrylogs.DBName,
telemetrylogs.LogsV2TableName,
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetrymetadata.DBName,
telemetrymetadata.AttributesMetadataLocalTableName,
)
return &API{
telemetryStore: telemetryStore,
telemetryMetadataStore: telemetryMetadataStore,
}
}
func (api *API) GetFieldsKeys(w http.ResponseWriter, r *http.Request) {
type fieldKeysResponse struct {
Keys map[string][]*telemetrytypes.TelemetryFieldKey `json:"keys"`
Complete bool `json:"complete"`
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
fieldKeySelector, err := parseFieldKeyRequest(r)
if err != nil {
render.Error(w, err)
return
}
keys, complete, err := api.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(w, err)
return
}
response := fieldKeysResponse{
Keys: keys,
Complete: complete,
}
render.Success(w, http.StatusOK, response)
}
func (api *API) GetFieldsValues(w http.ResponseWriter, r *http.Request) {
type fieldValuesResponse struct {
Values *telemetrytypes.TelemetryFieldValues `json:"values"`
Complete bool `json:"complete"`
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
fieldValueSelector, err := parseFieldValueRequest(r)
if err != nil {
render.Error(w, err)
return
}
allValues, allComplete, err := api.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(w, err)
return
}
relatedValues, relatedComplete, err := api.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
}
values := &telemetrytypes.TelemetryFieldValues{
StringValues: allValues.StringValues,
NumberValues: allValues.NumberValues,
RelatedValues: relatedValues,
}
response := fieldValuesResponse{
Values: values,
Complete: allComplete && relatedComplete,
}
render.Success(w, http.StatusOK, response)
}

162
pkg/apis/fields/parse.go Normal file
View File

@@ -0,0 +1,162 @@
package fields
import (
"net/http"
"strconv"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, error) {
var req telemetrytypes.FieldKeySelector
var signal telemetrytypes.Signal
var source telemetrytypes.Source
var err error
signalStr := r.URL.Query().Get("signal")
if signalStr != "" {
signal = telemetrytypes.Signal{String: valuer.NewString(signalStr)}
} else {
signal = telemetrytypes.SignalUnspecified
}
sourceStr := r.URL.Query().Get("source")
if sourceStr != "" {
source = telemetrytypes.Source{String: valuer.NewString(sourceStr)}
} else {
source = telemetrytypes.SourceUnspecified
}
if r.URL.Query().Get("limit") != "" {
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse limit")
}
req.Limit = limit
} else {
req.Limit = 1000
}
var startUnixMilli, endUnixMilli int64
if r.URL.Query().Get("startUnixMilli") != "" {
startUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("startUnixMilli"), 10, 64)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse startUnixMilli")
}
// Round down to the nearest 6 hours (21600000 milliseconds)
startUnixMilli -= startUnixMilli % 21600000
}
if r.URL.Query().Get("endUnixMilli") != "" {
endUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("endUnixMilli"), 10, 64)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse endUnixMilli")
}
}
// Parse fieldContext directly instead of using JSON unmarshalling.
var fieldContext telemetrytypes.FieldContext
fieldContextStr := r.URL.Query().Get("fieldContext")
if fieldContextStr != "" {
fieldContext = telemetrytypes.FieldContext{String: valuer.NewString(fieldContextStr)}
}
// Parse fieldDataType directly instead of using JSON unmarshalling.
var fieldDataType telemetrytypes.FieldDataType
fieldDataTypeStr := r.URL.Query().Get("fieldDataType")
if fieldDataTypeStr != "" {
fieldDataType = telemetrytypes.FieldDataType{String: valuer.NewString(fieldDataTypeStr)}
}
metricName := r.URL.Query().Get("metricName")
var metricContext *telemetrytypes.MetricContext
if metricName != "" {
metricContext = &telemetrytypes.MetricContext{
MetricName: metricName,
}
}
name := r.URL.Query().Get("searchText")
if name != "" && fieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, signal) {
name = parsedFieldKey.Name
fieldContext = parsedFieldKey.FieldContext
}
}
}
req = telemetrytypes.FieldKeySelector{
StartUnixMilli: startUnixMilli,
EndUnixMilli: endUnixMilli,
Signal: signal,
Source: source,
Name: name,
FieldContext: fieldContext,
FieldDataType: fieldDataType,
Limit: req.Limit,
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeFuzzy,
MetricContext: metricContext,
}
return &req, nil
}
func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector, error) {
keySelector, err := parseFieldKeyRequest(r)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse field key request")
}
name := r.URL.Query().Get("name")
if name != "" && keySelector.FieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, keySelector.Signal) {
name = parsedFieldKey.Name
keySelector.FieldContext = parsedFieldKey.FieldContext
}
}
}
keySelector.Name = name
existingQuery := r.URL.Query().Get("existingQuery")
value := r.URL.Query().Get("searchText")
// Parse limit for fieldValue request, fallback to default 50 if parsing fails.
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil {
limit = 50
}
req := telemetrytypes.FieldValueSelector{
FieldKeySelector: keySelector,
ExistingQuery: existingQuery,
Value: value,
Limit: limit,
}
return &req, nil
}
func isContextValidForSignal(ctx telemetrytypes.FieldContext, signal telemetrytypes.Signal) bool {
if ctx == telemetrytypes.FieldContextResource ||
ctx == telemetrytypes.FieldContextAttribute ||
ctx == telemetrytypes.FieldContextScope {
return true
}
switch signal.StringValue() {
case telemetrytypes.SignalLogs.StringValue():
return ctx == telemetrytypes.FieldContextLog || ctx == telemetrytypes.FieldContextBody
case telemetrytypes.SignalTraces.StringValue():
return ctx == telemetrytypes.FieldContextSpan || ctx == telemetrytypes.FieldContextEvent || ctx == telemetrytypes.FieldContextTrace
case telemetrytypes.SignalMetrics.StringValue():
return ctx == telemetrytypes.FieldContextMetric
}
return true
}

View File

@@ -1,50 +0,0 @@
package signozapiserver
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/gorilla/mux"
)
func (provider *provider) addFieldsRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/fields/keys", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsKeys), handler.OpenAPIDef{
ID: "GetFieldsKeys",
Tags: []string{"fields"},
Summary: "Get field keys",
Description: "This endpoint returns field keys",
Request: nil,
RequestQuery: new(telemetrytypes.PostableFieldKeysParams),
RequestContentType: "",
Response: new(telemetrytypes.GettableFieldKeys),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v1/fields/values", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsValues), handler.OpenAPIDef{
ID: "GetFieldsValues",
Tags: []string{"fields"},
Summary: "Get field values",
Description: "This endpoint returns field values",
Request: nil,
RequestQuery: new(telemetrytypes.PostableFieldValueParams),
RequestContentType: "",
Response: new(telemetrytypes.GettableFieldValues),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -13,11 +13,11 @@ import (
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types"
@@ -42,8 +42,8 @@ type provider struct {
dashboardHandler dashboard.Handler
metricsExplorerHandler metricsexplorer.Handler
gatewayHandler gateway.Handler
fieldsHandler fields.Handler
authzHandler authz.Handler
roleGetter role.Getter
roleHandler role.Handler
}
func NewFactory(
@@ -61,31 +61,11 @@ func NewFactory(
dashboardHandler dashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
gatewayHandler gateway.Handler,
fieldsHandler fields.Handler,
authzHandler authz.Handler,
roleGetter role.Getter,
roleHandler role.Handler,
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
return newProvider(
ctx,
providerSettings,
config,
orgGetter,
authz,
orgHandler,
userHandler,
sessionHandler,
authDomainHandler,
preferenceHandler,
globalHandler,
promoteHandler,
flaggerHandler,
dashboardModule,
dashboardHandler,
metricsExplorerHandler,
gatewayHandler,
fieldsHandler,
authzHandler,
)
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler, flaggerHandler, dashboardModule, dashboardHandler, metricsExplorerHandler, gatewayHandler, roleGetter, roleHandler)
})
}
@@ -107,8 +87,8 @@ func newProvider(
dashboardHandler dashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
gatewayHandler gateway.Handler,
fieldsHandler fields.Handler,
authzHandler authz.Handler,
roleGetter role.Getter,
roleHandler role.Handler,
) (apiserver.APIServer, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
router := mux.NewRouter().UseEncodedPath()
@@ -129,11 +109,11 @@ func newProvider(
dashboardHandler: dashboardHandler,
metricsExplorerHandler: metricsExplorerHandler,
gatewayHandler: gatewayHandler,
fieldsHandler: fieldsHandler,
authzHandler: authzHandler,
roleGetter: roleGetter,
roleHandler: roleHandler,
}
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz, roleGetter)
if err := provider.AddToRouter(router); err != nil {
return nil, err
@@ -195,10 +175,6 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
return err
}
if err := provider.addFieldsRoutes(router); err != nil {
return err
}
return nil
}

View File

@@ -10,7 +10,7 @@ import (
)
func (provider *provider) addRoleRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Create), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Create), handler.OpenAPIDef{
ID: "CreateRole",
Tags: []string{"role"},
Summary: "Create role",
@@ -27,7 +27,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.authzHandler.List), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.roleHandler.List), handler.OpenAPIDef{
ID: "ListRoles",
Tags: []string{"role"},
Summary: "List roles",
@@ -44,7 +44,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Get), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Get), handler.OpenAPIDef{
ID: "GetRole",
Tags: []string{"role"},
Summary: "Get role",
@@ -61,7 +61,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Patch), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Patch), handler.OpenAPIDef{
ID: "PatchRole",
Tags: []string{"role"},
Summary: "Patch role",
@@ -78,7 +78,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Delete), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Delete), handler.OpenAPIDef{
ID: "DeleteRole",
Tags: []string{"role"},
Summary: "Delete role",

View File

@@ -325,7 +325,7 @@ func (provider *provider) addUserRoutes(router *mux.Router) error {
Response: nil,
ResponseContentType: "",
SuccessStatusCode: http.StatusNoContent,
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnprocessableEntity},
ErrorStatusCodes: []int{http.StatusBadRequest},
Deprecated: false,
SecuritySchemes: []handler.OpenAPISecurityScheme{},
})).Methods(http.MethodPost).GetError(); err != nil {

View File

@@ -2,11 +2,9 @@ package authz
import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -31,76 +29,4 @@ type AuthZ interface {
// Lists the selectors for objects assigned to subject (s) with relation (r) on resource (s)
ListObjects(context.Context, string, authtypes.Relation, authtypes.Typeable) ([]*authtypes.Object, error)
// Creates the role.
Create(context.Context, valuer.UUID, *roletypes.Role) error
// Gets the role if it exists or creates one.
GetOrCreate(context.Context, valuer.UUID, *roletypes.Role) (*roletypes.Role, error)
// Gets the objects associated with the given role and relation.
GetObjects(context.Context, valuer.UUID, valuer.UUID, authtypes.Relation) ([]*authtypes.Object, error)
// Gets all the typeable resources registered from role registry.
GetResources(context.Context) []*authtypes.Resource
// Patches the role.
Patch(context.Context, valuer.UUID, *roletypes.Role) error
// Patches the objects in authorization server associated with the given role and relation
PatchObjects(context.Context, valuer.UUID, string, authtypes.Relation, []*authtypes.Object, []*authtypes.Object) error
// Deletes the role and tuples in authorization server.
Delete(context.Context, valuer.UUID, valuer.UUID) error
// Gets the role
Get(context.Context, valuer.UUID, valuer.UUID) (*roletypes.Role, error)
// Gets the role by org_id and name
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*roletypes.Role, error)
// Lists all the roles for the organization.
List(context.Context, valuer.UUID) ([]*roletypes.Role, error)
// Lists all the roles for the organization filtered by name
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*roletypes.Role, error)
// Grants a role to the subject based on role name.
Grant(context.Context, valuer.UUID, string, string) error
// Revokes a granted role from the subject based on role name.
Revoke(context.Context, valuer.UUID, string, string) error
// Changes the granted role for the subject based on role name.
ModifyGrant(context.Context, valuer.UUID, string, string, string) error
// Bootstrap the managed roles.
CreateManagedRoles(context.Context, valuer.UUID, []*roletypes.Role) error
// Bootstrap managed roles transactions and user assignments
CreateManagedUserRoleTransactions(context.Context, valuer.UUID, valuer.UUID) error
}
type RegisterTypeable interface {
MustGetTypeables() []authtypes.Typeable
MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction
}
type Handler interface {
Create(http.ResponseWriter, *http.Request)
Get(http.ResponseWriter, *http.Request)
GetObjects(http.ResponseWriter, *http.Request)
GetResources(http.ResponseWriter, *http.Request)
List(http.ResponseWriter, *http.Request)
Patch(http.ResponseWriter, *http.Request)
PatchObjects(http.ResponseWriter, *http.Request)
Delete(http.ResponseWriter, *http.Request)
}

View File

@@ -1,4 +1,4 @@
package openfgaserver
package openfgaauthz
import (
"context"

View File

@@ -2,24 +2,35 @@ package openfgaauthz
import (
"context"
"strconv"
"sync"
authz "github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/authzstore/sqlauthzstore"
"github.com/SigNoz/signoz/pkg/authz/openfgaserver"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
openfgapkgserver "github.com/openfga/openfga/pkg/server"
"google.golang.org/protobuf/encoding/protojson"
)
var (
openfgaDefaultStore = valuer.NewString("signoz")
)
type provider struct {
server *openfgaserver.Server
store roletypes.Store
config authz.Config
settings factory.ScopedProviderSettings
openfgaSchema []openfgapkgtransformer.ModuleFile
openfgaServer *openfgapkgserver.Server
storeID string
modelID string
mtx sync.RWMutex
stopChan chan struct{}
}
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
@@ -29,194 +40,301 @@ func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtr
}
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
server, err := openfgaserver.NewOpenfgaServer(ctx, settings, config, sqlstore, openfgaSchema)
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/authz/openfgaauthz")
store, err := NewSQLStore(sqlstore)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to initialize sqlstore for authz")
return nil, err
}
// setup the openfga server
opts := []openfgapkgserver.OpenFGAServiceV1Option{
openfgapkgserver.WithDatastore(store),
openfgapkgserver.WithLogger(NewLogger(scopedProviderSettings.Logger())),
openfgapkgserver.WithContextPropagationToDatastore(true),
}
openfgaServer, err := openfgapkgserver.NewServerWithOpts(opts...)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to create authz server")
return nil, err
}
return &provider{
server: server,
store: sqlauthzstore.NewSqlAuthzStore(sqlstore),
config: config,
settings: scopedProviderSettings,
openfgaServer: openfgaServer,
openfgaSchema: openfgaSchema,
mtx: sync.RWMutex{},
stopChan: make(chan struct{}),
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
return provider.server.Start(ctx)
storeId, err := provider.getOrCreateStore(ctx, openfgaDefaultStore.StringValue())
if err != nil {
return err
}
modelID, err := provider.getOrCreateModel(ctx, storeId)
if err != nil {
return err
}
provider.mtx.Lock()
provider.modelID = modelID
provider.storeID = storeId
provider.mtx.Unlock()
<-provider.stopChan
return nil
}
func (provider *provider) Stop(ctx context.Context) error {
return provider.server.Stop(ctx)
provider.openfgaServer.Close()
close(provider.stopChan)
return nil
}
func (provider *provider) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
return provider.server.Check(ctx, tupleReq)
storeID, modelID := provider.getStoreIDandModelID()
checkResponse, err := provider.openfgaServer.Check(
ctx,
&openfgav1.CheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tupleReq.User,
Relation: tupleReq.Relation,
Object: tupleReq.Object,
},
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
if !checkResponse.Allowed {
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
}
return nil
}
func (provider *provider) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
return provider.server.BatchCheck(ctx, tupleReq)
storeID, modelID := provider.getStoreIDandModelID()
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
for idx, tuple := range tupleReq {
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tuple.User,
Relation: tuple.Relation,
Object: tuple.Object,
},
// the batch check response is map[string] keyed by correlationID.
CorrelationId: strconv.Itoa(idx),
})
}
checkResponse, err := provider.openfgaServer.BatchCheck(
ctx,
&openfgav1.BatchCheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Checks: batchCheckItems,
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
for _, checkResponse := range checkResponse.Result {
if checkResponse.GetAllowed() {
return nil
}
}
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.server.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.server.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return provider.server.Write(ctx, additions, deletions)
if len(additions) == 0 && len(deletions) == 0 {
return nil
}
storeID, modelID := provider.getStoreIDandModelID()
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
for idx, tuple := range deletions {
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
}
_, err := provider.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Writes: func() *openfgav1.WriteRequestWrites {
if len(additions) == 0 {
return nil
}
return &openfgav1.WriteRequestWrites{
TupleKeys: additions,
OnDuplicate: "ignore",
}
}(),
Deletes: func() *openfgav1.WriteRequestDeletes {
if len(deletionTuplesWithoutCondition) == 0 {
return nil
}
return &openfgav1.WriteRequestDeletes{
TupleKeys: deletionTuplesWithoutCondition,
OnMissing: "ignore",
}
}(),
})
return err
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return provider.server.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
storableRole, err := provider.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
storableRole, err := provider.store.GetByOrgIDAndName(ctx, orgID, name)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
storableRoles, err := provider.store.List(ctx, orgID)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storableRole := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storableRole)
}
return roles, nil
}
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
storableRoles, err := provider.store.ListByOrgIDAndNames(ctx, orgID, names)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storable := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
}
return roles, nil
}
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return provider.Write(ctx, tuples, nil)
}
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
err := provider.Revoke(ctx, orgID, existingRoleName, subject)
if err != nil {
return err
}
err = provider.Grant(ctx, orgID, updatedRoleName, subject)
if err != nil {
return err
}
return nil
}
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return provider.Write(ctx, nil, tuples)
}
func (provider *provider) CreateManagedRoles(ctx context.Context, _ valuer.UUID, managedRoles []*roletypes.Role) error {
err := provider.store.RunInTx(ctx, func(ctx context.Context) error {
for _, role := range managedRoles {
err := provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return err
}
}
return nil
storeID, modelID := provider.getStoreIDandModelID()
response, err := provider.openfgaServer.ListObjects(ctx, &openfgav1.ListObjectsRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
User: subject,
Relation: relation.StringValue(),
Type: typeable.Type().StringValue(),
})
if err != nil {
return err
return nil, errors.Wrapf(err, errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "cannot list objects for subject %s with relation %s for type %s", subject, relation.StringValue(), typeable.Type().StringValue())
}
return nil
return authtypes.MustNewObjectsFromStringSlice(response.Objects), nil
}
func (provider *provider) SetManagedRoleTransactions(context.Context, valuer.UUID) error {
return nil
func (provider *provider) getOrCreateStore(ctx context.Context, name string) (string, error) {
stores, err := provider.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
if err != nil {
return "", err
}
for _, store := range stores.GetStores() {
if store.GetName() == name {
return store.Id, nil
}
}
store, err := provider.openfgaServer.CreateStore(ctx, &openfgav1.CreateStoreRequest{Name: name})
if err != nil {
return "", err
}
return store.Id, nil
}
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
return provider.Grant(ctx, orgID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil))
func (provider *provider) getOrCreateModel(ctx context.Context, storeID string) (string, error) {
schema, err := openfgapkgtransformer.TransformModuleFilesToModel(provider.openfgaSchema, "1.1")
if err != nil {
return "", err
}
authorisationModels, err := provider.openfgaServer.ReadAuthorizationModels(ctx, &openfgav1.ReadAuthorizationModelsRequest{StoreId: storeID})
if err != nil {
return "", err
}
for _, authModel := range authorisationModels.GetAuthorizationModels() {
equal, err := provider.isModelEqual(schema, authModel)
if err != nil {
return "", err
}
if equal {
return authModel.Id, nil
}
}
authorizationModel, err := provider.openfgaServer.WriteAuthorizationModel(ctx, &openfgav1.WriteAuthorizationModelRequest{
StoreId: storeID,
TypeDefinitions: schema.TypeDefinitions,
SchemaVersion: schema.SchemaVersion,
Conditions: schema.Conditions,
})
if err != nil {
return "", err
}
return authorizationModel.AuthorizationModelId, nil
}
func (setter *provider) Create(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
// the language model doesn't have any equality check
// https://github.com/openfga/language/blob/main/pkg/go/transformer/module-to-model_test.go#L38
func (provider *provider) isModelEqual(expected *openfgav1.AuthorizationModel, actual *openfgav1.AuthorizationModel) (bool, error) {
// we need to initialize a new model since the model extracted from schema doesn't have id
expectedAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: expected.SchemaVersion,
TypeDefinitions: expected.TypeDefinitions,
Conditions: expected.Conditions,
}
expectedAuthModelBytes, err := protojson.Marshal(&expectedAuthModel)
if err != nil {
return false, err
}
actualAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: actual.SchemaVersion,
TypeDefinitions: actual.TypeDefinitions,
Conditions: actual.Conditions,
}
actualAuthModelBytes, err := protojson.Marshal(&actualAuthModel)
if err != nil {
return false, err
}
return string(expectedAuthModelBytes) == string(actualAuthModelBytes), nil
}
func (provider *provider) GetOrCreate(_ context.Context, _ valuer.UUID, _ *roletypes.Role) (*roletypes.Role, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) getStoreIDandModelID() (string, string) {
provider.mtx.RLock()
defer provider.mtx.RUnlock()
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
return nil
}
storeID := provider.storeID
modelID := provider.modelID
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) Patch(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) PatchObjects(_ context.Context, _ valuer.UUID, _ string, _ authtypes.Relation, _, _ []*authtypes.Object) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) Delete(_ context.Context, _ valuer.UUID, _ valuer.UUID) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
return nil
return storeID, modelID
}

View File

@@ -1,4 +1,4 @@
package openfgaserver
package openfgaauthz
import (
"context"
@@ -20,7 +20,7 @@ func TestProviderStartStop(t *testing.T) {
expectedModel := `module base
type user`
provider, err := NewOpenfgaServer(context.Background(), providerSettings, authz.Config{}, sqlstore, []transformer.ModuleFile{{Name: "test.fga", Contents: expectedModel}})
provider, err := newOpenfgaProvider(context.Background(), providerSettings, authz.Config{}, sqlstore, []transformer.ModuleFile{{Name: "test.fga", Contents: expectedModel}})
require.NoError(t, err)
storeRows := sqlstore.Mock().NewRows([]string{"id", "name", "created_at", "updated_at"}).AddRow("01K3V0NTN47MPTMEV1PD5ST6ZC", "signoz", time.Now(), time.Now())

View File

@@ -1,4 +1,4 @@
package openfgaserver
package openfgaauthz
import (
"github.com/SigNoz/signoz/pkg/errors"

View File

@@ -1,334 +0,0 @@
package openfgaserver
import (
"context"
"strconv"
"sync"
authz "github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
openfgapkgserver "github.com/openfga/openfga/pkg/server"
"google.golang.org/protobuf/encoding/protojson"
)
var (
openfgaDefaultStore = valuer.NewString("signoz")
)
type Server struct {
config authz.Config
settings factory.ScopedProviderSettings
openfgaSchema []openfgapkgtransformer.ModuleFile
openfgaServer *openfgapkgserver.Server
storeID string
modelID string
mtx sync.RWMutex
stopChan chan struct{}
}
func NewOpenfgaServer(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (*Server, error) {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/authz/openfgaauthz")
store, err := NewSQLStore(sqlstore)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to initialize sqlstore for authz")
return nil, err
}
// setup the openfga server
opts := []openfgapkgserver.OpenFGAServiceV1Option{
openfgapkgserver.WithDatastore(store),
openfgapkgserver.WithLogger(NewLogger(scopedProviderSettings.Logger())),
openfgapkgserver.WithContextPropagationToDatastore(true),
}
openfgaServer, err := openfgapkgserver.NewServerWithOpts(opts...)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to create authz server")
return nil, err
}
return &Server{
config: config,
settings: scopedProviderSettings,
openfgaServer: openfgaServer,
openfgaSchema: openfgaSchema,
mtx: sync.RWMutex{},
stopChan: make(chan struct{}),
}, nil
}
func (server *Server) Start(ctx context.Context) error {
storeID, err := server.getOrCreateStore(ctx, openfgaDefaultStore.StringValue())
if err != nil {
return err
}
modelID, err := server.getOrCreateModel(ctx, storeID)
if err != nil {
return err
}
server.mtx.Lock()
server.modelID = modelID
server.storeID = storeID
server.mtx.Unlock()
<-server.stopChan
return nil
}
func (server *Server) Stop(ctx context.Context) error {
server.openfgaServer.Close()
close(server.stopChan)
return nil
}
func (server *Server) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
storeID, modelID := server.getStoreIDandModelID()
checkResponse, err := server.openfgaServer.Check(
ctx,
&openfgav1.CheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tupleReq.User,
Relation: tupleReq.Relation,
Object: tupleReq.Object,
},
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
if !checkResponse.Allowed {
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
}
return nil
}
func (server *Server) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
storeID, modelID := server.getStoreIDandModelID()
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
for idx, tuple := range tupleReq {
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tuple.User,
Relation: tuple.Relation,
Object: tuple.Object,
},
// the batch check response is map[string] keyed by correlationID.
CorrelationId: strconv.Itoa(idx),
})
}
checkResponse, err := server.openfgaServer.BatchCheck(
ctx,
&openfgav1.BatchCheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Checks: batchCheckItems,
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
for _, checkResponse := range checkResponse.Result {
if checkResponse.GetAllowed() {
return nil
}
}
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
}
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
if len(additions) == 0 && len(deletions) == 0 {
return nil
}
storeID, modelID := server.getStoreIDandModelID()
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
for idx, tuple := range deletions {
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
}
_, err := server.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Writes: func() *openfgav1.WriteRequestWrites {
if len(additions) == 0 {
return nil
}
return &openfgav1.WriteRequestWrites{
TupleKeys: additions,
OnDuplicate: "ignore",
}
}(),
Deletes: func() *openfgav1.WriteRequestDeletes {
if len(deletionTuplesWithoutCondition) == 0 {
return nil
}
return &openfgav1.WriteRequestDeletes{
TupleKeys: deletionTuplesWithoutCondition,
OnMissing: "ignore",
}
}(),
})
return err
}
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
storeID, modelID := server.getStoreIDandModelID()
response, err := server.openfgaServer.ListObjects(ctx, &openfgav1.ListObjectsRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
User: subject,
Relation: relation.StringValue(),
Type: typeable.Type().StringValue(),
})
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "cannot list objects for subject %s with relation %s for type %s", subject, relation.StringValue(), typeable.Type().StringValue())
}
return authtypes.MustNewObjectsFromStringSlice(response.Objects), nil
}
func (server *Server) getOrCreateStore(ctx context.Context, name string) (string, error) {
stores, err := server.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
if err != nil {
return "", err
}
for _, store := range stores.GetStores() {
if store.GetName() == name {
return store.Id, nil
}
}
store, err := server.openfgaServer.CreateStore(ctx, &openfgav1.CreateStoreRequest{Name: name})
if err != nil {
return "", err
}
return store.Id, nil
}
func (server *Server) getOrCreateModel(ctx context.Context, storeID string) (string, error) {
schema, err := openfgapkgtransformer.TransformModuleFilesToModel(server.openfgaSchema, "1.1")
if err != nil {
return "", err
}
authorisationModels, err := server.openfgaServer.ReadAuthorizationModels(ctx, &openfgav1.ReadAuthorizationModelsRequest{StoreId: storeID})
if err != nil {
return "", err
}
for _, authModel := range authorisationModels.GetAuthorizationModels() {
equal, err := server.isModelEqual(schema, authModel)
if err != nil {
return "", err
}
if equal {
return authModel.Id, nil
}
}
authorizationModel, err := server.openfgaServer.WriteAuthorizationModel(ctx, &openfgav1.WriteAuthorizationModelRequest{
StoreId: storeID,
TypeDefinitions: schema.TypeDefinitions,
SchemaVersion: schema.SchemaVersion,
Conditions: schema.Conditions,
})
if err != nil {
return "", err
}
return authorizationModel.AuthorizationModelId, nil
}
// the language model doesn't have any equality check
// https://github.com/openfga/language/blob/main/pkg/go/transformer/module-to-model_test.go#L38
func (server *Server) isModelEqual(expected *openfgav1.AuthorizationModel, actual *openfgav1.AuthorizationModel) (bool, error) {
// we need to initialize a new model since the model extracted from schema doesn't have id
expectedAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: expected.SchemaVersion,
TypeDefinitions: expected.TypeDefinitions,
Conditions: expected.Conditions,
}
expectedAuthModelBytes, err := protojson.Marshal(&expectedAuthModel)
if err != nil {
return false, err
}
actualAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: actual.SchemaVersion,
TypeDefinitions: actual.TypeDefinitions,
Conditions: actual.Conditions,
}
actualAuthModelBytes, err := protojson.Marshal(&actualAuthModel)
if err != nil {
return false, err
}
return string(expectedAuthModelBytes) == string(actualAuthModelBytes), nil
}
func (server *Server) getStoreIDandModelID() (string, string) {
server.mtx.RLock()
defer server.mtx.RUnlock()
storeID := server.storeID
modelID := server.modelID
return storeID, modelID
}

View File

@@ -8,6 +8,7 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
@@ -23,14 +24,15 @@ type AuthZ struct {
logger *slog.Logger
orgGetter organization.Getter
authzService authz.AuthZ
roleGetter role.Getter
}
func NewAuthZ(logger *slog.Logger, orgGetter organization.Getter, authzService authz.AuthZ) *AuthZ {
func NewAuthZ(logger *slog.Logger, orgGetter organization.Getter, authzService authz.AuthZ, roleGetter role.Getter) *AuthZ {
if logger == nil {
panic("cannot build authz middleware, logger is empty")
}
return &AuthZ{logger: logger, orgGetter: orgGetter, authzService: authzService}
return &AuthZ{logger: logger, orgGetter: orgGetter, authzService: authzService, roleGetter: roleGetter}
}
func (middleware *AuthZ) ViewAccess(next http.HandlerFunc) http.HandlerFunc {

View File

@@ -4,7 +4,7 @@ import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/statsreporter"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
@@ -51,7 +51,7 @@ type Module interface {
statsreporter.StatsCollector
authz.RegisterTypeable
role.RegisterTypeable
}
type Handler interface {

View File

@@ -206,10 +206,6 @@ func (module *module) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{dashboardtypes.TypeableMetaResourceDashboard, dashboardtypes.TypeableMetaResourcesDashboards}
}
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
return nil
}
// not supported
func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publicDashboard *dashboardtypes.PublicDashboard) error {
return errors.Newf(errors.TypeUnsupported, dashboardtypes.ErrCodePublicDashboardUnsupported, "not implemented")

View File

@@ -1,11 +0,0 @@
package fields
import "net/http"
type Handler interface {
// Gets the fields keys for the given field key selector
GetFieldsKeys(http.ResponseWriter, *http.Request)
// Gets the fields values for the given field value selector
GetFieldsValues(http.ResponseWriter, *http.Request)
}

View File

@@ -1,79 +0,0 @@
package implfields
import (
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type handler struct {
telemetryMetadataStore telemetrytypes.MetadataStore
}
func NewHandler(settings factory.ProviderSettings, telemetryMetadataStore telemetrytypes.MetadataStore) fields.Handler {
return &handler{
telemetryMetadataStore: telemetryMetadataStore,
}
}
func (handler *handler) GetFieldsKeys(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
var params telemetrytypes.PostableFieldKeysParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
fieldKeySelector := telemetrytypes.NewFieldKeySelectorFromPostableFieldKeysParams(params)
keys, complete, err := handler.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldKeys{
Keys: keys,
Complete: complete,
})
}
func (handler *handler) GetFieldsValues(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
var params telemetrytypes.PostableFieldValueParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
fieldValueSelector := telemetrytypes.NewFieldValueSelectorFromPostableFieldValueParams(params)
allValues, allComplete, err := handler.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(rw, err)
return
}
relatedValues, relatedComplete, err := handler.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
}
values := &telemetrytypes.TelemetryFieldValues{
StringValues: allValues.StringValues,
NumberValues: allValues.NumberValues,
RelatedValues: relatedValues,
}
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldValues{
Values: values,
Complete: allComplete && relatedComplete,
})
}

View File

@@ -0,0 +1,63 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type getter struct {
store roletypes.Store
}
func NewGetter(store roletypes.Store) role.Getter {
return &getter{store: store}
}
func (getter *getter) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
storableRole, err := getter.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (getter *getter) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
storableRole, err := getter.store.GetByOrgIDAndName(ctx, orgID, name)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (getter *getter) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
storableRoles, err := getter.store.List(ctx, orgID)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storableRole := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storableRole)
}
return roles, nil
}
func (getter *getter) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
storableRoles, err := getter.store.ListByOrgIDAndNames(ctx, orgID, names)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storable := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
}
return roles, nil
}

View File

@@ -0,0 +1,83 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type granter struct {
store roletypes.Store
authz authz.AuthZ
}
func NewGranter(store roletypes.Store, authz authz.AuthZ) role.Granter {
return &granter{store: store, authz: authz}
}
func (granter *granter) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return granter.authz.Write(ctx, tuples, nil)
}
func (granter *granter) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
err := granter.Revoke(ctx, orgID, existingRoleName, subject)
if err != nil {
return err
}
err = granter.Grant(ctx, orgID, updatedRoleName, subject)
if err != nil {
return err
}
return nil
}
func (granter *granter) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return granter.authz.Write(ctx, nil, tuples)
}
func (granter *granter) CreateManagedRoles(ctx context.Context, _ valuer.UUID, managedRoles []*roletypes.Role) error {
err := granter.store.RunInTx(ctx, func(ctx context.Context) error {
for _, role := range managedRoles {
err := granter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return err
}
}
return nil
})
if err != nil {
return err
}
return nil
}

View File

@@ -1,12 +1,12 @@
package signozauthzapi
package implrole
import (
"net/http"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -14,11 +14,12 @@ import (
)
type handler struct {
authz authz.AuthZ
setter role.Setter
getter role.Getter
}
func NewHandler(authz authz.AuthZ) authz.Handler {
return &handler{authz: authz}
func NewHandler(setter role.Setter, getter role.Getter) role.Handler {
return &handler{setter: setter, getter: getter}
}
func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
@@ -35,7 +36,7 @@ func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.authz.Create(ctx, valuer.MustNewUUID(claims.OrgID), roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID)))
err = handler.setter.Create(ctx, valuer.MustNewUUID(claims.OrgID), roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID)))
if err != nil {
render.Error(rw, err)
return
@@ -63,7 +64,7 @@ func (handler *handler) Get(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), roleID)
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), roleID)
if err != nil {
render.Error(rw, err)
return
@@ -102,7 +103,7 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
return
}
objects, err := handler.authz.GetObjects(ctx, valuer.MustNewUUID(claims.OrgID), roleID, relation)
objects, err := handler.setter.GetObjects(ctx, valuer.MustNewUUID(claims.OrgID), roleID, relation)
if err != nil {
render.Error(rw, err)
return
@@ -113,7 +114,7 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
func (handler *handler) GetResources(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
resources := handler.authz.GetResources(ctx)
resources := handler.setter.GetResources(ctx)
var resourceRelations = struct {
Resources []*authtypes.Resource `json:"resources"`
@@ -133,7 +134,7 @@ func (handler *handler) List(rw http.ResponseWriter, r *http.Request) {
return
}
roles, err := handler.authz.List(ctx, valuer.MustNewUUID(claims.OrgID))
roles, err := handler.getter.List(ctx, valuer.MustNewUUID(claims.OrgID))
if err != nil {
render.Error(rw, err)
return
@@ -162,7 +163,7 @@ func (handler *handler) Patch(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return
@@ -174,7 +175,7 @@ func (handler *handler) Patch(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.authz.Patch(ctx, valuer.MustNewUUID(claims.OrgID), role)
err = handler.setter.Patch(ctx, valuer.MustNewUUID(claims.OrgID), role)
if err != nil {
render.Error(rw, err)
return
@@ -209,7 +210,7 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return
@@ -221,7 +222,7 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.authz.PatchObjects(ctx, valuer.MustNewUUID(claims.OrgID), role.Name, relation, patchableObjects.Additions, patchableObjects.Deletions)
err = handler.setter.PatchObjects(ctx, valuer.MustNewUUID(claims.OrgID), role.Name, relation, patchableObjects.Additions, patchableObjects.Deletions)
if err != nil {
render.Error(rw, err)
return
@@ -244,7 +245,7 @@ func (handler *handler) Delete(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.authz.Delete(ctx, valuer.MustNewUUID(claims.OrgID), id)
err = handler.setter.Delete(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return

View File

@@ -0,0 +1,53 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type setter struct {
store roletypes.Store
authz authz.AuthZ
}
func NewSetter(store roletypes.Store, authz authz.AuthZ) role.Setter {
return &setter{store: store, authz: authz}
}
func (setter *setter) Create(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) GetOrCreate(_ context.Context, _ valuer.UUID, _ *roletypes.Role) (*roletypes.Role, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) GetResources(_ context.Context) []*authtypes.Resource {
return nil
}
func (setter *setter) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) Patch(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) PatchObjects(_ context.Context, _ valuer.UUID, _ string, _ authtypes.Relation, _, _ []*authtypes.Object) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) Delete(_ context.Context, _ valuer.UUID, _ valuer.UUID) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) MustGetTypeables() []authtypes.Typeable {
return nil
}

View File

@@ -1,4 +1,4 @@
package sqlauthzstore
package implrole
import (
"context"
@@ -14,7 +14,7 @@ type store struct {
sqlstore sqlstore.SQLStore
}
func NewSqlAuthzStore(sqlstore sqlstore.SQLStore) roletypes.Store {
func NewStore(sqlstore sqlstore.SQLStore) roletypes.Store {
return &store{sqlstore: sqlstore}
}

85
pkg/modules/role/role.go Normal file
View File

@@ -0,0 +1,85 @@
package role
import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type Setter interface {
// Creates the role.
Create(context.Context, valuer.UUID, *roletypes.Role) error
// Gets the role if it exists or creates one.
GetOrCreate(context.Context, valuer.UUID, *roletypes.Role) (*roletypes.Role, error)
// Gets the objects associated with the given role and relation.
GetObjects(context.Context, valuer.UUID, valuer.UUID, authtypes.Relation) ([]*authtypes.Object, error)
// Gets all the typeable resources registered from role registry.
GetResources(context.Context) []*authtypes.Resource
// Patches the role.
Patch(context.Context, valuer.UUID, *roletypes.Role) error
// Patches the objects in authorization server associated with the given role and relation
PatchObjects(context.Context, valuer.UUID, string, authtypes.Relation, []*authtypes.Object, []*authtypes.Object) error
// Deletes the role and tuples in authorization server.
Delete(context.Context, valuer.UUID, valuer.UUID) error
RegisterTypeable
}
type Getter interface {
// Gets the role
Get(context.Context, valuer.UUID, valuer.UUID) (*roletypes.Role, error)
// Gets the role by org_id and name
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*roletypes.Role, error)
// Lists all the roles for the organization.
List(context.Context, valuer.UUID) ([]*roletypes.Role, error)
// Lists all the roles for the organization filtered by name
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*roletypes.Role, error)
}
type Granter interface {
// Grants a role to the subject based on role name.
Grant(context.Context, valuer.UUID, string, string) error
// Revokes a granted role from the subject based on role name.
Revoke(context.Context, valuer.UUID, string, string) error
// Changes the granted role for the subject based on role name.
ModifyGrant(context.Context, valuer.UUID, string, string, string) error
// Bootstrap the managed roles.
CreateManagedRoles(context.Context, valuer.UUID, []*roletypes.Role) error
}
type RegisterTypeable interface {
MustGetTypeables() []authtypes.Typeable
}
type Handler interface {
Create(http.ResponseWriter, *http.Request)
Get(http.ResponseWriter, *http.Request)
GetObjects(http.ResponseWriter, *http.Request)
GetResources(http.ResponseWriter, *http.Request)
List(http.ResponseWriter, *http.Request)
Patch(http.ResponseWriter, *http.Request)
PatchObjects(http.ResponseWriter, *http.Request)
Delete(http.ResponseWriter, *http.Request)
}

View File

@@ -12,60 +12,30 @@ import (
)
const (
urlPathKeyLegacy = "http.url"
serverAddressKeyLegacy = "net.peer.name"
urlPathKey = "url.full"
serverAddressKey = "server.address"
derivedKeyHTTPURL = "http_url" // https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_url
derivedKeyHTTPHost = "http_host"
)
var defaultStepInterval = 60 * time.Second
type SemconvFieldMapping struct {
LegacyField string
CurrentField string
FieldType telemetrytypes.FieldDataType
Context telemetrytypes.FieldContext
}
var dualSemconvGroupByKeys = map[string][]qbtypes.GroupByKey{
"server": {
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: serverAddressKey,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextAttribute,
Signal: telemetrytypes.SignalTraces,
},
var (
groupByKeyHTTPHost = qbtypes.GroupByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: derivedKeyHTTPHost,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextSpan,
Signal: telemetrytypes.SignalTraces,
},
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: serverAddressKeyLegacy,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextAttribute,
Signal: telemetrytypes.SignalTraces,
},
}
groupByKeyHTTPURL = qbtypes.GroupByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: derivedKeyHTTPURL,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextSpan,
Signal: telemetrytypes.SignalTraces,
},
},
"url": {
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: urlPathKey,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextAttribute,
Signal: telemetrytypes.SignalTraces,
},
},
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: urlPathKeyLegacy,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextAttribute,
Signal: telemetrytypes.SignalTraces,
},
},
},
}
}
)
func FilterIntermediateColumns(result *qbtypes.QueryRangeResponse) *qbtypes.QueryRangeResponse {
if result == nil || result.Data.Results == nil {
@@ -114,103 +84,6 @@ func FilterIntermediateColumns(result *qbtypes.QueryRangeResponse) *qbtypes.Quer
return result
}
func MergeSemconvColumns(result *qbtypes.QueryRangeResponse) *qbtypes.QueryRangeResponse {
if result == nil || result.Data.Results == nil {
return result
}
for _, res := range result.Data.Results {
scalarData, ok := res.(*qbtypes.ScalarData)
if !ok {
continue
}
serverAddressKeyIdx := -1
serverAddressKeyLegacyIdx := -1
for i, col := range scalarData.Columns {
if col.Name == serverAddressKey {
serverAddressKeyIdx = i
} else if col.Name == serverAddressKeyLegacy {
serverAddressKeyLegacyIdx = i
}
}
if serverAddressKeyIdx == -1 || serverAddressKeyLegacyIdx == -1 {
continue
}
var newRows [][]any
for _, row := range scalarData.Data {
if len(row) <= serverAddressKeyIdx || len(row) <= serverAddressKeyLegacyIdx {
continue
}
var serverName any
if isValidValue(row[serverAddressKeyIdx]) {
serverName = row[serverAddressKeyIdx]
} else if isValidValue(row[serverAddressKeyLegacyIdx]) {
serverName = row[serverAddressKeyLegacyIdx]
}
if serverName != nil {
newRow := make([]any, len(row)-1)
newRow[0] = serverName
targetIdx := 1
for i, val := range row {
if i != serverAddressKeyLegacyIdx && i != serverAddressKeyIdx {
if targetIdx < len(newRow) {
newRow[targetIdx] = val
targetIdx++
}
}
}
newRows = append(newRows, newRow)
}
}
newColumns := make([]*qbtypes.ColumnDescriptor, len(scalarData.Columns)-1)
targetIdx := 0
for i, col := range scalarData.Columns {
if i == serverAddressKeyIdx {
newCol := &qbtypes.ColumnDescriptor{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: serverAddressKeyLegacy,
FieldDataType: col.FieldDataType,
FieldContext: col.FieldContext,
Signal: col.Signal,
},
QueryName: col.QueryName,
AggregationIndex: col.AggregationIndex,
Meta: col.Meta,
Type: col.Type,
}
newColumns[targetIdx] = newCol
targetIdx++
} else if i != serverAddressKeyLegacyIdx {
newColumns[targetIdx] = col
targetIdx++
}
}
scalarData.Columns = newColumns
scalarData.Data = newRows
}
return result
}
func isValidValue(val any) bool {
if val == nil {
return false
}
if str, ok := val.(string); ok {
return str != "" && str != "n/a"
}
return true
}
func FilterResponse(results []*qbtypes.QueryRangeResponse) []*qbtypes.QueryRangeResponse {
filteredResults := make([]*qbtypes.QueryRangeResponse, 0, len(results))
@@ -261,7 +134,7 @@ func FilterResponse(results []*qbtypes.QueryRangeResponse) []*qbtypes.QueryRange
func shouldIncludeSeries(series *qbtypes.TimeSeries) bool {
for _, label := range series.Labels {
if label.Key.Name == serverAddressKeyLegacy || label.Key.Name == serverAddressKey {
if label.Key.Name == derivedKeyHTTPHost {
if strVal, ok := label.Value.(string); ok {
if net.ParseIP(strVal) != nil {
return false
@@ -274,12 +147,10 @@ func shouldIncludeSeries(series *qbtypes.TimeSeries) bool {
func shouldIncludeRow(row *qbtypes.RawRow) bool {
if row.Data != nil {
for _, key := range []string{serverAddressKeyLegacy, serverAddressKey} {
if domainVal, ok := row.Data[key]; ok {
if domainStr, ok := domainVal.(string); ok {
if net.ParseIP(domainStr) != nil {
return false
}
if domainVal, ok := row.Data[derivedKeyHTTPHost]; ok {
if domainStr, ok := domainVal.(string); ok {
if net.ParseIP(domainStr) != nil {
return false
}
}
}
@@ -287,8 +158,8 @@ func shouldIncludeRow(row *qbtypes.RawRow) bool {
return true
}
func mergeGroupBy(base, additional []qbtypes.GroupByKey) []qbtypes.GroupByKey {
return append(base, additional...)
func mergeGroupBy(base qbtypes.GroupByKey, additional []qbtypes.GroupByKey) []qbtypes.GroupByKey {
return append([]qbtypes.GroupByKey{base}, additional...)
}
func BuildDomainList(req *thirdpartyapitypes.ThirdPartyApiRequest) (*qbtypes.QueryRangeRequest, error) {
@@ -354,10 +225,10 @@ func buildEndpointsQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Q
Signal: telemetrytypes.SignalTraces,
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
Aggregations: []qbtypes.TraceAggregation{
{Expression: "count_distinct(http.url)"},
{Expression: "count_distinct(http_url)"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -373,7 +244,7 @@ func buildLastSeenQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Qu
{Expression: "max(timestamp)"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -389,7 +260,7 @@ func buildRpsQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEn
{Expression: "rate()"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -407,7 +278,7 @@ func buildErrorQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Query
{Expression: "count()"},
},
Filter: filter,
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -423,7 +294,7 @@ func buildTotalSpanQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Q
{Expression: "count()"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -439,7 +310,7 @@ func buildP99Query(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEn
{Expression: "p99(duration_nano)"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -462,10 +333,10 @@ func buildEndpointsInfoQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtyp
Signal: telemetrytypes.SignalTraces,
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
Aggregations: []qbtypes.TraceAggregation{
{Expression: "rate(http.url)"},
{Expression: fmt.Sprintf("rate(%s)", derivedKeyHTTPURL)},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["url"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPURL, req.GroupBy),
},
}
}
@@ -519,8 +390,7 @@ func buildLastSeenInfoQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtype
}
func buildBaseFilter(additionalFilter *qbtypes.Filter) *qbtypes.Filter {
baseExpression := fmt.Sprintf("(%s EXISTS OR %s EXISTS) AND kind_string = 'Client'",
urlPathKeyLegacy, urlPathKey)
baseExpression := fmt.Sprintf("%s EXISTS AND kind_string = 'Client'", derivedKeyHTTPURL)
if additionalFilter != nil && additionalFilter.Expression != "" {
// even if it contains kind_string we add with an AND so it doesn't matter if the user is overriding it.

View File

@@ -1,9 +1,10 @@
package thirdpartyapi
import (
"github.com/SigNoz/signoz/pkg/types/thirdpartyapitypes"
"testing"
"github.com/SigNoz/signoz/pkg/types/thirdpartyapitypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/assert"
@@ -28,7 +29,7 @@ func TestFilterResponse(t *testing.T) {
{
Labels: []*qbtypes.Label{
{
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
Key: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
Value: "192.168.1.1",
},
},
@@ -36,7 +37,7 @@ func TestFilterResponse(t *testing.T) {
{
Labels: []*qbtypes.Label{
{
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
Key: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
Value: "example.com",
},
},
@@ -60,7 +61,7 @@ func TestFilterResponse(t *testing.T) {
{
Labels: []*qbtypes.Label{
{
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
Key: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
Value: "example.com",
},
},
@@ -84,12 +85,12 @@ func TestFilterResponse(t *testing.T) {
Rows: []*qbtypes.RawRow{
{
Data: map[string]any{
"net.peer.name": "192.168.1.1",
derivedKeyHTTPHost: "192.168.1.1",
},
},
{
Data: map[string]any{
"net.peer.name": "example.com",
derivedKeyHTTPHost: "example.com",
},
},
},
@@ -106,7 +107,7 @@ func TestFilterResponse(t *testing.T) {
Rows: []*qbtypes.RawRow{
{
Data: map[string]any{
"net.peer.name": "example.com",
derivedKeyHTTPHost: "example.com",
},
},
},

View File

@@ -8,11 +8,11 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/emailing"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/user"
root "github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/tokenizer"
@@ -32,13 +32,13 @@ type Module struct {
emailing emailing.Emailing
settings factory.ScopedProviderSettings
orgSetter organization.Setter
authz authz.AuthZ
granter role.Granter
analytics analytics.Analytics
config user.Config
}
// This module is a WIP, don't take inspiration from this.
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, authz authz.AuthZ, analytics analytics.Analytics, config user.Config) root.Module {
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, granter role.Granter, analytics analytics.Analytics, config user.Config) root.Module {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/modules/user/impluser")
return &Module{
store: store,
@@ -47,7 +47,7 @@ func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing em
settings: settings,
orgSetter: orgSetter,
analytics: analytics,
authz: authz,
granter: granter,
config: config,
}
}
@@ -172,7 +172,7 @@ func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ..
createUserOpts := root.NewCreateUserOptions(opts...)
// since assign is idempotant multiple calls to assign won't cause issues in case of retries.
err := module.authz.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
err := module.granter.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
if err != nil {
return err
}
@@ -238,7 +238,7 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
}
if user.Role != existingUser.Role {
err = m.authz.ModifyGrant(ctx,
err = m.granter.ModifyGrant(ctx,
orgID,
roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role),
roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role),
@@ -301,7 +301,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
}
// since revoke is idempotant multiple calls to revoke won't cause issues in case of retries
err = module.authz.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
err = module.granter.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
if err != nil {
return err
}
@@ -369,7 +369,7 @@ func (module *Module) GetOrCreateResetPasswordToken(ctx context.Context, userID
func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, email valuer.Email, frontendBaseURL string) error {
if !module.config.Password.Reset.AllowSelf {
return errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "Users are not allowed to reset their password themselves, please contact an admin to reset your password.")
return errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "users are not allowed to reset their password themselves, please contact an admin to reset your password")
}
user, err := module.store.GetUserByEmailAndOrgID(ctx, email, orgID)
@@ -504,14 +504,14 @@ func (module *Module) CreateFirstUser(ctx context.Context, organization *types.O
}
managedRoles := roletypes.NewManagedRoles(organization.ID)
err = module.authz.CreateManagedUserRoleTransactions(ctx, organization.ID, user.ID)
err = module.granter.Grant(ctx, organization.ID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, user.ID.StringValue(), user.OrgID, nil))
if err != nil {
return nil, err
}
if err = module.store.RunInTx(ctx, func(ctx context.Context) error {
err = module.orgSetter.Create(ctx, organization, func(ctx context.Context, orgID valuer.UUID) error {
err = module.authz.CreateManagedRoles(ctx, orgID, managedRoles)
err = module.granter.CreateManagedRoles(ctx, orgID, managedRoles)
if err != nil {
return err
}

View File

@@ -25,6 +25,7 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/http/render"
@@ -68,7 +69,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/opamptypes"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunneltypes"
"go.uber.org/zap"
@@ -103,11 +104,10 @@ type APIHandler struct {
querierV2 interfaces.Querier
queryBuilder *queryBuilder.QueryBuilder
// temporalityMap is a map of metric name to temporality to avoid fetching
// temporality for the same metric multiple times.
//
// Querying the v4 table on a low cardinal temporality column should be
// fast, but we can still avoid the query if we have the data in memory.
// temporalityMap is a map of metric name to temporality
// to avoid fetching temporality for the same metric multiple times
// querying the v4 table on low cardinal temporality column
// should be fast but we can still avoid the query if we have the data in memory
temporalityMap map[string]map[v3.Temporality]bool
temporalityMux sync.Mutex
@@ -145,6 +145,8 @@ type APIHandler struct {
LicensingAPI licensing.API
FieldsAPI *fields.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
@@ -175,6 +177,8 @@ type APIHandlerOpts struct {
LicensingAPI licensing.API
FieldsAPI *fields.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
@@ -239,6 +243,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
AlertmanagerAPI: opts.AlertmanagerAPI,
LicensingAPI: opts.LicensingAPI,
Signoz: opts.Signoz,
FieldsAPI: opts.FieldsAPI,
QuerierAPI: opts.QuerierAPI,
QueryParserAPI: opts.QueryParserAPI,
}
@@ -394,6 +399,13 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *middlew
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.QuerierAPI.QueryRawStream)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterFieldsRoutes(router *mux.Router, am *middleware.AuthZ) {
subRouter := router.PathPrefix("/api/v1").Subrouter()
subRouter.HandleFunc("/fields/keys", am.ViewAccess(aH.FieldsAPI.GetFieldsKeys)).Methods(http.MethodGet)
subRouter.HandleFunc("/fields/values", am.ViewAccess(aH.FieldsAPI.GetFieldsValues)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *middleware.AuthZ) {
hostsSubRouter := router.PathPrefix("/api/v1/hosts").Subrouter()
hostsSubRouter.HandleFunc("/attribute_keys", am.ViewAccess(aH.getHostAttributeKeys)).Methods(http.MethodGet)
@@ -1011,7 +1023,7 @@ func (aH *APIHandler) getRuleStateHistory(w http.ResponseWriter, r *http.Request
// the query range is calculated based on the rule's evalWindow and evalDelay
// alerts have 2 minutes delay built in, so we need to subtract that from the start time
// to get the correct query range
start := end.Add(-rule.EvalWindow.Duration() - 3*time.Minute)
start := end.Add(-time.Duration(rule.EvalWindow)).Add(-3 * time.Minute)
if rule.AlertType == ruletypes.AlertTypeLogs {
if rule.Version != "v5" {
res.Items[idx].RelatedLogsLink = contextlinks.PrepareLinksToLogs(start, end, newFilters)
@@ -1218,12 +1230,12 @@ func (aH *APIHandler) Get(rw http.ResponseWriter, r *http.Request) {
dashboard := new(dashboardtypes.Dashboard)
if aH.CloudIntegrationsController.IsCloudIntegrationDashboardUuid(id) {
cloudIntegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
cloudintegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
if apiErr != nil {
render.Error(rw, errorsV2.Wrapf(apiErr, errorsV2.TypeInternal, errorsV2.CodeInternal, "failed to get dashboard"))
return
}
dashboard = cloudIntegrationDashboard
dashboard = cloudintegrationDashboard
} else if aH.IntegrationsController.IsInstalledIntegrationDashboardID(id) {
integrationDashboard, apiErr := aH.IntegrationsController.GetInstalledIntegrationDashboardById(ctx, orgID, id)
if apiErr != nil {
@@ -1552,13 +1564,13 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
RespondError(w, &model.ApiError{Typ: model.ErrorExec, Err: res.Err}, nil)
}
responseData := &model.QueryData{
response_data := &model.QueryData{
ResultType: res.Value.Type(),
Result: res.Value,
Stats: qs,
}
aH.Respond(w, responseData)
aH.Respond(w, response_data)
}
@@ -2640,12 +2652,12 @@ func (aH *APIHandler) getProducerData(w http.ResponseWriter, r *http.Request) {
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2693,12 +2705,12 @@ func (aH *APIHandler) getConsumerData(w http.ResponseWriter, r *http.Request) {
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2747,12 +2759,12 @@ func (aH *APIHandler) getPartitionOverviewLatencyData(w http.ResponseWriter, r *
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2801,12 +2813,12 @@ func (aH *APIHandler) getConsumerPartitionLatencyData(w http.ResponseWriter, r *
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2858,12 +2870,12 @@ func (aH *APIHandler) getProducerThroughputOverview(w http.ResponseWriter, r *ht
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, producerQueryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, producerQueryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
@@ -2969,12 +2981,12 @@ func (aH *APIHandler) getProducerThroughputDetails(w http.ResponseWriter, r *htt
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -3023,12 +3035,12 @@ func (aH *APIHandler) getConsumerThroughputOverview(w http.ResponseWriter, r *ht
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -3077,12 +3089,12 @@ func (aH *APIHandler) getConsumerThroughputDetails(w http.ResponseWriter, r *htt
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -3137,12 +3149,12 @@ func (aH *APIHandler) getProducerConsumerEval(w http.ResponseWriter, r *http.Req
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
@@ -4126,11 +4138,11 @@ func (aH *APIHandler) ListLogsPipelinesHandler(w http.ResponseWriter, r *http.Re
aH.Respond(w, payload)
}
// listLogsPipelines lists logs pipelines for latest version
// listLogsPipelines lists logs piplines for latest version
func (aH *APIHandler) listLogsPipelines(ctx context.Context, orgID valuer.UUID) (
*logparsingpipeline.PipelinesResponse, error,
) {
// get latest agent config
// get lateset agent config
latestVersion := -1
lastestConfig, err := agentConf.GetLatestVersion(ctx, orgID, opamptypes.ElementTypeLogPipelines)
if err != nil && !errorsV2.Ast(err, errorsV2.TypeNotFound) {
@@ -4427,7 +4439,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
var spanKeys map[string]v3.AttributeKey
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
hasLogsQuery := false
@@ -4444,7 +4456,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
if logsv3.EnrichmentRequired(queryRangeParams) && hasLogsQuery {
logsFields, apiErr := aH.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(queryRangeParams.CompositeQuery))
if apiErr != nil {
RespondError(w, apiErr, errQueriesByName)
RespondError(w, apiErr, errQuriesByName)
return
}
// get the fields if any logs query is present
@@ -4455,7 +4467,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
spanKeys, err = aH.getSpanKeysV3(ctx, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
tracesV4.Enrich(queryRangeParams, spanKeys)
@@ -4500,11 +4512,11 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
}
}
result, errQueriesByName, err = aH.querier.QueryRange(ctx, orgID, queryRangeParams)
result, errQuriesByName, err = aH.querier.QueryRange(ctx, orgID, queryRangeParams)
if err != nil {
queryErrors := map[string]string{}
for name, err := range errQueriesByName {
for name, err := range errQuriesByName {
queryErrors[fmt.Sprintf("Query-%s", name)] = err.Error()
}
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
@@ -4780,7 +4792,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
var spanKeys map[string]v3.AttributeKey
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
hasLogsQuery := false
@@ -4810,7 +4822,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
spanKeys, err = aH.getSpanKeysV3(ctx, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
tracesV4.Enrich(queryRangeParams, spanKeys)
@@ -4833,11 +4845,11 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
}
}
result, errQueriesByName, err = aH.querierV2.QueryRange(ctx, orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(ctx, orgID, queryRangeParams)
if err != nil {
queryErrors := map[string]string{}
for name, err := range errQueriesByName {
for name, err := range errQuriesByName {
queryErrors[fmt.Sprintf("Query-%s", name)] = err.Error()
}
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
@@ -4854,7 +4866,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
aH.sendQueryResultEvents(r, result, queryRangeParams, "v4")
@@ -4996,7 +5008,6 @@ func (aH *APIHandler) getDomainList(w http.ResponseWriter, r *http.Request) {
return
}
result = thirdpartyapi.MergeSemconvColumns(result)
result = thirdpartyapi.FilterIntermediateColumns(result)
// Filter IP addresses if ShowIp is false
@@ -5053,7 +5064,6 @@ func (aH *APIHandler) getDomainInfo(w http.ResponseWriter, r *http.Request) {
return
}
result = thirdpartyapi.MergeSemconvColumns(result)
result = thirdpartyapi.FilterIntermediateColumns(result)
// Filter IP addresses if ShowIp is false

View File

@@ -17,6 +17,7 @@ import (
"github.com/gorilla/handlers"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
@@ -132,6 +133,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
FluxInterval: config.Querier.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: nooplicensing.NewLicenseAPI(),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
@@ -207,12 +209,13 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
r.Use(middleware.NewComment().Wrap)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz, s.signoz.Modules.RoleGetter)
api.RegisterRoutes(r, am)
api.RegisterLogsRoutes(r, am)
api.RegisterIntegrationRoutes(r, am)
api.RegisterCloudIntegrationsRoutes(r, am)
api.RegisterFieldsRoutes(r, am)
api.RegisterQueryRangeV3Routes(r, am)
api.RegisterInfraMetricsRoutes(r, am)
api.RegisterWebSocketPaths(r, am)

View File

@@ -5,10 +5,10 @@ import (
"os"
"regexp"
"strconv"
"time"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/valuer"
)
const (
@@ -40,11 +40,11 @@ const NormalizedMetricsMapQueryThreads = 10
var NormalizedMetricsMapRegex = regexp.MustCompile(`[^a-zA-Z0-9]`)
var NormalizedMetricsMapQuantileRegex = regexp.MustCompile(`(?i)([._-]?quantile.*)$`)
func GetEvalDelay() valuer.TextDuration {
func GetEvalDelay() time.Duration {
evalDelayStr := GetOrDefaultEnv("RULES_EVAL_DELAY", "2m")
evalDelayDuration, err := valuer.ParseTextDuration(evalDelayStr)
evalDelayDuration, err := time.ParseDuration(evalDelayStr)
if err != nil {
return valuer.TextDuration{}
return 0
}
return evalDelayDuration
}

View File

@@ -71,18 +71,7 @@ func Parse(filters *v3.FilterSet) (string, error) {
// accustom log filters like `body.log.message EXISTS` into EXPR language
// where User is attempting to check for keys present in JSON log body
if strings.HasPrefix(v.Key.Key, "body.") {
// if body is a string and is a valid JSON, then check if the key exists in the JSON
filter = fmt.Sprintf(`((type(body) == "string" && isJSON(body)) && %s %s %s)`, exprFormattedValue(strings.TrimPrefix(v.Key.Key, "body.")), logOperatorsToExpr[v.Operator], "fromJSON(body)")
// if body is a map, then check if the key exists in the map
operator := v3.FilterOperatorNotEqual
if v.Operator == v3.FilterOperatorNotExists {
operator = v3.FilterOperatorEqual
}
nilCheckFilter := fmt.Sprintf("%s %s nil", v.Key.Key, logOperatorsToExpr[operator])
// join the two filters with OR
filter = fmt.Sprintf(`(%s or (type(body) == "map" && (%s)))`, filter, nilCheckFilter)
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(strings.TrimPrefix(v.Key.Key, "body.")), logOperatorsToExpr[v.Operator], "fromJSON(body)")
} else if typ := getTypeName(v.Key.Type); typ != "" {
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], typ)
} else {

View File

@@ -3,538 +3,203 @@ package queryBuilderToExpr
import (
"testing"
signozstanzahelper "github.com/SigNoz/signoz-otel-collector/processor/signozlogspipelineprocessor/stanza/operator/helper"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/expr-lang/expr/vm"
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/entry"
"github.com/stretchr/testify/assert"
. "github.com/smartystreets/goconvey/convey"
)
func TestParseExpression(t *testing.T) {
var testCases = []struct {
Name string
Query *v3.FilterSet
Expr string
ExpectError bool
}{
{
Name: "equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "="},
}},
Expr: `attributes["key"] == "checkbody"`,
},
{
Name: "not equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "!="},
}},
Expr: `attributes["key"] != "checkbody"`,
},
{
Name: "less than",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<"},
}},
Expr: `attributes["key"] != nil && attributes["key"] < 10`,
},
{
Name: "greater than",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">"},
}},
Expr: `attributes["key"] != nil && attributes["key"] > 10`,
},
{
Name: "less than equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10`,
},
{
Name: "greater than equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">="},
}},
Expr: `attributes["key"] != nil && attributes["key"] >= 10`,
},
// case sensitive
{
Name: "body contains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
}},
Expr: `body != nil && lower(body) contains lower("checkbody")`,
},
{
Name: "body.log.message exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `(((type(body) == "string" && isJSON(body)) && "log.message" in fromJSON(body)) or (type(body) == "map" && (body.log.message != nil)))`,
},
{
Name: "body.log.message not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `(((type(body) == "string" && isJSON(body)) && "log.message" not in fromJSON(body)) or (type(body) == "map" && (body.log.message == nil)))`,
},
{
Name: "body ncontains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
}},
Expr: `body != nil && lower(body) not contains lower("checkbody")`,
},
{
Name: "body regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "regex"},
}},
Expr: `body != nil && body matches "[0-1]+regex$"`,
},
{
Name: "body not regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
}},
Expr: `body != nil && body not matches "[0-1]+regex$"`,
},
{
Name: "regex with escape characters",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: `^Executing \[\S+@\S+:[0-9]+\] \S+".*`, Operator: "regex"},
}},
Expr: `body != nil && body matches "^Executing \\[\\S+@\\S+:[0-9]+\\] \\S+\".*"`,
},
{
Name: "invalid regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
}},
Expr: `body != nil && lower(body) not matches "[0-9]++"`,
ExpectError: true,
},
{
Name: "in",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{1, 2, 3, 4}, Operator: "in"},
}},
Expr: `attributes["key"] != nil && attributes["key"] in [1,2,3,4]`,
},
{
Name: "not in",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"1", "2"}, Operator: "nin"},
}},
Expr: `attributes["key"] != nil && attributes["key"] not in ['1','2']`,
},
{
Name: "exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
}},
Expr: `"key" in attributes`,
},
{
Name: "not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `"key" not in attributes`,
},
{
Name: "trace_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `trace_id == nil`,
},
{
Name: "trace_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `trace_id != nil`,
},
{
Name: "span_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `span_id == nil`,
},
{
Name: "span_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `span_id != nil`,
},
{
Name: "Multi filter",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body != nil && body not matches "[0-1]+regex$" and "key" not in attributes`,
},
{
Name: "incorrect multi filter",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body not matches "[0-9]++" and "key" not in attributes`,
ExpectError: true,
},
}
var testCases = []struct {
Name string
Query *v3.FilterSet
Expr string
ExpectError bool
}{
{
Name: "equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "="},
}},
Expr: `attributes["key"] == "checkbody"`,
},
{
Name: "not equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "!="},
}},
Expr: `attributes["key"] != "checkbody"`,
},
{
Name: "less than",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<"},
}},
Expr: `attributes["key"] != nil && attributes["key"] < 10`,
},
{
Name: "greater than",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">"},
}},
Expr: `attributes["key"] != nil && attributes["key"] > 10`,
},
{
Name: "less than equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10`,
},
{
Name: "greater than equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">="},
}},
Expr: `attributes["key"] != nil && attributes["key"] >= 10`,
},
// case sensitive
{
Name: "body contains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
}},
Expr: `body != nil && lower(body) contains lower("checkbody")`,
},
{
Name: "body.log.message exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `"log.message" in fromJSON(body)`,
},
{
Name: "body.log.message not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `"log.message" not in fromJSON(body)`,
},
{
Name: "body ncontains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
}},
Expr: `body != nil && lower(body) not contains lower("checkbody")`,
},
{
Name: "body regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "regex"},
}},
Expr: `body != nil && body matches "[0-1]+regex$"`,
},
{
Name: "body not regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
}},
Expr: `body != nil && body not matches "[0-1]+regex$"`,
},
{
Name: "regex with escape characters",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: `^Executing \[\S+@\S+:[0-9]+\] \S+".*`, Operator: "regex"},
}},
Expr: `body != nil && body matches "^Executing \\[\\S+@\\S+:[0-9]+\\] \\S+\".*"`,
},
{
Name: "invalid regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
}},
Expr: `body != nil && lower(body) not matches "[0-9]++"`,
ExpectError: true,
},
{
Name: "in",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{1, 2, 3, 4}, Operator: "in"},
}},
Expr: `attributes["key"] != nil && attributes["key"] in [1,2,3,4]`,
},
{
Name: "not in",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{"1", "2"}, Operator: "nin"},
}},
Expr: `attributes["key"] != nil && attributes["key"] not in ['1','2']`,
},
{
Name: "exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
}},
Expr: `"key" in attributes`,
},
{
Name: "not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `"key" not in attributes`,
},
{
Name: "trace_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `trace_id == nil`,
},
{
Name: "trace_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `trace_id != nil`,
},
{
Name: "span_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `span_id == nil`,
},
{
Name: "span_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `span_id != nil`,
},
{
Name: "Multi filter",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body != nil && body not matches "[0-1]+regex$" and "key" not in attributes`,
},
{
Name: "incorrect multi filter",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body not matches "[0-9]++" and "key" not in attributes`,
ExpectError: true,
},
}
func TestParse(t *testing.T) {
for _, tt := range testCases {
t.Run(tt.Name, func(t *testing.T) {
Convey(tt.Name, t, func() {
x, err := Parse(tt.Query)
if tt.ExpectError {
assert.Error(t, err)
So(err, ShouldNotBeNil)
} else {
assert.NoError(t, err)
assert.Equal(t, tt.Expr, x)
So(err, ShouldBeNil)
So(x, ShouldEqual, tt.Expr)
}
})
}
}
type EntryComposite struct {
ID int
*entry.Entry
}
// makeEntry creates an EntryComposite for tests. Pass nil for traceID/spanID to mean "not set".
func makeEntry(id int, body any, attributes, resource map[string]any, traceID, spanID []byte) EntryComposite {
e := entry.New()
e.Body = body
if attributes != nil {
e.Attributes = attributes
} else {
e.Attributes = make(map[string]any)
}
if resource != nil {
e.Resource = resource
} else {
e.Resource = make(map[string]any)
}
if traceID != nil {
e.TraceID = traceID
}
if spanID != nil {
e.SpanID = spanID
}
return EntryComposite{ID: id, Entry: e}
}
func TestExpressionVSEntry(t *testing.T) {
// Dataset: entries with varied body (JSON and plain text), attributes, trace_id, span_id for filter testing.
// IDs 0..12: JSON bodies (body.msg / body.log etc. work). IDs 13..17: simple text log bodies.
dataset := []EntryComposite{
// JSON body entries (0-12)
makeEntry(0, `{"msg":"hello world"}`, map[string]any{"level": "info"}, map[string]any{"env": "prod", "host": "node-0"}, nil, nil),
makeEntry(1, `{"msg":"error occurred", "missing": "value"}`, map[string]any{"level": "error"}, map[string]any{"env": "prod", "host": "node-1"}, []byte("trace1"), []byte("span1")),
makeEntry(2, `{"msg":"checkbody substring"}`, map[string]any{"level": "info"}, map[string]any{"env": "staging", "host": "node-2"}, []byte("trace2"), nil),
makeEntry(3, `{"msg":"no match here"}`, map[string]any{"level": "debug"}, map[string]any{"env": "staging", "host": "node-3"}, nil, []byte("span3")),
makeEntry(4, `{"msg":"101regex suffix"}`, map[string]any{"code": "200", "count": int64(5)}, map[string]any{"env": "prod", "host": "node-4"}, nil, nil),
makeEntry(5, `{"msg":"plain text only"}`, map[string]any{"code": "404", "count": int64(10)}, map[string]any{"env": "prod", "host": "node-5"}, []byte("trace5"), []byte("span5")),
makeEntry(6, `{"log":{"message":"user login"}}`, map[string]any{"service": "auth"}, map[string]any{"env": "dev", "host": "node-6"}, nil, nil),
makeEntry(7, `{"log":{"message":"user logout"}}`, map[string]any{"service": "auth", "user_id": "u1"}, map[string]any{"env": "dev", "host": "node-7"}, []byte("trace7"), nil),
makeEntry(8, `{"event":"click"}`, map[string]any{"service": "api"}, map[string]any{"env": "dev", "host": "node-8"}, nil, nil),
makeEntry(9, `{"msg":"checkbody"}`, map[string]any{"tag": "exact", "num": int64(9)}, map[string]any{"env": "prod", "host": "node-9"}, nil, nil),
makeEntry(10, `{"msg":"CHECKBODY case"}`, map[string]any{"tag": "case", "num": int64(10)}, map[string]any{"env": "staging", "host": "node-10"}, nil, nil),
makeEntry(11, `{"msg":"foo"}`, map[string]any{"status": "active", "score": int64(100)}, map[string]any{"env": "prod", "host": "node-11"}, nil, nil),
makeEntry(12, `{"msg":"bar"}`, map[string]any{"status": "inactive", "score": int64(50)}, map[string]any{"env": "staging", "host": "node-12"}, []byte("trace12"), []byte("span12")),
// Plain text log body entries (13-17)
makeEntry(13, "Server started on port 8080", map[string]any{"component": "server"}, map[string]any{"env": "prod", "host": "node-13"}, nil, nil),
makeEntry(14, "Connection refused to 10.0.0.1:5432", map[string]any{"level": "error"}, map[string]any{"env": "prod", "host": "node-14"}, nil, nil),
makeEntry(15, "User login failed for admin", map[string]any{"service": "auth", "level": "warn"}, map[string]any{"env": "dev", "host": "node-15"}, []byte("trace15"), nil),
makeEntry(16, "checkbody in text log", map[string]any{"level": "info"}, map[string]any{"env": "staging", "host": "node-16"}, nil, nil),
makeEntry(17, "WARN: disk full on /var", map[string]any{"level": "warn"}, map[string]any{"env": "prod", "host": "node-17"}, nil, []byte("span17")),
// Body as map (not string) entries (18-20)
makeEntry(18, map[string]any{"msg": "checkbody substring", "level": "info"}, map[string]any{"source": "map"}, map[string]any{"env": "prod", "host": "node-18"}, nil, nil),
makeEntry(19, map[string]any{"log": map[string]any{"message": "nested value in map body"}, "missing": true}, map[string]any{"source": "map"}, map[string]any{"env": "staging", "host": "node-19"}, []byte("trace19"), nil),
makeEntry(20, map[string]any{"event": "deploy", "version": "1.2.0"}, map[string]any{"source": "map", "level": "info"}, map[string]any{"env": "dev", "host": "node-20"}, nil, []byte("span20")),
}
var testCases = []struct {
Name string
Query *v3.FilterSet
ExpectedMatches []int
}{
{
Name: "resource equal (env)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
}},
ExpectedMatches: []int{0, 1, 4, 5, 9, 11, 13, 14, 17, 18},
},
{
Name: "resource not equal (env)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "!="},
}},
ExpectedMatches: []int{2, 3, 6, 7, 8, 10, 12, 15, 16, 19, 20},
},
{
Name: "attribute less than (numeric)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "count", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 8, Operator: "<"},
}},
ExpectedMatches: []int{4},
},
{
Name: "attribute greater than (numeric)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "count", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 8, Operator: ">"},
}},
ExpectedMatches: []int{5},
},
{
Name: "body contains (case insensitive)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
}},
ExpectedMatches: []int{2, 9, 10, 16},
},
{
Name: "body ncontains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
}},
ExpectedMatches: []int{0, 1, 3, 4, 5, 6, 7, 8, 11, 12, 13, 14, 15, 17},
},
{
Name: "body.msg (case insensitive)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.msg", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: false}, Value: "checkbody", Operator: "contains"},
}},
ExpectedMatches: []int{2, 9, 10, 18},
},
{
Name: "body regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "regex"},
}},
ExpectedMatches: []int{4},
},
{
Name: "body not regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "nregex"},
}},
ExpectedMatches: []int{0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17},
},
// body.log.message exists/nexists: expr checks "log.message" in fromJSON(body); nested key
// semantics depend on signoz stanza helper. Omitted here to avoid coupling to env shape.
{
Name: "body top-level key exists (body.msg)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.msg", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
ExpectedMatches: []int{0, 1, 2, 3, 4, 5, 9, 10, 11, 12, 18},
},
{
Name: "body top-level key not exists (body.missing)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.missing", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
ExpectedMatches: []int{0, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 18, 20},
},
{
Name: "attribute exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
}},
ExpectedMatches: []int{6, 7, 8, 15},
},
{
Name: "attribute not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
ExpectedMatches: []int{0, 1, 2, 3, 4, 5, 9, 10, 11, 12, 13, 14, 16, 17, 18, 19, 20},
},
{
Name: "trace_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
ExpectedMatches: []int{1, 2, 5, 7, 12, 15, 19},
},
{
Name: "trace_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
ExpectedMatches: []int{0, 3, 4, 6, 8, 9, 10, 11, 13, 14, 16, 17, 18, 20},
},
{
Name: "span_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
ExpectedMatches: []int{1, 3, 5, 12, 17, 20},
},
{
Name: "span_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
ExpectedMatches: []int{0, 2, 4, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 18, 19},
},
{
Name: "in (attribute in list)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"info", "error"}, Operator: "in"},
}},
ExpectedMatches: []int{0, 1, 2, 14, 16, 20},
},
{
Name: "not in (attribute not in list)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"error", "warn"}, Operator: "nin"},
}},
ExpectedMatches: []int{0, 2, 3, 16, 20},
},
{
Name: "multi filter AND",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "staging", Operator: "="},
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "info", Operator: "="},
}},
ExpectedMatches: []int{2, 16},
},
{
Name: "multi filter AND (two attributes)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
ExpectedMatches: []int{6, 7},
},
// Multi-filter variations: body + attribute, three conditions, trace/span + attribute
{
Name: "multi filter AND body contains + attribute",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "Connection", Operator: "contains"},
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
}},
ExpectedMatches: []int{14},
},
{
Name: "multi filter AND body contains + service",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "login", Operator: "contains"},
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
}},
ExpectedMatches: []int{6, 15},
},
{
Name: "multi filter AND env + level (prod error)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "error", Operator: "="},
}},
ExpectedMatches: []int{1, 14},
},
{
Name: "multi filter AND three conditions (staging + checkbody + info)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "staging", Operator: "="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "info", Operator: "="},
}},
ExpectedMatches: []int{2, 16},
},
{
Name: "multi filter AND trace_id exists + body contains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
}},
ExpectedMatches: []int{2},
},
{
Name: "multi filter AND span_id nexists + service auth",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
}},
ExpectedMatches: []int{6, 7, 15},
},
{
Name: "multi filter AND body regex + attribute",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "regex"},
{Key: v3.AttributeKey{Key: "code", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "200", Operator: "="},
}},
ExpectedMatches: []int{4},
},
{
Name: "multi filter AND no trace_id + no span_id + env prod",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
}},
ExpectedMatches: []int{0, 4, 9, 11, 13, 14, 18},
},
{
Name: "multi filter AND level warn + body contains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "warn", Operator: "="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "disk", Operator: "contains"},
}},
ExpectedMatches: []int{17},
},
{
Name: "no matches (attribute value not present)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "never", Operator: "="},
}},
ExpectedMatches: []int{},
},
{
Name: "attribute equal and trace_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "code", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "404", Operator: "="},
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
ExpectedMatches: []int{5},
},
}
for _, tt := range testCases {
t.Run(tt.Name, func(t *testing.T) {
expression, err := Parse(tt.Query)
assert.NoError(t, err)
compiled, hasBodyFieldRef, err := signozstanzahelper.ExprCompileBool(expression)
assert.NoError(t, err)
matchedIDs := []int{}
for _, d := range dataset {
env := signozstanzahelper.GetExprEnv(d.Entry, hasBodyFieldRef)
matches, err := vm.Run(compiled, env)
signozstanzahelper.PutExprEnv(env)
if err != nil {
// Eval error (e.g. fromJSON on non-JSON body) => treat as no match
continue
}
if matches != nil && matches.(bool) {
matchedIDs = append(matchedIDs, d.ID)
}
}
assert.Equal(t, tt.ExpectedMatches, matchedIDs, "query %q", tt.Name)
})
}
}

View File

@@ -40,13 +40,13 @@ type BaseRule struct {
// evalWindow is the time window used for evaluating the rule
// i.e. each time we lookback from the current time, we look at data for the last
// evalWindow duration
evalWindow valuer.TextDuration
evalWindow time.Duration
// holdDuration is the duration for which the alert waits before firing
holdDuration valuer.TextDuration
holdDuration time.Duration
// evalDelay is the delay in evaluation of the rule
// this is useful in cases where the data is not available immediately
evalDelay valuer.TextDuration
evalDelay time.Duration
// holds the static set of labels and annotations for the rule
// these are the same for all alerts created for this rule
@@ -94,7 +94,7 @@ type BaseRule struct {
evaluation ruletypes.Evaluation
// newGroupEvalDelay is the grace period for new alert groups
newGroupEvalDelay valuer.TextDuration
newGroupEvalDelay *time.Duration
queryParser queryparser.QueryParser
}
@@ -113,7 +113,7 @@ func WithSendUnmatched() RuleOption {
}
}
func WithEvalDelay(dur valuer.TextDuration) RuleOption {
func WithEvalDelay(dur time.Duration) RuleOption {
return func(r *BaseRule) {
r.evalDelay = dur
}
@@ -163,7 +163,7 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader
source: p.Source,
typ: p.AlertType,
ruleCondition: p.RuleCondition,
evalWindow: p.EvalWindow,
evalWindow: time.Duration(p.EvalWindow),
labels: qslabels.FromMap(p.Labels),
annotations: qslabels.FromMap(p.Annotations),
preferredChannels: p.PreferredChannels,
@@ -176,12 +176,13 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader
}
// Store newGroupEvalDelay and groupBy keys from NotificationSettings
if p.NotificationSettings != nil {
baseRule.newGroupEvalDelay = p.NotificationSettings.NewGroupEvalDelay
if p.NotificationSettings != nil && p.NotificationSettings.NewGroupEvalDelay != nil {
newGroupEvalDelay := time.Duration(*p.NotificationSettings.NewGroupEvalDelay)
baseRule.newGroupEvalDelay = &newGroupEvalDelay
}
if baseRule.evalWindow.IsZero() {
baseRule.evalWindow = valuer.MustParseTextDuration("5m")
if baseRule.evalWindow == 0 {
baseRule.evalWindow = 5 * time.Minute
}
for _, opt := range opts {
@@ -244,15 +245,15 @@ func (r *BaseRule) ActiveAlertsLabelFP() map[uint64]struct{} {
return activeAlerts
}
func (r *BaseRule) EvalDelay() valuer.TextDuration {
func (r *BaseRule) EvalDelay() time.Duration {
return r.evalDelay
}
func (r *BaseRule) EvalWindow() valuer.TextDuration {
func (r *BaseRule) EvalWindow() time.Duration {
return r.evalWindow
}
func (r *BaseRule) HoldDuration() valuer.TextDuration {
func (r *BaseRule) HoldDuration() time.Duration {
return r.holdDuration
}
@@ -280,7 +281,7 @@ func (r *BaseRule) Timestamps(ts time.Time) (time.Time, time.Time) {
start := st.UnixMilli()
end := en.UnixMilli()
if r.evalDelay.IsPositive() {
if r.evalDelay > 0 {
start = start - r.evalDelay.Milliseconds()
end = end - r.evalDelay.Milliseconds()
}
@@ -551,7 +552,7 @@ func (r *BaseRule) PopulateTemporality(ctx context.Context, orgID valuer.UUID, q
// ShouldSkipNewGroups returns true if new group filtering should be applied
func (r *BaseRule) ShouldSkipNewGroups() bool {
return r.newGroupEvalDelay.IsPositive()
return r.newGroupEvalDelay != nil && *r.newGroupEvalDelay > 0
}
// isFilterNewSeriesSupported checks if the query is supported for new series filtering

View File

@@ -20,7 +20,7 @@ import (
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -124,8 +124,8 @@ func createPostableRule(compositeQuery *v3.CompositeQuery) ruletypes.PostableRul
Evaluation: &ruletypes.EvaluationEnvelope{
Kind: ruletypes.RollingEvaluation,
Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
},
},
RuleCondition: &ruletypes.RuleCondition{
@@ -151,7 +151,7 @@ type filterNewSeriesTestCase struct {
compositeQuery *v3.CompositeQuery
series []*v3.Series
firstSeenMap map[telemetrytypes.MetricMetadataLookupKey]int64
newGroupEvalDelay valuer.TextDuration
newGroupEvalDelay *time.Duration
evalTime time.Time
expectedFiltered []*v3.Series // series that should be in the final filtered result (old enough)
expectError bool
@@ -159,8 +159,7 @@ type filterNewSeriesTestCase struct {
func TestBaseRule_FilterNewSeries(t *testing.T) {
defaultEvalTime := time.Unix(1700000000, 0)
defaultNewGroupEvalDelay := valuer.MustParseTextDuration("2m")
defaultDelay := defaultNewGroupEvalDelay.Duration()
defaultDelay := 2 * time.Minute
defaultGroupByFields := []string{"service_name", "env"}
logger := instrumentationtest.New().Logger()
@@ -203,7 +202,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new", "prod"),
// svc-missing has no metadata, so it will be included
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
@@ -235,7 +234,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new1", "prod"),
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new2", "stage"),
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{}, // all should be filtered out (new series)
},
@@ -262,7 +261,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old1", "prod"),
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old2", "stage"),
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
@@ -296,7 +295,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -326,7 +325,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -362,7 +361,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"status": "200"}, nil), // no service_name or env
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"status": "200"}, nil),
@@ -391,7 +390,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old", "prod"),
// svc-no-metadata has no entry in firstSeenMap
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
@@ -421,7 +420,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
{MetricName: "request_total", AttributeName: "service_name", AttributeValue: "svc-partial"}: calculateFirstSeen(defaultEvalTime, defaultDelay, true),
// env metadata is missing
},
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
@@ -455,7 +454,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
series: []*v3.Series{},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{},
},
@@ -489,7 +488,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
newGroupEvalDelay: valuer.TextDuration{}, // zero delay
newGroupEvalDelay: func() *time.Duration { d := time.Duration(0); return &d }(), // zero delay
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -533,7 +532,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
createFirstSeenMap("error_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -573,7 +572,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", []string{"service_name"}, defaultEvalTime, defaultDelay, true, "svc1"),
createFirstSeenMap("request_total", []string{"env"}, defaultEvalTime, defaultDelay, false, "prod"),
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{}, // max first_seen is new, so should be filtered out
},
@@ -605,7 +604,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
@@ -640,7 +639,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
@@ -698,14 +697,20 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
telemetryStore,
prometheustest.New(context.Background(), settings, prometheus.Config{}, telemetryStore),
"",
time.Second,
time.Duration(time.Second),
nil,
readerCache,
options,
)
postableRule.NotificationSettings = &ruletypes.NotificationSettings{
NewGroupEvalDelay: tt.newGroupEvalDelay,
// Set newGroupEvalDelay in NotificationSettings if provided
if tt.newGroupEvalDelay != nil {
postableRule.NotificationSettings = &ruletypes.NotificationSettings{
NewGroupEvalDelay: func() *ruletypes.Duration {
d := ruletypes.Duration(*tt.newGroupEvalDelay)
return &d
}(),
}
}
// Create BaseRule using NewBaseRule

View File

@@ -30,7 +30,7 @@ import (
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -66,7 +66,7 @@ type PrepareTestRuleOptions struct {
OrgID valuer.UUID
}
const taskNameSuffix = "webAppEditor"
const taskNamesuffix = "webAppEditor"
func RuleIdFromTaskName(n string) string {
return strings.Split(n, "-groupname")[0]
@@ -97,7 +97,7 @@ type ManagerOptions struct {
SLogger *slog.Logger
Cache cache.Cache
EvalDelay valuer.TextDuration
EvalDelay time.Duration
PrepareTaskFunc func(opts PrepareTaskOptions) (Task, error)
PrepareTestRuleFunc func(opts PrepareTestRuleOptions) (int, *model.ApiError)
@@ -182,8 +182,8 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
rules = append(rules, tr)
// create ch rule task for evaluation
task = newTask(TaskTypeCh, opts.TaskName, taskNameSuffix, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
// create ch rule task for evalution
task = newTask(TaskTypeCh, opts.TaskName, taskNamesuffix, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
@@ -206,8 +206,8 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
rules = append(rules, pr)
// create promql rule task for evaluation
task = newTask(TaskTypeProm, opts.TaskName, taskNameSuffix, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
// create promql rule task for evalution
task = newTask(TaskTypeProm, opts.TaskName, taskNamesuffix, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else {
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
@@ -323,7 +323,7 @@ func (m *Manager) run(_ context.Context) {
}
// Stop the rule manager's rule evaluation cycles.
func (m *Manager) Stop(_ context.Context) {
func (m *Manager) Stop(ctx context.Context) {
m.mtx.Lock()
defer m.mtx.Unlock()
@@ -336,7 +336,7 @@ func (m *Manager) Stop(_ context.Context) {
zap.L().Info("Rule manager stopped")
}
// EditRule writes the rule definition to the
// EditRuleDefinition writes the rule definition to the
// datastore and also updates the rule executor
func (m *Manager) EditRule(ctx context.Context, ruleStr string, id valuer.UUID) error {
claims, err := authtypes.ClaimsFromContext(ctx)
@@ -643,7 +643,7 @@ func (m *Manager) addTask(_ context.Context, orgID valuer.UUID, rule *ruletypes.
m.rules[r.ID()] = r
}
// If there is another task with the same identifier, raise an error
// If there is an another task with the same identifier, raise an error
_, ok := m.tasks[taskName]
if ok {
return fmt.Errorf("a rule with the same name already exists")
@@ -678,8 +678,7 @@ func (m *Manager) RuleTasks() []Task {
return rgs
}
// RuleTasksWithoutLock returns the list of manager's rule tasks without
// acquiring a lock on the manager.
// RuleTasks returns the list of manager's rule tasks.
func (m *Manager) RuleTasksWithoutLock() []Task {
rgs := make([]Task, 0, len(m.tasks))
@@ -890,7 +889,7 @@ func (m *Manager) syncRuleStateWithTask(ctx context.Context, orgID valuer.UUID,
} else {
// check if rule has a task running
if _, ok := m.tasks[taskName]; !ok {
// rule has no task, start one
// rule has not task, start one
if err := m.addTask(ctx, orgID, rule, taskName); err != nil {
return err
}

View File

@@ -9,7 +9,6 @@ import (
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
// ThresholdRuleTestCase defines test case structure for threshold rule test notifications
@@ -41,8 +40,8 @@ func ThresholdRuleAtLeastOnceValueAbove(target float64, recovery *float64) rulet
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
Labels: map[string]string{
"service.name": "frontend",
@@ -100,8 +99,8 @@ func BuildPromAtLeastOnceValueAbove(target float64, recovery *float64) ruletypes
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
Labels: map[string]string{
"service.name": "frontend",

View File

@@ -28,8 +28,6 @@ type PromRule struct {
prometheus prometheus.Prometheus
}
var _ Rule = (*PromRule)(nil)
func NewPromRule(
id string,
orgID valuer.UUID,
@@ -334,7 +332,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration {
a.State = model.StateFiring
a.FiredAt = ts
state := model.StateFiring
@@ -398,7 +396,7 @@ func (r *PromRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.name,
RuleCondition: r.ruleCondition,
EvalWindow: r.evalWindow,
EvalWindow: ruletypes.Duration(r.evalWindow),
Labels: r.labels.Map(),
Annotations: r.annotations.Map(),
PreferredChannels: r.preferredChannels,

View File

@@ -41,12 +41,12 @@ type PromRuleTask struct {
orgID valuer.UUID
}
// NewPromRuleTask holds rules that have promql condition
// and evaluates the rule at a given frequency
// newPromRuleTask holds rules that have promql condition
// and evalutes the rule at a given frequency
func NewPromRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) *PromRuleTask {
zap.L().Info("Initiating a new rule group", zap.String("name", name), zap.Duration("frequency", frequency))
if frequency == 0 {
if time.Now() == time.Now().Add(frequency) {
frequency = DefaultFrequency
}

View File

@@ -41,8 +41,8 @@ func TestPromRuleEval(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -748,8 +748,8 @@ func TestPromRuleUnitCombinations(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1007,8 +1007,8 @@ func _Enable_this_after_9146_issue_fix_is_merged_TestPromRuleNoData(t *testing.T
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1118,8 +1118,8 @@ func TestMultipleThresholdPromRule(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1353,8 +1353,8 @@ func TestPromRule_NoData(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,
@@ -1466,7 +1466,7 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
// 3. Alert fires only if t2 - t1 > AbsentFor
baseTime := time.Unix(1700000000, 0)
evalWindow := valuer.MustParseTextDuration("5m")
evalWindow := 5 * time.Minute
// Set target higher than test data (100.0) so regular threshold alerts don't fire
target := 500.0
@@ -1476,8 +1476,8 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,
@@ -1619,7 +1619,7 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
baseTime := time.Unix(1700000000, 0)
evalTime := baseTime.Add(5 * time.Minute)
evalWindow := valuer.MustParseTextDuration("5m")
evalWindow := 5 * time.Minute
lookBackDelta := time.Minute
postableRule := ruletypes.PostableRule{
@@ -1627,8 +1627,8 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,

View File

@@ -7,7 +7,6 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
// A Rule encapsulates a vector expression which is evaluated at a specified
@@ -20,9 +19,9 @@ type Rule interface {
Labels() labels.BaseLabels
Annotations() labels.BaseLabels
Condition() *ruletypes.RuleCondition
EvalDelay() valuer.TextDuration
EvalWindow() valuer.TextDuration
HoldDuration() valuer.TextDuration
EvalDelay() time.Duration
EvalWindow() time.Duration
HoldDuration() time.Duration
State() model.AlertState
ActiveAlerts() []*ruletypes.Alert
// ActiveAlertsLabelFP returns a map of active alert labels fingerprint

View File

@@ -43,7 +43,7 @@ const DefaultFrequency = 1 * time.Minute
// NewRuleTask makes a new RuleTask with the given name, options, and rules.
func NewRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) *RuleTask {
if frequency == 0 {
if time.Now() == time.Now().Add(frequency) {
frequency = DefaultFrequency
}
zap.L().Info("initiating a new rule task", zap.String("name", name), zap.Duration("frequency", frequency))
@@ -78,7 +78,6 @@ func (g *RuleTask) Type() TaskType { return TaskTypeCh }
func (g *RuleTask) Rules() []Rule { return g.rules }
// Interval returns the group's interval.
// TODO: remove (unused)?
func (g *RuleTask) Interval() time.Duration { return g.frequency }
func (g *RuleTask) Pause(b bool) {

View File

@@ -61,8 +61,6 @@ type ThresholdRule struct {
spansKeys map[string]v3.AttributeKey
}
var _ Rule = (*ThresholdRule)(nil)
func NewThresholdRule(
id string,
orgID valuer.UUID,
@@ -748,7 +746,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration {
r.logger.DebugContext(ctx, "converting pending alert to firing", "name", r.Name())
a.State = model.StateFiring
a.FiredAt = ts
@@ -814,7 +812,7 @@ func (r *ThresholdRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.name,
RuleCondition: r.ruleCondition,
EvalWindow: r.evalWindow,
EvalWindow: ruletypes.Duration(r.evalWindow),
Labels: r.labels.Map(),
Annotations: r.annotations.Map(),
PreferredChannels: r.preferredChannels,

View File

@@ -36,8 +36,8 @@ func TestThresholdRuleEvalBackwardCompat(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -72,7 +72,7 @@ func TestThresholdRuleEvalBackwardCompat(t *testing.T) {
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
}
@@ -152,8 +152,8 @@ func TestPrepareLinksToLogs(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -189,7 +189,7 @@ func TestPrepareLinksToLogs(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
}
@@ -206,8 +206,8 @@ func TestPrepareLinksToLogsV5(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -250,7 +250,7 @@ func TestPrepareLinksToLogsV5(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
}
@@ -267,8 +267,8 @@ func TestPrepareLinksToTracesV5(t *testing.T) {
AlertType: ruletypes.AlertTypeTraces,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -311,7 +311,7 @@ func TestPrepareLinksToTracesV5(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
}
@@ -328,8 +328,8 @@ func TestPrepareLinksToTraces(t *testing.T) {
AlertType: ruletypes.AlertTypeTraces,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -365,7 +365,7 @@ func TestPrepareLinksToTraces(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
}
@@ -382,8 +382,8 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -451,7 +451,7 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
}
@@ -490,8 +490,8 @@ func TestThresholdRuleEvalDelay(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -553,8 +553,8 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -594,7 +594,7 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
logger := instrumentationtest.New().Logger()
for idx, c := range cases {
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
}
@@ -615,8 +615,8 @@ func TestThresholdRuleUnitCombinations(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -816,8 +816,8 @@ func TestThresholdRuleNoData(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -927,8 +927,8 @@ func TestThresholdRuleTracesLink(t *testing.T) {
AlertType: ruletypes.AlertTypeTraces,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1052,8 +1052,8 @@ func TestThresholdRuleLogsLink(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1190,8 +1190,8 @@ func TestThresholdRuleShiftBy(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
Thresholds: &ruletypes.RuleThresholdData{
@@ -1264,8 +1264,8 @@ func TestMultipleThresholdRule(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1455,8 +1455,8 @@ func TestThresholdRuleEval_BasicCases(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1486,8 +1486,8 @@ func TestThresholdRuleEval_MatchPlusCompareOps(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1523,8 +1523,8 @@ func TestThresholdRuleEval_SendUnmatchedBypassesRecovery(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1559,7 +1559,7 @@ func TestThresholdRuleEval_SendUnmatchedBypassesRecovery(t *testing.T) {
}
logger := instrumentationtest.New().Logger()
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
require.NoError(t, err)
now := time.Now()
@@ -1611,8 +1611,8 @@ func TestThresholdRuleEval_SendUnmatchedVariants(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1735,8 +1735,8 @@ func TestThresholdRuleEval_RecoveryNotMetSendUnmatchedFalse(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1820,7 +1820,7 @@ func runEvalTests(t *testing.T, postableRule ruletypes.PostableRule, testCases [
Spec: thresholds,
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
return
@@ -1927,7 +1927,7 @@ func runMultiThresholdEvalTests(t *testing.T, postableRule ruletypes.PostableRul
Spec: thresholds,
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
return
@@ -2035,8 +2035,8 @@ func TestThresholdRuleEval_MultiThreshold(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -2066,8 +2066,8 @@ func TestThresholdEval_RequireMinPoints(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,

View File

@@ -1,8 +1,6 @@
package signoz
import (
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/signozauthzapi"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/gateway"
@@ -13,14 +11,14 @@ import (
"github.com/SigNoz/signoz/pkg/modules/apdex/implapdex"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/fields/implfields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/modules/savedview"
"github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview"
"github.com/SigNoz/signoz/pkg/modules/services"
@@ -30,7 +28,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type Handlers struct {
@@ -46,21 +43,10 @@ type Handlers struct {
Global global.Handler
FlaggerHandler flagger.Handler
GatewayHandler gateway.Handler
Fields fields.Handler
AuthzHandler authz.Handler
Role role.Handler
}
func NewHandlers(
modules Modules,
providerSettings factory.ProviderSettings,
querier querier.Querier,
licensing licensing.Licensing,
global global.Global,
flaggerService flagger.Flagger,
gatewayService gateway.Gateway,
telemetryMetadataStore telemetrytypes.MetadataStore,
authz authz.AuthZ,
) Handlers {
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global, flaggerService flagger.Flagger, gatewayService gateway.Gateway) Handlers {
return Handlers{
SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex),
@@ -74,7 +60,6 @@ func NewHandlers(
Global: signozglobal.NewHandler(global),
FlaggerHandler: flagger.NewHandler(flaggerService),
GatewayHandler: gateway.NewHandler(gatewayService),
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
AuthzHandler: signozauthzapi.NewHandler(authz),
Role: implrole.NewHandler(modules.RoleSetter, modules.RoleGetter),
}
}

View File

@@ -13,6 +13,7 @@ import (
"github.com/SigNoz/signoz/pkg/factory/factorytest"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sharder"
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
@@ -40,9 +41,13 @@ func TestNewHandlers(t *testing.T) {
queryParser := queryparser.New(providerSettings)
require.NoError(t, err)
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
roleSetter := implrole.NewSetter(implrole.NewStore(sqlstore), nil)
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
grantModule := implrole.NewGranter(implrole.NewStore(sqlstore), nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, roleSetter, roleGetter, grantModule)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil, nil, nil)
reflectVal := reflect.ValueOf(handlers)
for i := 0; i < reflectVal.NumField(); i++ {
f := reflectVal.Field(i)

View File

@@ -25,6 +25,7 @@ import (
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/savedview"
"github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview"
"github.com/SigNoz/signoz/pkg/modules/services"
@@ -66,6 +67,9 @@ type Modules struct {
SpanPercentile spanpercentile.Module
MetricsExplorer metricsexplorer.Module
Promote promote.Module
RoleSetter role.Setter
RoleGetter role.Getter
Granter role.Granter
}
func NewModules(
@@ -85,10 +89,13 @@ func NewModules(
queryParser queryparser.QueryParser,
config Config,
dashboard dashboard.Module,
roleSetter role.Setter,
roleGetter role.Getter,
granter role.Granter,
) Modules {
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, authz, analytics, config.User)
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, granter, analytics, config.User)
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
@@ -110,5 +117,8 @@ func NewModules(
Services: implservices.NewModule(querier, telemetryStore),
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, ruleStore, dashboard, providerSettings, config.MetricsExplorer),
Promote: implpromote.NewModule(telemetryMetadataStore, telemetryStore),
RoleSetter: roleSetter,
RoleGetter: roleGetter,
Granter: granter,
}
}

View File

@@ -13,6 +13,7 @@ import (
"github.com/SigNoz/signoz/pkg/factory/factorytest"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sharder"
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
@@ -40,7 +41,10 @@ func TestNewModules(t *testing.T) {
queryParser := queryparser.New(providerSettings)
require.NoError(t, err)
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
roleSetter := implrole.NewSetter(implrole.NewStore(sqlstore), nil)
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
grantModule := implrole.NewGranter(implrole.NewStore(sqlstore), nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, roleSetter, roleGetter, grantModule)
reflectVal := reflect.ValueOf(modules)
for i := 0; i < reflectVal.NumField(); i++ {

Some files were not shown because too many files have changed in this diff Show More