Compare commits

..

13 Commits

Author SHA1 Message Date
Yunus M
33b235e6c5 fix: test title to use correct attribute 2026-02-09 16:58:51 +05:30
Yunus M
6bf23dce20 fix: remove endpoint in StatusCodeBarCharts as its fetched from filters 2026-02-09 16:51:37 +05:30
Yunus M
78dbc53196 fix: remove redudant url handling in column 2026-02-09 16:51:37 +05:30
Yunus M
d25b84fe0a fix: repace http_url with constant 2026-02-09 16:51:37 +05:30
Yunus M
39801e44f7 fix: update SpanDetailsDrawer.test.tsx to use http_url 2026-02-09 16:51:37 +05:30
Yunus M
51b67de174 fix: remove unnecessary handling of http.url and update the test cases 2026-02-09 16:51:37 +05:30
Yunus M
d5231fa3aa fix: update test cases to use derived attributes 2026-02-09 16:51:37 +05:30
Yunus M
00d2ecb914 fix: use derived values for url and host attributes 2026-02-09 16:51:37 +05:30
Ashwin Bhatkal
128497f27a chore: folder name change + CODEOWNER update (#10246)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
* chore: folder name change + CODEOWNER update

* chore: revert multi select file change
2026-02-09 09:40:26 +00:00
Jatinderjit Singh
9e466b56b2 chore: preserve the original duration format (#10149) 2026-02-09 09:24:58 +00:00
Vikrant Gupta
4ad0baa2a2 feat(authz): add support for wildcard selector (#10208)
* feat(authz): remove unnecessary dependency injection for role setter

* feat(authz): deprecate role module

* feat(authz): deprecate role module

* feat(authz): split between server and sql actions

* feat(authz): add bootstrap for managed role transactions

* feat(authz): update and add integration tests

* feat(authz): match names for factory and migration

* feat(authz): fix integration tests

* feat(authz): reduce calls on organisation creeation
2026-02-09 14:37:44 +05:30
Srikanth Chekuri
24b588bfba chore: move fields api to openapi spec (#10219) 2026-02-09 13:43:36 +05:30
Abhi kumar
e5867cc2ad chore: updated chart theme colors (#10233)
* chore: updated chart theme colors

* fix: fixed failing tests
2026-02-09 12:25:36 +05:30
125 changed files with 2904 additions and 3780 deletions

7
.github/CODEOWNERS vendored
View File

@@ -133,5 +133,8 @@
/frontend/src/pages/PublicDashboard/ @SigNoz/pulse-frontend
/frontend/src/container/PublicDashboardContainer/ @SigNoz/pulse-frontend
## UplotV2
/frontend/src/lib/uPlotV2/ @SigNoz/pulse-frontend
## Dashboard Libs + Components
/frontend/src/lib/uPlotV2/ @SigNoz/pulse-frontend
/frontend/src/lib/dashboard/ @SigNoz/pulse-frontend
/frontend/src/lib/dashboardVariables/ @SigNoz/pulse-frontend
/frontend/src/components/NewSelect/ @SigNoz/pulse-frontend

View File

@@ -18,8 +18,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/queryparser"
@@ -78,18 +76,15 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
},
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
func(ctx context.Context, sqlstore sqlstore.SQLStore, _ licensing.Licensing, _ dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
},
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, _ role.Setter, _ role.Granter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(impldashboard.NewStore(store), settings, analytics, orgGetter, queryParser)
},
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return noopgateway.NewProviderFactory()
},
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, _ []role.RegisterTypeable) role.Setter {
return implrole.NewSetter(implrole.NewStore(store), authz)
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)

View File

@@ -14,7 +14,6 @@ import (
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/ee/modules/role/implrole"
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
@@ -29,8 +28,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
pkgimplrole "github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/signoz"
@@ -118,18 +115,15 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
return authNs, nil
},
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
func(ctx context.Context, sqlstore sqlstore.SQLStore, licensing licensing.Licensing, dashboardModule dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx), licensing, dashboardModule)
},
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, roleSetter, granter, queryParser, querier, licensing)
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, queryParser, querier, licensing)
},
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return httpgateway.NewProviderFactory(licensing)
},
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
return implrole.NewSetter(pkgimplrole.NewStore(store), authz, licensing, registry)
},
)
if err != nil {

View File

@@ -607,6 +607,178 @@ paths:
summary: Update auth domain
tags:
- authdomains
/api/v1/fields/keys:
get:
deprecated: false
description: This endpoint returns field keys
operationId: GetFieldsKeys
parameters:
- in: query
name: signal
schema:
type: string
- in: query
name: source
schema:
type: string
- in: query
name: limit
schema:
type: integer
- in: query
name: startUnixMilli
schema:
format: int64
type: integer
- in: query
name: endUnixMilli
schema:
format: int64
type: integer
- in: query
name: fieldContext
schema:
type: string
- in: query
name: fieldDataType
schema:
type: string
- in: query
name: metricName
schema:
type: string
- in: query
name: searchText
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/TelemetrytypesGettableFieldKeys'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get field keys
tags:
- fields
/api/v1/fields/values:
get:
deprecated: false
description: This endpoint returns field values
operationId: GetFieldsValues
parameters:
- in: query
name: signal
schema:
type: string
- in: query
name: source
schema:
type: string
- in: query
name: limit
schema:
type: integer
- in: query
name: startUnixMilli
schema:
format: int64
type: integer
- in: query
name: endUnixMilli
schema:
format: int64
type: integer
- in: query
name: fieldContext
schema:
type: string
- in: query
name: fieldDataType
schema:
type: string
- in: query
name: metricName
schema:
type: string
- in: query
name: searchText
schema:
type: string
- in: query
name: name
schema:
type: string
- in: query
name: existingQuery
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/TelemetrytypesGettableFieldValues'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get field values
tags:
- fields
/api/v1/getResetPasswordToken/{id}:
get:
deprecated: false
@@ -4244,6 +4416,60 @@ components:
format: date-time
type: string
type: object
TelemetrytypesGettableFieldKeys:
properties:
complete:
type: boolean
keys:
additionalProperties:
items:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
type: array
nullable: true
type: object
type: object
TelemetrytypesGettableFieldValues:
properties:
complete:
type: boolean
values:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldValues'
type: object
TelemetrytypesTelemetryFieldKey:
properties:
description:
type: string
fieldContext:
type: string
fieldDataType:
type: string
name:
type: string
signal:
type: string
unit:
type: string
type: object
TelemetrytypesTelemetryFieldValues:
properties:
boolValues:
items:
type: boolean
type: array
numberValues:
items:
format: double
type: number
type: array
relatedValues:
items:
type: string
type: array
stringValues:
items:
type: string
type: array
type: object
TypesChangePasswordRequest:
properties:
newPassword:

View File

@@ -2,12 +2,18 @@ package openfgaauthz
import (
"context"
"slices"
"github.com/SigNoz/signoz/ee/authz/openfgaserver"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/authzstore/sqlauthzstore"
pkgopenfgaauthz "github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
@@ -15,84 +21,320 @@ import (
type provider struct {
pkgAuthzService authz.AuthZ
openfgaServer *openfgaserver.Server
licensing licensing.Licensing
store roletypes.Store
registry []authz.RegisterTypeable
}
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry ...authz.RegisterTypeable) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return factory.NewProviderFactory(factory.MustNewName("openfga"), func(ctx context.Context, ps factory.ProviderSettings, config authz.Config) (authz.AuthZ, error) {
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema)
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema, licensing, registry)
})
}
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry []authz.RegisterTypeable) (authz.AuthZ, error) {
pkgOpenfgaAuthzProvider := pkgopenfgaauthz.NewProviderFactory(sqlstore, openfgaSchema)
pkgAuthzService, err := pkgOpenfgaAuthzProvider.New(ctx, settings, config)
if err != nil {
return nil, err
}
openfgaServer, err := openfgaserver.NewOpenfgaServer(ctx, pkgAuthzService)
if err != nil {
return nil, err
}
return &provider{
pkgAuthzService: pkgAuthzService,
openfgaServer: openfgaServer,
licensing: licensing,
store: sqlauthzstore.NewSqlAuthzStore(sqlstore),
registry: registry,
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
return provider.pkgAuthzService.Start(ctx)
return provider.openfgaServer.Start(ctx)
}
func (provider *provider) Stop(ctx context.Context) error {
return provider.pkgAuthzService.Stop(ctx)
return provider.openfgaServer.Stop(ctx)
}
func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return provider.pkgAuthzService.Check(ctx, tuple)
return provider.openfgaServer.Check(ctx, tuple)
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.openfgaServer.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.openfgaServer.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.BatchCheck(ctx, tuples)
return provider.openfgaServer.BatchCheck(ctx, tuples)
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return provider.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
return provider.openfgaServer.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.Write(ctx, additions, deletions)
return provider.openfgaServer.Write(ctx, additions, deletions)
}
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
return provider.pkgAuthzService.Get(ctx, orgID, id)
}
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
return provider.pkgAuthzService.GetByOrgIDAndName(ctx, orgID, name)
}
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
return provider.pkgAuthzService.List(ctx, orgID)
}
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
return provider.pkgAuthzService.ListByOrgIDAndNames(ctx, orgID, names)
}
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
return provider.pkgAuthzService.Grant(ctx, orgID, name, subject)
}
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
return provider.pkgAuthzService.ModifyGrant(ctx, orgID, existingRoleName, updatedRoleName, subject)
}
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
return provider.pkgAuthzService.Revoke(ctx, orgID, name, subject)
}
func (provider *provider) CreateManagedRoles(ctx context.Context, orgID valuer.UUID, managedRoles []*roletypes.Role) error {
return provider.pkgAuthzService.CreateManagedRoles(ctx, orgID, managedRoles)
}
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
tuples := make([]*openfgav1.TupleKey, 0)
grantTuples, err := provider.getManagedRoleGrantTuples(orgID, userID)
if err != nil {
return err
}
tuples = append(tuples, grantTuples...)
managedRoleTuples, err := provider.getManagedRoleTransactionTuples(orgID)
if err != nil {
return err
}
tuples = append(tuples, managedRoleTuples...)
return provider.Write(ctx, tuples, nil)
}
func (provider *provider) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
}
func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
existingRole, err := provider.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
}
if existingRole != nil {
return roletypes.NewRoleFromStorableRole(existingRole), nil
}
err = provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return nil, err
}
return role, nil
}
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
typeables := make([]authtypes.Typeable, 0)
for _, register := range provider.registry {
typeables = append(typeables, register.MustGetTypeables()...)
}
// role module cannot self register itself!
typeables = append(typeables, provider.MustGetTypeables()...)
resources := make([]*authtypes.Resource, 0)
for _, typeable := range typeables {
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
}
return resources
}
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
storableRole, err := provider.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
objects := make([]*authtypes.Object, 0)
for _, resource := range provider.GetResources(ctx) {
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
resourceObjects, err := provider.
ListObjects(
ctx,
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
relation,
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
)
if err != nil {
return nil, err
}
objects = append(objects, resourceObjects...)
}
}
return objects, nil
}
func (provider *provider) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return provider.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
}
func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
if err != nil {
return err
}
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
if err != nil {
return err
}
err = provider.Write(ctx, additionTuples, deletionTuples)
if err != nil {
return err
}
return nil
}
func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
storableRole, err := provider.store.Get(ctx, orgID, id)
if err != nil {
return err
}
role := roletypes.NewRoleFromStorableRole(storableRole)
err = role.CanEditDelete()
if err != nil {
return err
}
return provider.store.Delete(ctx, orgID, id)
}
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
}
func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := []*openfgav1.TupleKey{}
// Grant the admin role to the user
adminSubject := authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil)
adminTuple, err := authtypes.TypeableRole.Tuples(
adminSubject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAdminRoleName),
},
orgID,
)
if err != nil {
return nil, err
}
tuples = append(tuples, adminTuple...)
// Grant the admin role to the anonymous user
anonymousSubject := authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
anonymousTuple, err := authtypes.TypeableRole.Tuples(
anonymousSubject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAnonymousRoleName),
},
orgID,
)
if err != nil {
return nil, err
}
tuples = append(tuples, anonymousTuple...)
return tuples, nil
}
func (provider *provider) getManagedRoleTransactionTuples(orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
transactionsByRole := make(map[string][]*authtypes.Transaction)
for _, register := range provider.registry {
for roleName, txns := range register.MustGetManagedRoleTransactions() {
transactionsByRole[roleName] = append(transactionsByRole[roleName], txns...)
}
}
tuples := make([]*openfgav1.TupleKey, 0)
for roleName, transactions := range transactionsByRole {
for _, txn := range transactions {
typeable := authtypes.MustNewTypeableFromType(txn.Object.Resource.Type, txn.Object.Resource.Name)
txnTuples, err := typeable.Tuples(
authtypes.MustNewSubject(
authtypes.TypeableRole,
roleName,
orgID,
&authtypes.RelationAssignee,
),
txn.Relation,
[]authtypes.Selector{txn.Object.Selector},
orgID,
)
if err != nil {
return nil, err
}
tuples = append(tuples, txnTuples...)
}
}
return tuples, nil
}

View File

@@ -0,0 +1,83 @@
package openfgaserver
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
type Server struct {
pkgAuthzService authz.AuthZ
}
func NewOpenfgaServer(ctx context.Context, pkgAuthzService authz.AuthZ) (*Server, error) {
return &Server{
pkgAuthzService: pkgAuthzService,
}, nil
}
func (server *Server) Start(ctx context.Context) error {
return server.pkgAuthzService.Start(ctx)
}
func (server *Server) Stop(ctx context.Context) error {
return server.pkgAuthzService.Stop(ctx)
}
func (server *Server) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return server.pkgAuthzService.Check(ctx, tuple)
}
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return server.pkgAuthzService.BatchCheck(ctx, tuples)
}
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return server.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
}
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return server.pkgAuthzService.Write(ctx, additions, deletions)
}

View File

@@ -11,7 +11,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/types"
@@ -26,13 +25,11 @@ type module struct {
pkgDashboardModule dashboard.Module
store dashboardtypes.Store
settings factory.ScopedProviderSettings
roleSetter role.Setter
granter role.Granter
querier querier.Querier
licensing licensing.Licensing
}
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard")
pkgDashboardModule := pkgimpldashboard.NewModule(store, settings, analytics, orgGetter, queryParser)
@@ -40,8 +37,6 @@ func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, an
pkgDashboardModule: pkgDashboardModule,
store: store,
settings: scopedProviderSettings,
roleSetter: roleSetter,
granter: granter,
querier: querier,
licensing: licensing,
}
@@ -61,29 +56,6 @@ func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publi
return errors.Newf(errors.TypeAlreadyExists, dashboardtypes.ErrCodePublicDashboardAlreadyExists, "dashboard with id %s is already public", storablePublicDashboard.DashboardID)
}
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
err = module.granter.Grant(ctx, orgID, roletypes.SigNozAnonymousRoleName, authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.StringValue(), orgID, nil))
if err != nil {
return err
}
additionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, []*authtypes.Object{additionObject}, nil)
if err != nil {
return err
}
err = module.store.CreatePublic(ctx, dashboardtypes.NewStorablePublicDashboardFromPublicDashboard(publicDashboard))
if err != nil {
return err
@@ -128,6 +100,7 @@ func (module *module) GetPublicDashboardSelectorsAndOrg(ctx context.Context, id
return []authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeMetaResource, id.StringValue()),
authtypes.MustNewSelector(authtypes.TypeMetaResource, authtypes.WildCardSelectorString),
}, storableDashboard.OrgID, nil
}
@@ -190,29 +163,6 @@ func (module *module) DeletePublic(ctx context.Context, orgID valuer.UUID, dashb
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
publicDashboard, err := module.GetPublic(ctx, orgID, dashboardID)
if err != nil {
return err
}
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
deletionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
if err != nil {
return err
}
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
if err != nil {
return err
@@ -250,10 +200,6 @@ func (module *module) GetByMetricNames(ctx context.Context, orgID valuer.UUID, m
return module.pkgDashboardModule.GetByMetricNames(ctx, orgID, metricNames)
}
func (module *module) MustGetTypeables() []authtypes.Typeable {
return module.pkgDashboardModule.MustGetTypeables()
}
func (module *module) List(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
return module.pkgDashboardModule.List(ctx, orgID)
}
@@ -266,34 +212,27 @@ func (module *module) LockUnlock(ctx context.Context, orgID valuer.UUID, id valu
return module.pkgDashboardModule.LockUnlock(ctx, orgID, id, updatedBy, role, lock)
}
func (module *module) deletePublic(ctx context.Context, orgID valuer.UUID, dashboardID valuer.UUID) error {
publicDashboard, err := module.store.GetPublic(ctx, dashboardID.String())
if err != nil {
return err
}
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
deletionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
if err != nil {
return err
}
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
if err != nil {
return err
}
return nil
func (module *module) MustGetTypeables() []authtypes.Typeable {
return module.pkgDashboardModule.MustGetTypeables()
}
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
return map[string][]*authtypes.Transaction{
roletypes.SigNozAnonymousRoleName: {
{
Relation: authtypes.RelationRead,
Object: *authtypes.MustNewObject(
authtypes.Resource{
Type: authtypes.TypeMetaResource,
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, "*"),
),
},
},
}
}
func (module *module) deletePublic(ctx context.Context, _ valuer.UUID, dashboardID valuer.UUID) error {
return module.store.DeletePublic(ctx, dashboardID.StringValue())
}

View File

@@ -1,165 +0,0 @@
package implrole
import (
"context"
"slices"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type setter struct {
store roletypes.Store
authz authz.AuthZ
licensing licensing.Licensing
registry []role.RegisterTypeable
}
func NewSetter(store roletypes.Store, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
return &setter{
store: store,
authz: authz,
licensing: licensing,
registry: registry,
}
}
func (setter *setter) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
}
func (setter *setter) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
existingRole, err := setter.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
}
if existingRole != nil {
return roletypes.NewRoleFromStorableRole(existingRole), nil
}
err = setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return nil, err
}
return role, nil
}
func (setter *setter) GetResources(_ context.Context) []*authtypes.Resource {
typeables := make([]authtypes.Typeable, 0)
for _, register := range setter.registry {
typeables = append(typeables, register.MustGetTypeables()...)
}
// role module cannot self register itself!
typeables = append(typeables, setter.MustGetTypeables()...)
resources := make([]*authtypes.Resource, 0)
for _, typeable := range typeables {
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
}
return resources
}
func (setter *setter) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
storableRole, err := setter.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
objects := make([]*authtypes.Object, 0)
for _, resource := range setter.GetResources(ctx) {
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
resourceObjects, err := setter.
authz.
ListObjects(
ctx,
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
relation,
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
)
if err != nil {
return nil, err
}
objects = append(objects, resourceObjects...)
}
}
return objects, nil
}
func (setter *setter) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return setter.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
}
func (setter *setter) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
if err != nil {
return err
}
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
if err != nil {
return err
}
err = setter.authz.Write(ctx, additionTuples, deletionTuples)
if err != nil {
return err
}
return nil
}
func (setter *setter) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
storableRole, err := setter.store.Get(ctx, orgID, id)
if err != nil {
return err
}
role := roletypes.NewRoleFromStorableRole(storableRole)
err = role.CanEditDelete()
if err != nil {
return err
}
return setter.store.Delete(ctx, orgID, id)
}
func (setter *setter) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
}

View File

@@ -9,7 +9,6 @@ import (
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
@@ -56,7 +55,6 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
FluxInterval: opts.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),

View File

@@ -211,7 +211,7 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz, s.signoz.Modules.RoleGetter)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
r.Use(otelmux.Middleware(
"apiserver",
@@ -237,7 +237,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterLogsRoutes(r, am)
apiHandler.RegisterIntegrationRoutes(r, am)
apiHandler.RegisterCloudIntegrationsRoutes(r, am)
apiHandler.RegisterFieldsRoutes(r, am)
apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterInfraMetricsRoutes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am)

View File

@@ -15,7 +15,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/transition"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
@@ -63,6 +63,8 @@ type AnomalyRule struct {
seasonality anomaly.Seasonality
}
var _ baserules.Rule = (*AnomalyRule)(nil)
func NewAnomalyRule(
id string,
orgID valuer.UUID,
@@ -490,7 +492,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration() {
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration().Duration() {
a.State = model.StateFiring
a.FiredAt = ts
state := model.StateFiring
@@ -553,7 +555,7 @@ func (r *AnomalyRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.Name(),
RuleCondition: r.Condition(),
EvalWindow: ruletypes.Duration(r.EvalWindow()),
EvalWindow: r.EvalWindow(),
Labels: r.Labels().Map(),
Annotations: r.Annotations().Map(),
PreferredChannels: r.PreferredChannels(),

View File

@@ -40,7 +40,7 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
// Test basic AlertOnAbsent functionality (without AbsentFor grace period)
baseTime := time.Unix(1700000000, 0)
evalWindow := 5 * time.Minute
evalWindow := valuer.MustParseTextDuration("5m")
evalTime := baseTime.Add(5 * time.Minute)
target := 500.0
@@ -50,8 +50,8 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: RuleTypeAnomaly,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,
@@ -147,7 +147,7 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
// 3. Alert fires only if t2 - t1 > AbsentFor
baseTime := time.Unix(1700000000, 0)
evalWindow := 5 * time.Minute
evalWindow := valuer.MustParseTextDuration("5m")
// Set target higher than test data so regular threshold alerts don't fire
target := 500.0
@@ -157,8 +157,8 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: RuleTypeAnomaly,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(time.Minute),
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,

View File

@@ -48,7 +48,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, tr)
// create ch rule task for evaluation
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
@@ -72,7 +72,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, pr)
// create promql rule task for evaluation
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeProm, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
// create anomaly rule
@@ -96,7 +96,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, ar)
// create anomaly rule task for evaluation
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else {
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
@@ -213,8 +213,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
return alertsFound, nil
}
// newTask returns an appropriate group for
// rule type
// newTask returns an appropriate group for the rule type
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) baserules.Task {
if taskType == baserules.TaskTypeCh {
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)

View File

@@ -34,159 +34,230 @@ const themeColors = {
cyan: '#00FFFF',
},
chartcolors: {
radicalRed: '#FF1A66',
// Blues (3)
dodgerBlue: '#2F80ED',
mediumOrchid: '#BB6BD9',
seaBuckthorn: '#F2994A',
seaGreen: '#219653',
turquoiseBlue: '#56CCF2',
festivalOrange: '#F2C94C',
silver: '#BDBDBD',
outrageousOrange: '#FF6633',
roseBud: '#FFB399',
canary: '#FFFF99',
deepSkyBlue: '#00B3E6',
goldTips: '#E6B333',
royalBlue: '#3366E6',
avocado: '#999966',
mintGreen: '#99FF99',
chestnut: '#B34D4D',
lima: '#80B300',
olive: '#809900',
beautyBush: '#E6B3B3',
danube: '#6680B3',
oliveDrab: '#66991A',
lavenderRose: '#FF99E6',
electricLime: '#CCFF1A',
robin: '#3F5ECC',
harleyOrange: '#E6331A',
turquoise: '#33FFCC',
gladeGreen: '#66994D',
hemlock: '#66664D',
vidaLoca: '#4D8000',
rust: '#B33300',
red: '#FF0000', // Adding more colors, we need to get better colors from design team
blue: '#0000FF',
green: '#00FF00',
yellow: '#FFFF00',
purple: '#800080',
cyan: '#00FFFF',
magenta: '#FF00FF',
orange: '#FFA500',
pink: '#FFC0CB',
brown: '#A52A2A',
teal: '#008080',
lime: '#00FF00',
maroon: '#800000',
navy: '#000080',
aquamarine: '#7FFFD4',
darkSeaGreen: '#8FBC8F',
gray: '#808080',
skyBlue: '#87CEEB',
indigo: '#4B0082',
slateGray: '#708090',
chocolate: '#D2691E',
tomato: '#FF6347',
steelBlue: '#4682B4',
peru: '#CD853F',
darkOliveGreen: '#556B2F',
indianRed: '#CD5C5C',
mediumSlateBlue: '#7B68EE',
rosyBrown: '#BC8F8F',
darkSlateGray: '#2F4F4F',
mediumAquamarine: '#66CDAA',
lavender: '#E6E6FA',
thistle: '#D8BFD8',
salmon: '#FA8072',
darkSalmon: '#E9967A',
paleVioletRed: '#DB7093',
mediumPurple: '#9370DB',
darkOrchid: '#9932CC',
lawnGreen: '#7CFC00',
// Teals / Cyans (3)
turquoise: '#00CEC9',
lagoon: '#1ABC9C',
cyanBright: '#22A6F2',
// Greens (3)
emeraldGreen: '#27AE60',
mediumSeaGreen: '#3CB371',
lightCoral: '#F08080',
gold: '#FFD700',
sandyBrown: '#F4A460',
darkKhaki: '#BDB76B',
cornflowerBlue: '#6495ED',
mediumVioletRed: '#C71585',
paleGreen: '#98FB98',
limeGreen: '#A3E635',
// Yellows / Golds (3)
festivalYellow: '#F2C94C',
sunflower: '#FFD93D',
warmAmber: '#FFCA28',
// Oranges (3)
festivalOrange: '#F2994A',
coralOrange: '#E17055',
pumpkin: '#FF7F50',
// Reds (3)
radicalRed: '#FF1A66',
crimsonRed: '#EB5757',
fireRed: '#E10600',
// Pinks (3)
hotPink: '#E84393',
rosePink: '#FD79A8',
blush: '#FF7EB6',
// Purples / Violets (3)
mediumPurple: '#BB6BD9',
royalPurple: '#9B51E0',
orchid: '#DA77F2',
// Accent / Neon / Unique Colors (3)
neonViolet: '#C77DFF',
electricPurple: '#6C5CE7',
arcticBlue: '#48DBFB',
// Extended palette — systematic variations to reach 100 colors
blue1: '#1F63E0',
blue2: '#3A7AED',
blue3: '#5A9DF5',
cyan1: '#00B0AA',
cyan2: '#33D6C2',
cyan3: '#66E9DA',
green1: '#1E8449',
green2: '#2ECC71',
green3: '#58D68D',
yellow1: '#F1C40F',
yellow2: '#F7DC6F',
yellow3: '#F9E79F',
orange1: '#D35400',
orange2: '#E67E22',
orange3: '#F5B041',
red1: '#C0392B',
red2: '#E74C3C',
red3: '#EC7063',
pink1: '#D81B60',
pink2: '#E91E63',
pink3: '#F06292',
purple1: '#8E44AD',
purple2: '#9B59B6',
purple3: '#BB8FCE',
teal1: '#009688',
teal2: '#1ABC9C',
teal3: '#48C9B0',
lime1: '#A3E635',
lime2: '#B9F18D',
lime3: '#D4FFB0',
gold1: '#F39C12',
gold2: '#F1C40F',
gold3: '#F7DC6F',
coral1: '#E67E22',
coral2: '#F39C12',
coral3: '#F5B041',
crimson1: '#C0392B',
crimson2: '#E74C3C',
crimson3: '#EC7063',
violet1: '#8E44AD',
violet2: '#9B59B6',
violet3: '#BB8FCE',
aqua1: '#00BFFF',
aqua2: '#1E90FF',
aqua3: '#63B8FF',
forest1: '#27AE60',
forest2: '#2ECC71',
forest3: '#58D68D',
blush1: '#FF6F91',
blush2: '#FF85A2',
blush3: '#FFA0B3',
lavender1: '#9B59B6',
lavender2: '#AF7AC5',
lavender3: '#C39BD3',
tomato1: '#E74C3C',
tomato2: '#EC7063',
tomato3: '#F1948A',
salmon1: '#FF6B6B',
salmon2: '#FF8787',
salmon3: '#FFA1A1',
mustard1: '#F1C40F',
mustard2: '#F7DC6F',
mustard3: '#F9E79F',
teal4: '#1ABC9C',
teal5: '#48C9B0',
teal6: '#76D7C4',
magenta1: '#D6336C',
magenta2: '#E84393',
magenta3: '#F06292',
violet4: '#7D3C98',
violet5: '#8E44AD',
violet6: '#9B59B6',
green4: '#229954',
green5: '#27AE60',
green6: '#52BE80',
blue4: '#2874A6',
blue5: '#2E86C1',
blue6: '#3498DB',
red4: '#C0392B',
red5: '#E74C3C',
red6: '#EC7063',
orange4: '#D35400',
orange5: '#E67E22',
orange6: '#EB984E',
pink4: '#C2185B',
pink5: '#D81B60',
pink6: '#E91E63',
gold4: '#B7950B',
gold5: '#F1C40F',
gold6: '#F4D03F',
},
lightModeColor: {
radicalRed: '#FF1A66',
dodgerBlueDark: '#0C6EED',
steelgrey: '#2f4b7c',
steelpurple: '#665191',
steelindigo: '#a05195',
steelpink: '#d45087',
steelcoral: '#f95d6a',
steelorange: '#ff7c43',
steelgold: '#ffa600',
steelrust: '#de425b',
steelgreen: '#41967e',
mediumOrchidDark: '#C326FD',
seaBuckthornDark: '#E66E05',
seaGreen: '#219653',
turquoiseBlueDark: '#0099CC',
silverDark: '#757575',
outrageousOrangeDark: '#F9521A',
roseBudDark: '#EB6437',
deepSkyBlueDark: '#0595BD',
royalBlue: '#3366E6',
avocadoDark: '#8E8E29',
mintGreenDark: '#00C700',
chestnut: '#B34D4D',
limaDark: '#6E9900',
olive: '#809900',
beautyBushDark: '#E25555',
danube: '#6680B3',
oliveDrab: '#66991A',
lavenderRoseDark: '#F024BD',
electricLimeDark: '#84A800',
robin: '#3F5ECC',
harleyOrange: '#E6331A',
gladeGreen: '#66994D',
hemlock: '#66664D',
vidaLoca: '#4D8000',
rust: '#B33300',
red: '#FF0000', // Adding more colors, we need to get better colors from design team
blue: '#0000FF',
green: '#00FF00',
purple: '#800080',
magentaDark: '#EB00EB',
pinkDark: '#FF3D5E',
brown: '#A52A2A',
teal: '#008080',
limeDark: '#07A207',
maroon: '#800000',
navy: '#000080',
gray: '#808080',
skyBlueDark: '#0CA7E4',
indigo: '#4B0082',
slateGray: '#708090',
chocolate: '#D2691E',
tomato: '#FF6347',
steelBlue: '#4682B4',
peruDark: '#D16E0A',
darkOliveGreen: '#556B2F',
indianRed: '#CD5C5C',
mediumSlateBlue: '#7B68EE',
rosyBrownDark: '#CB4848',
darkSlateGray: '#2F4F4F',
fuchsia: '#FF0AFF',
salmonDark: '#FF432E',
darkSalmonDark: '#D26541',
paleVioletRedDark: '#E83089',
mediumPurple: '#9370DB',
darkOrchid: '#9932CC',
mediumSeaGreenDark: '#109E50',
lightCoralDark: '#F85959',
gold: '#FFD700',
sandyBrownDark: '#D97117',
darkKhakiDark: '#99900A',
cornflowerBlueDark: '#3371E6',
mediumVioletRed: '#C71585',
paleGreenDark: '#0D910D',
radicalRed: '#D81B60',
dodgerBlueDark: '#1E5BD9',
steelgrey: '#344B6B',
steelpurple: '#5E548E',
steelindigo: '#8E4A7C',
steelpink: '#B63A6F',
steelcoral: '#E14B5A',
steelorange: '#E76F2F',
steelgold: '#E09B00',
steelrust: '#C93A50',
steelgreen: '#2F7D69',
mediumOrchidDark: '#8E24AA',
seaBuckthornDark: '#C75A00',
seaGreen: '#1E7F5A',
turquoiseBlueDark: '#007EA7',
silverDark: '#5F5F5F',
outrageousOrangeDark: '#E64A19',
roseBudDark: '#D84315',
deepSkyBlueDark: '#0277BD',
royalBlue: '#2A4FDB',
avocadoDark: '#6B6B1E',
mintGreenDark: '#2E9E55',
chestnut: '#8B3A3A',
limaDark: '#5C7F00',
olive: '#6E7F00',
beautyBushDark: '#C93C3C',
danube: '#4F6FB3',
oliveDrab: '#4F7F1A',
lavenderRoseDark: '#B0178F',
electricLimeDark: '#6B8F00',
robin: '#2F4FCC',
harleyOrange: '#CC2E12',
gladeGreen: '#4F7F46',
hemlock: '#5C5C45',
vidaLoca: '#3D6B00',
rust: '#993300',
red: '#C62828',
blue: '#1A237E',
green: '#1B7F3A',
purple: '#6A1B9A',
magentaDark: '#B000B5',
pinkDark: '#C2185B',
brown: '#7A3A1E',
teal: '#006D6F',
limeDark: '#4C8C2B',
maroon: '#6D1B1B',
navy: '#0D1B5E',
gray: '#616161',
skyBlueDark: '#0288D1',
indigo: '#303F9F',
slateGray: '#556B7C',
chocolate: '#9C4A1A',
tomato: '#E53935',
steelBlue: '#3A6EA5',
peruDark: '#B35E00',
darkOliveGreen: '#445B1F',
indianRed: '#B04040',
mediumSlateBlue: '#5C6BC0',
rosyBrownDark: '#A94444',
darkSlateGray: '#2E4A4A',
fuchsia: '#C511C5',
salmonDark: '#E64A3C',
darkSalmonDark: '#C85A3A',
paleVioletRedDark: '#C2186A',
mediumPurple: '#7E57C2',
darkOrchid: '#7B1FA2',
mediumSeaGreenDark: '#2E8B57',
lightCoralDark: '#E57373',
gold: '#D4AF37',
sandyBrownDark: '#C76A15',
darkKhakiDark: '#8A7F00',
cornflowerBlueDark: '#355FCC',
mediumVioletRed: '#AD1457',
paleGreenDark: '#2E7D32',
},
errorColor: '#d32f2f',
royalGrey: '#888888',

View File

@@ -202,7 +202,7 @@ function AllEndPoints({
const onRowClick = useCallback(
(props: any): void => {
setSelectedEndPointName(props[SPAN_ATTRIBUTES.URL_PATH] as string);
setSelectedEndPointName(props[SPAN_ATTRIBUTES.HTTP_URL] as string);
setSelectedView(VIEWS.ENDPOINT_STATS);
const initialItems = [
...(filters?.items || []),
@@ -213,7 +213,7 @@ function AllEndPoints({
op: 'AND',
});
setParams({
selectedEndPointName: props[SPAN_ATTRIBUTES.URL_PATH] as string,
selectedEndPointName: props[SPAN_ATTRIBUTES.HTTP_URL] as string,
selectedView: VIEWS.ENDPOINT_STATS,
endPointDetailsLocalFilters: {
items: initialItems,

View File

@@ -33,7 +33,7 @@ import { SPAN_ATTRIBUTES } from './constants';
const httpUrlKey = {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.URL_PATH,
key: SPAN_ATTRIBUTES.HTTP_URL,
type: 'tag',
};
@@ -93,7 +93,7 @@ function EndPointDetails({
return currentFilters; // No change needed, prevents loop
}
// Rebuild filters: Keep non-http.url filters and add/update http.url filter based on prop
// Rebuild filters: Keep non-http_url filters and add/update http_url filter based on prop
const otherFilters = currentFilters?.items?.filter(
(item) => item.key?.key !== httpUrlKey.key,
);
@@ -125,7 +125,7 @@ function EndPointDetails({
(newFilters: IBuilderQuery['filters']): void => {
// 1. Update local filters state immediately
setFilters(newFilters);
// Filter out http.url filter before saving to params
// Filter out http_url filter before saving to params
const filteredNewFilters = {
op: 'AND',
items:
@@ -299,7 +299,6 @@ function EndPointDetails({
endPointStatusCodeLatencyBarChartsDataQuery
}
domainName={domainName}
endPointName={endPointName}
filters={filters}
timeRange={timeRange}
onDragSelect={onDragSelect}

View File

@@ -56,15 +56,15 @@ function TopErrors({
{
items: endPointName
? [
// Remove any existing http.url filters from initialFilters to avoid duplicates
// Remove any existing http_url filters from initialFilters to avoid duplicates
...(initialFilters?.items?.filter(
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
(item) => item.key?.key !== SPAN_ATTRIBUTES.HTTP_URL,
) || []),
{
id: '92b8a1c1',
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.URL_PATH,
key: SPAN_ATTRIBUTES.HTTP_URL,
type: 'tag',
},
op: '=',

View File

@@ -9,6 +9,7 @@ import { ENTITY_VERSION_V5 } from 'constants/app';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { SPAN_ATTRIBUTES } from '../constants';
import DomainMetrics from './DomainMetrics';
// Mock the API call
@@ -126,11 +127,9 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
'count()',
);
// Verify exact domain filter expression structure
expect(queryA.filter.expression).toContain("http_host = '0.0.0.0'");
expect(queryA.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
expect(queryA.filter.expression).toContain(
'url.full EXISTS OR http.url EXISTS',
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
);
// Verify Query B - p99 latency
@@ -142,17 +141,13 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
'p99(duration_nano)',
);
// Verify exact domain filter expression structure
expect(queryB.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
expect(queryB.filter.expression).toContain("http_host = '0.0.0.0'");
// Verify Query C - error count (disabled)
const queryC = queryData.find((q: any) => q.queryName === 'C');
expect(queryC).toBeDefined();
expect(queryC.disabled).toBe(true);
expect(queryC.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
expect(queryC.filter.expression).toContain("http_host = '0.0.0.0'");
expect(queryC.aggregations?.[0]).toBeDefined();
expect((queryC.aggregations?.[0] as TraceAggregation)?.expression).toBe(
'count()',
@@ -169,9 +164,7 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
'max(timestamp)',
);
// Verify exact domain filter expression structure
expect(queryD.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
expect(queryD.filter.expression).toContain("http_host = '0.0.0.0'");
// Verify Formula F1 - error rate calculation
const formulas = payload.query.builder.queryFormulas;

View File

@@ -153,7 +153,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
// Verify exact domain filter expression structure
if (queryA.filter) {
expect(queryA.filter.expression).toContain(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
`http_host = 'api.example.com'`,
);
expect(queryA.filter.expression).toContain("kind_string = 'Client'");
}
@@ -171,7 +171,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
// Verify exact domain filter expression structure
if (queryB.filter) {
expect(queryB.filter.expression).toContain(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
`http_host = 'api.example.com'`,
);
expect(queryB.filter.expression).toContain("kind_string = 'Client'");
}
@@ -185,7 +185,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
expect(queryC.aggregateOperator).toBe('count');
if (queryC.filter) {
expect(queryC.filter.expression).toContain(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
`http_host = 'api.example.com'`,
);
expect(queryC.filter.expression).toContain("kind_string = 'Client'");
expect(queryC.filter.expression).toContain('has_error = true');
@@ -204,7 +204,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
// Verify exact domain filter expression structure
if (queryD.filter) {
expect(queryD.filter.expression).toContain(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
`http_host = 'api.example.com'`,
);
expect(queryD.filter.expression).toContain("kind_string = 'Client'");
}
@@ -221,7 +221,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
}
if (queryE.filter) {
expect(queryE.filter.expression).toContain(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
`http_host = 'api.example.com'`,
);
expect(queryE.filter.expression).toContain("kind_string = 'Client'");
}
@@ -291,7 +291,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
expect(query.filter.expression).toContain('staging');
// Also verify domain filter is still present
expect(query.filter.expression).toContain(
"(net.peer.name = 'api.internal.com' OR server.address = 'api.internal.com')",
"http_host = 'api.internal.com'",
);
// Verify client kind filter is present
expect(query.filter.expression).toContain("kind_string = 'Client'");

View File

@@ -34,7 +34,6 @@ function StatusCodeBarCharts({
endPointStatusCodeBarChartsDataQuery,
endPointStatusCodeLatencyBarChartsDataQuery,
domainName,
endPointName,
filters,
timeRange,
onDragSelect,
@@ -48,7 +47,6 @@ function StatusCodeBarCharts({
unknown
>;
domainName: string;
endPointName: string;
filters: IBuilderQuery['filters'];
timeRange: {
startTime: number;
@@ -144,11 +142,11 @@ function StatusCodeBarCharts({
const widget = useMemo<Widgets>(
() =>
getStatusCodeBarChartWidgetData(domainName, endPointName, {
getStatusCodeBarChartWidgetData(domainName, {
items: [...(filters?.items || [])],
op: filters?.op || 'AND',
}),
[domainName, endPointName, filters],
[domainName, filters],
);
const graphClickHandler = useCallback(
@@ -166,6 +164,7 @@ function StatusCodeBarCharts({
xValue,
TWO_AND_HALF_MINUTES_IN_MILLISECONDS,
);
handleGraphClick({
xValue,
yValue,

View File

@@ -12,8 +12,8 @@ export const VIEW_TYPES = {
// Span attribute keys - these are the source of truth for all attribute keys
export const SPAN_ATTRIBUTES = {
URL_PATH: 'http.url',
HTTP_URL: 'http_url',
RESPONSE_STATUS_CODE: 'response_status_code',
SERVER_NAME: 'net.peer.name',
SERVER_NAME: 'http_host',
SERVER_PORT: 'net.peer.port',
} as const;

View File

@@ -280,7 +280,7 @@ describe('API Monitoring Utils', () => {
const endpointFilter = result?.items?.find(
(item) =>
item.key &&
item.key.key === SPAN_ATTRIBUTES.URL_PATH &&
item.key.key === SPAN_ATTRIBUTES.HTTP_URL &&
item.value === endPointName,
);
expect(endpointFilter).toBeDefined();
@@ -344,13 +344,12 @@ describe('API Monitoring Utils', () => {
describe('getFormattedEndPointDropDownData', () => {
it('should format endpoint dropdown data correctly', () => {
// Arrange
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
const mockData = [
{
data: {
// eslint-disable-next-line sonarjs/no-duplicate-string
[URL_PATH_KEY]: '/api/users',
'url.full': 'http://example.com/api/users',
A: 150, // count or other metric
},
},
@@ -358,7 +357,6 @@ describe('API Monitoring Utils', () => {
data: {
// eslint-disable-next-line sonarjs/no-duplicate-string
[URL_PATH_KEY]: '/api/orders',
'url.full': 'http://example.com/api/orders',
A: 75,
},
},
@@ -406,7 +404,7 @@ describe('API Monitoring Utils', () => {
it('should handle items without URL path', () => {
// Arrange
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
type MockDataType = {
data: {
[key: string]: string | number;
@@ -712,13 +710,11 @@ describe('API Monitoring Utils', () => {
it('should generate widget configuration for status code bar chart', () => {
// Arrange
const domainName = 'test-domain';
const endPointName = '/api/test';
const filters = { items: [], op: 'AND' };
// Act
const result = getStatusCodeBarChartWidgetData(
domainName,
endPointName,
filters as IBuilderQuery['filters'],
);
@@ -741,21 +737,11 @@ describe('API Monitoring Utils', () => {
if (domainFilter) {
expect(domainFilter.value).toBe(domainName);
}
// Should have endpoint filter if provided
const endpointFilter = queryData.filters?.items?.find(
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.URL_PATH,
);
expect(endpointFilter).toBeDefined();
if (endpointFilter) {
expect(endpointFilter.value).toBe(endPointName);
}
});
it('should include custom filters in the widget configuration', () => {
// Arrange
const domainName = 'test-domain';
const endPointName = '/api/test';
const customFilter = {
id: 'custom-filter',
key: {
@@ -771,7 +757,6 @@ describe('API Monitoring Utils', () => {
// Act
const result = getStatusCodeBarChartWidgetData(
domainName,
endPointName,
filters as IBuilderQuery['filters'],
);

View File

@@ -25,7 +25,7 @@ jest.mock('container/GridCardLayout/GridCard', () => ({
type="button"
data-testid="row-click-button"
onClick={(): void =>
customOnRowClick({ [SPAN_ATTRIBUTES.URL_PATH]: '/api/test' })
customOnRowClick({ [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test' })
}
>
Click Row

View File

@@ -6,10 +6,10 @@
* These tests validate the migration from V4 to V5 format for getAllEndpointsWidgetData:
* - Filter format change: filters.items[] → filter.expression
* - Aggregation format: aggregateAttribute → aggregations[] array
* - Domain filter: (net.peer.name OR server.address)
* - Domain filter: http_host = '${domainName}'
* - Kind filter: kind_string = 'Client'
* - Four queries: A (count), B (p99 latency), C (max timestamp), D (error count - disabled)
* - GroupBy: Both http.url AND url.full with type 'attribute'
* - GroupBy: http_url with type 'attribute'
*/
import { getAllEndpointsWidgetData } from 'container/ApiMonitoring/utils';
import {
@@ -18,6 +18,8 @@ import {
} from 'types/api/queryBuilder/queryAutocompleteResponse';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
describe('AllEndpointsWidget - V5 Migration Validation', () => {
const mockDomainName = 'api.example.com';
const emptyFilters: IBuilderQuery['filters'] = {
@@ -92,28 +94,28 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
const baseExpression = `(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') AND kind_string = 'Client'`;
const baseExpression = `http_host = '${mockDomainName}' AND kind_string = 'Client'`;
// Queries A, B, C have identical base filter
expect(queryA.filter?.expression).toBe(
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
);
expect(queryB.filter?.expression).toBe(
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
);
expect(queryC.filter?.expression).toBe(
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
);
// Query D has additional has_error filter
expect(queryD.filter?.expression).toBe(
`${baseExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
`${baseExpression} AND has_error = true AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
);
});
});
describe('2. GroupBy Structure', () => {
it('default groupBy includes both http.url and url.full with type attribute', () => {
it(`default groupBy includes ${SPAN_ATTRIBUTES.HTTP_URL} with type attribute`, () => {
const widget = getAllEndpointsWidgetData(
emptyGroupBy,
mockDomainName,
@@ -124,23 +126,13 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
// All queries should have the same default groupBy
queryData.forEach((query) => {
expect(query.groupBy).toHaveLength(2);
expect(query.groupBy).toHaveLength(1);
// http.url
expect(query.groupBy).toContainEqual({
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: 'http.url',
type: 'attribute',
});
// url.full
expect(query.groupBy).toContainEqual({
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: 'url.full',
key: SPAN_ATTRIBUTES.HTTP_URL,
type: 'attribute',
});
});
@@ -170,19 +162,18 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
// All queries should have defaults + custom groupBy
queryData.forEach((query) => {
expect(query.groupBy).toHaveLength(4); // 2 defaults + 2 custom
expect(query.groupBy).toHaveLength(3); // 1 default + 2 custom
// First two should be defaults (http.url, url.full)
expect(query.groupBy[0].key).toBe('http.url');
expect(query.groupBy[1].key).toBe('url.full');
// First two should be defaults (http_url)
expect(query.groupBy[0].key).toBe(SPAN_ATTRIBUTES.HTTP_URL);
// Last two should be custom (matching subset of properties)
expect(query.groupBy[2]).toMatchObject({
expect(query.groupBy[1]).toMatchObject({
dataType: DataTypes.String,
key: 'service.name',
type: 'resource',
});
expect(query.groupBy[3]).toMatchObject({
expect(query.groupBy[2]).toMatchObject({
dataType: DataTypes.String,
key: 'deployment.environment',
type: 'resource',

View File

@@ -258,7 +258,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
value: '/api/test',
}),
]),
@@ -278,7 +278,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
value: '/api/test',
}),
]),
@@ -360,7 +360,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
value: '/api/test',
}),
]),
@@ -373,7 +373,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
value: '/api/test',
}),
]),

View File

@@ -191,7 +191,7 @@ describe('EndPointsDropDown Component', () => {
it('formats data using the utility function', () => {
const mockRows = [
{ data: { [SPAN_ATTRIBUTES.URL_PATH]: '/api/test', A: 10 } },
{ data: { [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test', A: 10 } },
];
const dataProps = {

View File

@@ -6,15 +6,18 @@
* These tests validate the migration from V4 to V5 format for the third payload
* in getEndPointDetailsQueryPayload (endpoint dropdown data):
* - Filter format change: filters.items[] → filter.expression
* - Domain handling: (net.peer.name OR server.address)
* - Domain handling: http_host = '${domainName}'
* - Kind filter: kind_string = 'Client'
* - Existence check: (http.url EXISTS OR url.full EXISTS)
* - Existence check: http_url EXISTS
* - Aggregation: count() expression
* - GroupBy: Both http.url AND url.full with type 'attribute'
* - GroupBy: http_url with type 'attribute'
*/
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
describe('EndpointDropdown - V5 Migration Validation', () => {
const mockDomainName = 'api.example.com';
const mockStartTime = 1000;
@@ -43,9 +46,9 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters');
// Base filter 1: Domain (net.peer.name OR server.address)
// Base filter 1: Domain http_host = '${domainName}'
expect(queryA.filter?.expression).toContain(
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
`http_host = '${mockDomainName}'`,
);
// Base filter 2: Kind
@@ -53,7 +56,7 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
// Base filter 3: Existence check
expect(queryA.filter?.expression).toContain(
'(http.url EXISTS OR url.full EXISTS)',
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
);
// V5 Aggregation format: aggregations array (not aggregateAttribute)
@@ -64,16 +67,11 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
});
expect(queryA).not.toHaveProperty('aggregateAttribute');
// GroupBy: Both http.url and url.full
expect(queryA.groupBy).toHaveLength(2);
// GroupBy: http_url
expect(queryA.groupBy).toHaveLength(1);
expect(queryA.groupBy).toContainEqual({
key: 'http.url',
dataType: 'string',
type: 'attribute',
});
expect(queryA.groupBy).toContainEqual({
key: 'url.full',
dataType: 'string',
key: SPAN_ATTRIBUTES.HTTP_URL,
dataType: DataTypes.String,
type: 'attribute',
});
});
@@ -120,53 +118,7 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
// Exact filter expression with custom filters merged
expect(expression).toBe(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND deployment.environment = 'production'",
);
});
});
describe('3. HTTP URL Filter Special Handling', () => {
it('converts http.url filter to (http.url OR url.full) expression', () => {
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
items: [
{
id: 'http-url-filter',
key: {
key: 'http.url',
dataType: 'string' as any,
type: 'tag',
},
op: '=',
value: '/api/users',
},
{
id: 'service-filter',
key: {
key: 'service.name',
dataType: 'string' as any,
type: 'resource',
},
op: '=',
value: 'user-service',
},
],
op: 'AND',
};
const payload = getEndPointDetailsQueryPayload(
mockDomainName,
mockStartTime,
mockEndTime,
filtersWithHttpUrl,
);
const dropdownQuery = payload[2];
const expression =
dropdownQuery.query.builder.queryData[0].filter?.expression;
// CRITICAL: Exact filter expression with http.url converted to OR logic
expect(expression).toBe(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND (http.url = '/api/users' OR url.full = '/api/users')",
`${SPAN_ATTRIBUTES.SERVER_NAME} = 'api.example.com' AND kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS service.name = 'user-service' AND deployment.environment = 'production'`,
);
});
});

View File

@@ -33,7 +33,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
expect(queryData).not.toHaveProperty('filters.items');
});
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
it('uses new domain filter format: (http_host)', () => {
const widget = getRateOverTimeWidgetData(
mockDomainName,
mockEndpointName,
@@ -44,7 +44,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify EXACT new filter format with OR operator
expect(queryData?.filter?.expression).toContain(
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
`http_host = '${mockDomainName}'`,
);
// Endpoint name is used in legend, not filter
@@ -90,7 +90,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify domain filter is present
expect(queryData?.filter?.expression).toContain(
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
`http_host = '${mockDomainName}'`,
);
// Verify custom filters are merged into the expression
@@ -120,7 +120,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
expect(queryData).not.toHaveProperty('filters.items');
});
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
it('uses new domain filter format: (http_host)', () => {
const widget = getLatencyOverTimeWidgetData(
mockDomainName,
mockEndpointName,
@@ -132,7 +132,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify EXACT new filter format with OR operator
expect(queryData.filter).toBeDefined();
expect(queryData?.filter?.expression).toContain(
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
`http_host = '${mockDomainName}'`,
);
// Endpoint name is used in legend, not filter
@@ -166,7 +166,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify domain filter is present
expect(queryData?.filter?.expression).toContain(
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') service.name = 'user-service'`,
`http_host = '${mockDomainName}' service.name = 'user-service'`,
);
});
});

View File

@@ -142,7 +142,6 @@ describe('StatusCodeBarCharts', () => {
endTime: 1609545600000,
};
const mockDomainName = 'test-domain';
const mockEndPointName = '/api/test';
const onDragSelectMock = jest.fn();
const refetchFn = jest.fn();
@@ -232,7 +231,6 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -268,7 +266,6 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -311,7 +308,6 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -356,7 +352,6 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -404,7 +399,6 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -419,7 +413,6 @@ describe('StatusCodeBarCharts', () => {
// but we've confirmed the function is mocked and ready to be tested
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
mockDomainName,
mockEndPointName,
expect.objectContaining({
items: [],
op: 'AND',
@@ -467,7 +460,6 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockCustomFilters as IBuilderQuery['filters']}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -477,7 +469,6 @@ describe('StatusCodeBarCharts', () => {
// Assert widget creation was called with the correct parameters
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
mockDomainName,
mockEndPointName,
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({ id: 'custom-filter' }),

View File

@@ -10,7 +10,7 @@
*
* V5 Changes:
* - Filter format change: filters.items[] → filter.expression
* - Domain filter: (net.peer.name OR server.address)
* - Domain filter: (http_host)
* - Kind filter: kind_string = 'Client'
* - stepInterval: 60 → null
* - Grouped by response_status_code
@@ -47,9 +47,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters.items');
// Base filter 1: Domain (net.peer.name OR server.address)
// Base filter 1: Domain (http_host)
expect(queryA.filter?.expression).toContain(
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
`http_host = '${mockDomainName}'`,
);
// Base filter 2: Kind
@@ -96,9 +96,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters.items');
// Base filter 1: Domain (net.peer.name OR server.address)
// Base filter 1: Domain (http_host)
expect(queryA.filter?.expression).toContain(
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
`http_host = '${mockDomainName}'`,
);
// Base filter 2: Kind
@@ -177,7 +177,7 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(callsExpression).toBe(latencyExpression);
// Verify base filters
expect(callsExpression).toContain('net.peer.name');
expect(callsExpression).toContain('http_host');
expect(callsExpression).toContain("kind_string = 'Client'");
// Verify custom filters are merged
@@ -187,51 +187,4 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(callsExpression).toContain('production');
});
});
describe('4. HTTP URL Filter Handling', () => {
it('converts http.url filter to (http.url OR url.full) expression in both charts', () => {
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
items: [
{
id: 'http-url-filter',
key: {
key: 'http.url',
dataType: 'string' as any,
type: 'tag',
},
op: '=',
value: '/api/metrics',
},
],
op: 'AND',
};
const payload = getEndPointDetailsQueryPayload(
mockDomainName,
mockStartTime,
mockEndTime,
filtersWithHttpUrl,
);
const callsChartQuery = payload[4];
const latencyChartQuery = payload[5];
const callsExpression =
callsChartQuery.query.builder.queryData[0].filter?.expression;
const latencyExpression =
latencyChartQuery.query.builder.queryData[0].filter?.expression;
// CRITICAL: http.url converted to OR logic
expect(callsExpression).toContain(
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
);
expect(latencyExpression).toContain(
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
);
// Base filters still present
expect(callsExpression).toContain('net.peer.name');
expect(callsExpression).toContain("kind_string = 'Client'");
});
});
});

View File

@@ -6,8 +6,8 @@
* These tests validate the migration from V4 to V5 format for the second payload
* in getEndPointDetailsQueryPayload (status code table data):
* - Filter format change: filters.items[] → filter.expression
* - URL handling: Special logic for (http.url OR url.full)
* - Domain filter: (net.peer.name OR server.address)
* - URL handling: Special logic for http_url
* - Domain filter: http_host = '${domainName}'
* - Kind filter: kind_string = 'Client'
* - Kind filter: response_status_code EXISTS
* - Three queries: A (count), B (p99 latency), C (rate)
@@ -45,9 +45,9 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters.items');
// Base filter 1: Domain (net.peer.name OR server.address)
// Base filter 1: Domain (http_host)
expect(queryA.filter?.expression).toContain(
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
`http_host = '${mockDomainName}'`,
);
// Base filter 2: Kind
@@ -149,7 +149,7 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
statusCodeQuery.query.builder.queryData[0].filter?.expression;
// Base filters present
expect(expression).toContain('net.peer.name');
expect(expression).toContain('http_host');
expect(expression).toContain("kind_string = 'Client'");
expect(expression).toContain('response_status_code EXISTS');
@@ -165,62 +165,4 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
expect(queries[1].filter?.expression).toBe(queries[2].filter?.expression);
});
});
describe('4. HTTP URL Filter Handling', () => {
it('converts http.url filter to (http.url OR url.full) expression', () => {
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
items: [
{
id: 'http-url-filter',
key: {
key: 'http.url',
dataType: 'string' as any,
type: 'tag',
},
op: '=',
value: '/api/users',
},
{
id: 'service-filter',
key: {
key: 'service.name',
dataType: 'string' as any,
type: 'resource',
},
op: '=',
value: 'user-service',
},
],
op: 'AND',
};
const payload = getEndPointDetailsQueryPayload(
mockDomainName,
mockStartTime,
mockEndTime,
filtersWithHttpUrl,
);
const statusCodeQuery = payload[1];
const expression =
statusCodeQuery.query.builder.queryData[0].filter?.expression;
// CRITICAL: http.url converted to OR logic
expect(expression).toContain(
"(http.url = '/api/users' OR url.full = '/api/users')",
);
// Other filters still present
expect(expression).toContain('service.name');
expect(expression).toContain('user-service');
// Base filters present
expect(expression).toContain('net.peer.name');
expect(expression).toContain("kind_string = 'Client'");
expect(expression).toContain('response_status_code EXISTS');
// All ANDed together (at least 2 ANDs: domain+kind, custom filter, url condition)
expect(expression?.match(/AND/g)?.length).toBeGreaterThanOrEqual(2);
});
});
});

View File

@@ -4,6 +4,7 @@ import { rest, server } from 'mocks-server/server';
import { fireEvent, render, screen, waitFor, within } from 'tests/test-utils';
import { DataSource } from 'types/common/queryBuilder';
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
import TopErrors from '../Explorer/Domains/DomainDetails/TopErrors';
import { getTopErrorsQueryPayload } from '../utils';
@@ -83,7 +84,7 @@ describe('TopErrors', () => {
{
columns: [
{
name: 'http.url',
name: SPAN_ATTRIBUTES.HTTP_URL,
fieldDataType: 'string',
fieldContext: 'attribute',
},
@@ -123,7 +124,7 @@ describe('TopErrors', () => {
table: {
rows: [
{
'http.url': '/api/test',
http_url: '/api/test',
A: 100,
},
],
@@ -205,7 +206,7 @@ describe('TopErrors', () => {
expect(navigateMock).toHaveBeenCalledWith({
filters: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: 'http.url' }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
op: '=',
value: '/api/test',
}),
@@ -215,7 +216,7 @@ describe('TopErrors', () => {
value: 'true',
}),
expect.objectContaining({
key: expect.objectContaining({ key: 'net.peer.name' }),
key: expect.objectContaining({ key: 'http_host' }),
op: '=',
value: 'test-domain',
}),
@@ -334,7 +335,7 @@ describe('TopErrors', () => {
// Verify all required filters are present
expect(filterExpression).toContain(
`kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) AND (net.peer.name = 'test-domain' OR server.address = 'test-domain') AND has_error = true`,
`kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS AND ${SPAN_ATTRIBUTES.SERVER_NAME} = 'test-domain' AND has_error = true`,
);
});
});

View File

@@ -15,7 +15,6 @@ import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsAppli
import { convertNanoToMilliseconds } from 'container/MetricsExplorer/Summary/utils';
import dayjs from 'dayjs';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { cloneDeep } from 'lodash-es';
import { ArrowUpDown, ChevronDown, ChevronRight, Info } from 'lucide-react';
import { getWidgetQuery } from 'pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil';
@@ -57,12 +56,12 @@ export const getDisplayValue = (value: unknown): string =>
isEmptyFilterValue(value) ? '-' : String(value);
export const getDomainNameFilterExpression = (domainName: string): string =>
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`;
`http_host = '${domainName}'`;
export const clientKindExpression = `kind_string = 'Client'`;
/**
* Converts filters to expression, handling http.url specially by creating (http.url OR url.full) condition
* Converts filters to expression
* @param filters Filters to convert
* @param baseExpression Base expression to combine with filters
* @returns Filter expression string
@@ -75,34 +74,6 @@ export const convertFiltersWithUrlHandling = (
return baseExpression;
}
// Check if filters contain http.url (SPAN_ATTRIBUTES.URL_PATH)
const httpUrlFilter = filters.items?.find(
(item) => item.key?.key === SPAN_ATTRIBUTES.URL_PATH,
);
// If http.url filter exists, create modified filters with (http.url OR url.full)
if (httpUrlFilter && httpUrlFilter.value) {
// Remove ALL http.url filters from items (guards against duplicates)
const otherFilters = filters.items?.filter(
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
);
// Convert to expression first with other filters
const {
filter: intermediateFilter,
} = convertFiltersToExpressionWithExistingQuery(
{ ...filters, items: otherFilters || [] },
baseExpression,
);
// Add the OR condition for http.url and url.full
const urlValue = httpUrlFilter.value;
const urlCondition = `(http.url = '${urlValue}' OR url.full = '${urlValue}')`;
return intermediateFilter.expression.trim()
? `${intermediateFilter.expression} AND ${urlCondition}`
: urlCondition;
}
const { filter } = convertFiltersToExpressionWithExistingQuery(
filters,
baseExpression,
@@ -371,7 +342,7 @@ export const formatDataForTable = (
});
};
const urlExpression = `(url.full EXISTS OR http.url EXISTS)`;
const urlExpression = `${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`;
export const getDomainMetricsQueryPayload = (
domainName: string,
@@ -588,14 +559,7 @@ const defaultGroupBy = [
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: SPAN_ATTRIBUTES.URL_PATH,
type: 'attribute',
},
{
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: 'url.full',
key: SPAN_ATTRIBUTES.HTTP_URL,
type: 'attribute',
},
// {
@@ -867,8 +831,8 @@ function buildFilterExpression(
): string {
const baseFilterParts = [
`kind_string = 'Client'`,
`(http.url EXISTS OR url.full EXISTS)`,
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${SPAN_ATTRIBUTES.SERVER_NAME} = '${domainName}'`,
`has_error = true`,
];
if (showStatusCodeErrors) {
@@ -910,12 +874,7 @@ export const getTopErrorsQueryPayload = (
filter: { expression: filterExpression },
groupBy: [
{
name: 'http.url',
fieldDataType: 'string',
fieldContext: 'attribute',
},
{
name: 'url.full',
name: SPAN_ATTRIBUTES.HTTP_URL,
fieldDataType: 'string',
fieldContext: 'attribute',
},
@@ -1134,11 +1093,11 @@ export const formatEndPointsDataForTable = (
if (!isGroupedByAttribute) {
formattedData = data?.map((endpoint) => {
const { port } = extractPortAndEndpoint(
(endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '',
(endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '',
);
return {
key: v4(),
endpointName: (endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '-',
endpointName: (endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '-',
port,
callCount:
endpoint.data.A === 'n/a' || endpoint.data.A === undefined
@@ -1262,9 +1221,7 @@ export const formatTopErrorsDataForTable = (
return {
key: v4(),
endpointName: getDisplayValue(
rowObj[SPAN_ATTRIBUTES.URL_PATH] || rowObj['url.full'],
),
endpointName: getDisplayValue(rowObj[SPAN_ATTRIBUTES.HTTP_URL]),
statusCode: getDisplayValue(rowObj[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]),
statusMessage: getDisplayValue(rowObj.status_message),
count: getDisplayValue(rowObj.__result_0),
@@ -1281,10 +1238,10 @@ export const getTopErrorsCoRelationQueryFilters = (
{
id: 'ea16470b',
key: {
key: 'http.url',
key: SPAN_ATTRIBUTES.HTTP_URL,
dataType: DataTypes.String,
type: 'tag',
id: 'http.url--string--tag--false',
id: `${SPAN_ATTRIBUTES.HTTP_URL}--string--tag--false`,
},
op: '=',
value: endPointName,
@@ -1302,7 +1259,7 @@ export const getTopErrorsCoRelationQueryFilters = (
{
id: 'e8a043b7',
key: {
key: 'net.peer.name',
key: SPAN_ATTRIBUTES.SERVER_NAME,
dataType: DataTypes.String,
type: '',
},
@@ -1781,7 +1738,7 @@ export const getEndPointDetailsQueryPayload = (
filters || { items: [], op: 'AND' },
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
)} AND ${clientKindExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
),
},
expression: 'A',
@@ -1793,12 +1750,7 @@ export const getEndPointDetailsQueryPayload = (
orderBy: [],
groupBy: [
{
key: SPAN_ATTRIBUTES.URL_PATH,
dataType: DataTypes.String,
type: 'attribute',
},
{
key: 'url.full',
key: SPAN_ATTRIBUTES.HTTP_URL,
dataType: DataTypes.String,
type: 'attribute',
},
@@ -2225,7 +2177,7 @@ export const getEndPointZeroStateQueryPayload = (
orderBy: [],
groupBy: [
{
key: SPAN_ATTRIBUTES.URL_PATH,
key: SPAN_ATTRIBUTES.HTTP_URL,
dataType: DataTypes.String,
type: 'tag',
},
@@ -2419,8 +2371,7 @@ export const statusCodeWidgetInfo = [
interface EndPointDropDownResponseRow {
data: {
[SPAN_ATTRIBUTES.URL_PATH]: string;
'url.full': string;
[SPAN_ATTRIBUTES.HTTP_URL]: string;
A: number;
};
}
@@ -2439,8 +2390,8 @@ export const getFormattedEndPointDropDownData = (
}
return data.map((row) => ({
key: v4(),
label: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
value: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
label: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
value: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
}));
};
@@ -2769,7 +2720,6 @@ export const groupStatusCodes = (
export const getStatusCodeBarChartWidgetData = (
domainName: string,
endPointName: string,
filters: IBuilderQuery['filters'],
): Widgets => ({
query: {
@@ -2798,20 +2748,6 @@ export const getStatusCodeBarChartWidgetData = (
op: '=',
value: domainName,
},
...(endPointName
? [
{
id: '8b1be6f0',
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.URL_PATH,
type: 'tag',
},
op: '=',
value: endPointName,
},
]
: []),
...(filters?.items || []),
],
op: 'AND',
@@ -2933,7 +2869,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
)} AND ${clientKindExpression} AND http_url EXISTS`,
),
},
functions: [],
@@ -2965,7 +2901,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
)} AND ${clientKindExpression} AND http_url EXISTS`,
),
},
functions: [],
@@ -2997,7 +2933,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
)} AND ${clientKindExpression} AND http_url EXISTS`,
),
},
functions: [],
@@ -3029,7 +2965,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
)} AND ${clientKindExpression} AND has_error = true AND http_url EXISTS`,
),
},
functions: [],
@@ -3060,24 +2996,12 @@ export const getAllEndpointsWidgetData = (
);
widget.renderColumnCell = {
[SPAN_ATTRIBUTES.URL_PATH]: (
url: string | number,
record?: RowData,
): ReactNode => {
// First try to use the url from the column value
let urlValue = url;
// If url is empty/null and we have the record, fallback to url.full
if (isEmptyFilterValue(url) && record) {
const { 'url.full': urlFull } = record;
urlValue = urlFull;
}
if (!urlValue || urlValue === 'n/a') {
[SPAN_ATTRIBUTES.HTTP_URL]: (url: string | number): ReactNode => {
if (isEmptyFilterValue(url) || !url || url === 'n/a') {
return <span>-</span>;
}
const { endpoint } = extractPortAndEndpoint(String(urlValue));
const { endpoint } = extractPortAndEndpoint(String(url));
return <span>{getDisplayValue(endpoint)}</span>;
},
A: (numOfCalls: any): ReactNode => (
@@ -3132,8 +3056,8 @@ export const getAllEndpointsWidgetData = (
};
widget.customColTitles = {
[SPAN_ATTRIBUTES.URL_PATH]: 'Endpoint',
'net.peer.port': 'Port',
[SPAN_ATTRIBUTES.HTTP_URL]: 'Endpoint',
[SPAN_ATTRIBUTES.SERVER_PORT]: 'Port',
};
widget.title = (
@@ -3158,12 +3082,10 @@ export const getAllEndpointsWidgetData = (
</div>
);
widget.hiddenColumns = ['url.full'];
return widget;
};
const keysToRemove = ['http.url', 'url.full', 'A', 'B', 'C', 'F1'];
const keysToRemove = [SPAN_ATTRIBUTES.HTTP_URL, 'A', 'B', 'C', 'F1'];
export const getGroupByFiltersFromGroupByValues = (
rowData: any,
@@ -3221,7 +3143,7 @@ export const getRateOverTimeWidgetData = (
filter: {
expression: convertFiltersWithUrlHandling(
filters || { items: [], op: 'AND' },
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
`http_host = '${domainName}'`,
),
},
functions: [],
@@ -3272,7 +3194,7 @@ export const getLatencyOverTimeWidgetData = (
filter: {
expression: convertFiltersWithUrlHandling(
filters || { items: [], op: 'AND' },
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
`http_host = '${domainName}'`,
),
},
functions: [],

View File

@@ -18,8 +18,8 @@ import { useWidgetsByDynamicVariableId } from 'hooks/dashboard/useWidgetsByDynam
import { getWidgetsHavingDynamicVariableAttribute } from 'hooks/dashboard/utils';
import { useGetFieldValues } from 'hooks/dynamicVariables/useGetFieldValues';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import { commaValuesParser } from 'lib/dashboardVariables/customCommaValuesParser';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import { isEmpty, map } from 'lodash-es';
import {
ArrowLeft,

View File

@@ -1,6 +1,6 @@
import { memo, useMemo } from 'react';
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import { commaValuesParser } from 'lib/dashboardVariables/customCommaValuesParser';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';

View File

@@ -3,7 +3,7 @@ import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import { isArray, isString } from 'lodash-es';
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import { AppState } from 'store/reducers';

View File

@@ -33,8 +33,8 @@ import { useChartMutable } from 'hooks/useChartMutable';
import useComponentPermission from 'hooks/useComponentPermission';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import useUrlQuery from 'hooks/useUrlQuery';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import GetMinMax from 'lib/getMinMax';
import { isEmpty } from 'lodash-es';
import { useAppContext } from 'providers/App/App';

View File

@@ -8,8 +8,8 @@ import { populateMultipleResults } from 'container/NewWidget/LeftContainer/Widge
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/types';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
import { useIntersectionObserver } from 'hooks/useIntersectionObserver';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import getTimeString from 'lib/getTimeString';
import { isEqual } from 'lodash-es';
import isEmpty from 'lodash-es/isEmpty';

View File

@@ -6,7 +6,7 @@ import { prepareQueryRangePayloadV5 } from 'api/v5/v5';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
import { AppState } from 'store/reducers';
import { Query } from 'types/api/queryBuilder/queryBuilderData';

View File

@@ -27,8 +27,8 @@ import { useIsDarkMode } from 'hooks/useDarkMode';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import useUrlQuery from 'hooks/useUrlQuery';
import createQueryParams from 'lib/createQueryParams';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { cloneDeep, defaultTo, isEmpty, isUndefined } from 'lodash-es';
import { Check, X } from 'lucide-react';
import { DashboardWidgetPageParams } from 'pages/DashboardWidget';

View File

@@ -1,8 +1,8 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsApplication.factory';
import { updateStepInterval } from 'hooks/queryBuilder/useStepInterval';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { ServicesList } from 'types/api/metrics/getService';
import { QueryDataV3 } from 'types/api/widgets/getQuery';
import { EQueryType } from 'types/common/dashboard';

View File

@@ -3,6 +3,7 @@ import { MemoryRouter, Route } from 'react-router-dom';
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import ROUTES from 'constants/routes';
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
import { AppProvider } from 'providers/App/App';
import MockQueryClientProvider from 'providers/test/MockQueryClientProvider';
import { Span } from 'types/api/trace/getTraceV2';
@@ -108,7 +109,7 @@ const createMockSpan = (): Span => ({
statusMessage: '',
tagMap: {
'http.method': 'GET',
'http.url': '/api/users?page=1',
[SPAN_ATTRIBUTES.HTTP_URL]: '/api/users?page=1',
'http.status_code': '200',
'service.name': 'frontend-service',
'span.kind': 'server',

View File

@@ -5,6 +5,7 @@ import getSpanPercentiles from 'api/trace/getSpanPercentiles';
import getUserPreference from 'api/v1/user/preferences/name/get';
import { QueryParams } from 'constants/query';
import ROUTES from 'constants/routes';
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { server } from 'mocks-server/server';
import { QueryBuilderContext } from 'providers/QueryBuilder';
@@ -878,7 +879,9 @@ describe('SpanDetailsDrawer', () => {
// Verify only matching attributes are shown (use getAllByText for all since they appear in multiple places)
expect(screen.getAllByText('http.method').length).toBeGreaterThan(0);
expect(screen.getAllByText('http.url').length).toBeGreaterThan(0);
expect(screen.getAllByText(SPAN_ATTRIBUTES.HTTP_URL).length).toBeGreaterThan(
0,
);
expect(screen.getAllByText('http.status_code').length).toBeGreaterThan(0);
});
@@ -1126,7 +1129,7 @@ describe('SpanDetailsDrawer - Search Visibility User Flows', () => {
// User sees all attributes initially
expect(screen.getByText('http.method')).toBeInTheDocument();
expect(screen.getByText('http.url')).toBeInTheDocument();
expect(screen.getByText(SPAN_ATTRIBUTES.HTTP_URL)).toBeInTheDocument();
expect(screen.getByText('http.status_code')).toBeInTheDocument();
// User types "method" in search
@@ -1136,7 +1139,7 @@ describe('SpanDetailsDrawer - Search Visibility User Flows', () => {
// User sees only matching attributes
await waitFor(() => {
expect(screen.getByText('http.method')).toBeInTheDocument();
expect(screen.queryByText('http.url')).not.toBeInTheDocument();
expect(screen.queryByText(SPAN_ATTRIBUTES.HTTP_URL)).not.toBeInTheDocument();
expect(screen.queryByText('http.status_code')).not.toBeInTheDocument();
});
});

View File

@@ -1,3 +1,4 @@
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
import { ILog } from 'types/api/logs/log';
import { Span } from 'types/api/trace/getTraceV2';
@@ -22,7 +23,7 @@ export const mockSpan: Span = {
event: [],
tagMap: {
'http.method': 'GET',
'http.url': '/api/test',
[SPAN_ATTRIBUTES.HTTP_URL]: '/api/test',
'http.status_code': '200',
},
hasError: false,

View File

@@ -13,7 +13,7 @@ import { MenuItemKeys } from 'container/GridCardLayout/WidgetHeader/contants';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
import { useNotifications } from 'hooks/useNotifications';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
import { isEmpty } from 'lodash-es';
import { useDashboard } from 'providers/Dashboard/Dashboard';

View File

@@ -3,8 +3,8 @@ import { useSelector } from 'react-redux';
import { initialQueriesMap } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { AppState } from 'store/reducers';
import { SuccessResponse } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';

View File

@@ -14,7 +14,7 @@ describe('Get Series Data', () => {
expect(seriesData.length).toBe(5);
expect(seriesData[1].label).toBe('firstLegend');
expect(seriesData[1].show).toBe(true);
expect(seriesData[1].fill).toBe('#C71585');
expect(seriesData[1].fill).toBe('#FF6F91');
expect(seriesData[1].width).toBe(2);
});

View File

@@ -1,3 +1,5 @@
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
/* eslint-disable sonarjs/no-duplicate-string */
export const traceDetailResponse = [
{
@@ -35,7 +37,7 @@ export const traceDetailResponse = [
'component',
'host.name',
'http.method',
'http.url',
SPAN_ATTRIBUTES.HTTP_URL,
'ip',
'http.status_code',
'opencensus.exporterversion',
@@ -84,7 +86,7 @@ export const traceDetailResponse = [
'signoz.collector.id',
'component',
'http.method',
'http.url',
SPAN_ATTRIBUTES.HTTP_URL,
'ip',
],
[
@@ -741,7 +743,7 @@ export const traceDetailResponse = [
'component',
'http.method',
'http.status_code',
'http.url',
SPAN_ATTRIBUTES.HTTP_URL,
'net/http.reused',
'net/http.was_idle',
'service.name',
@@ -833,7 +835,7 @@ export const traceDetailResponse = [
'opencensus.exporterversion',
'signoz.collector.id',
'host.name',
'http.url',
SPAN_ATTRIBUTES.HTTP_URL,
'net/http.reused',
'net/http.was_idle',
],
@@ -916,7 +918,7 @@ export const traceDetailResponse = [
'net/http.was_idle',
'component',
'host.name',
'http.url',
SPAN_ATTRIBUTES.HTTP_URL,
'ip',
'service.name',
'signoz.collector.id',

View File

@@ -2,6 +2,7 @@
import { Dispatch, SetStateAction, useEffect, useState } from 'react';
import { getAttributesValues } from 'api/queryBuilder/getAttributesValues';
import { DATA_TYPE_VS_ATTRIBUTE_VALUES_KEY } from 'constants/queryBuilder';
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
import {
BaseAutocompleteData,
DataTypes,
@@ -31,7 +32,7 @@ export const AllTraceFilterKeyValue: Record<string, string> = {
httpRoute: 'HTTP Route',
'http.route': 'HTTP Route',
httpUrl: 'HTTP URL',
'http.url': 'HTTP URL',
[SPAN_ATTRIBUTES.HTTP_URL]: 'HTTP URL',
traceID: 'Trace ID',
trace_id: 'Trace ID',
} as const;

View File

@@ -1,7 +1,7 @@
import { ALL_SELECTED_VALUE } from 'components/NewSelect/utils';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { commaValuesParser } from '../../lib/dashbaordVariables/customCommaValuesParser';
import { commaValuesParser } from '../../lib/dashboardVariables/customCommaValuesParser';
interface UrlVariables {
[key: string]: any;

View File

@@ -1,127 +0,0 @@
package fields
import (
"bytes"
"io"
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
"github.com/SigNoz/signoz/pkg/telemetrymeter"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrytraces"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type API struct {
telemetryStore telemetrystore.TelemetryStore
telemetryMetadataStore telemetrytypes.MetadataStore
}
// TODO: move this to module and remove metastore init
func NewAPI(
settings factory.ProviderSettings,
telemetryStore telemetrystore.TelemetryStore,
) *API {
telemetryMetadataStore := telemetrymetadata.NewTelemetryMetaStore(
settings,
telemetryStore,
telemetrytraces.DBName,
telemetrytraces.TagAttributesV2TableName,
telemetrytraces.SpanAttributesKeysTblName,
telemetrytraces.SpanIndexV3TableName,
telemetrymetrics.DBName,
telemetrymetrics.AttributesMetadataTableName,
telemetrymeter.DBName,
telemetrymeter.SamplesAgg1dTableName,
telemetrylogs.DBName,
telemetrylogs.LogsV2TableName,
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetrymetadata.DBName,
telemetrymetadata.AttributesMetadataLocalTableName,
)
return &API{
telemetryStore: telemetryStore,
telemetryMetadataStore: telemetryMetadataStore,
}
}
func (api *API) GetFieldsKeys(w http.ResponseWriter, r *http.Request) {
type fieldKeysResponse struct {
Keys map[string][]*telemetrytypes.TelemetryFieldKey `json:"keys"`
Complete bool `json:"complete"`
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
fieldKeySelector, err := parseFieldKeyRequest(r)
if err != nil {
render.Error(w, err)
return
}
keys, complete, err := api.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(w, err)
return
}
response := fieldKeysResponse{
Keys: keys,
Complete: complete,
}
render.Success(w, http.StatusOK, response)
}
func (api *API) GetFieldsValues(w http.ResponseWriter, r *http.Request) {
type fieldValuesResponse struct {
Values *telemetrytypes.TelemetryFieldValues `json:"values"`
Complete bool `json:"complete"`
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
fieldValueSelector, err := parseFieldValueRequest(r)
if err != nil {
render.Error(w, err)
return
}
allValues, allComplete, err := api.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(w, err)
return
}
relatedValues, relatedComplete, err := api.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
}
values := &telemetrytypes.TelemetryFieldValues{
StringValues: allValues.StringValues,
NumberValues: allValues.NumberValues,
RelatedValues: relatedValues,
}
response := fieldValuesResponse{
Values: values,
Complete: allComplete && relatedComplete,
}
render.Success(w, http.StatusOK, response)
}

View File

@@ -1,162 +0,0 @@
package fields
import (
"net/http"
"strconv"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, error) {
var req telemetrytypes.FieldKeySelector
var signal telemetrytypes.Signal
var source telemetrytypes.Source
var err error
signalStr := r.URL.Query().Get("signal")
if signalStr != "" {
signal = telemetrytypes.Signal{String: valuer.NewString(signalStr)}
} else {
signal = telemetrytypes.SignalUnspecified
}
sourceStr := r.URL.Query().Get("source")
if sourceStr != "" {
source = telemetrytypes.Source{String: valuer.NewString(sourceStr)}
} else {
source = telemetrytypes.SourceUnspecified
}
if r.URL.Query().Get("limit") != "" {
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse limit")
}
req.Limit = limit
} else {
req.Limit = 1000
}
var startUnixMilli, endUnixMilli int64
if r.URL.Query().Get("startUnixMilli") != "" {
startUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("startUnixMilli"), 10, 64)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse startUnixMilli")
}
// Round down to the nearest 6 hours (21600000 milliseconds)
startUnixMilli -= startUnixMilli % 21600000
}
if r.URL.Query().Get("endUnixMilli") != "" {
endUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("endUnixMilli"), 10, 64)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse endUnixMilli")
}
}
// Parse fieldContext directly instead of using JSON unmarshalling.
var fieldContext telemetrytypes.FieldContext
fieldContextStr := r.URL.Query().Get("fieldContext")
if fieldContextStr != "" {
fieldContext = telemetrytypes.FieldContext{String: valuer.NewString(fieldContextStr)}
}
// Parse fieldDataType directly instead of using JSON unmarshalling.
var fieldDataType telemetrytypes.FieldDataType
fieldDataTypeStr := r.URL.Query().Get("fieldDataType")
if fieldDataTypeStr != "" {
fieldDataType = telemetrytypes.FieldDataType{String: valuer.NewString(fieldDataTypeStr)}
}
metricName := r.URL.Query().Get("metricName")
var metricContext *telemetrytypes.MetricContext
if metricName != "" {
metricContext = &telemetrytypes.MetricContext{
MetricName: metricName,
}
}
name := r.URL.Query().Get("searchText")
if name != "" && fieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, signal) {
name = parsedFieldKey.Name
fieldContext = parsedFieldKey.FieldContext
}
}
}
req = telemetrytypes.FieldKeySelector{
StartUnixMilli: startUnixMilli,
EndUnixMilli: endUnixMilli,
Signal: signal,
Source: source,
Name: name,
FieldContext: fieldContext,
FieldDataType: fieldDataType,
Limit: req.Limit,
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeFuzzy,
MetricContext: metricContext,
}
return &req, nil
}
func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector, error) {
keySelector, err := parseFieldKeyRequest(r)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse field key request")
}
name := r.URL.Query().Get("name")
if name != "" && keySelector.FieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, keySelector.Signal) {
name = parsedFieldKey.Name
keySelector.FieldContext = parsedFieldKey.FieldContext
}
}
}
keySelector.Name = name
existingQuery := r.URL.Query().Get("existingQuery")
value := r.URL.Query().Get("searchText")
// Parse limit for fieldValue request, fallback to default 50 if parsing fails.
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil {
limit = 50
}
req := telemetrytypes.FieldValueSelector{
FieldKeySelector: keySelector,
ExistingQuery: existingQuery,
Value: value,
Limit: limit,
}
return &req, nil
}
func isContextValidForSignal(ctx telemetrytypes.FieldContext, signal telemetrytypes.Signal) bool {
if ctx == telemetrytypes.FieldContextResource ||
ctx == telemetrytypes.FieldContextAttribute ||
ctx == telemetrytypes.FieldContextScope {
return true
}
switch signal.StringValue() {
case telemetrytypes.SignalLogs.StringValue():
return ctx == telemetrytypes.FieldContextLog || ctx == telemetrytypes.FieldContextBody
case telemetrytypes.SignalTraces.StringValue():
return ctx == telemetrytypes.FieldContextSpan || ctx == telemetrytypes.FieldContextEvent || ctx == telemetrytypes.FieldContextTrace
case telemetrytypes.SignalMetrics.StringValue():
return ctx == telemetrytypes.FieldContextMetric
}
return true
}

View File

@@ -0,0 +1,50 @@
package signozapiserver
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/gorilla/mux"
)
func (provider *provider) addFieldsRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/fields/keys", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsKeys), handler.OpenAPIDef{
ID: "GetFieldsKeys",
Tags: []string{"fields"},
Summary: "Get field keys",
Description: "This endpoint returns field keys",
Request: nil,
RequestQuery: new(telemetrytypes.PostableFieldKeysParams),
RequestContentType: "",
Response: new(telemetrytypes.GettableFieldKeys),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v1/fields/values", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsValues), handler.OpenAPIDef{
ID: "GetFieldsValues",
Tags: []string{"fields"},
Summary: "Get field values",
Description: "This endpoint returns field values",
Request: nil,
RequestQuery: new(telemetrytypes.PostableFieldValueParams),
RequestContentType: "",
Response: new(telemetrytypes.GettableFieldValues),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -13,11 +13,11 @@ import (
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types"
@@ -42,8 +42,8 @@ type provider struct {
dashboardHandler dashboard.Handler
metricsExplorerHandler metricsexplorer.Handler
gatewayHandler gateway.Handler
roleGetter role.Getter
roleHandler role.Handler
fieldsHandler fields.Handler
authzHandler authz.Handler
}
func NewFactory(
@@ -61,11 +61,31 @@ func NewFactory(
dashboardHandler dashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
gatewayHandler gateway.Handler,
roleGetter role.Getter,
roleHandler role.Handler,
fieldsHandler fields.Handler,
authzHandler authz.Handler,
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler, flaggerHandler, dashboardModule, dashboardHandler, metricsExplorerHandler, gatewayHandler, roleGetter, roleHandler)
return newProvider(
ctx,
providerSettings,
config,
orgGetter,
authz,
orgHandler,
userHandler,
sessionHandler,
authDomainHandler,
preferenceHandler,
globalHandler,
promoteHandler,
flaggerHandler,
dashboardModule,
dashboardHandler,
metricsExplorerHandler,
gatewayHandler,
fieldsHandler,
authzHandler,
)
})
}
@@ -87,8 +107,8 @@ func newProvider(
dashboardHandler dashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
gatewayHandler gateway.Handler,
roleGetter role.Getter,
roleHandler role.Handler,
fieldsHandler fields.Handler,
authzHandler authz.Handler,
) (apiserver.APIServer, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
router := mux.NewRouter().UseEncodedPath()
@@ -109,11 +129,11 @@ func newProvider(
dashboardHandler: dashboardHandler,
metricsExplorerHandler: metricsExplorerHandler,
gatewayHandler: gatewayHandler,
roleGetter: roleGetter,
roleHandler: roleHandler,
fieldsHandler: fieldsHandler,
authzHandler: authzHandler,
}
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz, roleGetter)
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
if err := provider.AddToRouter(router); err != nil {
return nil, err
@@ -175,6 +195,10 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
return err
}
if err := provider.addFieldsRoutes(router); err != nil {
return err
}
return nil
}

View File

@@ -10,7 +10,7 @@ import (
)
func (provider *provider) addRoleRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Create), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Create), handler.OpenAPIDef{
ID: "CreateRole",
Tags: []string{"role"},
Summary: "Create role",
@@ -27,7 +27,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.roleHandler.List), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.authzHandler.List), handler.OpenAPIDef{
ID: "ListRoles",
Tags: []string{"role"},
Summary: "List roles",
@@ -44,7 +44,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Get), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Get), handler.OpenAPIDef{
ID: "GetRole",
Tags: []string{"role"},
Summary: "Get role",
@@ -61,7 +61,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Patch), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Patch), handler.OpenAPIDef{
ID: "PatchRole",
Tags: []string{"role"},
Summary: "Patch role",
@@ -78,7 +78,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Delete), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Delete), handler.OpenAPIDef{
ID: "DeleteRole",
Tags: []string{"role"},
Summary: "Delete role",

View File

@@ -2,9 +2,11 @@ package authz
import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -29,4 +31,76 @@ type AuthZ interface {
// Lists the selectors for objects assigned to subject (s) with relation (r) on resource (s)
ListObjects(context.Context, string, authtypes.Relation, authtypes.Typeable) ([]*authtypes.Object, error)
// Creates the role.
Create(context.Context, valuer.UUID, *roletypes.Role) error
// Gets the role if it exists or creates one.
GetOrCreate(context.Context, valuer.UUID, *roletypes.Role) (*roletypes.Role, error)
// Gets the objects associated with the given role and relation.
GetObjects(context.Context, valuer.UUID, valuer.UUID, authtypes.Relation) ([]*authtypes.Object, error)
// Gets all the typeable resources registered from role registry.
GetResources(context.Context) []*authtypes.Resource
// Patches the role.
Patch(context.Context, valuer.UUID, *roletypes.Role) error
// Patches the objects in authorization server associated with the given role and relation
PatchObjects(context.Context, valuer.UUID, string, authtypes.Relation, []*authtypes.Object, []*authtypes.Object) error
// Deletes the role and tuples in authorization server.
Delete(context.Context, valuer.UUID, valuer.UUID) error
// Gets the role
Get(context.Context, valuer.UUID, valuer.UUID) (*roletypes.Role, error)
// Gets the role by org_id and name
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*roletypes.Role, error)
// Lists all the roles for the organization.
List(context.Context, valuer.UUID) ([]*roletypes.Role, error)
// Lists all the roles for the organization filtered by name
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*roletypes.Role, error)
// Grants a role to the subject based on role name.
Grant(context.Context, valuer.UUID, string, string) error
// Revokes a granted role from the subject based on role name.
Revoke(context.Context, valuer.UUID, string, string) error
// Changes the granted role for the subject based on role name.
ModifyGrant(context.Context, valuer.UUID, string, string, string) error
// Bootstrap the managed roles.
CreateManagedRoles(context.Context, valuer.UUID, []*roletypes.Role) error
// Bootstrap managed roles transactions and user assignments
CreateManagedUserRoleTransactions(context.Context, valuer.UUID, valuer.UUID) error
}
type RegisterTypeable interface {
MustGetTypeables() []authtypes.Typeable
MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction
}
type Handler interface {
Create(http.ResponseWriter, *http.Request)
Get(http.ResponseWriter, *http.Request)
GetObjects(http.ResponseWriter, *http.Request)
GetResources(http.ResponseWriter, *http.Request)
List(http.ResponseWriter, *http.Request)
Patch(http.ResponseWriter, *http.Request)
PatchObjects(http.ResponseWriter, *http.Request)
Delete(http.ResponseWriter, *http.Request)
}

View File

@@ -1,4 +1,4 @@
package implrole
package sqlauthzstore
import (
"context"
@@ -14,7 +14,7 @@ type store struct {
sqlstore sqlstore.SQLStore
}
func NewStore(sqlstore sqlstore.SQLStore) roletypes.Store {
func NewSqlAuthzStore(sqlstore sqlstore.SQLStore) roletypes.Store {
return &store{sqlstore: sqlstore}
}

View File

@@ -2,35 +2,24 @@ package openfgaauthz
import (
"context"
"strconv"
"sync"
authz "github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/authzstore/sqlauthzstore"
"github.com/SigNoz/signoz/pkg/authz/openfgaserver"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
openfgapkgserver "github.com/openfga/openfga/pkg/server"
"google.golang.org/protobuf/encoding/protojson"
)
var (
openfgaDefaultStore = valuer.NewString("signoz")
)
type provider struct {
config authz.Config
settings factory.ScopedProviderSettings
openfgaSchema []openfgapkgtransformer.ModuleFile
openfgaServer *openfgapkgserver.Server
storeID string
modelID string
mtx sync.RWMutex
stopChan chan struct{}
server *openfgaserver.Server
store roletypes.Store
}
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
@@ -40,301 +29,194 @@ func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtr
}
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/authz/openfgaauthz")
store, err := NewSQLStore(sqlstore)
server, err := openfgaserver.NewOpenfgaServer(ctx, settings, config, sqlstore, openfgaSchema)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to initialize sqlstore for authz")
return nil, err
}
// setup the openfga server
opts := []openfgapkgserver.OpenFGAServiceV1Option{
openfgapkgserver.WithDatastore(store),
openfgapkgserver.WithLogger(NewLogger(scopedProviderSettings.Logger())),
openfgapkgserver.WithContextPropagationToDatastore(true),
}
openfgaServer, err := openfgapkgserver.NewServerWithOpts(opts...)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to create authz server")
return nil, err
}
return &provider{
config: config,
settings: scopedProviderSettings,
openfgaServer: openfgaServer,
openfgaSchema: openfgaSchema,
mtx: sync.RWMutex{},
stopChan: make(chan struct{}),
server: server,
store: sqlauthzstore.NewSqlAuthzStore(sqlstore),
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
storeId, err := provider.getOrCreateStore(ctx, openfgaDefaultStore.StringValue())
if err != nil {
return err
}
modelID, err := provider.getOrCreateModel(ctx, storeId)
if err != nil {
return err
}
provider.mtx.Lock()
provider.modelID = modelID
provider.storeID = storeId
provider.mtx.Unlock()
<-provider.stopChan
return nil
return provider.server.Start(ctx)
}
func (provider *provider) Stop(ctx context.Context) error {
provider.openfgaServer.Close()
close(provider.stopChan)
return nil
return provider.server.Stop(ctx)
}
func (provider *provider) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
storeID, modelID := provider.getStoreIDandModelID()
checkResponse, err := provider.openfgaServer.Check(
ctx,
&openfgav1.CheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tupleReq.User,
Relation: tupleReq.Relation,
Object: tupleReq.Object,
},
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
if !checkResponse.Allowed {
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
}
return nil
return provider.server.Check(ctx, tupleReq)
}
func (provider *provider) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
storeID, modelID := provider.getStoreIDandModelID()
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
for idx, tuple := range tupleReq {
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tuple.User,
Relation: tuple.Relation,
Object: tuple.Object,
},
// the batch check response is map[string] keyed by correlationID.
CorrelationId: strconv.Itoa(idx),
})
}
checkResponse, err := provider.openfgaServer.BatchCheck(
ctx,
&openfgav1.BatchCheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Checks: batchCheckItems,
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
for _, checkResponse := range checkResponse.Result {
if checkResponse.GetAllowed() {
return nil
}
}
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
return provider.server.BatchCheck(ctx, tupleReq)
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.server.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.server.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
if len(additions) == 0 && len(deletions) == 0 {
return nil
}
storeID, modelID := provider.getStoreIDandModelID()
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
for idx, tuple := range deletions {
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
}
_, err := provider.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Writes: func() *openfgav1.WriteRequestWrites {
if len(additions) == 0 {
return nil
}
return &openfgav1.WriteRequestWrites{
TupleKeys: additions,
OnDuplicate: "ignore",
}
}(),
Deletes: func() *openfgav1.WriteRequestDeletes {
if len(deletionTuplesWithoutCondition) == 0 {
return nil
}
return &openfgav1.WriteRequestDeletes{
TupleKeys: deletionTuplesWithoutCondition,
OnMissing: "ignore",
}
}(),
})
return err
return provider.server.Write(ctx, additions, deletions)
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
storeID, modelID := provider.getStoreIDandModelID()
response, err := provider.openfgaServer.ListObjects(ctx, &openfgav1.ListObjectsRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
User: subject,
Relation: relation.StringValue(),
Type: typeable.Type().StringValue(),
return provider.server.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
storableRole, err := provider.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
storableRole, err := provider.store.GetByOrgIDAndName(ctx, orgID, name)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
storableRoles, err := provider.store.List(ctx, orgID)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storableRole := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storableRole)
}
return roles, nil
}
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
storableRoles, err := provider.store.ListByOrgIDAndNames(ctx, orgID, names)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storable := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
}
return roles, nil
}
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return provider.Write(ctx, tuples, nil)
}
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
err := provider.Revoke(ctx, orgID, existingRoleName, subject)
if err != nil {
return err
}
err = provider.Grant(ctx, orgID, updatedRoleName, subject)
if err != nil {
return err
}
return nil
}
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return provider.Write(ctx, nil, tuples)
}
func (provider *provider) CreateManagedRoles(ctx context.Context, _ valuer.UUID, managedRoles []*roletypes.Role) error {
err := provider.store.RunInTx(ctx, func(ctx context.Context) error {
for _, role := range managedRoles {
err := provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return err
}
}
return nil
})
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "cannot list objects for subject %s with relation %s for type %s", subject, relation.StringValue(), typeable.Type().StringValue())
return err
}
return authtypes.MustNewObjectsFromStringSlice(response.Objects), nil
return nil
}
func (provider *provider) getOrCreateStore(ctx context.Context, name string) (string, error) {
stores, err := provider.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
if err != nil {
return "", err
}
for _, store := range stores.GetStores() {
if store.GetName() == name {
return store.Id, nil
}
}
store, err := provider.openfgaServer.CreateStore(ctx, &openfgav1.CreateStoreRequest{Name: name})
if err != nil {
return "", err
}
return store.Id, nil
func (provider *provider) SetManagedRoleTransactions(context.Context, valuer.UUID) error {
return nil
}
func (provider *provider) getOrCreateModel(ctx context.Context, storeID string) (string, error) {
schema, err := openfgapkgtransformer.TransformModuleFilesToModel(provider.openfgaSchema, "1.1")
if err != nil {
return "", err
}
authorisationModels, err := provider.openfgaServer.ReadAuthorizationModels(ctx, &openfgav1.ReadAuthorizationModelsRequest{StoreId: storeID})
if err != nil {
return "", err
}
for _, authModel := range authorisationModels.GetAuthorizationModels() {
equal, err := provider.isModelEqual(schema, authModel)
if err != nil {
return "", err
}
if equal {
return authModel.Id, nil
}
}
authorizationModel, err := provider.openfgaServer.WriteAuthorizationModel(ctx, &openfgav1.WriteAuthorizationModelRequest{
StoreId: storeID,
TypeDefinitions: schema.TypeDefinitions,
SchemaVersion: schema.SchemaVersion,
Conditions: schema.Conditions,
})
if err != nil {
return "", err
}
return authorizationModel.AuthorizationModelId, nil
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
return provider.Grant(ctx, orgID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil))
}
// the language model doesn't have any equality check
// https://github.com/openfga/language/blob/main/pkg/go/transformer/module-to-model_test.go#L38
func (provider *provider) isModelEqual(expected *openfgav1.AuthorizationModel, actual *openfgav1.AuthorizationModel) (bool, error) {
// we need to initialize a new model since the model extracted from schema doesn't have id
expectedAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: expected.SchemaVersion,
TypeDefinitions: expected.TypeDefinitions,
Conditions: expected.Conditions,
}
expectedAuthModelBytes, err := protojson.Marshal(&expectedAuthModel)
if err != nil {
return false, err
}
actualAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: actual.SchemaVersion,
TypeDefinitions: actual.TypeDefinitions,
Conditions: actual.Conditions,
}
actualAuthModelBytes, err := protojson.Marshal(&actualAuthModel)
if err != nil {
return false, err
}
return string(expectedAuthModelBytes) == string(actualAuthModelBytes), nil
func (setter *provider) Create(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) getStoreIDandModelID() (string, string) {
provider.mtx.RLock()
defer provider.mtx.RUnlock()
storeID := provider.storeID
modelID := provider.modelID
return storeID, modelID
func (provider *provider) GetOrCreate(_ context.Context, _ valuer.UUID, _ *roletypes.Role) (*roletypes.Role, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
return nil
}
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) Patch(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) PatchObjects(_ context.Context, _ valuer.UUID, _ string, _ authtypes.Relation, _, _ []*authtypes.Object) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) Delete(_ context.Context, _ valuer.UUID, _ valuer.UUID) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
return nil
}

View File

@@ -1,4 +1,4 @@
package openfgaauthz
package openfgaserver
import (
"context"

View File

@@ -0,0 +1,334 @@
package openfgaserver
import (
"context"
"strconv"
"sync"
authz "github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
openfgapkgserver "github.com/openfga/openfga/pkg/server"
"google.golang.org/protobuf/encoding/protojson"
)
var (
openfgaDefaultStore = valuer.NewString("signoz")
)
type Server struct {
config authz.Config
settings factory.ScopedProviderSettings
openfgaSchema []openfgapkgtransformer.ModuleFile
openfgaServer *openfgapkgserver.Server
storeID string
modelID string
mtx sync.RWMutex
stopChan chan struct{}
}
func NewOpenfgaServer(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (*Server, error) {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/authz/openfgaauthz")
store, err := NewSQLStore(sqlstore)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to initialize sqlstore for authz")
return nil, err
}
// setup the openfga server
opts := []openfgapkgserver.OpenFGAServiceV1Option{
openfgapkgserver.WithDatastore(store),
openfgapkgserver.WithLogger(NewLogger(scopedProviderSettings.Logger())),
openfgapkgserver.WithContextPropagationToDatastore(true),
}
openfgaServer, err := openfgapkgserver.NewServerWithOpts(opts...)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to create authz server")
return nil, err
}
return &Server{
config: config,
settings: scopedProviderSettings,
openfgaServer: openfgaServer,
openfgaSchema: openfgaSchema,
mtx: sync.RWMutex{},
stopChan: make(chan struct{}),
}, nil
}
func (server *Server) Start(ctx context.Context) error {
storeID, err := server.getOrCreateStore(ctx, openfgaDefaultStore.StringValue())
if err != nil {
return err
}
modelID, err := server.getOrCreateModel(ctx, storeID)
if err != nil {
return err
}
server.mtx.Lock()
server.modelID = modelID
server.storeID = storeID
server.mtx.Unlock()
<-server.stopChan
return nil
}
func (server *Server) Stop(ctx context.Context) error {
server.openfgaServer.Close()
close(server.stopChan)
return nil
}
func (server *Server) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
storeID, modelID := server.getStoreIDandModelID()
checkResponse, err := server.openfgaServer.Check(
ctx,
&openfgav1.CheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tupleReq.User,
Relation: tupleReq.Relation,
Object: tupleReq.Object,
},
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
if !checkResponse.Allowed {
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
}
return nil
}
func (server *Server) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
storeID, modelID := server.getStoreIDandModelID()
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
for idx, tuple := range tupleReq {
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tuple.User,
Relation: tuple.Relation,
Object: tuple.Object,
},
// the batch check response is map[string] keyed by correlationID.
CorrelationId: strconv.Itoa(idx),
})
}
checkResponse, err := server.openfgaServer.BatchCheck(
ctx,
&openfgav1.BatchCheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Checks: batchCheckItems,
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
for _, checkResponse := range checkResponse.Result {
if checkResponse.GetAllowed() {
return nil
}
}
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
}
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
if len(additions) == 0 && len(deletions) == 0 {
return nil
}
storeID, modelID := server.getStoreIDandModelID()
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
for idx, tuple := range deletions {
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
}
_, err := server.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Writes: func() *openfgav1.WriteRequestWrites {
if len(additions) == 0 {
return nil
}
return &openfgav1.WriteRequestWrites{
TupleKeys: additions,
OnDuplicate: "ignore",
}
}(),
Deletes: func() *openfgav1.WriteRequestDeletes {
if len(deletionTuplesWithoutCondition) == 0 {
return nil
}
return &openfgav1.WriteRequestDeletes{
TupleKeys: deletionTuplesWithoutCondition,
OnMissing: "ignore",
}
}(),
})
return err
}
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
storeID, modelID := server.getStoreIDandModelID()
response, err := server.openfgaServer.ListObjects(ctx, &openfgav1.ListObjectsRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
User: subject,
Relation: relation.StringValue(),
Type: typeable.Type().StringValue(),
})
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "cannot list objects for subject %s with relation %s for type %s", subject, relation.StringValue(), typeable.Type().StringValue())
}
return authtypes.MustNewObjectsFromStringSlice(response.Objects), nil
}
func (server *Server) getOrCreateStore(ctx context.Context, name string) (string, error) {
stores, err := server.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
if err != nil {
return "", err
}
for _, store := range stores.GetStores() {
if store.GetName() == name {
return store.Id, nil
}
}
store, err := server.openfgaServer.CreateStore(ctx, &openfgav1.CreateStoreRequest{Name: name})
if err != nil {
return "", err
}
return store.Id, nil
}
func (server *Server) getOrCreateModel(ctx context.Context, storeID string) (string, error) {
schema, err := openfgapkgtransformer.TransformModuleFilesToModel(server.openfgaSchema, "1.1")
if err != nil {
return "", err
}
authorisationModels, err := server.openfgaServer.ReadAuthorizationModels(ctx, &openfgav1.ReadAuthorizationModelsRequest{StoreId: storeID})
if err != nil {
return "", err
}
for _, authModel := range authorisationModels.GetAuthorizationModels() {
equal, err := server.isModelEqual(schema, authModel)
if err != nil {
return "", err
}
if equal {
return authModel.Id, nil
}
}
authorizationModel, err := server.openfgaServer.WriteAuthorizationModel(ctx, &openfgav1.WriteAuthorizationModelRequest{
StoreId: storeID,
TypeDefinitions: schema.TypeDefinitions,
SchemaVersion: schema.SchemaVersion,
Conditions: schema.Conditions,
})
if err != nil {
return "", err
}
return authorizationModel.AuthorizationModelId, nil
}
// the language model doesn't have any equality check
// https://github.com/openfga/language/blob/main/pkg/go/transformer/module-to-model_test.go#L38
func (server *Server) isModelEqual(expected *openfgav1.AuthorizationModel, actual *openfgav1.AuthorizationModel) (bool, error) {
// we need to initialize a new model since the model extracted from schema doesn't have id
expectedAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: expected.SchemaVersion,
TypeDefinitions: expected.TypeDefinitions,
Conditions: expected.Conditions,
}
expectedAuthModelBytes, err := protojson.Marshal(&expectedAuthModel)
if err != nil {
return false, err
}
actualAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: actual.SchemaVersion,
TypeDefinitions: actual.TypeDefinitions,
Conditions: actual.Conditions,
}
actualAuthModelBytes, err := protojson.Marshal(&actualAuthModel)
if err != nil {
return false, err
}
return string(expectedAuthModelBytes) == string(actualAuthModelBytes), nil
}
func (server *Server) getStoreIDandModelID() (string, string) {
server.mtx.RLock()
defer server.mtx.RUnlock()
storeID := server.storeID
modelID := server.modelID
return storeID, modelID
}

View File

@@ -1,4 +1,4 @@
package openfgaauthz
package openfgaserver
import (
"context"
@@ -20,7 +20,7 @@ func TestProviderStartStop(t *testing.T) {
expectedModel := `module base
type user`
provider, err := newOpenfgaProvider(context.Background(), providerSettings, authz.Config{}, sqlstore, []transformer.ModuleFile{{Name: "test.fga", Contents: expectedModel}})
provider, err := NewOpenfgaServer(context.Background(), providerSettings, authz.Config{}, sqlstore, []transformer.ModuleFile{{Name: "test.fga", Contents: expectedModel}})
require.NoError(t, err)
storeRows := sqlstore.Mock().NewRows([]string{"id", "name", "created_at", "updated_at"}).AddRow("01K3V0NTN47MPTMEV1PD5ST6ZC", "signoz", time.Now(), time.Now())

View File

@@ -1,4 +1,4 @@
package openfgaauthz
package openfgaserver
import (
"github.com/SigNoz/signoz/pkg/errors"

View File

@@ -1,12 +1,12 @@
package implrole
package signozauthzapi
import (
"net/http"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -14,12 +14,11 @@ import (
)
type handler struct {
setter role.Setter
getter role.Getter
authz authz.AuthZ
}
func NewHandler(setter role.Setter, getter role.Getter) role.Handler {
return &handler{setter: setter, getter: getter}
func NewHandler(authz authz.AuthZ) authz.Handler {
return &handler{authz: authz}
}
func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
@@ -36,7 +35,7 @@ func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.setter.Create(ctx, valuer.MustNewUUID(claims.OrgID), roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID)))
err = handler.authz.Create(ctx, valuer.MustNewUUID(claims.OrgID), roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID)))
if err != nil {
render.Error(rw, err)
return
@@ -64,7 +63,7 @@ func (handler *handler) Get(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), roleID)
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), roleID)
if err != nil {
render.Error(rw, err)
return
@@ -103,7 +102,7 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
return
}
objects, err := handler.setter.GetObjects(ctx, valuer.MustNewUUID(claims.OrgID), roleID, relation)
objects, err := handler.authz.GetObjects(ctx, valuer.MustNewUUID(claims.OrgID), roleID, relation)
if err != nil {
render.Error(rw, err)
return
@@ -114,7 +113,7 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
func (handler *handler) GetResources(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
resources := handler.setter.GetResources(ctx)
resources := handler.authz.GetResources(ctx)
var resourceRelations = struct {
Resources []*authtypes.Resource `json:"resources"`
@@ -134,7 +133,7 @@ func (handler *handler) List(rw http.ResponseWriter, r *http.Request) {
return
}
roles, err := handler.getter.List(ctx, valuer.MustNewUUID(claims.OrgID))
roles, err := handler.authz.List(ctx, valuer.MustNewUUID(claims.OrgID))
if err != nil {
render.Error(rw, err)
return
@@ -163,7 +162,7 @@ func (handler *handler) Patch(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return
@@ -175,7 +174,7 @@ func (handler *handler) Patch(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.setter.Patch(ctx, valuer.MustNewUUID(claims.OrgID), role)
err = handler.authz.Patch(ctx, valuer.MustNewUUID(claims.OrgID), role)
if err != nil {
render.Error(rw, err)
return
@@ -210,7 +209,7 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return
@@ -222,7 +221,7 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.setter.PatchObjects(ctx, valuer.MustNewUUID(claims.OrgID), role.Name, relation, patchableObjects.Additions, patchableObjects.Deletions)
err = handler.authz.PatchObjects(ctx, valuer.MustNewUUID(claims.OrgID), role.Name, relation, patchableObjects.Additions, patchableObjects.Deletions)
if err != nil {
render.Error(rw, err)
return
@@ -245,7 +244,7 @@ func (handler *handler) Delete(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.setter.Delete(ctx, valuer.MustNewUUID(claims.OrgID), id)
err = handler.authz.Delete(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return

View File

@@ -8,7 +8,6 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
@@ -24,15 +23,14 @@ type AuthZ struct {
logger *slog.Logger
orgGetter organization.Getter
authzService authz.AuthZ
roleGetter role.Getter
}
func NewAuthZ(logger *slog.Logger, orgGetter organization.Getter, authzService authz.AuthZ, roleGetter role.Getter) *AuthZ {
func NewAuthZ(logger *slog.Logger, orgGetter organization.Getter, authzService authz.AuthZ) *AuthZ {
if logger == nil {
panic("cannot build authz middleware, logger is empty")
}
return &AuthZ{logger: logger, orgGetter: orgGetter, authzService: authzService, roleGetter: roleGetter}
return &AuthZ{logger: logger, orgGetter: orgGetter, authzService: authzService}
}
func (middleware *AuthZ) ViewAccess(next http.HandlerFunc) http.HandlerFunc {

View File

@@ -4,7 +4,7 @@ import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/statsreporter"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
@@ -51,7 +51,7 @@ type Module interface {
statsreporter.StatsCollector
role.RegisterTypeable
authz.RegisterTypeable
}
type Handler interface {

View File

@@ -206,6 +206,10 @@ func (module *module) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{dashboardtypes.TypeableMetaResourceDashboard, dashboardtypes.TypeableMetaResourcesDashboards}
}
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
return nil
}
// not supported
func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publicDashboard *dashboardtypes.PublicDashboard) error {
return errors.Newf(errors.TypeUnsupported, dashboardtypes.ErrCodePublicDashboardUnsupported, "not implemented")

View File

@@ -0,0 +1,11 @@
package fields
import "net/http"
type Handler interface {
// Gets the fields keys for the given field key selector
GetFieldsKeys(http.ResponseWriter, *http.Request)
// Gets the fields values for the given field value selector
GetFieldsValues(http.ResponseWriter, *http.Request)
}

View File

@@ -0,0 +1,79 @@
package implfields
import (
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type handler struct {
telemetryMetadataStore telemetrytypes.MetadataStore
}
func NewHandler(settings factory.ProviderSettings, telemetryMetadataStore telemetrytypes.MetadataStore) fields.Handler {
return &handler{
telemetryMetadataStore: telemetryMetadataStore,
}
}
func (handler *handler) GetFieldsKeys(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
var params telemetrytypes.PostableFieldKeysParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
fieldKeySelector := telemetrytypes.NewFieldKeySelectorFromPostableFieldKeysParams(params)
keys, complete, err := handler.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldKeys{
Keys: keys,
Complete: complete,
})
}
func (handler *handler) GetFieldsValues(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
var params telemetrytypes.PostableFieldValueParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
fieldValueSelector := telemetrytypes.NewFieldValueSelectorFromPostableFieldValueParams(params)
allValues, allComplete, err := handler.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(rw, err)
return
}
relatedValues, relatedComplete, err := handler.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
}
values := &telemetrytypes.TelemetryFieldValues{
StringValues: allValues.StringValues,
NumberValues: allValues.NumberValues,
RelatedValues: relatedValues,
}
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldValues{
Values: values,
Complete: allComplete && relatedComplete,
})
}

View File

@@ -1,63 +0,0 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type getter struct {
store roletypes.Store
}
func NewGetter(store roletypes.Store) role.Getter {
return &getter{store: store}
}
func (getter *getter) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
storableRole, err := getter.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (getter *getter) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
storableRole, err := getter.store.GetByOrgIDAndName(ctx, orgID, name)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (getter *getter) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
storableRoles, err := getter.store.List(ctx, orgID)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storableRole := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storableRole)
}
return roles, nil
}
func (getter *getter) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
storableRoles, err := getter.store.ListByOrgIDAndNames(ctx, orgID, names)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storable := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
}
return roles, nil
}

View File

@@ -1,83 +0,0 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type granter struct {
store roletypes.Store
authz authz.AuthZ
}
func NewGranter(store roletypes.Store, authz authz.AuthZ) role.Granter {
return &granter{store: store, authz: authz}
}
func (granter *granter) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return granter.authz.Write(ctx, tuples, nil)
}
func (granter *granter) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
err := granter.Revoke(ctx, orgID, existingRoleName, subject)
if err != nil {
return err
}
err = granter.Grant(ctx, orgID, updatedRoleName, subject)
if err != nil {
return err
}
return nil
}
func (granter *granter) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return granter.authz.Write(ctx, nil, tuples)
}
func (granter *granter) CreateManagedRoles(ctx context.Context, _ valuer.UUID, managedRoles []*roletypes.Role) error {
err := granter.store.RunInTx(ctx, func(ctx context.Context) error {
for _, role := range managedRoles {
err := granter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return err
}
}
return nil
})
if err != nil {
return err
}
return nil
}

View File

@@ -1,53 +0,0 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type setter struct {
store roletypes.Store
authz authz.AuthZ
}
func NewSetter(store roletypes.Store, authz authz.AuthZ) role.Setter {
return &setter{store: store, authz: authz}
}
func (setter *setter) Create(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) GetOrCreate(_ context.Context, _ valuer.UUID, _ *roletypes.Role) (*roletypes.Role, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) GetResources(_ context.Context) []*authtypes.Resource {
return nil
}
func (setter *setter) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) Patch(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) PatchObjects(_ context.Context, _ valuer.UUID, _ string, _ authtypes.Relation, _, _ []*authtypes.Object) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) Delete(_ context.Context, _ valuer.UUID, _ valuer.UUID) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) MustGetTypeables() []authtypes.Typeable {
return nil
}

View File

@@ -1,85 +0,0 @@
package role
import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type Setter interface {
// Creates the role.
Create(context.Context, valuer.UUID, *roletypes.Role) error
// Gets the role if it exists or creates one.
GetOrCreate(context.Context, valuer.UUID, *roletypes.Role) (*roletypes.Role, error)
// Gets the objects associated with the given role and relation.
GetObjects(context.Context, valuer.UUID, valuer.UUID, authtypes.Relation) ([]*authtypes.Object, error)
// Gets all the typeable resources registered from role registry.
GetResources(context.Context) []*authtypes.Resource
// Patches the role.
Patch(context.Context, valuer.UUID, *roletypes.Role) error
// Patches the objects in authorization server associated with the given role and relation
PatchObjects(context.Context, valuer.UUID, string, authtypes.Relation, []*authtypes.Object, []*authtypes.Object) error
// Deletes the role and tuples in authorization server.
Delete(context.Context, valuer.UUID, valuer.UUID) error
RegisterTypeable
}
type Getter interface {
// Gets the role
Get(context.Context, valuer.UUID, valuer.UUID) (*roletypes.Role, error)
// Gets the role by org_id and name
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*roletypes.Role, error)
// Lists all the roles for the organization.
List(context.Context, valuer.UUID) ([]*roletypes.Role, error)
// Lists all the roles for the organization filtered by name
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*roletypes.Role, error)
}
type Granter interface {
// Grants a role to the subject based on role name.
Grant(context.Context, valuer.UUID, string, string) error
// Revokes a granted role from the subject based on role name.
Revoke(context.Context, valuer.UUID, string, string) error
// Changes the granted role for the subject based on role name.
ModifyGrant(context.Context, valuer.UUID, string, string, string) error
// Bootstrap the managed roles.
CreateManagedRoles(context.Context, valuer.UUID, []*roletypes.Role) error
}
type RegisterTypeable interface {
MustGetTypeables() []authtypes.Typeable
}
type Handler interface {
Create(http.ResponseWriter, *http.Request)
Get(http.ResponseWriter, *http.Request)
GetObjects(http.ResponseWriter, *http.Request)
GetResources(http.ResponseWriter, *http.Request)
List(http.ResponseWriter, *http.Request)
Patch(http.ResponseWriter, *http.Request)
PatchObjects(http.ResponseWriter, *http.Request)
Delete(http.ResponseWriter, *http.Request)
}

View File

@@ -8,11 +8,11 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/emailing"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/user"
root "github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/tokenizer"
@@ -32,13 +32,13 @@ type Module struct {
emailing emailing.Emailing
settings factory.ScopedProviderSettings
orgSetter organization.Setter
granter role.Granter
authz authz.AuthZ
analytics analytics.Analytics
config user.Config
}
// This module is a WIP, don't take inspiration from this.
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, granter role.Granter, analytics analytics.Analytics, config user.Config) root.Module {
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, authz authz.AuthZ, analytics analytics.Analytics, config user.Config) root.Module {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/modules/user/impluser")
return &Module{
store: store,
@@ -47,7 +47,7 @@ func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing em
settings: settings,
orgSetter: orgSetter,
analytics: analytics,
granter: granter,
authz: authz,
config: config,
}
}
@@ -172,7 +172,7 @@ func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ..
createUserOpts := root.NewCreateUserOptions(opts...)
// since assign is idempotant multiple calls to assign won't cause issues in case of retries.
err := module.granter.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
err := module.authz.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
if err != nil {
return err
}
@@ -238,7 +238,7 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
}
if user.Role != existingUser.Role {
err = m.granter.ModifyGrant(ctx,
err = m.authz.ModifyGrant(ctx,
orgID,
roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role),
roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role),
@@ -301,7 +301,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
}
// since revoke is idempotant multiple calls to revoke won't cause issues in case of retries
err = module.granter.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
err = module.authz.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
if err != nil {
return err
}
@@ -504,14 +504,14 @@ func (module *Module) CreateFirstUser(ctx context.Context, organization *types.O
}
managedRoles := roletypes.NewManagedRoles(organization.ID)
err = module.granter.Grant(ctx, organization.ID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, user.ID.StringValue(), user.OrgID, nil))
err = module.authz.CreateManagedUserRoleTransactions(ctx, organization.ID, user.ID)
if err != nil {
return nil, err
}
if err = module.store.RunInTx(ctx, func(ctx context.Context) error {
err = module.orgSetter.Create(ctx, organization, func(ctx context.Context, orgID valuer.UUID) error {
err = module.granter.CreateManagedRoles(ctx, orgID, managedRoles)
err = module.authz.CreateManagedRoles(ctx, orgID, managedRoles)
if err != nil {
return err
}

View File

@@ -25,7 +25,6 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/http/render"
@@ -69,7 +68,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/opamptypes"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunneltypes"
"go.uber.org/zap"
@@ -104,10 +103,11 @@ type APIHandler struct {
querierV2 interfaces.Querier
queryBuilder *queryBuilder.QueryBuilder
// temporalityMap is a map of metric name to temporality
// to avoid fetching temporality for the same metric multiple times
// querying the v4 table on low cardinal temporality column
// should be fast but we can still avoid the query if we have the data in memory
// temporalityMap is a map of metric name to temporality to avoid fetching
// temporality for the same metric multiple times.
//
// Querying the v4 table on a low cardinal temporality column should be
// fast, but we can still avoid the query if we have the data in memory.
temporalityMap map[string]map[v3.Temporality]bool
temporalityMux sync.Mutex
@@ -145,8 +145,6 @@ type APIHandler struct {
LicensingAPI licensing.API
FieldsAPI *fields.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
@@ -177,8 +175,6 @@ type APIHandlerOpts struct {
LicensingAPI licensing.API
FieldsAPI *fields.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
@@ -243,7 +239,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
AlertmanagerAPI: opts.AlertmanagerAPI,
LicensingAPI: opts.LicensingAPI,
Signoz: opts.Signoz,
FieldsAPI: opts.FieldsAPI,
QuerierAPI: opts.QuerierAPI,
QueryParserAPI: opts.QueryParserAPI,
}
@@ -399,13 +394,6 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *middlew
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.QuerierAPI.QueryRawStream)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterFieldsRoutes(router *mux.Router, am *middleware.AuthZ) {
subRouter := router.PathPrefix("/api/v1").Subrouter()
subRouter.HandleFunc("/fields/keys", am.ViewAccess(aH.FieldsAPI.GetFieldsKeys)).Methods(http.MethodGet)
subRouter.HandleFunc("/fields/values", am.ViewAccess(aH.FieldsAPI.GetFieldsValues)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *middleware.AuthZ) {
hostsSubRouter := router.PathPrefix("/api/v1/hosts").Subrouter()
hostsSubRouter.HandleFunc("/attribute_keys", am.ViewAccess(aH.getHostAttributeKeys)).Methods(http.MethodGet)
@@ -1023,7 +1011,7 @@ func (aH *APIHandler) getRuleStateHistory(w http.ResponseWriter, r *http.Request
// the query range is calculated based on the rule's evalWindow and evalDelay
// alerts have 2 minutes delay built in, so we need to subtract that from the start time
// to get the correct query range
start := end.Add(-time.Duration(rule.EvalWindow)).Add(-3 * time.Minute)
start := end.Add(-rule.EvalWindow.Duration() - 3*time.Minute)
if rule.AlertType == ruletypes.AlertTypeLogs {
if rule.Version != "v5" {
res.Items[idx].RelatedLogsLink = contextlinks.PrepareLinksToLogs(start, end, newFilters)
@@ -1230,12 +1218,12 @@ func (aH *APIHandler) Get(rw http.ResponseWriter, r *http.Request) {
dashboard := new(dashboardtypes.Dashboard)
if aH.CloudIntegrationsController.IsCloudIntegrationDashboardUuid(id) {
cloudintegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
cloudIntegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
if apiErr != nil {
render.Error(rw, errorsV2.Wrapf(apiErr, errorsV2.TypeInternal, errorsV2.CodeInternal, "failed to get dashboard"))
return
}
dashboard = cloudintegrationDashboard
dashboard = cloudIntegrationDashboard
} else if aH.IntegrationsController.IsInstalledIntegrationDashboardID(id) {
integrationDashboard, apiErr := aH.IntegrationsController.GetInstalledIntegrationDashboardById(ctx, orgID, id)
if apiErr != nil {
@@ -1564,13 +1552,13 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
RespondError(w, &model.ApiError{Typ: model.ErrorExec, Err: res.Err}, nil)
}
response_data := &model.QueryData{
responseData := &model.QueryData{
ResultType: res.Value.Type(),
Result: res.Value,
Stats: qs,
}
aH.Respond(w, response_data)
aH.Respond(w, responseData)
}
@@ -2652,12 +2640,12 @@ func (aH *APIHandler) getProducerData(w http.ResponseWriter, r *http.Request) {
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2705,12 +2693,12 @@ func (aH *APIHandler) getConsumerData(w http.ResponseWriter, r *http.Request) {
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2759,12 +2747,12 @@ func (aH *APIHandler) getPartitionOverviewLatencyData(w http.ResponseWriter, r *
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2813,12 +2801,12 @@ func (aH *APIHandler) getConsumerPartitionLatencyData(w http.ResponseWriter, r *
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2870,12 +2858,12 @@ func (aH *APIHandler) getProducerThroughputOverview(w http.ResponseWriter, r *ht
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, producerQueryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, producerQueryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
@@ -2981,12 +2969,12 @@ func (aH *APIHandler) getProducerThroughputDetails(w http.ResponseWriter, r *htt
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -3035,12 +3023,12 @@ func (aH *APIHandler) getConsumerThroughputOverview(w http.ResponseWriter, r *ht
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -3089,12 +3077,12 @@ func (aH *APIHandler) getConsumerThroughputDetails(w http.ResponseWriter, r *htt
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -3149,12 +3137,12 @@ func (aH *APIHandler) getProducerConsumerEval(w http.ResponseWriter, r *http.Req
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
@@ -4138,11 +4126,11 @@ func (aH *APIHandler) ListLogsPipelinesHandler(w http.ResponseWriter, r *http.Re
aH.Respond(w, payload)
}
// listLogsPipelines lists logs piplines for latest version
// listLogsPipelines lists logs pipelines for latest version
func (aH *APIHandler) listLogsPipelines(ctx context.Context, orgID valuer.UUID) (
*logparsingpipeline.PipelinesResponse, error,
) {
// get lateset agent config
// get latest agent config
latestVersion := -1
lastestConfig, err := agentConf.GetLatestVersion(ctx, orgID, opamptypes.ElementTypeLogPipelines)
if err != nil && !errorsV2.Ast(err, errorsV2.TypeNotFound) {
@@ -4439,7 +4427,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
var spanKeys map[string]v3.AttributeKey
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
hasLogsQuery := false
@@ -4456,7 +4444,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
if logsv3.EnrichmentRequired(queryRangeParams) && hasLogsQuery {
logsFields, apiErr := aH.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(queryRangeParams.CompositeQuery))
if apiErr != nil {
RespondError(w, apiErr, errQuriesByName)
RespondError(w, apiErr, errQueriesByName)
return
}
// get the fields if any logs query is present
@@ -4467,7 +4455,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
spanKeys, err = aH.getSpanKeysV3(ctx, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
tracesV4.Enrich(queryRangeParams, spanKeys)
@@ -4512,11 +4500,11 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
}
}
result, errQuriesByName, err = aH.querier.QueryRange(ctx, orgID, queryRangeParams)
result, errQueriesByName, err = aH.querier.QueryRange(ctx, orgID, queryRangeParams)
if err != nil {
queryErrors := map[string]string{}
for name, err := range errQuriesByName {
for name, err := range errQueriesByName {
queryErrors[fmt.Sprintf("Query-%s", name)] = err.Error()
}
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
@@ -4792,7 +4780,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
var spanKeys map[string]v3.AttributeKey
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
hasLogsQuery := false
@@ -4822,7 +4810,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
spanKeys, err = aH.getSpanKeysV3(ctx, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
tracesV4.Enrich(queryRangeParams, spanKeys)
@@ -4845,11 +4833,11 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
}
}
result, errQuriesByName, err = aH.querierV2.QueryRange(ctx, orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(ctx, orgID, queryRangeParams)
if err != nil {
queryErrors := map[string]string{}
for name, err := range errQuriesByName {
for name, err := range errQueriesByName {
queryErrors[fmt.Sprintf("Query-%s", name)] = err.Error()
}
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
@@ -4866,7 +4854,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
aH.sendQueryResultEvents(r, result, queryRangeParams, "v4")

View File

@@ -17,7 +17,6 @@ import (
"github.com/gorilla/handlers"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
@@ -133,7 +132,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
FluxInterval: config.Querier.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: nooplicensing.NewLicenseAPI(),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
@@ -209,13 +207,12 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
r.Use(middleware.NewComment().Wrap)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz, s.signoz.Modules.RoleGetter)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
api.RegisterRoutes(r, am)
api.RegisterLogsRoutes(r, am)
api.RegisterIntegrationRoutes(r, am)
api.RegisterCloudIntegrationsRoutes(r, am)
api.RegisterFieldsRoutes(r, am)
api.RegisterQueryRangeV3Routes(r, am)
api.RegisterInfraMetricsRoutes(r, am)
api.RegisterWebSocketPaths(r, am)

View File

@@ -5,10 +5,10 @@ import (
"os"
"regexp"
"strconv"
"time"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/valuer"
)
const (
@@ -40,11 +40,11 @@ const NormalizedMetricsMapQueryThreads = 10
var NormalizedMetricsMapRegex = regexp.MustCompile(`[^a-zA-Z0-9]`)
var NormalizedMetricsMapQuantileRegex = regexp.MustCompile(`(?i)([._-]?quantile.*)$`)
func GetEvalDelay() time.Duration {
func GetEvalDelay() valuer.TextDuration {
evalDelayStr := GetOrDefaultEnv("RULES_EVAL_DELAY", "2m")
evalDelayDuration, err := time.ParseDuration(evalDelayStr)
evalDelayDuration, err := valuer.ParseTextDuration(evalDelayStr)
if err != nil {
return 0
return valuer.TextDuration{}
}
return evalDelayDuration
}

View File

@@ -40,13 +40,13 @@ type BaseRule struct {
// evalWindow is the time window used for evaluating the rule
// i.e. each time we lookback from the current time, we look at data for the last
// evalWindow duration
evalWindow time.Duration
evalWindow valuer.TextDuration
// holdDuration is the duration for which the alert waits before firing
holdDuration time.Duration
holdDuration valuer.TextDuration
// evalDelay is the delay in evaluation of the rule
// this is useful in cases where the data is not available immediately
evalDelay time.Duration
evalDelay valuer.TextDuration
// holds the static set of labels and annotations for the rule
// these are the same for all alerts created for this rule
@@ -94,7 +94,7 @@ type BaseRule struct {
evaluation ruletypes.Evaluation
// newGroupEvalDelay is the grace period for new alert groups
newGroupEvalDelay *time.Duration
newGroupEvalDelay valuer.TextDuration
queryParser queryparser.QueryParser
}
@@ -113,7 +113,7 @@ func WithSendUnmatched() RuleOption {
}
}
func WithEvalDelay(dur time.Duration) RuleOption {
func WithEvalDelay(dur valuer.TextDuration) RuleOption {
return func(r *BaseRule) {
r.evalDelay = dur
}
@@ -163,7 +163,7 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader
source: p.Source,
typ: p.AlertType,
ruleCondition: p.RuleCondition,
evalWindow: time.Duration(p.EvalWindow),
evalWindow: p.EvalWindow,
labels: qslabels.FromMap(p.Labels),
annotations: qslabels.FromMap(p.Annotations),
preferredChannels: p.PreferredChannels,
@@ -176,13 +176,12 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader
}
// Store newGroupEvalDelay and groupBy keys from NotificationSettings
if p.NotificationSettings != nil && p.NotificationSettings.NewGroupEvalDelay != nil {
newGroupEvalDelay := time.Duration(*p.NotificationSettings.NewGroupEvalDelay)
baseRule.newGroupEvalDelay = &newGroupEvalDelay
if p.NotificationSettings != nil {
baseRule.newGroupEvalDelay = p.NotificationSettings.NewGroupEvalDelay
}
if baseRule.evalWindow == 0 {
baseRule.evalWindow = 5 * time.Minute
if baseRule.evalWindow.IsZero() {
baseRule.evalWindow = valuer.MustParseTextDuration("5m")
}
for _, opt := range opts {
@@ -245,15 +244,15 @@ func (r *BaseRule) ActiveAlertsLabelFP() map[uint64]struct{} {
return activeAlerts
}
func (r *BaseRule) EvalDelay() time.Duration {
func (r *BaseRule) EvalDelay() valuer.TextDuration {
return r.evalDelay
}
func (r *BaseRule) EvalWindow() time.Duration {
func (r *BaseRule) EvalWindow() valuer.TextDuration {
return r.evalWindow
}
func (r *BaseRule) HoldDuration() time.Duration {
func (r *BaseRule) HoldDuration() valuer.TextDuration {
return r.holdDuration
}
@@ -281,7 +280,7 @@ func (r *BaseRule) Timestamps(ts time.Time) (time.Time, time.Time) {
start := st.UnixMilli()
end := en.UnixMilli()
if r.evalDelay > 0 {
if r.evalDelay.IsPositive() {
start = start - r.evalDelay.Milliseconds()
end = end - r.evalDelay.Milliseconds()
}
@@ -552,7 +551,7 @@ func (r *BaseRule) PopulateTemporality(ctx context.Context, orgID valuer.UUID, q
// ShouldSkipNewGroups returns true if new group filtering should be applied
func (r *BaseRule) ShouldSkipNewGroups() bool {
return r.newGroupEvalDelay != nil && *r.newGroupEvalDelay > 0
return r.newGroupEvalDelay.IsPositive()
}
// isFilterNewSeriesSupported checks if the query is supported for new series filtering

View File

@@ -20,7 +20,7 @@ import (
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -124,8 +124,8 @@ func createPostableRule(compositeQuery *v3.CompositeQuery) ruletypes.PostableRul
Evaluation: &ruletypes.EvaluationEnvelope{
Kind: ruletypes.RollingEvaluation,
Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
},
},
RuleCondition: &ruletypes.RuleCondition{
@@ -151,7 +151,7 @@ type filterNewSeriesTestCase struct {
compositeQuery *v3.CompositeQuery
series []*v3.Series
firstSeenMap map[telemetrytypes.MetricMetadataLookupKey]int64
newGroupEvalDelay *time.Duration
newGroupEvalDelay valuer.TextDuration
evalTime time.Time
expectedFiltered []*v3.Series // series that should be in the final filtered result (old enough)
expectError bool
@@ -159,7 +159,8 @@ type filterNewSeriesTestCase struct {
func TestBaseRule_FilterNewSeries(t *testing.T) {
defaultEvalTime := time.Unix(1700000000, 0)
defaultDelay := 2 * time.Minute
defaultNewGroupEvalDelay := valuer.MustParseTextDuration("2m")
defaultDelay := defaultNewGroupEvalDelay.Duration()
defaultGroupByFields := []string{"service_name", "env"}
logger := instrumentationtest.New().Logger()
@@ -202,7 +203,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new", "prod"),
// svc-missing has no metadata, so it will be included
),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
@@ -234,7 +235,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new1", "prod"),
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new2", "stage"),
),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{}, // all should be filtered out (new series)
},
@@ -261,7 +262,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old1", "prod"),
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old2", "stage"),
),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
@@ -295,7 +296,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -325,7 +326,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -361,7 +362,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"status": "200"}, nil), // no service_name or env
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"status": "200"}, nil),
@@ -390,7 +391,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old", "prod"),
// svc-no-metadata has no entry in firstSeenMap
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
@@ -420,7 +421,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
{MetricName: "request_total", AttributeName: "service_name", AttributeValue: "svc-partial"}: calculateFirstSeen(defaultEvalTime, defaultDelay, true),
// env metadata is missing
},
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
@@ -454,7 +455,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
series: []*v3.Series{},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{},
},
@@ -488,7 +489,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
newGroupEvalDelay: func() *time.Duration { d := time.Duration(0); return &d }(), // zero delay
newGroupEvalDelay: valuer.TextDuration{}, // zero delay
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -532,7 +533,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
createFirstSeenMap("error_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -572,7 +573,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", []string{"service_name"}, defaultEvalTime, defaultDelay, true, "svc1"),
createFirstSeenMap("request_total", []string{"env"}, defaultEvalTime, defaultDelay, false, "prod"),
),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{}, // max first_seen is new, so should be filtered out
},
@@ -604,7 +605,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
@@ -639,7 +640,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
@@ -697,20 +698,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
telemetryStore,
prometheustest.New(context.Background(), settings, prometheus.Config{}, telemetryStore),
"",
time.Duration(time.Second),
time.Second,
nil,
readerCache,
options,
)
// Set newGroupEvalDelay in NotificationSettings if provided
if tt.newGroupEvalDelay != nil {
postableRule.NotificationSettings = &ruletypes.NotificationSettings{
NewGroupEvalDelay: func() *ruletypes.Duration {
d := ruletypes.Duration(*tt.newGroupEvalDelay)
return &d
}(),
}
postableRule.NotificationSettings = &ruletypes.NotificationSettings{
NewGroupEvalDelay: tt.newGroupEvalDelay,
}
// Create BaseRule using NewBaseRule

View File

@@ -30,7 +30,7 @@ import (
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/types/authtypes"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -66,7 +66,7 @@ type PrepareTestRuleOptions struct {
OrgID valuer.UUID
}
const taskNamesuffix = "webAppEditor"
const taskNameSuffix = "webAppEditor"
func RuleIdFromTaskName(n string) string {
return strings.Split(n, "-groupname")[0]
@@ -97,7 +97,7 @@ type ManagerOptions struct {
SLogger *slog.Logger
Cache cache.Cache
EvalDelay time.Duration
EvalDelay valuer.TextDuration
PrepareTaskFunc func(opts PrepareTaskOptions) (Task, error)
PrepareTestRuleFunc func(opts PrepareTestRuleOptions) (int, *model.ApiError)
@@ -182,8 +182,8 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
rules = append(rules, tr)
// create ch rule task for evalution
task = newTask(TaskTypeCh, opts.TaskName, taskNamesuffix, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
// create ch rule task for evaluation
task = newTask(TaskTypeCh, opts.TaskName, taskNameSuffix, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
@@ -206,8 +206,8 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
rules = append(rules, pr)
// create promql rule task for evalution
task = newTask(TaskTypeProm, opts.TaskName, taskNamesuffix, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
// create promql rule task for evaluation
task = newTask(TaskTypeProm, opts.TaskName, taskNameSuffix, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else {
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
@@ -323,7 +323,7 @@ func (m *Manager) run(_ context.Context) {
}
// Stop the rule manager's rule evaluation cycles.
func (m *Manager) Stop(ctx context.Context) {
func (m *Manager) Stop(_ context.Context) {
m.mtx.Lock()
defer m.mtx.Unlock()
@@ -336,7 +336,7 @@ func (m *Manager) Stop(ctx context.Context) {
zap.L().Info("Rule manager stopped")
}
// EditRuleDefinition writes the rule definition to the
// EditRule writes the rule definition to the
// datastore and also updates the rule executor
func (m *Manager) EditRule(ctx context.Context, ruleStr string, id valuer.UUID) error {
claims, err := authtypes.ClaimsFromContext(ctx)
@@ -643,7 +643,7 @@ func (m *Manager) addTask(_ context.Context, orgID valuer.UUID, rule *ruletypes.
m.rules[r.ID()] = r
}
// If there is an another task with the same identifier, raise an error
// If there is another task with the same identifier, raise an error
_, ok := m.tasks[taskName]
if ok {
return fmt.Errorf("a rule with the same name already exists")
@@ -678,7 +678,8 @@ func (m *Manager) RuleTasks() []Task {
return rgs
}
// RuleTasks returns the list of manager's rule tasks.
// RuleTasksWithoutLock returns the list of manager's rule tasks without
// acquiring a lock on the manager.
func (m *Manager) RuleTasksWithoutLock() []Task {
rgs := make([]Task, 0, len(m.tasks))
@@ -889,7 +890,7 @@ func (m *Manager) syncRuleStateWithTask(ctx context.Context, orgID valuer.UUID,
} else {
// check if rule has a task running
if _, ok := m.tasks[taskName]; !ok {
// rule has not task, start one
// rule has no task, start one
if err := m.addTask(ctx, orgID, rule, taskName); err != nil {
return err
}

View File

@@ -9,6 +9,7 @@ import (
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
// ThresholdRuleTestCase defines test case structure for threshold rule test notifications
@@ -40,8 +41,8 @@ func ThresholdRuleAtLeastOnceValueAbove(target float64, recovery *float64) rulet
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
Labels: map[string]string{
"service.name": "frontend",
@@ -99,8 +100,8 @@ func BuildPromAtLeastOnceValueAbove(target float64, recovery *float64) ruletypes
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
Labels: map[string]string{
"service.name": "frontend",

View File

@@ -28,6 +28,8 @@ type PromRule struct {
prometheus prometheus.Prometheus
}
var _ Rule = (*PromRule)(nil)
func NewPromRule(
id string,
orgID valuer.UUID,
@@ -332,7 +334,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration {
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
a.State = model.StateFiring
a.FiredAt = ts
state := model.StateFiring
@@ -396,7 +398,7 @@ func (r *PromRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.name,
RuleCondition: r.ruleCondition,
EvalWindow: ruletypes.Duration(r.evalWindow),
EvalWindow: r.evalWindow,
Labels: r.labels.Map(),
Annotations: r.annotations.Map(),
PreferredChannels: r.preferredChannels,

View File

@@ -41,12 +41,12 @@ type PromRuleTask struct {
orgID valuer.UUID
}
// newPromRuleTask holds rules that have promql condition
// and evalutes the rule at a given frequency
// NewPromRuleTask holds rules that have promql condition
// and evaluates the rule at a given frequency
func NewPromRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) *PromRuleTask {
zap.L().Info("Initiating a new rule group", zap.String("name", name), zap.Duration("frequency", frequency))
if time.Now() == time.Now().Add(frequency) {
if frequency == 0 {
frequency = DefaultFrequency
}

View File

@@ -41,8 +41,8 @@ func TestPromRuleEval(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -748,8 +748,8 @@ func TestPromRuleUnitCombinations(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1007,8 +1007,8 @@ func _Enable_this_after_9146_issue_fix_is_merged_TestPromRuleNoData(t *testing.T
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1118,8 +1118,8 @@ func TestMultipleThresholdPromRule(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1353,8 +1353,8 @@ func TestPromRule_NoData(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,
@@ -1466,7 +1466,7 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
// 3. Alert fires only if t2 - t1 > AbsentFor
baseTime := time.Unix(1700000000, 0)
evalWindow := 5 * time.Minute
evalWindow := valuer.MustParseTextDuration("5m")
// Set target higher than test data (100.0) so regular threshold alerts don't fire
target := 500.0
@@ -1476,8 +1476,8 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,
@@ -1619,7 +1619,7 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
baseTime := time.Unix(1700000000, 0)
evalTime := baseTime.Add(5 * time.Minute)
evalWindow := 5 * time.Minute
evalWindow := valuer.MustParseTextDuration("5m")
lookBackDelta := time.Minute
postableRule := ruletypes.PostableRule{
@@ -1627,8 +1627,8 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(time.Minute),
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,

View File

@@ -7,6 +7,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
// A Rule encapsulates a vector expression which is evaluated at a specified
@@ -19,9 +20,9 @@ type Rule interface {
Labels() labels.BaseLabels
Annotations() labels.BaseLabels
Condition() *ruletypes.RuleCondition
EvalDelay() time.Duration
EvalWindow() time.Duration
HoldDuration() time.Duration
EvalDelay() valuer.TextDuration
EvalWindow() valuer.TextDuration
HoldDuration() valuer.TextDuration
State() model.AlertState
ActiveAlerts() []*ruletypes.Alert
// ActiveAlertsLabelFP returns a map of active alert labels fingerprint

View File

@@ -43,7 +43,7 @@ const DefaultFrequency = 1 * time.Minute
// NewRuleTask makes a new RuleTask with the given name, options, and rules.
func NewRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) *RuleTask {
if time.Now() == time.Now().Add(frequency) {
if frequency == 0 {
frequency = DefaultFrequency
}
zap.L().Info("initiating a new rule task", zap.String("name", name), zap.Duration("frequency", frequency))
@@ -78,6 +78,7 @@ func (g *RuleTask) Type() TaskType { return TaskTypeCh }
func (g *RuleTask) Rules() []Rule { return g.rules }
// Interval returns the group's interval.
// TODO: remove (unused)?
func (g *RuleTask) Interval() time.Duration { return g.frequency }
func (g *RuleTask) Pause(b bool) {

View File

@@ -61,6 +61,8 @@ type ThresholdRule struct {
spansKeys map[string]v3.AttributeKey
}
var _ Rule = (*ThresholdRule)(nil)
func NewThresholdRule(
id string,
orgID valuer.UUID,
@@ -746,7 +748,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration {
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
r.logger.DebugContext(ctx, "converting pending alert to firing", "name", r.Name())
a.State = model.StateFiring
a.FiredAt = ts
@@ -812,7 +814,7 @@ func (r *ThresholdRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.name,
RuleCondition: r.ruleCondition,
EvalWindow: ruletypes.Duration(r.evalWindow),
EvalWindow: r.evalWindow,
Labels: r.labels.Map(),
Annotations: r.annotations.Map(),
PreferredChannels: r.preferredChannels,

View File

@@ -36,8 +36,8 @@ func TestThresholdRuleEvalBackwardCompat(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -72,7 +72,7 @@ func TestThresholdRuleEvalBackwardCompat(t *testing.T) {
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -152,8 +152,8 @@ func TestPrepareLinksToLogs(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -189,7 +189,7 @@ func TestPrepareLinksToLogs(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -206,8 +206,8 @@ func TestPrepareLinksToLogsV5(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -250,7 +250,7 @@ func TestPrepareLinksToLogsV5(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -267,8 +267,8 @@ func TestPrepareLinksToTracesV5(t *testing.T) {
AlertType: ruletypes.AlertTypeTraces,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -311,7 +311,7 @@ func TestPrepareLinksToTracesV5(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -328,8 +328,8 @@ func TestPrepareLinksToTraces(t *testing.T) {
AlertType: ruletypes.AlertTypeTraces,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -365,7 +365,7 @@ func TestPrepareLinksToTraces(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -382,8 +382,8 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -451,7 +451,7 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -490,8 +490,8 @@ func TestThresholdRuleEvalDelay(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -553,8 +553,8 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -594,7 +594,7 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
logger := instrumentationtest.New().Logger()
for idx, c := range cases {
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -615,8 +615,8 @@ func TestThresholdRuleUnitCombinations(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -816,8 +816,8 @@ func TestThresholdRuleNoData(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -927,8 +927,8 @@ func TestThresholdRuleTracesLink(t *testing.T) {
AlertType: ruletypes.AlertTypeTraces,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1052,8 +1052,8 @@ func TestThresholdRuleLogsLink(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1190,8 +1190,8 @@ func TestThresholdRuleShiftBy(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
Thresholds: &ruletypes.RuleThresholdData{
@@ -1264,8 +1264,8 @@ func TestMultipleThresholdRule(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1455,8 +1455,8 @@ func TestThresholdRuleEval_BasicCases(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1486,8 +1486,8 @@ func TestThresholdRuleEval_MatchPlusCompareOps(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1523,8 +1523,8 @@ func TestThresholdRuleEval_SendUnmatchedBypassesRecovery(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1559,7 +1559,7 @@ func TestThresholdRuleEval_SendUnmatchedBypassesRecovery(t *testing.T) {
}
logger := instrumentationtest.New().Logger()
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
require.NoError(t, err)
now := time.Now()
@@ -1611,8 +1611,8 @@ func TestThresholdRuleEval_SendUnmatchedVariants(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1735,8 +1735,8 @@ func TestThresholdRuleEval_RecoveryNotMetSendUnmatchedFalse(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1820,7 +1820,7 @@ func runEvalTests(t *testing.T, postableRule ruletypes.PostableRule, testCases [
Spec: thresholds,
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
return
@@ -1927,7 +1927,7 @@ func runMultiThresholdEvalTests(t *testing.T, postableRule ruletypes.PostableRul
Spec: thresholds,
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
return
@@ -2035,8 +2035,8 @@ func TestThresholdRuleEval_MultiThreshold(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -2066,8 +2066,8 @@ func TestThresholdEval_RequireMinPoints(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,

View File

@@ -1,6 +1,8 @@
package signoz
import (
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/signozauthzapi"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/gateway"
@@ -11,14 +13,14 @@ import (
"github.com/SigNoz/signoz/pkg/modules/apdex/implapdex"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/fields/implfields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/modules/savedview"
"github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview"
"github.com/SigNoz/signoz/pkg/modules/services"
@@ -28,6 +30,7 @@ import (
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type Handlers struct {
@@ -43,10 +46,21 @@ type Handlers struct {
Global global.Handler
FlaggerHandler flagger.Handler
GatewayHandler gateway.Handler
Role role.Handler
Fields fields.Handler
AuthzHandler authz.Handler
}
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global, flaggerService flagger.Flagger, gatewayService gateway.Gateway) Handlers {
func NewHandlers(
modules Modules,
providerSettings factory.ProviderSettings,
querier querier.Querier,
licensing licensing.Licensing,
global global.Global,
flaggerService flagger.Flagger,
gatewayService gateway.Gateway,
telemetryMetadataStore telemetrytypes.MetadataStore,
authz authz.AuthZ,
) Handlers {
return Handlers{
SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex),
@@ -60,6 +74,7 @@ func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, que
Global: signozglobal.NewHandler(global),
FlaggerHandler: flagger.NewHandler(flaggerService),
GatewayHandler: gateway.NewHandler(gatewayService),
Role: implrole.NewHandler(modules.RoleSetter, modules.RoleGetter),
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
AuthzHandler: signozauthzapi.NewHandler(authz),
}
}

View File

@@ -13,7 +13,6 @@ import (
"github.com/SigNoz/signoz/pkg/factory/factorytest"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sharder"
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
@@ -41,13 +40,9 @@ func TestNewHandlers(t *testing.T) {
queryParser := queryparser.New(providerSettings)
require.NoError(t, err)
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
roleSetter := implrole.NewSetter(implrole.NewStore(sqlstore), nil)
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
grantModule := implrole.NewGranter(implrole.NewStore(sqlstore), nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, roleSetter, roleGetter, grantModule)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil, nil, nil)
reflectVal := reflect.ValueOf(handlers)
for i := 0; i < reflectVal.NumField(); i++ {
f := reflectVal.Field(i)

View File

@@ -25,7 +25,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/savedview"
"github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview"
"github.com/SigNoz/signoz/pkg/modules/services"
@@ -67,9 +66,6 @@ type Modules struct {
SpanPercentile spanpercentile.Module
MetricsExplorer metricsexplorer.Module
Promote promote.Module
RoleSetter role.Setter
RoleGetter role.Getter
Granter role.Granter
}
func NewModules(
@@ -89,13 +85,10 @@ func NewModules(
queryParser queryparser.QueryParser,
config Config,
dashboard dashboard.Module,
roleSetter role.Setter,
roleGetter role.Getter,
granter role.Granter,
) Modules {
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, granter, analytics, config.User)
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, authz, analytics, config.User)
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
@@ -117,8 +110,5 @@ func NewModules(
Services: implservices.NewModule(querier, telemetryStore),
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, ruleStore, dashboard, providerSettings, config.MetricsExplorer),
Promote: implpromote.NewModule(telemetryMetadataStore, telemetryStore),
RoleSetter: roleSetter,
RoleGetter: roleGetter,
Granter: granter,
}
}

View File

@@ -13,7 +13,6 @@ import (
"github.com/SigNoz/signoz/pkg/factory/factorytest"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sharder"
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
@@ -41,10 +40,7 @@ func TestNewModules(t *testing.T) {
queryParser := queryparser.New(providerSettings)
require.NoError(t, err)
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
roleSetter := implrole.NewSetter(implrole.NewStore(sqlstore), nil)
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
grantModule := implrole.NewGranter(implrole.NewStore(sqlstore), nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, roleSetter, roleGetter, grantModule)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
reflectVal := reflect.ValueOf(modules)
for i := 0; i < reflectVal.NumField(); i++ {

View File

@@ -15,11 +15,11 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
@@ -50,8 +50,8 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
struct{ dashboard.Handler }{},
struct{ metricsexplorer.Handler }{},
struct{ gateway.Handler }{},
struct{ role.Getter }{},
struct{ role.Handler }{},
struct{ fields.Handler }{},
struct{ authz.Handler }{},
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
if err != nil {
return nil, err

View File

@@ -166,6 +166,7 @@ func NewSQLMigrationProviderFactories(
sqlmigration.NewAddAuthzIndexFactory(sqlstore, sqlschema),
sqlmigration.NewMigrateRbacToAuthzFactory(sqlstore),
sqlmigration.NewMigratePublicDashboardsFactory(sqlstore),
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
)
}
@@ -247,8 +248,8 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
handlers.Dashboard,
handlers.MetricsExplorer,
handlers.GatewayHandler,
modules.RoleGetter,
handlers.Role,
handlers.Fields,
handlers.AuthzHandler,
),
)
}

View File

@@ -21,8 +21,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
"github.com/SigNoz/signoz/pkg/prometheus"
"github.com/SigNoz/signoz/pkg/querier"
@@ -89,10 +87,9 @@ func New(
sqlstoreProviderFactories factory.NamedMap[factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config]],
telemetrystoreProviderFactories factory.NamedMap[factory.ProviderFactory[telemetrystore.TelemetryStore, telemetrystore.Config]],
authNsCallback func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error),
authzCallback func(context.Context, sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config],
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, role.Setter, role.Granter, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
authzCallback func(context.Context, sqlstore.SQLStore, licensing.Licensing, dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config],
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
gatewayProviderFactory func(licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config],
roleSetterCallback func(sqlstore.SQLStore, authz.AuthZ, licensing.Licensing, []role.RegisterTypeable) role.Setter,
) (*SigNoz, error) {
// Initialize instrumentation
instrumentation, err := instrumentation.New(ctx, config.Instrumentation, version.Info, "signoz")
@@ -284,11 +281,24 @@ func New(
// Initialize user getter
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
// Initialize the role getter
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
licensingProviderFactory := licenseProviderFactory(sqlstore, zeus, orgGetter, analytics)
licensing, err := licensingProviderFactory.New(
ctx,
providerSettings,
licenseConfig,
)
if err != nil {
return nil, err
}
// Initialize query parser (needed for dashboard module)
queryParser := queryparser.New(providerSettings)
// Initialize dashboard module (needed for authz registry)
dashboard := dashboardModuleCallback(sqlstore, providerSettings, analytics, orgGetter, queryParser, querier, licensing)
// Initialize authz
authzProviderFactory := authzCallback(ctx, sqlstore)
authzProviderFactory := authzCallback(ctx, sqlstore, licensing, dashboard)
authz, err := authzProviderFactory.New(ctx, providerSettings, authz.Config{})
if err != nil {
return nil, err
@@ -318,9 +328,6 @@ func New(
return nil, err
}
// Initialize query parser
queryParser := queryparser.New(providerSettings)
// Initialize ruler from the available ruler provider factories
ruler, err := factory.NewProviderFromNamedMap(
ctx,
@@ -333,16 +340,6 @@ func New(
return nil, err
}
licensingProviderFactory := licenseProviderFactory(sqlstore, zeus, orgGetter, analytics)
licensing, err := licensingProviderFactory.New(
ctx,
providerSettings,
licenseConfig,
)
if err != nil {
return nil, err
}
gatewayFactory := gatewayProviderFactory(licensing)
gateway, err := gatewayFactory.New(ctx, providerSettings, config.Gateway)
if err != nil {
@@ -390,13 +387,10 @@ func New(
}
// Initialize all modules
roleSetter := roleSetterCallback(sqlstore, authz, licensing, nil)
granter := implrole.NewGranter(implrole.NewStore(sqlstore), authz)
dashboard := dashboardModuleCallback(sqlstore, providerSettings, analytics, orgGetter, roleSetter, granter, queryParser, querier, licensing)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard, roleSetter, roleGetter, granter)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway)
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz)
// Initialize the API server
apiserver, err := factory.NewProviderFromNamedMap(

View File

@@ -0,0 +1,154 @@
package sqlmigration
import (
"context"
"time"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/oklog/ulid/v2"
"github.com/uptrace/bun"
"github.com/uptrace/bun/dialect"
"github.com/uptrace/bun/migrate"
)
type addAnonymousPublicDashboardTransaction struct {
sqlstore sqlstore.SQLStore
}
func NewAddAnonymousPublicDashboardTransactionFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("add_public_dashboard_txn"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return newAddAnonymousPublicDashboardTransaction(ctx, ps, c, sqlstore)
})
}
func newAddAnonymousPublicDashboardTransaction(_ context.Context, _ factory.ProviderSettings, _ Config, sqlstore sqlstore.SQLStore) (SQLMigration, error) {
return &addAnonymousPublicDashboardTransaction{
sqlstore: sqlstore,
}, nil
}
func (migration *addAnonymousPublicDashboardTransaction) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *addAnonymousPublicDashboardTransaction) Up(ctx context.Context, db *bun.DB) error {
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
var storeID string
err = tx.QueryRowContext(ctx, `SELECT id FROM store WHERE name = ? LIMIT 1`, "signoz").Scan(&storeID)
if err != nil {
return err
}
// fetch all the orgs for which we need to insert the anonymous public dashboard transaction tuple.
orgIDs := []string{}
rows, err := tx.QueryContext(ctx, `SELECT id FROM organizations`)
if err != nil {
return err
}
defer rows.Close()
for rows.Next() {
var orgID string
if err := rows.Scan(&orgID); err != nil {
return err
}
orgIDs = append(orgIDs, orgID)
}
for _, orgID := range orgIDs {
// based on openfga tuple and changelog id's are same for writes.
// ref: https://github.com/openfga/openfga/blob/main/pkg/storage/sqlite/sqlite.go#L467
entropy := ulid.DefaultEntropy()
now := time.Now().UTC()
tupleID := ulid.MustNew(ulid.Timestamp(now), entropy).String()
// Add wildcard (*) transaction for signoz-anonymous role to read all public-dashboards
// This grants the signoz-anonymous role read access to all public dashboards in the organization
if migration.sqlstore.BunDB().Dialect().Name() == dialect.PG {
result, err := tx.ExecContext(ctx, `
INSERT INTO tuple (store, object_type, object_id, relation, _user, user_type, ulid, inserted_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (store, object_type, object_id, relation, _user) DO NOTHING`,
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role:organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName+"#assignee", "userset", tupleID, now,
)
if err != nil {
return err
}
rowsAffected, err := result.RowsAffected()
if err != nil {
return err
}
if rowsAffected == 0 {
continue
}
_, err = tx.ExecContext(ctx, `
INSERT INTO changelog (store, object_type, object_id, relation, _user, operation, ulid, inserted_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (store, ulid, object_type) DO NOTHING`,
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role:organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName+"#assignee", "TUPLE_OPERATION_WRITE", tupleID, now,
)
if err != nil {
return err
}
} else {
result, err := tx.ExecContext(ctx, `
INSERT INTO tuple (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation, user_type, ulid, inserted_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation) DO NOTHING`,
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role", "organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName, "assignee", "userset", tupleID, now,
)
if err != nil {
return err
}
rowsAffected, err := result.RowsAffected()
if err != nil {
return err
}
if rowsAffected == 0 {
continue
}
_, err = tx.ExecContext(ctx, `
INSERT INTO changelog (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation, operation, ulid, inserted_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (store, ulid, object_type) DO NOTHING`,
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role", "organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName, "assignee", 0, tupleID, now,
)
if err != nil {
return err
}
}
}
err = tx.Commit()
if err != nil {
return err
}
return nil
}
func (migration *addAnonymousPublicDashboardTransaction) Down(context.Context, *bun.DB) error {
return nil
}

View File

@@ -20,15 +20,19 @@ var (
)
var (
typeUserSelectorRegex = regexp.MustCompile(`^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
typeRoleSelectorRegex = regexp.MustCompile(`^[a-z-]{1,50}$`)
typeUserSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
typeRoleSelectorRegex = regexp.MustCompile(`^([a-z-]{1,50}|\*)$`)
typeAnonymousSelectorRegex = regexp.MustCompile(`^\*$`)
typeOrganizationSelectorRegex = regexp.MustCompile(`^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
typeOrganizationSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
typeMetaResourceSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
// metaresources selectors are used to select either all or none
// metaresources selectors are used to select either all or none until we introduce some hierarchy here.
typeMetaResourcesSelectorRegex = regexp.MustCompile(`^\*$`)
)
var (
WildCardSelectorString = "*"
)
type SelectorCallbackWithClaimsFn func(*http.Request, Claims) ([]Selector, error)
type SelectorCallbackWithoutClaimsFn func(*http.Request, []*types.Organization) ([]Selector, valuer.UUID, error)

View File

@@ -24,9 +24,10 @@ func MustNewTypeableMetaResource(name Name) Typeable {
return typeableesource
}
func (typeableMetaResource *typeableMetaResource) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
func (typeableMetaResource *typeableMetaResource) Tuples(subject string, relation Relation, selectors []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selector {
for _, selector := range selectors {
object := typeableMetaResource.Prefix(orgID) + "/" + selector.String()
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}

Some files were not shown because too many files have changed in this diff Show More