Compare commits

..

1 Commits

Author SHA1 Message Date
Abhi Kumar
04e5d87986 test: added test for tooltip plugin 2026-02-09 13:16:20 +05:30
102 changed files with 2160 additions and 3765 deletions

7
.github/CODEOWNERS vendored
View File

@@ -133,8 +133,5 @@
/frontend/src/pages/PublicDashboard/ @SigNoz/pulse-frontend
/frontend/src/container/PublicDashboardContainer/ @SigNoz/pulse-frontend
## Dashboard Libs + Components
/frontend/src/lib/uPlotV2/ @SigNoz/pulse-frontend
/frontend/src/lib/dashboard/ @SigNoz/pulse-frontend
/frontend/src/lib/dashboardVariables/ @SigNoz/pulse-frontend
/frontend/src/components/NewSelect/ @SigNoz/pulse-frontend
## UplotV2
/frontend/src/lib/uPlotV2/ @SigNoz/pulse-frontend

View File

@@ -18,6 +18,8 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/queryparser"
@@ -76,15 +78,18 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
},
func(ctx context.Context, sqlstore sqlstore.SQLStore, _ licensing.Licensing, _ dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
},
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, _ role.Setter, _ role.Granter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(impldashboard.NewStore(store), settings, analytics, orgGetter, queryParser)
},
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return noopgateway.NewProviderFactory()
},
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, _ []role.RegisterTypeable) role.Setter {
return implrole.NewSetter(implrole.NewStore(store), authz)
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)

View File

@@ -14,6 +14,7 @@ import (
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/ee/modules/role/implrole"
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
@@ -28,6 +29,8 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
pkgimplrole "github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/signoz"
@@ -115,15 +118,18 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
return authNs, nil
},
func(ctx context.Context, sqlstore sqlstore.SQLStore, licensing licensing.Licensing, dashboardModule dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx), licensing, dashboardModule)
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
},
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, queryParser, querier, licensing)
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, roleSetter, granter, queryParser, querier, licensing)
},
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return httpgateway.NewProviderFactory(licensing)
},
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
return implrole.NewSetter(pkgimplrole.NewStore(store), authz, licensing, registry)
},
)
if err != nil {

View File

@@ -607,178 +607,6 @@ paths:
summary: Update auth domain
tags:
- authdomains
/api/v1/fields/keys:
get:
deprecated: false
description: This endpoint returns field keys
operationId: GetFieldsKeys
parameters:
- in: query
name: signal
schema:
type: string
- in: query
name: source
schema:
type: string
- in: query
name: limit
schema:
type: integer
- in: query
name: startUnixMilli
schema:
format: int64
type: integer
- in: query
name: endUnixMilli
schema:
format: int64
type: integer
- in: query
name: fieldContext
schema:
type: string
- in: query
name: fieldDataType
schema:
type: string
- in: query
name: metricName
schema:
type: string
- in: query
name: searchText
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/TelemetrytypesGettableFieldKeys'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get field keys
tags:
- fields
/api/v1/fields/values:
get:
deprecated: false
description: This endpoint returns field values
operationId: GetFieldsValues
parameters:
- in: query
name: signal
schema:
type: string
- in: query
name: source
schema:
type: string
- in: query
name: limit
schema:
type: integer
- in: query
name: startUnixMilli
schema:
format: int64
type: integer
- in: query
name: endUnixMilli
schema:
format: int64
type: integer
- in: query
name: fieldContext
schema:
type: string
- in: query
name: fieldDataType
schema:
type: string
- in: query
name: metricName
schema:
type: string
- in: query
name: searchText
schema:
type: string
- in: query
name: name
schema:
type: string
- in: query
name: existingQuery
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/TelemetrytypesGettableFieldValues'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get field values
tags:
- fields
/api/v1/getResetPasswordToken/{id}:
get:
deprecated: false
@@ -4416,60 +4244,6 @@ components:
format: date-time
type: string
type: object
TelemetrytypesGettableFieldKeys:
properties:
complete:
type: boolean
keys:
additionalProperties:
items:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
type: array
nullable: true
type: object
type: object
TelemetrytypesGettableFieldValues:
properties:
complete:
type: boolean
values:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldValues'
type: object
TelemetrytypesTelemetryFieldKey:
properties:
description:
type: string
fieldContext:
type: string
fieldDataType:
type: string
name:
type: string
signal:
type: string
unit:
type: string
type: object
TelemetrytypesTelemetryFieldValues:
properties:
boolValues:
items:
type: boolean
type: array
numberValues:
items:
format: double
type: number
type: array
relatedValues:
items:
type: string
type: array
stringValues:
items:
type: string
type: array
type: object
TypesChangePasswordRequest:
properties:
newPassword:

View File

@@ -2,18 +2,12 @@ package openfgaauthz
import (
"context"
"slices"
"github.com/SigNoz/signoz/ee/authz/openfgaserver"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/authzstore/sqlauthzstore"
pkgopenfgaauthz "github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
@@ -21,224 +15,50 @@ import (
type provider struct {
pkgAuthzService authz.AuthZ
openfgaServer *openfgaserver.Server
licensing licensing.Licensing
store roletypes.Store
registry []authz.RegisterTypeable
}
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry ...authz.RegisterTypeable) factory.ProviderFactory[authz.AuthZ, authz.Config] {
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return factory.NewProviderFactory(factory.MustNewName("openfga"), func(ctx context.Context, ps factory.ProviderSettings, config authz.Config) (authz.AuthZ, error) {
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema, licensing, registry)
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema)
})
}
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry []authz.RegisterTypeable) (authz.AuthZ, error) {
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
pkgOpenfgaAuthzProvider := pkgopenfgaauthz.NewProviderFactory(sqlstore, openfgaSchema)
pkgAuthzService, err := pkgOpenfgaAuthzProvider.New(ctx, settings, config)
if err != nil {
return nil, err
}
openfgaServer, err := openfgaserver.NewOpenfgaServer(ctx, pkgAuthzService)
if err != nil {
return nil, err
}
return &provider{
pkgAuthzService: pkgAuthzService,
openfgaServer: openfgaServer,
licensing: licensing,
store: sqlauthzstore.NewSqlAuthzStore(sqlstore),
registry: registry,
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
return provider.openfgaServer.Start(ctx)
return provider.pkgAuthzService.Start(ctx)
}
func (provider *provider) Stop(ctx context.Context) error {
return provider.openfgaServer.Stop(ctx)
return provider.pkgAuthzService.Stop(ctx)
}
func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return provider.openfgaServer.Check(ctx, tuple)
return provider.pkgAuthzService.Check(ctx, tuple)
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.openfgaServer.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.openfgaServer.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return provider.openfgaServer.BatchCheck(ctx, tuples)
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return provider.openfgaServer.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return provider.openfgaServer.Write(ctx, additions, deletions)
}
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
return provider.pkgAuthzService.Get(ctx, orgID, id)
}
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
return provider.pkgAuthzService.GetByOrgIDAndName(ctx, orgID, name)
}
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
return provider.pkgAuthzService.List(ctx, orgID)
}
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
return provider.pkgAuthzService.ListByOrgIDAndNames(ctx, orgID, names)
}
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
return provider.pkgAuthzService.Grant(ctx, orgID, name, subject)
}
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
return provider.pkgAuthzService.ModifyGrant(ctx, orgID, existingRoleName, updatedRoleName, subject)
}
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
return provider.pkgAuthzService.Revoke(ctx, orgID, name, subject)
}
func (provider *provider) CreateManagedRoles(ctx context.Context, orgID valuer.UUID, managedRoles []*roletypes.Role) error {
return provider.pkgAuthzService.CreateManagedRoles(ctx, orgID, managedRoles)
}
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
tuples := make([]*openfgav1.TupleKey, 0)
grantTuples, err := provider.getManagedRoleGrantTuples(orgID, userID)
if err != nil {
return err
}
tuples = append(tuples, grantTuples...)
managedRoleTuples, err := provider.getManagedRoleTransactionTuples(orgID)
if err != nil {
return err
}
tuples = append(tuples, managedRoleTuples...)
return provider.Write(ctx, tuples, nil)
}
func (provider *provider) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
}
func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
existingRole, err := provider.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
}
if existingRole != nil {
return roletypes.NewRoleFromStorableRole(existingRole), nil
}
err = provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return nil, err
}
return role, nil
}
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
typeables := make([]authtypes.Typeable, 0)
for _, register := range provider.registry {
typeables = append(typeables, register.MustGetTypeables()...)
}
// role module cannot self register itself!
typeables = append(typeables, provider.MustGetTypeables()...)
resources := make([]*authtypes.Resource, 0)
for _, typeable := range typeables {
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
}
return resources
}
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
storableRole, err := provider.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
objects := make([]*authtypes.Object, 0)
for _, resource := range provider.GetResources(ctx) {
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
resourceObjects, err := provider.
ListObjects(
ctx,
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
relation,
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
)
if err != nil {
return nil, err
}
objects = append(objects, resourceObjects...)
}
}
return objects, nil
}
func (provider *provider) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return provider.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
}
func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = provider.Write(ctx, additionTuples, deletionTuples)
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
@@ -246,95 +66,33 @@ func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, n
return nil
}
func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
storableRole, err := provider.store.Get(ctx, orgID, id)
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
role := roletypes.NewRoleFromStorableRole(storableRole)
err = role.CanEditDelete()
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
return provider.store.Delete(ctx, orgID, id)
}
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
}
func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := []*openfgav1.TupleKey{}
// Grant the admin role to the user
adminSubject := authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil)
adminTuple, err := authtypes.TypeableRole.Tuples(
adminSubject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAdminRoleName),
},
orgID,
)
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return nil, err
return err
}
tuples = append(tuples, adminTuple...)
// Grant the admin role to the anonymous user
anonymousSubject := authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
anonymousTuple, err := authtypes.TypeableRole.Tuples(
anonymousSubject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAnonymousRoleName),
},
orgID,
)
if err != nil {
return nil, err
}
tuples = append(tuples, anonymousTuple...)
return tuples, nil
return nil
}
func (provider *provider) getManagedRoleTransactionTuples(orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
transactionsByRole := make(map[string][]*authtypes.Transaction)
for _, register := range provider.registry {
for roleName, txns := range register.MustGetManagedRoleTransactions() {
transactionsByRole[roleName] = append(transactionsByRole[roleName], txns...)
}
}
tuples := make([]*openfgav1.TupleKey, 0)
for roleName, transactions := range transactionsByRole {
for _, txn := range transactions {
typeable := authtypes.MustNewTypeableFromType(txn.Object.Resource.Type, txn.Object.Resource.Name)
txnTuples, err := typeable.Tuples(
authtypes.MustNewSubject(
authtypes.TypeableRole,
roleName,
orgID,
&authtypes.RelationAssignee,
),
txn.Relation,
[]authtypes.Selector{txn.Object.Selector},
orgID,
)
if err != nil {
return nil, err
}
tuples = append(tuples, txnTuples...)
}
}
return tuples, nil
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.BatchCheck(ctx, tuples)
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return provider.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.Write(ctx, additions, deletions)
}

View File

@@ -1,83 +0,0 @@
package openfgaserver
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
type Server struct {
pkgAuthzService authz.AuthZ
}
func NewOpenfgaServer(ctx context.Context, pkgAuthzService authz.AuthZ) (*Server, error) {
return &Server{
pkgAuthzService: pkgAuthzService,
}, nil
}
func (server *Server) Start(ctx context.Context) error {
return server.pkgAuthzService.Start(ctx)
}
func (server *Server) Stop(ctx context.Context) error {
return server.pkgAuthzService.Stop(ctx)
}
func (server *Server) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return server.pkgAuthzService.Check(ctx, tuple)
}
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return server.pkgAuthzService.BatchCheck(ctx, tuples)
}
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return server.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
}
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return server.pkgAuthzService.Write(ctx, additions, deletions)
}

View File

@@ -11,6 +11,7 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/types"
@@ -25,11 +26,13 @@ type module struct {
pkgDashboardModule dashboard.Module
store dashboardtypes.Store
settings factory.ScopedProviderSettings
roleSetter role.Setter
granter role.Granter
querier querier.Querier
licensing licensing.Licensing
}
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard")
pkgDashboardModule := pkgimpldashboard.NewModule(store, settings, analytics, orgGetter, queryParser)
@@ -37,6 +40,8 @@ func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, an
pkgDashboardModule: pkgDashboardModule,
store: store,
settings: scopedProviderSettings,
roleSetter: roleSetter,
granter: granter,
querier: querier,
licensing: licensing,
}
@@ -56,6 +61,29 @@ func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publi
return errors.Newf(errors.TypeAlreadyExists, dashboardtypes.ErrCodePublicDashboardAlreadyExists, "dashboard with id %s is already public", storablePublicDashboard.DashboardID)
}
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
err = module.granter.Grant(ctx, orgID, roletypes.SigNozAnonymousRoleName, authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.StringValue(), orgID, nil))
if err != nil {
return err
}
additionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, []*authtypes.Object{additionObject}, nil)
if err != nil {
return err
}
err = module.store.CreatePublic(ctx, dashboardtypes.NewStorablePublicDashboardFromPublicDashboard(publicDashboard))
if err != nil {
return err
@@ -100,7 +128,6 @@ func (module *module) GetPublicDashboardSelectorsAndOrg(ctx context.Context, id
return []authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeMetaResource, id.StringValue()),
authtypes.MustNewSelector(authtypes.TypeMetaResource, authtypes.WildCardSelectorString),
}, storableDashboard.OrgID, nil
}
@@ -163,6 +190,29 @@ func (module *module) DeletePublic(ctx context.Context, orgID valuer.UUID, dashb
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
publicDashboard, err := module.GetPublic(ctx, orgID, dashboardID)
if err != nil {
return err
}
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
deletionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
if err != nil {
return err
}
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
if err != nil {
return err
@@ -200,6 +250,10 @@ func (module *module) GetByMetricNames(ctx context.Context, orgID valuer.UUID, m
return module.pkgDashboardModule.GetByMetricNames(ctx, orgID, metricNames)
}
func (module *module) MustGetTypeables() []authtypes.Typeable {
return module.pkgDashboardModule.MustGetTypeables()
}
func (module *module) List(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
return module.pkgDashboardModule.List(ctx, orgID)
}
@@ -212,27 +266,34 @@ func (module *module) LockUnlock(ctx context.Context, orgID valuer.UUID, id valu
return module.pkgDashboardModule.LockUnlock(ctx, orgID, id, updatedBy, role, lock)
}
func (module *module) MustGetTypeables() []authtypes.Typeable {
return module.pkgDashboardModule.MustGetTypeables()
}
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
return map[string][]*authtypes.Transaction{
roletypes.SigNozAnonymousRoleName: {
{
Relation: authtypes.RelationRead,
Object: *authtypes.MustNewObject(
authtypes.Resource{
Type: authtypes.TypeMetaResource,
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, "*"),
),
},
},
func (module *module) deletePublic(ctx context.Context, orgID valuer.UUID, dashboardID valuer.UUID) error {
publicDashboard, err := module.store.GetPublic(ctx, dashboardID.String())
if err != nil {
return err
}
}
func (module *module) deletePublic(ctx context.Context, _ valuer.UUID, dashboardID valuer.UUID) error {
return module.store.DeletePublic(ctx, dashboardID.StringValue())
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
deletionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
if err != nil {
return err
}
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
if err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,165 @@
package implrole
import (
"context"
"slices"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type setter struct {
store roletypes.Store
authz authz.AuthZ
licensing licensing.Licensing
registry []role.RegisterTypeable
}
func NewSetter(store roletypes.Store, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
return &setter{
store: store,
authz: authz,
licensing: licensing,
registry: registry,
}
}
func (setter *setter) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
}
func (setter *setter) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
existingRole, err := setter.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
}
if existingRole != nil {
return roletypes.NewRoleFromStorableRole(existingRole), nil
}
err = setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return nil, err
}
return role, nil
}
func (setter *setter) GetResources(_ context.Context) []*authtypes.Resource {
typeables := make([]authtypes.Typeable, 0)
for _, register := range setter.registry {
typeables = append(typeables, register.MustGetTypeables()...)
}
// role module cannot self register itself!
typeables = append(typeables, setter.MustGetTypeables()...)
resources := make([]*authtypes.Resource, 0)
for _, typeable := range typeables {
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
}
return resources
}
func (setter *setter) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
storableRole, err := setter.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
objects := make([]*authtypes.Object, 0)
for _, resource := range setter.GetResources(ctx) {
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
resourceObjects, err := setter.
authz.
ListObjects(
ctx,
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
relation,
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
)
if err != nil {
return nil, err
}
objects = append(objects, resourceObjects...)
}
}
return objects, nil
}
func (setter *setter) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return setter.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
}
func (setter *setter) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
if err != nil {
return err
}
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
if err != nil {
return err
}
err = setter.authz.Write(ctx, additionTuples, deletionTuples)
if err != nil {
return err
}
return nil
}
func (setter *setter) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
storableRole, err := setter.store.Get(ctx, orgID, id)
if err != nil {
return err
}
role := roletypes.NewRoleFromStorableRole(storableRole)
err = role.CanEditDelete()
if err != nil {
return err
}
return setter.store.Delete(ctx, orgID, id)
}
func (setter *setter) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
}

View File

@@ -9,6 +9,7 @@ import (
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
@@ -55,6 +56,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
FluxInterval: opts.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),

View File

@@ -211,7 +211,7 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz, s.signoz.Modules.RoleGetter)
r.Use(otelmux.Middleware(
"apiserver",
@@ -237,6 +237,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterLogsRoutes(r, am)
apiHandler.RegisterIntegrationRoutes(r, am)
apiHandler.RegisterCloudIntegrationsRoutes(r, am)
apiHandler.RegisterFieldsRoutes(r, am)
apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterInfraMetricsRoutes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am)

View File

@@ -15,7 +15,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
@@ -63,8 +63,6 @@ type AnomalyRule struct {
seasonality anomaly.Seasonality
}
var _ baserules.Rule = (*AnomalyRule)(nil)
func NewAnomalyRule(
id string,
orgID valuer.UUID,
@@ -492,7 +490,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration().Duration() {
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration() {
a.State = model.StateFiring
a.FiredAt = ts
state := model.StateFiring
@@ -555,7 +553,7 @@ func (r *AnomalyRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.Name(),
RuleCondition: r.Condition(),
EvalWindow: r.EvalWindow(),
EvalWindow: ruletypes.Duration(r.EvalWindow()),
Labels: r.Labels().Map(),
Annotations: r.Annotations().Map(),
PreferredChannels: r.PreferredChannels(),

View File

@@ -40,7 +40,7 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
// Test basic AlertOnAbsent functionality (without AbsentFor grace period)
baseTime := time.Unix(1700000000, 0)
evalWindow := valuer.MustParseTextDuration("5m")
evalWindow := 5 * time.Minute
evalTime := baseTime.Add(5 * time.Minute)
target := 500.0
@@ -50,8 +50,8 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: RuleTypeAnomaly,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,
@@ -147,7 +147,7 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
// 3. Alert fires only if t2 - t1 > AbsentFor
baseTime := time.Unix(1700000000, 0)
evalWindow := valuer.MustParseTextDuration("5m")
evalWindow := 5 * time.Minute
// Set target higher than test data so regular threshold alerts don't fire
target := 500.0
@@ -157,8 +157,8 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: RuleTypeAnomaly,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,

View File

@@ -48,7 +48,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, tr)
// create ch rule task for evaluation
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
@@ -72,7 +72,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, pr)
// create promql rule task for evaluation
task = newTask(baserules.TaskTypeProm, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
// create anomaly rule
@@ -96,7 +96,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, ar)
// create anomaly rule task for evaluation
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else {
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
@@ -213,7 +213,8 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
return alertsFound, nil
}
// newTask returns an appropriate group for the rule type
// newTask returns an appropriate group for
// rule type
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) baserules.Task {
if taskType == baserules.TaskTypeCh {
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)

View File

@@ -12,8 +12,6 @@ export interface MockUPlotInstance {
export interface MockUPlotPaths {
spline: jest.Mock;
bars: jest.Mock;
linear: jest.Mock;
stepped: jest.Mock;
}
// Create mock instance methods
@@ -25,23 +23,10 @@ const createMockUPlotInstance = (): MockUPlotInstance => ({
setSeries: jest.fn(),
});
// Path builder: (self, seriesIdx, idx0, idx1) => paths or null
const createMockPathBuilder = (name: string): jest.Mock =>
jest.fn(() => ({
name, // To test if the correct pathBuilder is used
stroke: jest.fn(),
fill: jest.fn(),
clip: jest.fn(),
}));
// Create mock paths - linear, spline, stepped needed by UPlotSeriesBuilder.getPathBuilder
const mockPaths = {
spline: jest.fn(() => createMockPathBuilder('spline')),
bars: jest.fn(() => createMockPathBuilder('bars')),
linear: jest.fn(() => createMockPathBuilder('linear')),
stepped: jest.fn((opts?: { align?: number }) =>
createMockPathBuilder(`stepped-(${opts?.align ?? 0})`),
),
// Create mock paths
const mockPaths: MockUPlotPaths = {
spline: jest.fn(),
bars: jest.fn(),
};
// Mock static methods

View File

@@ -18,8 +18,8 @@ import { useWidgetsByDynamicVariableId } from 'hooks/dashboard/useWidgetsByDynam
import { getWidgetsHavingDynamicVariableAttribute } from 'hooks/dashboard/utils';
import { useGetFieldValues } from 'hooks/dynamicVariables/useGetFieldValues';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { commaValuesParser } from 'lib/dashboardVariables/customCommaValuesParser';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import { isEmpty, map } from 'lodash-es';
import {
ArrowLeft,

View File

@@ -1,6 +1,6 @@
import { memo, useMemo } from 'react';
import { commaValuesParser } from 'lib/dashboardVariables/customCommaValuesParser';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';

View File

@@ -3,7 +3,7 @@ import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import { isArray, isString } from 'lodash-es';
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import { AppState } from 'store/reducers';

View File

@@ -33,8 +33,8 @@ import { useChartMutable } from 'hooks/useChartMutable';
import useComponentPermission from 'hooks/useComponentPermission';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import useUrlQuery from 'hooks/useUrlQuery';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import GetMinMax from 'lib/getMinMax';
import { isEmpty } from 'lodash-es';
import { useAppContext } from 'providers/App/App';

View File

@@ -8,8 +8,8 @@ import { populateMultipleResults } from 'container/NewWidget/LeftContainer/Widge
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/types';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
import { useIntersectionObserver } from 'hooks/useIntersectionObserver';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import getTimeString from 'lib/getTimeString';
import { isEqual } from 'lodash-es';
import isEmpty from 'lodash-es/isEmpty';

View File

@@ -6,7 +6,7 @@ import { prepareQueryRangePayloadV5 } from 'api/v5/v5';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
import { AppState } from 'store/reducers';
import { Query } from 'types/api/queryBuilder/queryBuilderData';

View File

@@ -27,8 +27,8 @@ import { useIsDarkMode } from 'hooks/useDarkMode';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import useUrlQuery from 'hooks/useUrlQuery';
import createQueryParams from 'lib/createQueryParams';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { cloneDeep, defaultTo, isEmpty, isUndefined } from 'lodash-es';
import { Check, X } from 'lucide-react';
import { DashboardWidgetPageParams } from 'pages/DashboardWidget';

View File

@@ -1,8 +1,8 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsApplication.factory';
import { updateStepInterval } from 'hooks/queryBuilder/useStepInterval';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { ServicesList } from 'types/api/metrics/getService';
import { QueryDataV3 } from 'types/api/widgets/getQuery';
import { EQueryType } from 'types/common/dashboard';

View File

@@ -13,7 +13,7 @@ import { MenuItemKeys } from 'container/GridCardLayout/WidgetHeader/contants';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
import { useNotifications } from 'hooks/useNotifications';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
import { isEmpty } from 'lodash-es';
import { useDashboard } from 'providers/Dashboard/Dashboard';

View File

@@ -3,8 +3,8 @@ import { useSelector } from 'react-redux';
import { initialQueriesMap } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { AppState } from 'store/reducers';
import { SuccessResponse } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';

View File

@@ -117,19 +117,13 @@ export class UPlotAxisBuilder extends ConfigBuilder<AxisProps, Axis> {
/**
* Calculate axis size from existing size property
*/
private getExistingAxisSize({
uplotInstance,
axis,
values,
axisIdx,
cycleNum,
}: {
uplotInstance: uPlot;
axis: Axis;
values: string[] | undefined;
axisIdx: number;
cycleNum: number;
}): number {
private getExistingAxisSize(
self: uPlot,
axis: Axis,
values: string[] | undefined,
axisIdx: number,
cycleNum: number,
): number {
const internalSize = (axis as { _size?: number })._size;
if (internalSize !== undefined) {
return internalSize;
@@ -137,7 +131,7 @@ export class UPlotAxisBuilder extends ConfigBuilder<AxisProps, Axis> {
const existingSize = axis.size;
if (typeof existingSize === 'function') {
return existingSize(uplotInstance, values ?? [], axisIdx, cycleNum);
return existingSize(self, values ?? [], axisIdx, cycleNum);
}
return existingSize ?? 0;
@@ -184,13 +178,7 @@ export class UPlotAxisBuilder extends ConfigBuilder<AxisProps, Axis> {
// Bail out, force convergence
if (cycleNum > 1) {
return this.getExistingAxisSize({
uplotInstance: self,
axis,
values,
axisIdx,
cycleNum,
});
return this.getExistingAxisSize(self, axis, values, axisIdx, cycleNum);
}
const gap = this.props.gap ?? 5;

View File

@@ -1,341 +0,0 @@
import { getToolTipValue } from 'components/Graph/yAxisConfig';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { uPlotXAxisValuesFormat } from 'lib/uPlotLib/utils/constants';
import type uPlot from 'uplot';
import type { AxisProps } from '../types';
import { UPlotAxisBuilder } from '../UPlotAxisBuilder';
jest.mock('components/Graph/yAxisConfig', () => ({
getToolTipValue: jest.fn(),
}));
const createAxisProps = (overrides: Partial<AxisProps> = {}): AxisProps => ({
scaleKey: 'x',
label: 'Time',
isDarkMode: false,
show: true,
...overrides,
});
describe('UPlotAxisBuilder', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('builds basic axis config with defaults', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
label: 'Time',
}),
);
const config = builder.getConfig();
expect(config.scale).toBe('x');
expect(config.label).toBe('Time');
expect(config.show).toBe(true);
expect(config.side).toBe(2);
expect(config.gap).toBe(5);
// Default grid and ticks are created
expect(config.grid).toEqual({
stroke: 'rgba(0,0,0,0.5)',
width: 0.2,
show: true,
});
expect(config.ticks).toEqual({
width: 0.3,
show: true,
});
});
it('merges custom grid config over defaults and respects isDarkMode and isLogScale', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({
isDarkMode: true,
isLogScale: true,
grid: {
width: 1,
},
}),
);
const config = builder.getConfig();
expect(config.grid).toEqual({
// stroke falls back to theme-based default when not provided
stroke: 'rgba(231,233,237,0.3)',
// provided width overrides default
width: 1,
// show falls back to default when not provided
show: true,
});
});
it('uses provided ticks config when present and falls back to defaults otherwise', () => {
const customTicks = { width: 1, show: false };
const withTicks = new UPlotAxisBuilder(
createAxisProps({
ticks: customTicks,
}),
);
const withoutTicks = new UPlotAxisBuilder(createAxisProps());
expect(withTicks.getConfig().ticks).toBe(customTicks);
expect(withoutTicks.getConfig().ticks).toEqual({
width: 0.3,
show: true,
});
});
it('uses time-based X-axis values formatter for time-series like panels', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
panelType: PANEL_TYPES.TIME_SERIES,
}),
);
const config = builder.getConfig();
expect(config.values).toBe(uPlotXAxisValuesFormat);
});
it('does not attach X-axis datetime formatter when panel type is not supported', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
panelType: PANEL_TYPES.LIST, // not in PANEL_TYPES_WITH_X_AXIS_DATETIME_FORMAT
}),
);
const config = builder.getConfig();
expect(config.values).toBeUndefined();
});
it('builds Y-axis values formatter that delegates to getToolTipValue', () => {
const yBuilder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'y',
yAxisUnit: 'ms',
decimalPrecision: 3,
}),
);
const config = yBuilder.getConfig();
expect(typeof config.values).toBe('function');
(getToolTipValue as jest.Mock).mockImplementation(
(value: string, unit?: string, precision?: unknown) =>
`formatted:${value}:${unit}:${precision}`,
);
// Simulate uPlot calling the values formatter
const valuesFn = (config.values as unknown) as (
self: uPlot,
vals: unknown[],
) => string[];
const result = valuesFn({} as uPlot, [1, null, 2, Number.NaN]);
expect(getToolTipValue).toHaveBeenCalledTimes(2);
expect(getToolTipValue).toHaveBeenNthCalledWith(1, '1', 'ms', 3);
expect(getToolTipValue).toHaveBeenNthCalledWith(2, '2', 'ms', 3);
// Null/NaN values should map to empty strings
expect(result).toEqual(['formatted:1:ms:3', '', 'formatted:2:ms:3', '']);
});
it('adds dynamic size calculator only for Y-axis when size is not provided', () => {
const yBuilder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'y',
}),
);
const xBuilder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
}),
);
const yConfig = yBuilder.getConfig();
const xConfig = xBuilder.getConfig();
expect(typeof yConfig.size).toBe('function');
expect(xConfig.size).toBeUndefined();
});
it('uses explicit size function when provided', () => {
const sizeFn: uPlot.Axis.Size = jest.fn(() => 100) as uPlot.Axis.Size;
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'y',
size: sizeFn,
}),
);
const config = builder.getConfig();
expect(config.size).toBe(sizeFn);
});
it('builds stroke color based on stroke and isDarkMode', () => {
const explicitStroke = new UPlotAxisBuilder(
createAxisProps({
stroke: '#ff0000',
}),
);
const darkStroke = new UPlotAxisBuilder(
createAxisProps({
stroke: undefined,
isDarkMode: true,
}),
);
const lightStroke = new UPlotAxisBuilder(
createAxisProps({
stroke: undefined,
isDarkMode: false,
}),
);
expect(explicitStroke.getConfig().stroke).toBe('#ff0000');
expect(darkStroke.getConfig().stroke).toBe('white');
expect(lightStroke.getConfig().stroke).toBe('black');
});
it('uses explicit values formatter when provided', () => {
const customValues: uPlot.Axis.Values = jest.fn(() => ['a', 'b', 'c']);
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'y',
values: customValues,
}),
);
const config = builder.getConfig();
expect(config.values).toBe(customValues);
});
it('returns undefined values for scaleKey neither x nor y', () => {
const builder = new UPlotAxisBuilder(createAxisProps({ scaleKey: 'custom' }));
const config = builder.getConfig();
expect(config.values).toBeUndefined();
});
it('includes space in config when provided', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({ scaleKey: 'y', space: 50 }),
);
const config = builder.getConfig();
expect(config.space).toBe(50);
});
it('includes PANEL_TYPES.BAR and PANEL_TYPES.PIE in X-axis datetime formatter', () => {
const barBuilder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
panelType: PANEL_TYPES.BAR,
}),
);
const pieBuilder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
panelType: PANEL_TYPES.PIE,
}),
);
expect(barBuilder.getConfig().values).toBe(uPlotXAxisValuesFormat);
expect(pieBuilder.getConfig().values).toBe(uPlotXAxisValuesFormat);
});
it('invokes Y-axis size calculator and delegates to getExistingAxisSize when cycleNum > 1', () => {
const builder = new UPlotAxisBuilder(createAxisProps({ scaleKey: 'y' }));
const config = builder.getConfig();
const sizeFn = config.size;
expect(typeof sizeFn).toBe('function');
const mockAxis = {
_size: 80,
ticks: { size: 10 },
font: ['12px sans-serif'],
};
const mockSelf = ({
axes: [mockAxis],
ctx: { measureText: jest.fn(() => ({ width: 60 })), font: '' },
} as unknown) as uPlot;
const result = (sizeFn as (
s: uPlot,
v: string[],
a: number,
c: number,
) => number)(
mockSelf,
['100', '200'],
0,
2, // cycleNum > 1
);
expect(result).toBe(80);
});
it('invokes Y-axis size calculator and computes from text width when cycleNum <= 1', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({ scaleKey: 'y', gap: 8 }),
);
const config = builder.getConfig();
const sizeFn = config.size;
expect(typeof sizeFn).toBe('function');
const mockAxis = {
ticks: { size: 12 },
font: ['12px sans-serif'],
};
const measureText = jest.fn(() => ({ width: 48 }));
const mockSelf = ({
axes: [mockAxis],
ctx: {
measureText,
get font() {
return '';
},
set font(_v: string) {
/* noop */
},
},
} as unknown) as uPlot;
const result = (sizeFn as (
s: uPlot,
v: string[],
a: number,
c: number,
) => number)(mockSelf, ['10', '2000ms'], 0, 0);
expect(measureText).toHaveBeenCalledWith('2000ms');
expect(result).toBeGreaterThanOrEqual(12 + 8);
});
it('merge updates axis props', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({ scaleKey: 'y', label: 'Original' }),
);
builder.merge({ label: 'Merged', yAxisUnit: 'bytes' });
const config = builder.getConfig();
expect(config.label).toBe('Merged');
expect(config.values).toBeDefined();
});
});

View File

@@ -1,325 +0,0 @@
import uPlot from 'uplot';
import type { SeriesProps } from '../types';
import { DrawStyle, SelectionPreferencesSource } from '../types';
import { UPlotConfigBuilder } from '../UPlotConfigBuilder';
// Mock only the real boundary that hits localStorage
jest.mock(
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
() => ({
getStoredSeriesVisibility: jest.fn(),
}),
);
const getStoredSeriesVisibilityMock = jest.requireMock(
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
) as {
getStoredSeriesVisibility: jest.Mock;
};
describe('UPlotConfigBuilder', () => {
beforeEach(() => {
jest.clearAllMocks();
});
const createSeriesProps = (
overrides: Partial<SeriesProps> = {},
): SeriesProps => ({
scaleKey: 'y',
label: 'Requests',
colorMapping: {},
drawStyle: DrawStyle.Line,
...overrides,
});
it('returns correct save selection preference flag from constructor args', () => {
const builder = new UPlotConfigBuilder({
shouldSaveSelectionPreference: true,
});
expect(builder.getShouldSaveSelectionPreference()).toBe(true);
});
it('returns widgetId from constructor args', () => {
const builder = new UPlotConfigBuilder({ widgetId: 'widget-123' });
expect(builder.getWidgetId()).toBe('widget-123');
});
it('sets tzDate from constructor and includes it in config', () => {
const tzDate = (ts: number): Date => new Date(ts);
const builder = new UPlotConfigBuilder({ tzDate });
const config = builder.getConfig();
expect(config.tzDate).toBe(tzDate);
});
it('does not call onDragSelect for click without drag (width === 0)', () => {
const onDragSelect = jest.fn();
const builder = new UPlotConfigBuilder({ onDragSelect });
const config = builder.getConfig();
const setSelectHooks = config.hooks?.setSelect ?? [];
expect(setSelectHooks.length).toBe(1);
const uplotInstance = ({
select: { left: 10, width: 0 },
posToVal: jest.fn(),
} as unknown) as uPlot;
// Simulate uPlot calling the hook
const setSelectHook = setSelectHooks[0];
expect(setSelectHook).toBeDefined();
if (!setSelectHook) {
throw new Error('Expected setSelect hook to be registered');
}
setSelectHook(uplotInstance);
expect(onDragSelect).not.toHaveBeenCalled();
});
it('calls onDragSelect with start and end times in milliseconds for a drag selection', () => {
const onDragSelect = jest.fn();
const builder = new UPlotConfigBuilder({ onDragSelect });
const config = builder.getConfig();
const setSelectHooks = config.hooks?.setSelect ?? [];
expect(setSelectHooks.length).toBe(1);
const posToVal = jest
.fn()
// left position
.mockReturnValueOnce(100)
// left + width
.mockReturnValueOnce(110);
const uplotInstance = ({
select: { left: 50, width: 20 },
posToVal,
} as unknown) as uPlot;
const setSelectHook = setSelectHooks[0];
expect(setSelectHook).toBeDefined();
if (!setSelectHook) {
throw new Error('Expected setSelect hook to be registered');
}
setSelectHook(uplotInstance);
expect(onDragSelect).toHaveBeenCalledTimes(1);
// 100 and 110 seconds converted to milliseconds
expect(onDragSelect).toHaveBeenCalledWith(100_000, 110_000);
});
it('adds and removes hooks via addHook, and exposes them through getConfig', () => {
const builder = new UPlotConfigBuilder();
const drawHook = jest.fn();
const remove = builder.addHook('draw', drawHook as uPlot.Hooks.Defs['draw']);
let config = builder.getConfig();
expect(config.hooks?.draw).toContain(drawHook);
// Remove and ensure it no longer appears in config
remove();
config = builder.getConfig();
expect(config.hooks?.draw ?? []).not.toContain(drawHook);
});
it('adds axes, scales, and series and wires them into the final config', () => {
const builder = new UPlotConfigBuilder();
// Add axis and scale
builder.addAxis({ scaleKey: 'y', label: 'Requests' });
builder.addScale({ scaleKey: 'y' });
// Add two series legend indices should start from 1 (0 is the timestamp series)
builder.addSeries(createSeriesProps({ label: 'Requests' }));
builder.addSeries(createSeriesProps({ label: 'Errors' }));
const config = builder.getConfig();
// Axes
expect(config.axes).toHaveLength(1);
expect(config.axes?.[0].scale).toBe('y');
// Scales are returned as an object keyed by scaleKey
expect(config.scales).toBeDefined();
expect(Object.keys(config.scales ?? {})).toContain('y');
// Series: base timestamp + 2 data series
expect(config.series).toHaveLength(3);
// Base series (index 0) has a value formatter that returns empty string
const baseSeries = config.series?.[0] as { value?: () => string };
expect(typeof baseSeries?.value).toBe('function');
expect(baseSeries?.value?.()).toBe('');
// Legend items align with series and carry label and color from series config
const legendItems = builder.getLegendItems();
expect(Object.keys(legendItems)).toEqual(['1', '2']);
expect(legendItems[1].seriesIndex).toBe(1);
expect(legendItems[1].label).toBe('Requests');
expect(legendItems[2].label).toBe('Errors');
});
it('merges axis when addAxis is called twice with same scaleKey', () => {
const builder = new UPlotConfigBuilder();
builder.addAxis({ scaleKey: 'y', label: 'Requests' });
builder.addAxis({ scaleKey: 'y', label: 'Updated Label', show: false });
const config = builder.getConfig();
expect(config.axes).toHaveLength(1);
expect(config.axes?.[0].label).toBe('Updated Label');
expect(config.axes?.[0].show).toBe(false);
});
it('merges scale when addScale is called twice with same scaleKey', () => {
const builder = new UPlotConfigBuilder();
builder.addScale({ scaleKey: 'y', min: 0 });
builder.addScale({ scaleKey: 'y', max: 100 });
const config = builder.getConfig();
// Only one scale entry for 'y' (merge path used, no duplicate added)
expect(config.scales).toBeDefined();
const scales = config.scales ?? {};
expect(Object.keys(scales)).toEqual(['y']);
expect(scales.y?.range).toBeDefined();
});
it('restores visibility state from localStorage when selectionPreferencesSource is LOCAL_STORAGE', () => {
const visibilityMap = new Map<string, boolean>([
['Requests', true],
['Errors', false],
]);
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue(
visibilityMap,
);
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
});
builder.addSeries(createSeriesProps({ label: 'Requests' }));
builder.addSeries(createSeriesProps({ label: 'Errors' }));
const legendItems = builder.getLegendItems();
// When any series is hidden, legend visibility is driven by the stored map
expect(legendItems[1].show).toBe(true);
expect(legendItems[2].show).toBe(false);
const config = builder.getConfig();
const [, firstSeries, secondSeries] = config.series ?? [];
expect(firstSeries?.show).toBe(true);
expect(secondSeries?.show).toBe(false);
});
it('does not attempt to read stored visibility when using in-memory preferences', () => {
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',
selectionPreferencesSource: SelectionPreferencesSource.IN_MEMORY,
});
builder.addSeries(createSeriesProps({ label: 'Requests' }));
builder.getLegendItems();
builder.getConfig();
expect(
getStoredSeriesVisibilityMock.getStoredSeriesVisibility,
).not.toHaveBeenCalled();
});
it('adds thresholds only once per scale key', () => {
const builder = new UPlotConfigBuilder();
const thresholdsOptions = {
scaleKey: 'y',
thresholds: [{ thresholdValue: 100 }],
};
builder.addThresholds(thresholdsOptions);
builder.addThresholds(thresholdsOptions);
const config = builder.getConfig();
const drawHooks = config.hooks?.draw ?? [];
// Only a single draw hook should be registered for the same scaleKey
expect(drawHooks.length).toBe(1);
});
it('merges cursor configuration with defaults instead of replacing them', () => {
const builder = new UPlotConfigBuilder();
builder.setCursor({
drag: { setScale: false },
});
const config = builder.getConfig();
expect(config.cursor?.drag?.setScale).toBe(false);
// Points configuration from DEFAULT_CURSOR_CONFIG should still be present
expect(config.cursor?.points).toBeDefined();
});
it('adds plugins and includes them in config', () => {
const builder = new UPlotConfigBuilder();
const plugin: uPlot.Plugin = {
opts: (): void => {},
hooks: {},
};
builder.addPlugin(plugin);
const config = builder.getConfig();
expect(config.plugins).toContain(plugin);
});
it('sets padding, legend, focus, select, tzDate, bands and includes them in config', () => {
const tzDate = (ts: number): Date => new Date(ts);
const builder = new UPlotConfigBuilder();
const bands: uPlot.Band[] = [{ series: [1, 2], fill: (): string => '#000' }];
builder.setBands(bands);
builder.setPadding([10, 20, 30, 40]);
builder.setLegend({ show: true, live: true });
builder.setFocus({ alpha: 0.5 });
builder.setSelect({ left: 0, width: 0, top: 0, height: 0 });
builder.setTzDate(tzDate);
const config = builder.getConfig();
expect(config.bands).toEqual(bands);
expect(config.padding).toEqual([10, 20, 30, 40]);
expect(config.legend).toEqual({ show: true, live: true });
expect(config.focus).toEqual({ alpha: 0.5 });
expect(config.select).toEqual({ left: 0, width: 0, top: 0, height: 0 });
expect(config.tzDate).toBe(tzDate);
});
it('does not include plugins when none added', () => {
const builder = new UPlotConfigBuilder();
const config = builder.getConfig();
expect(config.plugins).toBeUndefined();
});
it('does not include bands when empty', () => {
const builder = new UPlotConfigBuilder();
const config = builder.getConfig();
expect(config.bands).toBeUndefined();
});
});

View File

@@ -1,236 +0,0 @@
import type uPlot from 'uplot';
import * as scaleUtils from '../../utils/scale';
import type { ScaleProps } from '../types';
import { DistributionType } from '../types';
import { UPlotScaleBuilder } from '../UPlotScaleBuilder';
const createScaleProps = (overrides: Partial<ScaleProps> = {}): ScaleProps => ({
scaleKey: 'y',
time: false,
auto: undefined,
min: undefined,
max: undefined,
softMin: undefined,
softMax: undefined,
distribution: DistributionType.Linear,
...overrides,
});
describe('UPlotScaleBuilder', () => {
const getFallbackMinMaxSpy = jest.spyOn(
scaleUtils,
'getFallbackMinMaxTimeStamp',
);
beforeEach(() => {
jest.clearAllMocks();
});
it('initializes softMin/softMax correctly when both are 0 (treated as unset)', () => {
const builder = new UPlotScaleBuilder(
createScaleProps({
softMin: 0,
softMax: 0,
}),
);
// Non-time scale so config path uses thresholds pipeline; we just care that
// adjustSoftLimitsWithThresholds receives null soft limits instead of 0/0.
const adjustSpy = jest.spyOn(scaleUtils, 'adjustSoftLimitsWithThresholds');
builder.getConfig();
expect(adjustSpy).toHaveBeenCalledWith(null, null, undefined, undefined);
});
it('handles time scales using explicit min/max and rounds max down to the previous minute', () => {
const min = 1_700_000_000; // seconds
const max = 1_700_000_600; // seconds
const builder = new UPlotScaleBuilder(
createScaleProps({
scaleKey: 'x',
time: true,
min,
max,
}),
);
const config = builder.getConfig();
const xScale = config.x;
expect(xScale.time).toBe(true);
expect(xScale.auto).toBe(false);
expect(Array.isArray(xScale.range)).toBe(true);
const [resolvedMin, resolvedMax] = xScale.range as [number, number];
// min is passed through
expect(resolvedMin).toBe(min);
// max is coerced to "endTime - 1 minute" and rounded down to minute precision
const oneMinuteAgoTimestamp = (max - 60) * 1000;
const currentDate = new Date(oneMinuteAgoTimestamp);
currentDate.setSeconds(0);
currentDate.setMilliseconds(0);
const expectedMax = Math.floor(currentDate.getTime() / 1000);
expect(resolvedMax).toBe(expectedMax);
});
it('falls back to getFallbackMinMaxTimeStamp when time scale has no min/max', () => {
getFallbackMinMaxSpy.mockReturnValue({
fallbackMin: 100,
fallbackMax: 200,
});
const builder = new UPlotScaleBuilder(
createScaleProps({
scaleKey: 'x',
time: true,
min: undefined,
max: undefined,
}),
);
const config = builder.getConfig();
const [resolvedMin, resolvedMax] = config.x.range as [number, number];
expect(getFallbackMinMaxSpy).toHaveBeenCalled();
expect(resolvedMin).toBe(100);
// max is aligned to "fallbackMax - 60 seconds" minute boundary
expect(resolvedMax).toBeLessThanOrEqual(200);
expect(resolvedMax).toBeGreaterThan(100);
});
it('pipes limits through soft-limit adjustment and log-scale normalization before range config', () => {
const adjustSpy = jest.spyOn(scaleUtils, 'adjustSoftLimitsWithThresholds');
const normalizeSpy = jest.spyOn(scaleUtils, 'normalizeLogScaleLimits');
const getRangeConfigSpy = jest.spyOn(scaleUtils, 'getRangeConfig');
const thresholds = {
scaleKey: 'y',
thresholds: [{ thresholdValue: 10 }],
yAxisUnit: 'ms',
};
const builder = new UPlotScaleBuilder(
createScaleProps({
softMin: 1,
softMax: 5,
min: 0,
max: 100,
distribution: DistributionType.Logarithmic,
thresholds,
logBase: 2,
padMinBy: 0.1,
padMaxBy: 0.2,
}),
);
builder.getConfig();
expect(adjustSpy).toHaveBeenCalledWith(1, 5, thresholds.thresholds, 'ms');
expect(normalizeSpy).toHaveBeenCalledWith({
distr: DistributionType.Logarithmic,
logBase: 2,
limits: {
min: 0,
max: 100,
softMin: expect.anything(),
softMax: expect.anything(),
},
});
expect(getRangeConfigSpy).toHaveBeenCalled();
});
it('computes distribution config for non-time scales and wires range function when range is not provided', () => {
const createRangeFnSpy = jest.spyOn(scaleUtils, 'createRangeFunction');
const builder = new UPlotScaleBuilder(
createScaleProps({
scaleKey: 'y',
time: false,
distribution: DistributionType.Linear,
}),
);
const config = builder.getConfig();
const yScale = config.y;
expect(createRangeFnSpy).toHaveBeenCalled();
// range should be a function when not provided explicitly
expect(typeof yScale.range).toBe('function');
// distribution config should be applied
expect(yScale.distr).toBeDefined();
expect(yScale.log).toBeDefined();
});
it('respects explicit range function when provided on props', () => {
const explicitRange: uPlot.Scale.Range = jest.fn(() => [
0,
10,
]) as uPlot.Scale.Range;
const builder = new UPlotScaleBuilder(
createScaleProps({
scaleKey: 'y',
range: explicitRange,
}),
);
const config = builder.getConfig();
const yScale = config.y;
expect(yScale.range).toBe(explicitRange);
});
it('derives auto flag when not explicitly provided, based on hasFixedRange and time', () => {
const getRangeConfigSpy = jest.spyOn(scaleUtils, 'getRangeConfig');
const builder = new UPlotScaleBuilder(
createScaleProps({
min: 0,
max: 100,
time: false,
}),
);
const config = builder.getConfig();
const yScale = config.y;
expect(getRangeConfigSpy).toHaveBeenCalled();
// For non-time scale with fixed min/max, hasFixedRange is true → auto should remain false
expect(yScale.auto).toBe(false);
});
it('merge updates internal min/max/soft limits while preserving other props', () => {
const builder = new UPlotScaleBuilder(
createScaleProps({
scaleKey: 'y',
min: 0,
max: 10,
softMin: 1,
softMax: 9,
time: false,
}),
);
builder.merge({
min: 2,
softMax: undefined,
});
expect(builder.props.min).toBe(2);
expect(builder.props.softMax).toBe(undefined);
expect(builder.props.max).toBe(10);
expect(builder.props.softMin).toBe(1);
expect(builder.props.time).toBe(false);
expect(builder.props.scaleKey).toBe('y');
expect(builder.props.distribution).toBe(DistributionType.Linear);
expect(builder.props.thresholds).toBe(undefined);
});
});

View File

@@ -1,300 +0,0 @@
import { themeColors } from 'constants/theme';
import uPlot from 'uplot';
import type { SeriesProps } from '../types';
import {
DrawStyle,
LineInterpolation,
LineStyle,
VisibilityMode,
} from '../types';
import { UPlotSeriesBuilder } from '../UPlotSeriesBuilder';
const createBaseProps = (
overrides: Partial<SeriesProps> = {},
): SeriesProps => ({
scaleKey: 'y',
label: 'Requests',
colorMapping: {},
drawStyle: DrawStyle.Line,
isDarkMode: false,
...overrides,
});
interface MockPath extends uPlot.Series.Paths {
name?: string;
}
describe('UPlotSeriesBuilder', () => {
it('maps basic props into uPlot series config', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
label: 'Latency',
spanGaps: true,
show: false,
}),
);
const config = builder.getConfig();
expect(config.scale).toBe('y');
expect(config.label).toBe('Latency');
expect(config.spanGaps).toBe(true);
expect(config.show).toBe(false);
expect(config.pxAlign).toBe(true);
expect(typeof config.value).toBe('function');
});
it('uses explicit lineColor when provided, regardless of mapping', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
lineColor: '#ff00ff',
colorMapping: { Requests: '#00ff00' },
}),
);
const config = builder.getConfig();
expect(config.stroke).toBe('#ff00ff');
});
it('falls back to theme colors when no label is provided', () => {
const darkBuilder = new UPlotSeriesBuilder(
createBaseProps({
label: undefined,
isDarkMode: true,
lineColor: undefined,
}),
);
const lightBuilder = new UPlotSeriesBuilder(
createBaseProps({
label: undefined,
isDarkMode: false,
lineColor: undefined,
}),
);
const darkConfig = darkBuilder.getConfig();
const lightConfig = lightBuilder.getConfig();
expect(darkConfig.stroke).toBe(themeColors.white);
expect(lightConfig.stroke).toBe(themeColors.black);
});
it('uses colorMapping when available and no explicit lineColor is provided', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
label: 'Requests',
colorMapping: { Requests: '#123456' },
lineColor: undefined,
}),
);
const config = builder.getConfig();
expect(config.stroke).toBe('#123456');
});
it('passes through a custom pathBuilder when provided', () => {
const customPaths = (jest.fn() as unknown) as uPlot.Series.PathBuilder;
const builder = new UPlotSeriesBuilder(
createBaseProps({
pathBuilder: customPaths,
}),
);
const config = builder.getConfig();
expect(config.paths).toBe(customPaths);
});
it('does not build line paths when drawStyle is Points, but still renders points', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Points,
pointSize: 4,
lineWidth: 2,
lineColor: '#aa00aa',
}),
);
const config = builder.getConfig();
expect(typeof config.paths).toBe('function');
expect(config.paths && config.paths({} as uPlot, 1, 0, 10)).toBeNull();
expect(config.points).toBeDefined();
expect(config.points?.stroke).toBe('#aa00aa');
expect(config.points?.fill).toBe('#aa00aa');
expect(config.points?.show).toBe(true);
expect(config.points?.size).toBe(4);
});
it('derives point size based on lineWidth and pointSize', () => {
const smallPointsBuilder = new UPlotSeriesBuilder(
createBaseProps({
lineWidth: 4,
pointSize: 2,
}),
);
const largePointsBuilder = new UPlotSeriesBuilder(
createBaseProps({
lineWidth: 2,
pointSize: 4,
}),
);
const smallConfig = smallPointsBuilder.getConfig();
const largeConfig = largePointsBuilder.getConfig();
expect(smallConfig.points?.size).toBeUndefined();
expect(largeConfig.points?.size).toBe(4);
});
it('uses pointsBuilder when provided instead of default visibility logic', () => {
const pointsBuilder: uPlot.Series.Points.Show = jest.fn(
() => true,
) as uPlot.Series.Points.Show;
const builder = new UPlotSeriesBuilder(
createBaseProps({
pointsBuilder,
drawStyle: DrawStyle.Line,
}),
);
const config = builder.getConfig();
expect(config.points?.show).toBe(pointsBuilder);
});
it('respects VisibilityMode for point visibility when no custom pointsBuilder is given', () => {
const neverPointsBuilder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
showPoints: VisibilityMode.Never,
}),
);
const alwaysPointsBuilder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
showPoints: VisibilityMode.Always,
}),
);
const neverConfig = neverPointsBuilder.getConfig();
const alwaysConfig = alwaysPointsBuilder.getConfig();
expect(neverConfig.points?.show).toBe(false);
expect(alwaysConfig.points?.show).toBe(true);
});
it('applies LineStyle.Dashed and lineCap to line config', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
lineStyle: LineStyle.Dashed,
lineCap: 'round' as CanvasLineCap,
}),
);
const config = builder.getConfig();
expect(config.dash).toEqual([10, 10]);
expect(config.cap).toBe('round');
});
it('builds default paths for Line drawStyle and invokes the path builder', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
lineInterpolation: LineInterpolation.Linear,
}),
);
const config = builder.getConfig();
const result = config.paths?.({} as uPlot, 1, 0, 10);
expect((result as MockPath).name).toBe('linear');
});
it('uses StepBefore and StepAfter interpolation for line paths', () => {
const stepBeforeBuilder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
lineInterpolation: LineInterpolation.StepBefore,
}),
);
const stepAfterBuilder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
lineInterpolation: LineInterpolation.StepAfter,
}),
);
const stepBeforeConfig = stepBeforeBuilder.getConfig();
const stepAfterConfig = stepAfterBuilder.getConfig();
const stepBeforePath = stepBeforeConfig.paths?.({} as uPlot, 1, 0, 5);
const stepAfterPath = stepAfterConfig.paths?.({} as uPlot, 1, 0, 5);
expect((stepBeforePath as MockPath).name).toBe('stepped-(-1)');
expect((stepAfterPath as MockPath).name).toBe('stepped-(1)');
});
it('defaults to spline interpolation when lineInterpolation is Spline or undefined', () => {
const splineBuilder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
lineInterpolation: LineInterpolation.Spline,
}),
);
const defaultBuilder = new UPlotSeriesBuilder(
createBaseProps({ drawStyle: DrawStyle.Line }),
);
const splineConfig = splineBuilder.getConfig();
const defaultConfig = defaultBuilder.getConfig();
const splinePath = splineConfig.paths?.({} as uPlot, 1, 0, 10);
const defaultPath = defaultConfig.paths?.({} as uPlot, 1, 0, 10);
expect((splinePath as MockPath).name).toBe('spline');
expect((defaultPath as MockPath).name).toBe('spline');
});
it('preserves spanGaps true when provided as boolean', () => {
const builder = new UPlotSeriesBuilder(createBaseProps({ spanGaps: true }));
const config = builder.getConfig();
expect(config.spanGaps).toBe(true);
});
it('uses generateColor when label has no colorMapping and no lineColor', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
label: 'CustomSeries',
colorMapping: {},
lineColor: undefined,
}),
);
const config = builder.getConfig();
expect(config.stroke).toBe('#E64A3C');
});
it('passes through pointsFilter when provided', () => {
const pointsFilter: uPlot.Series.Points.Filter = jest.fn(
(_self, _seriesIdx, _show) => null,
);
const builder = new UPlotSeriesBuilder(
createBaseProps({
pointsFilter,
drawStyle: DrawStyle.Line,
}),
);
const config = builder.getConfig();
expect(config.points?.filter).toBe(pointsFilter);
});
});

View File

@@ -0,0 +1,477 @@
import React from 'react';
import { act, screen, waitFor } from '@testing-library/react';
import { render } from 'tests/test-utils';
import TooltipPlugin from '../TooltipPlugin/TooltipPlugin';
import { DashboardCursorSync } from '../TooltipPlugin/types';
// ---------------------------------------------------------------------------
// Mock helpers
// ---------------------------------------------------------------------------
type HookHandler = (...args: any[]) => void;
interface ConfigMock {
scales: Array<{ props: { time?: boolean } }>;
setCursor: jest.Mock;
addHook: jest.Mock<() => void, [string, HookHandler]> & {
removeCallbacks: jest.Mock[];
};
}
function createConfigMock(
overrides: Partial<ConfigMock> = {},
): ConfigMock & { removeCallbacks: jest.Mock[] } {
const removeCallbacks: jest.Mock[] = [];
const addHook = Object.assign(
jest.fn((_: string, _handler: HookHandler) => {
const remove = jest.fn();
removeCallbacks.push(remove);
return remove;
}),
{ removeCallbacks },
) as ConfigMock['addHook'];
return {
scales: [{ props: { time: false } }],
setCursor: jest.fn(),
addHook,
...overrides,
removeCallbacks,
};
}
function getHandler(config: ConfigMock, hookName: string): HookHandler {
const call = config.addHook.mock.calls.find(([name]) => name === hookName);
if (!call) {
throw new Error(`Hook "${hookName}" was not registered on config`);
}
return call[1];
}
function createFakePlot(): {
over: HTMLDivElement;
setCursor: jest.Mock;
cursor: { event: Record<string, unknown> };
} {
return {
over: document.createElement('div'),
setCursor: jest.fn(),
cursor: { event: {} },
};
}
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
describe('TooltipPlugin', () => {
beforeEach(() => {
jest.spyOn(window, 'requestAnimationFrame').mockImplementation((callback) => {
(callback as FrameRequestCallback)(0);
return 0;
});
jest
.spyOn(window, 'cancelAnimationFrame')
// eslint-disable-next-line @typescript-eslint/no-empty-function
.mockImplementation(() => {});
});
afterEach(() => {
jest.restoreAllMocks();
});
/**
* Shorthand: render the plugin, initialise a fake plot, and trigger a
* series focus so the tooltip becomes visible. Returns the fake plot
* instance for further interaction (e.g. clicking the overlay).
*/
function renderAndActivateHover(
config: ConfigMock,
renderFn: (...args: any[]) => React.ReactNode = (): React.ReactNode =>
React.createElement('div', null, 'tooltip-body'),
extraProps: Record<string, unknown> = {},
): ReturnType<typeof createFakePlot> {
render(
React.createElement(TooltipPlugin, {
config: config as any,
render: renderFn,
syncMode: DashboardCursorSync.None,
...extraProps,
}),
);
const fakePlot = createFakePlot();
const initHandler = getHandler(config, 'init');
const setSeriesHandler = getHandler(config, 'setSeries');
act(() => {
initHandler(fakePlot);
setSeriesHandler(fakePlot, 1, { focus: true });
});
return fakePlot;
}
// ---- Initial state --------------------------------------------------------
describe('before any interaction', () => {
it('does not render anything when there is no active hover', () => {
const config = createConfigMock();
render(
React.createElement(TooltipPlugin, {
config: config as any,
render: () => React.createElement('div', null, 'tooltip-body'),
syncMode: DashboardCursorSync.None,
}),
);
expect(document.querySelector('.tooltip-plugin-container')).toBeNull();
});
it('registers all required uPlot hooks on mount', () => {
const config = createConfigMock();
render(
React.createElement(TooltipPlugin, {
config: config as any,
render: () => null,
syncMode: DashboardCursorSync.None,
}),
);
const registered = config.addHook.mock.calls.map(([name]) => name);
expect(registered).toContain('ready');
expect(registered).toContain('init');
expect(registered).toContain('setData');
expect(registered).toContain('setSeries');
expect(registered).toContain('setLegend');
expect(registered).toContain('setCursor');
});
});
// ---- Tooltip rendering ------------------------------------------------------
describe('tooltip rendering', () => {
it('renders contents into a portal on document.body when hover is active', () => {
const config = createConfigMock();
const renderTooltip = jest.fn(() =>
React.createElement('div', null, 'tooltip-body'),
);
renderAndActivateHover(config, renderTooltip);
expect(renderTooltip).toHaveBeenCalled();
expect(screen.getByText('tooltip-body')).toBeInTheDocument();
const container = document.querySelector(
'.tooltip-plugin-container',
) as HTMLElement;
expect(container).not.toBeNull();
expect(container.parentElement).toBe(document.body);
});
});
// ---- Pin behaviour ----------------------------------------------------------
describe('pin behaviour', () => {
it('pins the tooltip when canPinTooltip is true and overlay is clicked', () => {
const config = createConfigMock();
const fakePlot = renderAndActivateHover(config, undefined, {
canPinTooltip: true,
});
const container = document.querySelector(
'.tooltip-plugin-container',
) as HTMLElement;
expect(container.classList.contains('pinned')).toBe(false);
act(() => {
fakePlot.over.dispatchEvent(new MouseEvent('click', { bubbles: true }));
});
return waitFor(() => {
const updated = document.querySelector(
'.tooltip-plugin-container',
) as HTMLElement | null;
expect(updated).not.toBeNull();
expect(updated?.classList.contains('pinned')).toBe(true);
});
});
it('dismisses a pinned tooltip via the dismiss callback', async () => {
jest.useFakeTimers();
const config = createConfigMock();
render(
React.createElement(TooltipPlugin, {
config: config as any,
render: (args: any) =>
React.createElement(
'button',
{ type: 'button', onClick: args.dismiss },
'Dismiss',
),
syncMode: DashboardCursorSync.None,
canPinTooltip: true,
}),
);
const fakePlot = createFakePlot();
act(() => {
getHandler(config, 'init')(fakePlot);
getHandler(config, 'setSeries')(fakePlot, 1, { focus: true });
jest.runAllTimers();
});
// Pin the tooltip.
act(() => {
fakePlot.over.dispatchEvent(new MouseEvent('click', { bubbles: true }));
jest.runAllTimers();
});
const button = await screen.findByRole('button', { name: 'Dismiss' });
act(() => {
button.click();
jest.runAllTimers();
});
expect(document.querySelector('.tooltip-plugin-container')).toBeNull();
jest.useRealTimers();
});
it('drops a pinned tooltip when the underlying data changes', () => {
jest.useFakeTimers();
const config = createConfigMock();
render(
React.createElement(TooltipPlugin, {
config: config as any,
render: () => React.createElement('div', null, 'tooltip-body'),
syncMode: DashboardCursorSync.None,
canPinTooltip: true,
}),
);
const fakePlot = createFakePlot();
act(() => {
getHandler(config, 'init')(fakePlot);
getHandler(config, 'setSeries')(fakePlot, 1, { focus: true });
jest.runAllTimers();
});
// Pin.
act(() => {
fakePlot.over.dispatchEvent(new MouseEvent('click', { bubbles: true }));
jest.runAllTimers();
});
expect(
(document.querySelector(
'.tooltip-plugin-container',
) as HTMLElement)?.classList.contains('pinned'),
).toBe(true);
// Simulate data update should dismiss the pinned tooltip.
act(() => {
getHandler(config, 'setData')(fakePlot);
jest.runAllTimers();
});
expect(document.querySelector('.tooltip-plugin-container')).toBeNull();
jest.useRealTimers();
});
it('unpins the tooltip on outside mousedown', () => {
jest.useFakeTimers();
const config = createConfigMock();
render(
React.createElement(TooltipPlugin, {
config: config as any,
render: () => React.createElement('div', null, 'pinned content'),
syncMode: DashboardCursorSync.None,
canPinTooltip: true,
}),
);
const fakePlot = createFakePlot();
act(() => {
getHandler(config, 'init')(fakePlot);
getHandler(config, 'setSeries')(fakePlot, 1, { focus: true });
jest.runAllTimers();
});
act(() => {
fakePlot.over.dispatchEvent(new MouseEvent('click', { bubbles: true }));
jest.runAllTimers();
});
expect(
document
.querySelector('.tooltip-plugin-container')
?.classList.contains('pinned'),
).toBe(true);
// Click outside the tooltip container.
act(() => {
document.body.dispatchEvent(new MouseEvent('mousedown', { bubbles: true }));
jest.runAllTimers();
});
expect(document.querySelector('.tooltip-plugin-container')).toBeNull();
jest.useRealTimers();
});
it('unpins the tooltip on outside keydown', () => {
jest.useFakeTimers();
const config = createConfigMock();
render(
React.createElement(TooltipPlugin, {
config: config as any,
render: () => React.createElement('div', null, 'pinned content'),
syncMode: DashboardCursorSync.None,
canPinTooltip: true,
}),
);
const fakePlot = createFakePlot();
act(() => {
getHandler(config, 'init')(fakePlot);
getHandler(config, 'setSeries')(fakePlot, 1, { focus: true });
jest.runAllTimers();
});
act(() => {
fakePlot.over.dispatchEvent(new MouseEvent('click', { bubbles: true }));
jest.runAllTimers();
});
expect(
document
.querySelector('.tooltip-plugin-container')
?.classList.contains('pinned'),
).toBe(true);
// Press a key outside the tooltip.
act(() => {
document.body.dispatchEvent(
new KeyboardEvent('keydown', { key: 'Escape', bubbles: true }),
);
jest.runAllTimers();
});
expect(document.querySelector('.tooltip-plugin-container')).toBeNull();
jest.useRealTimers();
});
});
// ---- Cursor sync ------------------------------------------------------------
describe('cursor sync', () => {
it('enables uPlot cursor sync for time-based scales when mode is Tooltip', () => {
const config = createConfigMock({
scales: [{ props: { time: true } }],
} as any);
render(
React.createElement(TooltipPlugin, {
config: config as any,
render: () => null,
syncMode: DashboardCursorSync.Tooltip,
syncKey: 'dashboard-sync',
}),
);
expect(config.setCursor).toHaveBeenCalledWith({
sync: { key: 'dashboard-sync', scales: ['x', null] },
});
});
it('does not enable cursor sync when mode is None', () => {
const config = createConfigMock({
scales: [{ props: { time: true } }],
} as any);
render(
React.createElement(TooltipPlugin, {
config: config as any,
render: () => null,
syncMode: DashboardCursorSync.None,
}),
);
expect(config.setCursor).not.toHaveBeenCalled();
});
it('does not enable cursor sync when scale is not time-based', () => {
const config = createConfigMock({
scales: [{ props: { time: false } }],
} as any);
render(
React.createElement(TooltipPlugin, {
config: config as any,
render: () => null,
syncMode: DashboardCursorSync.Tooltip,
}),
);
expect(config.setCursor).not.toHaveBeenCalled();
});
});
// ---- Cleanup ----------------------------------------------------------------
describe('cleanup on unmount', () => {
it('removes window event listeners and all uPlot hooks', () => {
const config = createConfigMock();
const addSpy = jest.spyOn(window, 'addEventListener');
const removeSpy = jest.spyOn(window, 'removeEventListener');
const { unmount } = render(
React.createElement(TooltipPlugin, {
config: config as any,
render: () => null,
syncMode: DashboardCursorSync.None,
}),
);
const resizeCall = addSpy.mock.calls.find(([type]) => type === 'resize');
const scrollCall = addSpy.mock.calls.find(([type]) => type === 'scroll');
expect(resizeCall).toBeDefined();
expect(scrollCall).toBeDefined();
const resizeListener = resizeCall?.[1] as EventListener;
const scrollListener = scrollCall?.[1] as EventListener;
const scrollOptions = scrollCall?.[2];
unmount();
config.removeCallbacks.forEach((removeFn) => {
expect(removeFn).toHaveBeenCalledTimes(1);
});
expect(removeSpy).toHaveBeenCalledWith('resize', resizeListener);
expect(removeSpy).toHaveBeenCalledWith(
'scroll',
scrollListener,
scrollOptions,
);
});
});
});

View File

@@ -1,7 +1,7 @@
import { ALL_SELECTED_VALUE } from 'components/NewSelect/utils';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { commaValuesParser } from '../../lib/dashboardVariables/customCommaValuesParser';
import { commaValuesParser } from '../../lib/dashbaordVariables/customCommaValuesParser';
interface UrlVariables {
[key: string]: any;

127
pkg/apis/fields/api.go Normal file
View File

@@ -0,0 +1,127 @@
package fields
import (
"bytes"
"io"
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
"github.com/SigNoz/signoz/pkg/telemetrymeter"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrytraces"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type API struct {
telemetryStore telemetrystore.TelemetryStore
telemetryMetadataStore telemetrytypes.MetadataStore
}
// TODO: move this to module and remove metastore init
func NewAPI(
settings factory.ProviderSettings,
telemetryStore telemetrystore.TelemetryStore,
) *API {
telemetryMetadataStore := telemetrymetadata.NewTelemetryMetaStore(
settings,
telemetryStore,
telemetrytraces.DBName,
telemetrytraces.TagAttributesV2TableName,
telemetrytraces.SpanAttributesKeysTblName,
telemetrytraces.SpanIndexV3TableName,
telemetrymetrics.DBName,
telemetrymetrics.AttributesMetadataTableName,
telemetrymeter.DBName,
telemetrymeter.SamplesAgg1dTableName,
telemetrylogs.DBName,
telemetrylogs.LogsV2TableName,
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetrymetadata.DBName,
telemetrymetadata.AttributesMetadataLocalTableName,
)
return &API{
telemetryStore: telemetryStore,
telemetryMetadataStore: telemetryMetadataStore,
}
}
func (api *API) GetFieldsKeys(w http.ResponseWriter, r *http.Request) {
type fieldKeysResponse struct {
Keys map[string][]*telemetrytypes.TelemetryFieldKey `json:"keys"`
Complete bool `json:"complete"`
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
fieldKeySelector, err := parseFieldKeyRequest(r)
if err != nil {
render.Error(w, err)
return
}
keys, complete, err := api.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(w, err)
return
}
response := fieldKeysResponse{
Keys: keys,
Complete: complete,
}
render.Success(w, http.StatusOK, response)
}
func (api *API) GetFieldsValues(w http.ResponseWriter, r *http.Request) {
type fieldValuesResponse struct {
Values *telemetrytypes.TelemetryFieldValues `json:"values"`
Complete bool `json:"complete"`
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
fieldValueSelector, err := parseFieldValueRequest(r)
if err != nil {
render.Error(w, err)
return
}
allValues, allComplete, err := api.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(w, err)
return
}
relatedValues, relatedComplete, err := api.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
}
values := &telemetrytypes.TelemetryFieldValues{
StringValues: allValues.StringValues,
NumberValues: allValues.NumberValues,
RelatedValues: relatedValues,
}
response := fieldValuesResponse{
Values: values,
Complete: allComplete && relatedComplete,
}
render.Success(w, http.StatusOK, response)
}

162
pkg/apis/fields/parse.go Normal file
View File

@@ -0,0 +1,162 @@
package fields
import (
"net/http"
"strconv"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, error) {
var req telemetrytypes.FieldKeySelector
var signal telemetrytypes.Signal
var source telemetrytypes.Source
var err error
signalStr := r.URL.Query().Get("signal")
if signalStr != "" {
signal = telemetrytypes.Signal{String: valuer.NewString(signalStr)}
} else {
signal = telemetrytypes.SignalUnspecified
}
sourceStr := r.URL.Query().Get("source")
if sourceStr != "" {
source = telemetrytypes.Source{String: valuer.NewString(sourceStr)}
} else {
source = telemetrytypes.SourceUnspecified
}
if r.URL.Query().Get("limit") != "" {
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse limit")
}
req.Limit = limit
} else {
req.Limit = 1000
}
var startUnixMilli, endUnixMilli int64
if r.URL.Query().Get("startUnixMilli") != "" {
startUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("startUnixMilli"), 10, 64)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse startUnixMilli")
}
// Round down to the nearest 6 hours (21600000 milliseconds)
startUnixMilli -= startUnixMilli % 21600000
}
if r.URL.Query().Get("endUnixMilli") != "" {
endUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("endUnixMilli"), 10, 64)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse endUnixMilli")
}
}
// Parse fieldContext directly instead of using JSON unmarshalling.
var fieldContext telemetrytypes.FieldContext
fieldContextStr := r.URL.Query().Get("fieldContext")
if fieldContextStr != "" {
fieldContext = telemetrytypes.FieldContext{String: valuer.NewString(fieldContextStr)}
}
// Parse fieldDataType directly instead of using JSON unmarshalling.
var fieldDataType telemetrytypes.FieldDataType
fieldDataTypeStr := r.URL.Query().Get("fieldDataType")
if fieldDataTypeStr != "" {
fieldDataType = telemetrytypes.FieldDataType{String: valuer.NewString(fieldDataTypeStr)}
}
metricName := r.URL.Query().Get("metricName")
var metricContext *telemetrytypes.MetricContext
if metricName != "" {
metricContext = &telemetrytypes.MetricContext{
MetricName: metricName,
}
}
name := r.URL.Query().Get("searchText")
if name != "" && fieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, signal) {
name = parsedFieldKey.Name
fieldContext = parsedFieldKey.FieldContext
}
}
}
req = telemetrytypes.FieldKeySelector{
StartUnixMilli: startUnixMilli,
EndUnixMilli: endUnixMilli,
Signal: signal,
Source: source,
Name: name,
FieldContext: fieldContext,
FieldDataType: fieldDataType,
Limit: req.Limit,
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeFuzzy,
MetricContext: metricContext,
}
return &req, nil
}
func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector, error) {
keySelector, err := parseFieldKeyRequest(r)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse field key request")
}
name := r.URL.Query().Get("name")
if name != "" && keySelector.FieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, keySelector.Signal) {
name = parsedFieldKey.Name
keySelector.FieldContext = parsedFieldKey.FieldContext
}
}
}
keySelector.Name = name
existingQuery := r.URL.Query().Get("existingQuery")
value := r.URL.Query().Get("searchText")
// Parse limit for fieldValue request, fallback to default 50 if parsing fails.
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil {
limit = 50
}
req := telemetrytypes.FieldValueSelector{
FieldKeySelector: keySelector,
ExistingQuery: existingQuery,
Value: value,
Limit: limit,
}
return &req, nil
}
func isContextValidForSignal(ctx telemetrytypes.FieldContext, signal telemetrytypes.Signal) bool {
if ctx == telemetrytypes.FieldContextResource ||
ctx == telemetrytypes.FieldContextAttribute ||
ctx == telemetrytypes.FieldContextScope {
return true
}
switch signal.StringValue() {
case telemetrytypes.SignalLogs.StringValue():
return ctx == telemetrytypes.FieldContextLog || ctx == telemetrytypes.FieldContextBody
case telemetrytypes.SignalTraces.StringValue():
return ctx == telemetrytypes.FieldContextSpan || ctx == telemetrytypes.FieldContextEvent || ctx == telemetrytypes.FieldContextTrace
case telemetrytypes.SignalMetrics.StringValue():
return ctx == telemetrytypes.FieldContextMetric
}
return true
}

View File

@@ -1,50 +0,0 @@
package signozapiserver
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/gorilla/mux"
)
func (provider *provider) addFieldsRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/fields/keys", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsKeys), handler.OpenAPIDef{
ID: "GetFieldsKeys",
Tags: []string{"fields"},
Summary: "Get field keys",
Description: "This endpoint returns field keys",
Request: nil,
RequestQuery: new(telemetrytypes.PostableFieldKeysParams),
RequestContentType: "",
Response: new(telemetrytypes.GettableFieldKeys),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v1/fields/values", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsValues), handler.OpenAPIDef{
ID: "GetFieldsValues",
Tags: []string{"fields"},
Summary: "Get field values",
Description: "This endpoint returns field values",
Request: nil,
RequestQuery: new(telemetrytypes.PostableFieldValueParams),
RequestContentType: "",
Response: new(telemetrytypes.GettableFieldValues),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -13,11 +13,11 @@ import (
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types"
@@ -42,8 +42,8 @@ type provider struct {
dashboardHandler dashboard.Handler
metricsExplorerHandler metricsexplorer.Handler
gatewayHandler gateway.Handler
fieldsHandler fields.Handler
authzHandler authz.Handler
roleGetter role.Getter
roleHandler role.Handler
}
func NewFactory(
@@ -61,31 +61,11 @@ func NewFactory(
dashboardHandler dashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
gatewayHandler gateway.Handler,
fieldsHandler fields.Handler,
authzHandler authz.Handler,
roleGetter role.Getter,
roleHandler role.Handler,
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
return newProvider(
ctx,
providerSettings,
config,
orgGetter,
authz,
orgHandler,
userHandler,
sessionHandler,
authDomainHandler,
preferenceHandler,
globalHandler,
promoteHandler,
flaggerHandler,
dashboardModule,
dashboardHandler,
metricsExplorerHandler,
gatewayHandler,
fieldsHandler,
authzHandler,
)
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler, flaggerHandler, dashboardModule, dashboardHandler, metricsExplorerHandler, gatewayHandler, roleGetter, roleHandler)
})
}
@@ -107,8 +87,8 @@ func newProvider(
dashboardHandler dashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
gatewayHandler gateway.Handler,
fieldsHandler fields.Handler,
authzHandler authz.Handler,
roleGetter role.Getter,
roleHandler role.Handler,
) (apiserver.APIServer, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
router := mux.NewRouter().UseEncodedPath()
@@ -129,11 +109,11 @@ func newProvider(
dashboardHandler: dashboardHandler,
metricsExplorerHandler: metricsExplorerHandler,
gatewayHandler: gatewayHandler,
fieldsHandler: fieldsHandler,
authzHandler: authzHandler,
roleGetter: roleGetter,
roleHandler: roleHandler,
}
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz, roleGetter)
if err := provider.AddToRouter(router); err != nil {
return nil, err
@@ -195,10 +175,6 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
return err
}
if err := provider.addFieldsRoutes(router); err != nil {
return err
}
return nil
}

View File

@@ -10,7 +10,7 @@ import (
)
func (provider *provider) addRoleRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Create), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Create), handler.OpenAPIDef{
ID: "CreateRole",
Tags: []string{"role"},
Summary: "Create role",
@@ -27,7 +27,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.authzHandler.List), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.roleHandler.List), handler.OpenAPIDef{
ID: "ListRoles",
Tags: []string{"role"},
Summary: "List roles",
@@ -44,7 +44,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Get), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Get), handler.OpenAPIDef{
ID: "GetRole",
Tags: []string{"role"},
Summary: "Get role",
@@ -61,7 +61,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Patch), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Patch), handler.OpenAPIDef{
ID: "PatchRole",
Tags: []string{"role"},
Summary: "Patch role",
@@ -78,7 +78,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Delete), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Delete), handler.OpenAPIDef{
ID: "DeleteRole",
Tags: []string{"role"},
Summary: "Delete role",

View File

@@ -2,11 +2,9 @@ package authz
import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -31,76 +29,4 @@ type AuthZ interface {
// Lists the selectors for objects assigned to subject (s) with relation (r) on resource (s)
ListObjects(context.Context, string, authtypes.Relation, authtypes.Typeable) ([]*authtypes.Object, error)
// Creates the role.
Create(context.Context, valuer.UUID, *roletypes.Role) error
// Gets the role if it exists or creates one.
GetOrCreate(context.Context, valuer.UUID, *roletypes.Role) (*roletypes.Role, error)
// Gets the objects associated with the given role and relation.
GetObjects(context.Context, valuer.UUID, valuer.UUID, authtypes.Relation) ([]*authtypes.Object, error)
// Gets all the typeable resources registered from role registry.
GetResources(context.Context) []*authtypes.Resource
// Patches the role.
Patch(context.Context, valuer.UUID, *roletypes.Role) error
// Patches the objects in authorization server associated with the given role and relation
PatchObjects(context.Context, valuer.UUID, string, authtypes.Relation, []*authtypes.Object, []*authtypes.Object) error
// Deletes the role and tuples in authorization server.
Delete(context.Context, valuer.UUID, valuer.UUID) error
// Gets the role
Get(context.Context, valuer.UUID, valuer.UUID) (*roletypes.Role, error)
// Gets the role by org_id and name
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*roletypes.Role, error)
// Lists all the roles for the organization.
List(context.Context, valuer.UUID) ([]*roletypes.Role, error)
// Lists all the roles for the organization filtered by name
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*roletypes.Role, error)
// Grants a role to the subject based on role name.
Grant(context.Context, valuer.UUID, string, string) error
// Revokes a granted role from the subject based on role name.
Revoke(context.Context, valuer.UUID, string, string) error
// Changes the granted role for the subject based on role name.
ModifyGrant(context.Context, valuer.UUID, string, string, string) error
// Bootstrap the managed roles.
CreateManagedRoles(context.Context, valuer.UUID, []*roletypes.Role) error
// Bootstrap managed roles transactions and user assignments
CreateManagedUserRoleTransactions(context.Context, valuer.UUID, valuer.UUID) error
}
type RegisterTypeable interface {
MustGetTypeables() []authtypes.Typeable
MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction
}
type Handler interface {
Create(http.ResponseWriter, *http.Request)
Get(http.ResponseWriter, *http.Request)
GetObjects(http.ResponseWriter, *http.Request)
GetResources(http.ResponseWriter, *http.Request)
List(http.ResponseWriter, *http.Request)
Patch(http.ResponseWriter, *http.Request)
PatchObjects(http.ResponseWriter, *http.Request)
Delete(http.ResponseWriter, *http.Request)
}

View File

@@ -1,4 +1,4 @@
package openfgaserver
package openfgaauthz
import (
"context"

View File

@@ -2,24 +2,35 @@ package openfgaauthz
import (
"context"
"strconv"
"sync"
authz "github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/authzstore/sqlauthzstore"
"github.com/SigNoz/signoz/pkg/authz/openfgaserver"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
openfgapkgserver "github.com/openfga/openfga/pkg/server"
"google.golang.org/protobuf/encoding/protojson"
)
var (
openfgaDefaultStore = valuer.NewString("signoz")
)
type provider struct {
server *openfgaserver.Server
store roletypes.Store
config authz.Config
settings factory.ScopedProviderSettings
openfgaSchema []openfgapkgtransformer.ModuleFile
openfgaServer *openfgapkgserver.Server
storeID string
modelID string
mtx sync.RWMutex
stopChan chan struct{}
}
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
@@ -29,194 +40,301 @@ func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtr
}
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
server, err := openfgaserver.NewOpenfgaServer(ctx, settings, config, sqlstore, openfgaSchema)
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/authz/openfgaauthz")
store, err := NewSQLStore(sqlstore)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to initialize sqlstore for authz")
return nil, err
}
// setup the openfga server
opts := []openfgapkgserver.OpenFGAServiceV1Option{
openfgapkgserver.WithDatastore(store),
openfgapkgserver.WithLogger(NewLogger(scopedProviderSettings.Logger())),
openfgapkgserver.WithContextPropagationToDatastore(true),
}
openfgaServer, err := openfgapkgserver.NewServerWithOpts(opts...)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to create authz server")
return nil, err
}
return &provider{
server: server,
store: sqlauthzstore.NewSqlAuthzStore(sqlstore),
config: config,
settings: scopedProviderSettings,
openfgaServer: openfgaServer,
openfgaSchema: openfgaSchema,
mtx: sync.RWMutex{},
stopChan: make(chan struct{}),
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
return provider.server.Start(ctx)
storeId, err := provider.getOrCreateStore(ctx, openfgaDefaultStore.StringValue())
if err != nil {
return err
}
modelID, err := provider.getOrCreateModel(ctx, storeId)
if err != nil {
return err
}
provider.mtx.Lock()
provider.modelID = modelID
provider.storeID = storeId
provider.mtx.Unlock()
<-provider.stopChan
return nil
}
func (provider *provider) Stop(ctx context.Context) error {
return provider.server.Stop(ctx)
provider.openfgaServer.Close()
close(provider.stopChan)
return nil
}
func (provider *provider) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
return provider.server.Check(ctx, tupleReq)
storeID, modelID := provider.getStoreIDandModelID()
checkResponse, err := provider.openfgaServer.Check(
ctx,
&openfgav1.CheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tupleReq.User,
Relation: tupleReq.Relation,
Object: tupleReq.Object,
},
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
if !checkResponse.Allowed {
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
}
return nil
}
func (provider *provider) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
return provider.server.BatchCheck(ctx, tupleReq)
storeID, modelID := provider.getStoreIDandModelID()
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
for idx, tuple := range tupleReq {
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tuple.User,
Relation: tuple.Relation,
Object: tuple.Object,
},
// the batch check response is map[string] keyed by correlationID.
CorrelationId: strconv.Itoa(idx),
})
}
checkResponse, err := provider.openfgaServer.BatchCheck(
ctx,
&openfgav1.BatchCheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Checks: batchCheckItems,
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
for _, checkResponse := range checkResponse.Result {
if checkResponse.GetAllowed() {
return nil
}
}
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.server.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.server.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return provider.server.Write(ctx, additions, deletions)
if len(additions) == 0 && len(deletions) == 0 {
return nil
}
storeID, modelID := provider.getStoreIDandModelID()
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
for idx, tuple := range deletions {
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
}
_, err := provider.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Writes: func() *openfgav1.WriteRequestWrites {
if len(additions) == 0 {
return nil
}
return &openfgav1.WriteRequestWrites{
TupleKeys: additions,
OnDuplicate: "ignore",
}
}(),
Deletes: func() *openfgav1.WriteRequestDeletes {
if len(deletionTuplesWithoutCondition) == 0 {
return nil
}
return &openfgav1.WriteRequestDeletes{
TupleKeys: deletionTuplesWithoutCondition,
OnMissing: "ignore",
}
}(),
})
return err
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return provider.server.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
storableRole, err := provider.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
storableRole, err := provider.store.GetByOrgIDAndName(ctx, orgID, name)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
storableRoles, err := provider.store.List(ctx, orgID)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storableRole := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storableRole)
}
return roles, nil
}
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
storableRoles, err := provider.store.ListByOrgIDAndNames(ctx, orgID, names)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storable := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
}
return roles, nil
}
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return provider.Write(ctx, tuples, nil)
}
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
err := provider.Revoke(ctx, orgID, existingRoleName, subject)
if err != nil {
return err
}
err = provider.Grant(ctx, orgID, updatedRoleName, subject)
if err != nil {
return err
}
return nil
}
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return provider.Write(ctx, nil, tuples)
}
func (provider *provider) CreateManagedRoles(ctx context.Context, _ valuer.UUID, managedRoles []*roletypes.Role) error {
err := provider.store.RunInTx(ctx, func(ctx context.Context) error {
for _, role := range managedRoles {
err := provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return err
}
}
return nil
storeID, modelID := provider.getStoreIDandModelID()
response, err := provider.openfgaServer.ListObjects(ctx, &openfgav1.ListObjectsRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
User: subject,
Relation: relation.StringValue(),
Type: typeable.Type().StringValue(),
})
if err != nil {
return err
return nil, errors.Wrapf(err, errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "cannot list objects for subject %s with relation %s for type %s", subject, relation.StringValue(), typeable.Type().StringValue())
}
return nil
return authtypes.MustNewObjectsFromStringSlice(response.Objects), nil
}
func (provider *provider) SetManagedRoleTransactions(context.Context, valuer.UUID) error {
return nil
func (provider *provider) getOrCreateStore(ctx context.Context, name string) (string, error) {
stores, err := provider.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
if err != nil {
return "", err
}
for _, store := range stores.GetStores() {
if store.GetName() == name {
return store.Id, nil
}
}
store, err := provider.openfgaServer.CreateStore(ctx, &openfgav1.CreateStoreRequest{Name: name})
if err != nil {
return "", err
}
return store.Id, nil
}
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
return provider.Grant(ctx, orgID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil))
func (provider *provider) getOrCreateModel(ctx context.Context, storeID string) (string, error) {
schema, err := openfgapkgtransformer.TransformModuleFilesToModel(provider.openfgaSchema, "1.1")
if err != nil {
return "", err
}
authorisationModels, err := provider.openfgaServer.ReadAuthorizationModels(ctx, &openfgav1.ReadAuthorizationModelsRequest{StoreId: storeID})
if err != nil {
return "", err
}
for _, authModel := range authorisationModels.GetAuthorizationModels() {
equal, err := provider.isModelEqual(schema, authModel)
if err != nil {
return "", err
}
if equal {
return authModel.Id, nil
}
}
authorizationModel, err := provider.openfgaServer.WriteAuthorizationModel(ctx, &openfgav1.WriteAuthorizationModelRequest{
StoreId: storeID,
TypeDefinitions: schema.TypeDefinitions,
SchemaVersion: schema.SchemaVersion,
Conditions: schema.Conditions,
})
if err != nil {
return "", err
}
return authorizationModel.AuthorizationModelId, nil
}
func (setter *provider) Create(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
// the language model doesn't have any equality check
// https://github.com/openfga/language/blob/main/pkg/go/transformer/module-to-model_test.go#L38
func (provider *provider) isModelEqual(expected *openfgav1.AuthorizationModel, actual *openfgav1.AuthorizationModel) (bool, error) {
// we need to initialize a new model since the model extracted from schema doesn't have id
expectedAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: expected.SchemaVersion,
TypeDefinitions: expected.TypeDefinitions,
Conditions: expected.Conditions,
}
expectedAuthModelBytes, err := protojson.Marshal(&expectedAuthModel)
if err != nil {
return false, err
}
actualAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: actual.SchemaVersion,
TypeDefinitions: actual.TypeDefinitions,
Conditions: actual.Conditions,
}
actualAuthModelBytes, err := protojson.Marshal(&actualAuthModel)
if err != nil {
return false, err
}
return string(expectedAuthModelBytes) == string(actualAuthModelBytes), nil
}
func (provider *provider) GetOrCreate(_ context.Context, _ valuer.UUID, _ *roletypes.Role) (*roletypes.Role, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) getStoreIDandModelID() (string, string) {
provider.mtx.RLock()
defer provider.mtx.RUnlock()
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
return nil
}
storeID := provider.storeID
modelID := provider.modelID
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) Patch(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) PatchObjects(_ context.Context, _ valuer.UUID, _ string, _ authtypes.Relation, _, _ []*authtypes.Object) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) Delete(_ context.Context, _ valuer.UUID, _ valuer.UUID) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
return nil
return storeID, modelID
}

View File

@@ -1,4 +1,4 @@
package openfgaserver
package openfgaauthz
import (
"context"
@@ -20,7 +20,7 @@ func TestProviderStartStop(t *testing.T) {
expectedModel := `module base
type user`
provider, err := NewOpenfgaServer(context.Background(), providerSettings, authz.Config{}, sqlstore, []transformer.ModuleFile{{Name: "test.fga", Contents: expectedModel}})
provider, err := newOpenfgaProvider(context.Background(), providerSettings, authz.Config{}, sqlstore, []transformer.ModuleFile{{Name: "test.fga", Contents: expectedModel}})
require.NoError(t, err)
storeRows := sqlstore.Mock().NewRows([]string{"id", "name", "created_at", "updated_at"}).AddRow("01K3V0NTN47MPTMEV1PD5ST6ZC", "signoz", time.Now(), time.Now())

View File

@@ -1,4 +1,4 @@
package openfgaserver
package openfgaauthz
import (
"github.com/SigNoz/signoz/pkg/errors"

View File

@@ -1,334 +0,0 @@
package openfgaserver
import (
"context"
"strconv"
"sync"
authz "github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
openfgapkgserver "github.com/openfga/openfga/pkg/server"
"google.golang.org/protobuf/encoding/protojson"
)
var (
openfgaDefaultStore = valuer.NewString("signoz")
)
type Server struct {
config authz.Config
settings factory.ScopedProviderSettings
openfgaSchema []openfgapkgtransformer.ModuleFile
openfgaServer *openfgapkgserver.Server
storeID string
modelID string
mtx sync.RWMutex
stopChan chan struct{}
}
func NewOpenfgaServer(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (*Server, error) {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/authz/openfgaauthz")
store, err := NewSQLStore(sqlstore)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to initialize sqlstore for authz")
return nil, err
}
// setup the openfga server
opts := []openfgapkgserver.OpenFGAServiceV1Option{
openfgapkgserver.WithDatastore(store),
openfgapkgserver.WithLogger(NewLogger(scopedProviderSettings.Logger())),
openfgapkgserver.WithContextPropagationToDatastore(true),
}
openfgaServer, err := openfgapkgserver.NewServerWithOpts(opts...)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to create authz server")
return nil, err
}
return &Server{
config: config,
settings: scopedProviderSettings,
openfgaServer: openfgaServer,
openfgaSchema: openfgaSchema,
mtx: sync.RWMutex{},
stopChan: make(chan struct{}),
}, nil
}
func (server *Server) Start(ctx context.Context) error {
storeID, err := server.getOrCreateStore(ctx, openfgaDefaultStore.StringValue())
if err != nil {
return err
}
modelID, err := server.getOrCreateModel(ctx, storeID)
if err != nil {
return err
}
server.mtx.Lock()
server.modelID = modelID
server.storeID = storeID
server.mtx.Unlock()
<-server.stopChan
return nil
}
func (server *Server) Stop(ctx context.Context) error {
server.openfgaServer.Close()
close(server.stopChan)
return nil
}
func (server *Server) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
storeID, modelID := server.getStoreIDandModelID()
checkResponse, err := server.openfgaServer.Check(
ctx,
&openfgav1.CheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tupleReq.User,
Relation: tupleReq.Relation,
Object: tupleReq.Object,
},
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
if !checkResponse.Allowed {
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
}
return nil
}
func (server *Server) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
storeID, modelID := server.getStoreIDandModelID()
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
for idx, tuple := range tupleReq {
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tuple.User,
Relation: tuple.Relation,
Object: tuple.Object,
},
// the batch check response is map[string] keyed by correlationID.
CorrelationId: strconv.Itoa(idx),
})
}
checkResponse, err := server.openfgaServer.BatchCheck(
ctx,
&openfgav1.BatchCheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Checks: batchCheckItems,
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
for _, checkResponse := range checkResponse.Result {
if checkResponse.GetAllowed() {
return nil
}
}
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
}
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
if len(additions) == 0 && len(deletions) == 0 {
return nil
}
storeID, modelID := server.getStoreIDandModelID()
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
for idx, tuple := range deletions {
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
}
_, err := server.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Writes: func() *openfgav1.WriteRequestWrites {
if len(additions) == 0 {
return nil
}
return &openfgav1.WriteRequestWrites{
TupleKeys: additions,
OnDuplicate: "ignore",
}
}(),
Deletes: func() *openfgav1.WriteRequestDeletes {
if len(deletionTuplesWithoutCondition) == 0 {
return nil
}
return &openfgav1.WriteRequestDeletes{
TupleKeys: deletionTuplesWithoutCondition,
OnMissing: "ignore",
}
}(),
})
return err
}
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
storeID, modelID := server.getStoreIDandModelID()
response, err := server.openfgaServer.ListObjects(ctx, &openfgav1.ListObjectsRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
User: subject,
Relation: relation.StringValue(),
Type: typeable.Type().StringValue(),
})
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "cannot list objects for subject %s with relation %s for type %s", subject, relation.StringValue(), typeable.Type().StringValue())
}
return authtypes.MustNewObjectsFromStringSlice(response.Objects), nil
}
func (server *Server) getOrCreateStore(ctx context.Context, name string) (string, error) {
stores, err := server.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
if err != nil {
return "", err
}
for _, store := range stores.GetStores() {
if store.GetName() == name {
return store.Id, nil
}
}
store, err := server.openfgaServer.CreateStore(ctx, &openfgav1.CreateStoreRequest{Name: name})
if err != nil {
return "", err
}
return store.Id, nil
}
func (server *Server) getOrCreateModel(ctx context.Context, storeID string) (string, error) {
schema, err := openfgapkgtransformer.TransformModuleFilesToModel(server.openfgaSchema, "1.1")
if err != nil {
return "", err
}
authorisationModels, err := server.openfgaServer.ReadAuthorizationModels(ctx, &openfgav1.ReadAuthorizationModelsRequest{StoreId: storeID})
if err != nil {
return "", err
}
for _, authModel := range authorisationModels.GetAuthorizationModels() {
equal, err := server.isModelEqual(schema, authModel)
if err != nil {
return "", err
}
if equal {
return authModel.Id, nil
}
}
authorizationModel, err := server.openfgaServer.WriteAuthorizationModel(ctx, &openfgav1.WriteAuthorizationModelRequest{
StoreId: storeID,
TypeDefinitions: schema.TypeDefinitions,
SchemaVersion: schema.SchemaVersion,
Conditions: schema.Conditions,
})
if err != nil {
return "", err
}
return authorizationModel.AuthorizationModelId, nil
}
// the language model doesn't have any equality check
// https://github.com/openfga/language/blob/main/pkg/go/transformer/module-to-model_test.go#L38
func (server *Server) isModelEqual(expected *openfgav1.AuthorizationModel, actual *openfgav1.AuthorizationModel) (bool, error) {
// we need to initialize a new model since the model extracted from schema doesn't have id
expectedAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: expected.SchemaVersion,
TypeDefinitions: expected.TypeDefinitions,
Conditions: expected.Conditions,
}
expectedAuthModelBytes, err := protojson.Marshal(&expectedAuthModel)
if err != nil {
return false, err
}
actualAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: actual.SchemaVersion,
TypeDefinitions: actual.TypeDefinitions,
Conditions: actual.Conditions,
}
actualAuthModelBytes, err := protojson.Marshal(&actualAuthModel)
if err != nil {
return false, err
}
return string(expectedAuthModelBytes) == string(actualAuthModelBytes), nil
}
func (server *Server) getStoreIDandModelID() (string, string) {
server.mtx.RLock()
defer server.mtx.RUnlock()
storeID := server.storeID
modelID := server.modelID
return storeID, modelID
}

View File

@@ -8,6 +8,7 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
@@ -23,14 +24,15 @@ type AuthZ struct {
logger *slog.Logger
orgGetter organization.Getter
authzService authz.AuthZ
roleGetter role.Getter
}
func NewAuthZ(logger *slog.Logger, orgGetter organization.Getter, authzService authz.AuthZ) *AuthZ {
func NewAuthZ(logger *slog.Logger, orgGetter organization.Getter, authzService authz.AuthZ, roleGetter role.Getter) *AuthZ {
if logger == nil {
panic("cannot build authz middleware, logger is empty")
}
return &AuthZ{logger: logger, orgGetter: orgGetter, authzService: authzService}
return &AuthZ{logger: logger, orgGetter: orgGetter, authzService: authzService, roleGetter: roleGetter}
}
func (middleware *AuthZ) ViewAccess(next http.HandlerFunc) http.HandlerFunc {

View File

@@ -4,7 +4,7 @@ import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/statsreporter"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
@@ -51,7 +51,7 @@ type Module interface {
statsreporter.StatsCollector
authz.RegisterTypeable
role.RegisterTypeable
}
type Handler interface {

View File

@@ -206,10 +206,6 @@ func (module *module) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{dashboardtypes.TypeableMetaResourceDashboard, dashboardtypes.TypeableMetaResourcesDashboards}
}
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
return nil
}
// not supported
func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publicDashboard *dashboardtypes.PublicDashboard) error {
return errors.Newf(errors.TypeUnsupported, dashboardtypes.ErrCodePublicDashboardUnsupported, "not implemented")

View File

@@ -1,11 +0,0 @@
package fields
import "net/http"
type Handler interface {
// Gets the fields keys for the given field key selector
GetFieldsKeys(http.ResponseWriter, *http.Request)
// Gets the fields values for the given field value selector
GetFieldsValues(http.ResponseWriter, *http.Request)
}

View File

@@ -1,79 +0,0 @@
package implfields
import (
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type handler struct {
telemetryMetadataStore telemetrytypes.MetadataStore
}
func NewHandler(settings factory.ProviderSettings, telemetryMetadataStore telemetrytypes.MetadataStore) fields.Handler {
return &handler{
telemetryMetadataStore: telemetryMetadataStore,
}
}
func (handler *handler) GetFieldsKeys(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
var params telemetrytypes.PostableFieldKeysParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
fieldKeySelector := telemetrytypes.NewFieldKeySelectorFromPostableFieldKeysParams(params)
keys, complete, err := handler.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldKeys{
Keys: keys,
Complete: complete,
})
}
func (handler *handler) GetFieldsValues(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
var params telemetrytypes.PostableFieldValueParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
fieldValueSelector := telemetrytypes.NewFieldValueSelectorFromPostableFieldValueParams(params)
allValues, allComplete, err := handler.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(rw, err)
return
}
relatedValues, relatedComplete, err := handler.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
}
values := &telemetrytypes.TelemetryFieldValues{
StringValues: allValues.StringValues,
NumberValues: allValues.NumberValues,
RelatedValues: relatedValues,
}
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldValues{
Values: values,
Complete: allComplete && relatedComplete,
})
}

View File

@@ -0,0 +1,63 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type getter struct {
store roletypes.Store
}
func NewGetter(store roletypes.Store) role.Getter {
return &getter{store: store}
}
func (getter *getter) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
storableRole, err := getter.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (getter *getter) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
storableRole, err := getter.store.GetByOrgIDAndName(ctx, orgID, name)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (getter *getter) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
storableRoles, err := getter.store.List(ctx, orgID)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storableRole := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storableRole)
}
return roles, nil
}
func (getter *getter) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
storableRoles, err := getter.store.ListByOrgIDAndNames(ctx, orgID, names)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storable := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
}
return roles, nil
}

View File

@@ -0,0 +1,83 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type granter struct {
store roletypes.Store
authz authz.AuthZ
}
func NewGranter(store roletypes.Store, authz authz.AuthZ) role.Granter {
return &granter{store: store, authz: authz}
}
func (granter *granter) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return granter.authz.Write(ctx, tuples, nil)
}
func (granter *granter) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
err := granter.Revoke(ctx, orgID, existingRoleName, subject)
if err != nil {
return err
}
err = granter.Grant(ctx, orgID, updatedRoleName, subject)
if err != nil {
return err
}
return nil
}
func (granter *granter) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return granter.authz.Write(ctx, nil, tuples)
}
func (granter *granter) CreateManagedRoles(ctx context.Context, _ valuer.UUID, managedRoles []*roletypes.Role) error {
err := granter.store.RunInTx(ctx, func(ctx context.Context) error {
for _, role := range managedRoles {
err := granter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return err
}
}
return nil
})
if err != nil {
return err
}
return nil
}

View File

@@ -1,12 +1,12 @@
package signozauthzapi
package implrole
import (
"net/http"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -14,11 +14,12 @@ import (
)
type handler struct {
authz authz.AuthZ
setter role.Setter
getter role.Getter
}
func NewHandler(authz authz.AuthZ) authz.Handler {
return &handler{authz: authz}
func NewHandler(setter role.Setter, getter role.Getter) role.Handler {
return &handler{setter: setter, getter: getter}
}
func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
@@ -35,7 +36,7 @@ func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.authz.Create(ctx, valuer.MustNewUUID(claims.OrgID), roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID)))
err = handler.setter.Create(ctx, valuer.MustNewUUID(claims.OrgID), roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID)))
if err != nil {
render.Error(rw, err)
return
@@ -63,7 +64,7 @@ func (handler *handler) Get(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), roleID)
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), roleID)
if err != nil {
render.Error(rw, err)
return
@@ -102,7 +103,7 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
return
}
objects, err := handler.authz.GetObjects(ctx, valuer.MustNewUUID(claims.OrgID), roleID, relation)
objects, err := handler.setter.GetObjects(ctx, valuer.MustNewUUID(claims.OrgID), roleID, relation)
if err != nil {
render.Error(rw, err)
return
@@ -113,7 +114,7 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
func (handler *handler) GetResources(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
resources := handler.authz.GetResources(ctx)
resources := handler.setter.GetResources(ctx)
var resourceRelations = struct {
Resources []*authtypes.Resource `json:"resources"`
@@ -133,7 +134,7 @@ func (handler *handler) List(rw http.ResponseWriter, r *http.Request) {
return
}
roles, err := handler.authz.List(ctx, valuer.MustNewUUID(claims.OrgID))
roles, err := handler.getter.List(ctx, valuer.MustNewUUID(claims.OrgID))
if err != nil {
render.Error(rw, err)
return
@@ -162,7 +163,7 @@ func (handler *handler) Patch(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return
@@ -174,7 +175,7 @@ func (handler *handler) Patch(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.authz.Patch(ctx, valuer.MustNewUUID(claims.OrgID), role)
err = handler.setter.Patch(ctx, valuer.MustNewUUID(claims.OrgID), role)
if err != nil {
render.Error(rw, err)
return
@@ -209,7 +210,7 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return
@@ -221,7 +222,7 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.authz.PatchObjects(ctx, valuer.MustNewUUID(claims.OrgID), role.Name, relation, patchableObjects.Additions, patchableObjects.Deletions)
err = handler.setter.PatchObjects(ctx, valuer.MustNewUUID(claims.OrgID), role.Name, relation, patchableObjects.Additions, patchableObjects.Deletions)
if err != nil {
render.Error(rw, err)
return
@@ -244,7 +245,7 @@ func (handler *handler) Delete(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.authz.Delete(ctx, valuer.MustNewUUID(claims.OrgID), id)
err = handler.setter.Delete(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return

View File

@@ -0,0 +1,53 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type setter struct {
store roletypes.Store
authz authz.AuthZ
}
func NewSetter(store roletypes.Store, authz authz.AuthZ) role.Setter {
return &setter{store: store, authz: authz}
}
func (setter *setter) Create(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) GetOrCreate(_ context.Context, _ valuer.UUID, _ *roletypes.Role) (*roletypes.Role, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) GetResources(_ context.Context) []*authtypes.Resource {
return nil
}
func (setter *setter) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) Patch(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) PatchObjects(_ context.Context, _ valuer.UUID, _ string, _ authtypes.Relation, _, _ []*authtypes.Object) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) Delete(_ context.Context, _ valuer.UUID, _ valuer.UUID) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) MustGetTypeables() []authtypes.Typeable {
return nil
}

View File

@@ -1,4 +1,4 @@
package sqlauthzstore
package implrole
import (
"context"
@@ -14,7 +14,7 @@ type store struct {
sqlstore sqlstore.SQLStore
}
func NewSqlAuthzStore(sqlstore sqlstore.SQLStore) roletypes.Store {
func NewStore(sqlstore sqlstore.SQLStore) roletypes.Store {
return &store{sqlstore: sqlstore}
}

85
pkg/modules/role/role.go Normal file
View File

@@ -0,0 +1,85 @@
package role
import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type Setter interface {
// Creates the role.
Create(context.Context, valuer.UUID, *roletypes.Role) error
// Gets the role if it exists or creates one.
GetOrCreate(context.Context, valuer.UUID, *roletypes.Role) (*roletypes.Role, error)
// Gets the objects associated with the given role and relation.
GetObjects(context.Context, valuer.UUID, valuer.UUID, authtypes.Relation) ([]*authtypes.Object, error)
// Gets all the typeable resources registered from role registry.
GetResources(context.Context) []*authtypes.Resource
// Patches the role.
Patch(context.Context, valuer.UUID, *roletypes.Role) error
// Patches the objects in authorization server associated with the given role and relation
PatchObjects(context.Context, valuer.UUID, string, authtypes.Relation, []*authtypes.Object, []*authtypes.Object) error
// Deletes the role and tuples in authorization server.
Delete(context.Context, valuer.UUID, valuer.UUID) error
RegisterTypeable
}
type Getter interface {
// Gets the role
Get(context.Context, valuer.UUID, valuer.UUID) (*roletypes.Role, error)
// Gets the role by org_id and name
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*roletypes.Role, error)
// Lists all the roles for the organization.
List(context.Context, valuer.UUID) ([]*roletypes.Role, error)
// Lists all the roles for the organization filtered by name
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*roletypes.Role, error)
}
type Granter interface {
// Grants a role to the subject based on role name.
Grant(context.Context, valuer.UUID, string, string) error
// Revokes a granted role from the subject based on role name.
Revoke(context.Context, valuer.UUID, string, string) error
// Changes the granted role for the subject based on role name.
ModifyGrant(context.Context, valuer.UUID, string, string, string) error
// Bootstrap the managed roles.
CreateManagedRoles(context.Context, valuer.UUID, []*roletypes.Role) error
}
type RegisterTypeable interface {
MustGetTypeables() []authtypes.Typeable
}
type Handler interface {
Create(http.ResponseWriter, *http.Request)
Get(http.ResponseWriter, *http.Request)
GetObjects(http.ResponseWriter, *http.Request)
GetResources(http.ResponseWriter, *http.Request)
List(http.ResponseWriter, *http.Request)
Patch(http.ResponseWriter, *http.Request)
PatchObjects(http.ResponseWriter, *http.Request)
Delete(http.ResponseWriter, *http.Request)
}

View File

@@ -8,11 +8,11 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/emailing"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/user"
root "github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/tokenizer"
@@ -32,13 +32,13 @@ type Module struct {
emailing emailing.Emailing
settings factory.ScopedProviderSettings
orgSetter organization.Setter
authz authz.AuthZ
granter role.Granter
analytics analytics.Analytics
config user.Config
}
// This module is a WIP, don't take inspiration from this.
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, authz authz.AuthZ, analytics analytics.Analytics, config user.Config) root.Module {
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, granter role.Granter, analytics analytics.Analytics, config user.Config) root.Module {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/modules/user/impluser")
return &Module{
store: store,
@@ -47,7 +47,7 @@ func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing em
settings: settings,
orgSetter: orgSetter,
analytics: analytics,
authz: authz,
granter: granter,
config: config,
}
}
@@ -172,7 +172,7 @@ func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ..
createUserOpts := root.NewCreateUserOptions(opts...)
// since assign is idempotant multiple calls to assign won't cause issues in case of retries.
err := module.authz.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
err := module.granter.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
if err != nil {
return err
}
@@ -238,7 +238,7 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
}
if user.Role != existingUser.Role {
err = m.authz.ModifyGrant(ctx,
err = m.granter.ModifyGrant(ctx,
orgID,
roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role),
roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role),
@@ -301,7 +301,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
}
// since revoke is idempotant multiple calls to revoke won't cause issues in case of retries
err = module.authz.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
err = module.granter.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
if err != nil {
return err
}
@@ -504,14 +504,14 @@ func (module *Module) CreateFirstUser(ctx context.Context, organization *types.O
}
managedRoles := roletypes.NewManagedRoles(organization.ID)
err = module.authz.CreateManagedUserRoleTransactions(ctx, organization.ID, user.ID)
err = module.granter.Grant(ctx, organization.ID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, user.ID.StringValue(), user.OrgID, nil))
if err != nil {
return nil, err
}
if err = module.store.RunInTx(ctx, func(ctx context.Context) error {
err = module.orgSetter.Create(ctx, organization, func(ctx context.Context, orgID valuer.UUID) error {
err = module.authz.CreateManagedRoles(ctx, orgID, managedRoles)
err = module.granter.CreateManagedRoles(ctx, orgID, managedRoles)
if err != nil {
return err
}

View File

@@ -25,6 +25,7 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/http/render"
@@ -68,7 +69,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/opamptypes"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunneltypes"
"go.uber.org/zap"
@@ -103,11 +104,10 @@ type APIHandler struct {
querierV2 interfaces.Querier
queryBuilder *queryBuilder.QueryBuilder
// temporalityMap is a map of metric name to temporality to avoid fetching
// temporality for the same metric multiple times.
//
// Querying the v4 table on a low cardinal temporality column should be
// fast, but we can still avoid the query if we have the data in memory.
// temporalityMap is a map of metric name to temporality
// to avoid fetching temporality for the same metric multiple times
// querying the v4 table on low cardinal temporality column
// should be fast but we can still avoid the query if we have the data in memory
temporalityMap map[string]map[v3.Temporality]bool
temporalityMux sync.Mutex
@@ -145,6 +145,8 @@ type APIHandler struct {
LicensingAPI licensing.API
FieldsAPI *fields.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
@@ -175,6 +177,8 @@ type APIHandlerOpts struct {
LicensingAPI licensing.API
FieldsAPI *fields.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
@@ -239,6 +243,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
AlertmanagerAPI: opts.AlertmanagerAPI,
LicensingAPI: opts.LicensingAPI,
Signoz: opts.Signoz,
FieldsAPI: opts.FieldsAPI,
QuerierAPI: opts.QuerierAPI,
QueryParserAPI: opts.QueryParserAPI,
}
@@ -394,6 +399,13 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *middlew
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.QuerierAPI.QueryRawStream)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterFieldsRoutes(router *mux.Router, am *middleware.AuthZ) {
subRouter := router.PathPrefix("/api/v1").Subrouter()
subRouter.HandleFunc("/fields/keys", am.ViewAccess(aH.FieldsAPI.GetFieldsKeys)).Methods(http.MethodGet)
subRouter.HandleFunc("/fields/values", am.ViewAccess(aH.FieldsAPI.GetFieldsValues)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *middleware.AuthZ) {
hostsSubRouter := router.PathPrefix("/api/v1/hosts").Subrouter()
hostsSubRouter.HandleFunc("/attribute_keys", am.ViewAccess(aH.getHostAttributeKeys)).Methods(http.MethodGet)
@@ -1011,7 +1023,7 @@ func (aH *APIHandler) getRuleStateHistory(w http.ResponseWriter, r *http.Request
// the query range is calculated based on the rule's evalWindow and evalDelay
// alerts have 2 minutes delay built in, so we need to subtract that from the start time
// to get the correct query range
start := end.Add(-rule.EvalWindow.Duration() - 3*time.Minute)
start := end.Add(-time.Duration(rule.EvalWindow)).Add(-3 * time.Minute)
if rule.AlertType == ruletypes.AlertTypeLogs {
if rule.Version != "v5" {
res.Items[idx].RelatedLogsLink = contextlinks.PrepareLinksToLogs(start, end, newFilters)
@@ -1218,12 +1230,12 @@ func (aH *APIHandler) Get(rw http.ResponseWriter, r *http.Request) {
dashboard := new(dashboardtypes.Dashboard)
if aH.CloudIntegrationsController.IsCloudIntegrationDashboardUuid(id) {
cloudIntegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
cloudintegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
if apiErr != nil {
render.Error(rw, errorsV2.Wrapf(apiErr, errorsV2.TypeInternal, errorsV2.CodeInternal, "failed to get dashboard"))
return
}
dashboard = cloudIntegrationDashboard
dashboard = cloudintegrationDashboard
} else if aH.IntegrationsController.IsInstalledIntegrationDashboardID(id) {
integrationDashboard, apiErr := aH.IntegrationsController.GetInstalledIntegrationDashboardById(ctx, orgID, id)
if apiErr != nil {
@@ -1552,13 +1564,13 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
RespondError(w, &model.ApiError{Typ: model.ErrorExec, Err: res.Err}, nil)
}
responseData := &model.QueryData{
response_data := &model.QueryData{
ResultType: res.Value.Type(),
Result: res.Value,
Stats: qs,
}
aH.Respond(w, responseData)
aH.Respond(w, response_data)
}
@@ -2640,12 +2652,12 @@ func (aH *APIHandler) getProducerData(w http.ResponseWriter, r *http.Request) {
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2693,12 +2705,12 @@ func (aH *APIHandler) getConsumerData(w http.ResponseWriter, r *http.Request) {
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2747,12 +2759,12 @@ func (aH *APIHandler) getPartitionOverviewLatencyData(w http.ResponseWriter, r *
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2801,12 +2813,12 @@ func (aH *APIHandler) getConsumerPartitionLatencyData(w http.ResponseWriter, r *
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2858,12 +2870,12 @@ func (aH *APIHandler) getProducerThroughputOverview(w http.ResponseWriter, r *ht
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, producerQueryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, producerQueryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
@@ -2969,12 +2981,12 @@ func (aH *APIHandler) getProducerThroughputDetails(w http.ResponseWriter, r *htt
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -3023,12 +3035,12 @@ func (aH *APIHandler) getConsumerThroughputOverview(w http.ResponseWriter, r *ht
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -3077,12 +3089,12 @@ func (aH *APIHandler) getConsumerThroughputDetails(w http.ResponseWriter, r *htt
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -3137,12 +3149,12 @@ func (aH *APIHandler) getProducerConsumerEval(w http.ResponseWriter, r *http.Req
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
@@ -4126,11 +4138,11 @@ func (aH *APIHandler) ListLogsPipelinesHandler(w http.ResponseWriter, r *http.Re
aH.Respond(w, payload)
}
// listLogsPipelines lists logs pipelines for latest version
// listLogsPipelines lists logs piplines for latest version
func (aH *APIHandler) listLogsPipelines(ctx context.Context, orgID valuer.UUID) (
*logparsingpipeline.PipelinesResponse, error,
) {
// get latest agent config
// get lateset agent config
latestVersion := -1
lastestConfig, err := agentConf.GetLatestVersion(ctx, orgID, opamptypes.ElementTypeLogPipelines)
if err != nil && !errorsV2.Ast(err, errorsV2.TypeNotFound) {
@@ -4427,7 +4439,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
var spanKeys map[string]v3.AttributeKey
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
hasLogsQuery := false
@@ -4444,7 +4456,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
if logsv3.EnrichmentRequired(queryRangeParams) && hasLogsQuery {
logsFields, apiErr := aH.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(queryRangeParams.CompositeQuery))
if apiErr != nil {
RespondError(w, apiErr, errQueriesByName)
RespondError(w, apiErr, errQuriesByName)
return
}
// get the fields if any logs query is present
@@ -4455,7 +4467,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
spanKeys, err = aH.getSpanKeysV3(ctx, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
tracesV4.Enrich(queryRangeParams, spanKeys)
@@ -4500,11 +4512,11 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
}
}
result, errQueriesByName, err = aH.querier.QueryRange(ctx, orgID, queryRangeParams)
result, errQuriesByName, err = aH.querier.QueryRange(ctx, orgID, queryRangeParams)
if err != nil {
queryErrors := map[string]string{}
for name, err := range errQueriesByName {
for name, err := range errQuriesByName {
queryErrors[fmt.Sprintf("Query-%s", name)] = err.Error()
}
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
@@ -4780,7 +4792,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
}
var result []*v3.Result
var errQueriesByName map[string]error
var errQuriesByName map[string]error
var spanKeys map[string]v3.AttributeKey
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
hasLogsQuery := false
@@ -4810,7 +4822,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
spanKeys, err = aH.getSpanKeysV3(ctx, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
tracesV4.Enrich(queryRangeParams, spanKeys)
@@ -4833,11 +4845,11 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
}
}
result, errQueriesByName, err = aH.querierV2.QueryRange(ctx, orgID, queryRangeParams)
result, errQuriesByName, err = aH.querierV2.QueryRange(ctx, orgID, queryRangeParams)
if err != nil {
queryErrors := map[string]string{}
for name, err := range errQueriesByName {
for name, err := range errQuriesByName {
queryErrors[fmt.Sprintf("Query-%s", name)] = err.Error()
}
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
@@ -4854,7 +4866,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQueriesByName)
RespondError(w, apiErrObj, errQuriesByName)
return
}
aH.sendQueryResultEvents(r, result, queryRangeParams, "v4")

View File

@@ -17,6 +17,7 @@ import (
"github.com/gorilla/handlers"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
@@ -132,6 +133,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
FluxInterval: config.Querier.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: nooplicensing.NewLicenseAPI(),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
@@ -207,12 +209,13 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
r.Use(middleware.NewComment().Wrap)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz, s.signoz.Modules.RoleGetter)
api.RegisterRoutes(r, am)
api.RegisterLogsRoutes(r, am)
api.RegisterIntegrationRoutes(r, am)
api.RegisterCloudIntegrationsRoutes(r, am)
api.RegisterFieldsRoutes(r, am)
api.RegisterQueryRangeV3Routes(r, am)
api.RegisterInfraMetricsRoutes(r, am)
api.RegisterWebSocketPaths(r, am)

View File

@@ -5,10 +5,10 @@ import (
"os"
"regexp"
"strconv"
"time"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/valuer"
)
const (
@@ -40,11 +40,11 @@ const NormalizedMetricsMapQueryThreads = 10
var NormalizedMetricsMapRegex = regexp.MustCompile(`[^a-zA-Z0-9]`)
var NormalizedMetricsMapQuantileRegex = regexp.MustCompile(`(?i)([._-]?quantile.*)$`)
func GetEvalDelay() valuer.TextDuration {
func GetEvalDelay() time.Duration {
evalDelayStr := GetOrDefaultEnv("RULES_EVAL_DELAY", "2m")
evalDelayDuration, err := valuer.ParseTextDuration(evalDelayStr)
evalDelayDuration, err := time.ParseDuration(evalDelayStr)
if err != nil {
return valuer.TextDuration{}
return 0
}
return evalDelayDuration
}

View File

@@ -40,13 +40,13 @@ type BaseRule struct {
// evalWindow is the time window used for evaluating the rule
// i.e. each time we lookback from the current time, we look at data for the last
// evalWindow duration
evalWindow valuer.TextDuration
evalWindow time.Duration
// holdDuration is the duration for which the alert waits before firing
holdDuration valuer.TextDuration
holdDuration time.Duration
// evalDelay is the delay in evaluation of the rule
// this is useful in cases where the data is not available immediately
evalDelay valuer.TextDuration
evalDelay time.Duration
// holds the static set of labels and annotations for the rule
// these are the same for all alerts created for this rule
@@ -94,7 +94,7 @@ type BaseRule struct {
evaluation ruletypes.Evaluation
// newGroupEvalDelay is the grace period for new alert groups
newGroupEvalDelay valuer.TextDuration
newGroupEvalDelay *time.Duration
queryParser queryparser.QueryParser
}
@@ -113,7 +113,7 @@ func WithSendUnmatched() RuleOption {
}
}
func WithEvalDelay(dur valuer.TextDuration) RuleOption {
func WithEvalDelay(dur time.Duration) RuleOption {
return func(r *BaseRule) {
r.evalDelay = dur
}
@@ -163,7 +163,7 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader
source: p.Source,
typ: p.AlertType,
ruleCondition: p.RuleCondition,
evalWindow: p.EvalWindow,
evalWindow: time.Duration(p.EvalWindow),
labels: qslabels.FromMap(p.Labels),
annotations: qslabels.FromMap(p.Annotations),
preferredChannels: p.PreferredChannels,
@@ -176,12 +176,13 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader
}
// Store newGroupEvalDelay and groupBy keys from NotificationSettings
if p.NotificationSettings != nil {
baseRule.newGroupEvalDelay = p.NotificationSettings.NewGroupEvalDelay
if p.NotificationSettings != nil && p.NotificationSettings.NewGroupEvalDelay != nil {
newGroupEvalDelay := time.Duration(*p.NotificationSettings.NewGroupEvalDelay)
baseRule.newGroupEvalDelay = &newGroupEvalDelay
}
if baseRule.evalWindow.IsZero() {
baseRule.evalWindow = valuer.MustParseTextDuration("5m")
if baseRule.evalWindow == 0 {
baseRule.evalWindow = 5 * time.Minute
}
for _, opt := range opts {
@@ -244,15 +245,15 @@ func (r *BaseRule) ActiveAlertsLabelFP() map[uint64]struct{} {
return activeAlerts
}
func (r *BaseRule) EvalDelay() valuer.TextDuration {
func (r *BaseRule) EvalDelay() time.Duration {
return r.evalDelay
}
func (r *BaseRule) EvalWindow() valuer.TextDuration {
func (r *BaseRule) EvalWindow() time.Duration {
return r.evalWindow
}
func (r *BaseRule) HoldDuration() valuer.TextDuration {
func (r *BaseRule) HoldDuration() time.Duration {
return r.holdDuration
}
@@ -280,7 +281,7 @@ func (r *BaseRule) Timestamps(ts time.Time) (time.Time, time.Time) {
start := st.UnixMilli()
end := en.UnixMilli()
if r.evalDelay.IsPositive() {
if r.evalDelay > 0 {
start = start - r.evalDelay.Milliseconds()
end = end - r.evalDelay.Milliseconds()
}
@@ -551,7 +552,7 @@ func (r *BaseRule) PopulateTemporality(ctx context.Context, orgID valuer.UUID, q
// ShouldSkipNewGroups returns true if new group filtering should be applied
func (r *BaseRule) ShouldSkipNewGroups() bool {
return r.newGroupEvalDelay.IsPositive()
return r.newGroupEvalDelay != nil && *r.newGroupEvalDelay > 0
}
// isFilterNewSeriesSupported checks if the query is supported for new series filtering

View File

@@ -20,7 +20,7 @@ import (
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -124,8 +124,8 @@ func createPostableRule(compositeQuery *v3.CompositeQuery) ruletypes.PostableRul
Evaluation: &ruletypes.EvaluationEnvelope{
Kind: ruletypes.RollingEvaluation,
Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
},
},
RuleCondition: &ruletypes.RuleCondition{
@@ -151,7 +151,7 @@ type filterNewSeriesTestCase struct {
compositeQuery *v3.CompositeQuery
series []*v3.Series
firstSeenMap map[telemetrytypes.MetricMetadataLookupKey]int64
newGroupEvalDelay valuer.TextDuration
newGroupEvalDelay *time.Duration
evalTime time.Time
expectedFiltered []*v3.Series // series that should be in the final filtered result (old enough)
expectError bool
@@ -159,8 +159,7 @@ type filterNewSeriesTestCase struct {
func TestBaseRule_FilterNewSeries(t *testing.T) {
defaultEvalTime := time.Unix(1700000000, 0)
defaultNewGroupEvalDelay := valuer.MustParseTextDuration("2m")
defaultDelay := defaultNewGroupEvalDelay.Duration()
defaultDelay := 2 * time.Minute
defaultGroupByFields := []string{"service_name", "env"}
logger := instrumentationtest.New().Logger()
@@ -203,7 +202,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new", "prod"),
// svc-missing has no metadata, so it will be included
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
@@ -235,7 +234,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new1", "prod"),
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new2", "stage"),
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{}, // all should be filtered out (new series)
},
@@ -262,7 +261,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old1", "prod"),
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old2", "stage"),
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
@@ -296,7 +295,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -326,7 +325,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -362,7 +361,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"status": "200"}, nil), // no service_name or env
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"status": "200"}, nil),
@@ -391,7 +390,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old", "prod"),
// svc-no-metadata has no entry in firstSeenMap
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
@@ -421,7 +420,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
{MetricName: "request_total", AttributeName: "service_name", AttributeValue: "svc-partial"}: calculateFirstSeen(defaultEvalTime, defaultDelay, true),
// env metadata is missing
},
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
@@ -455,7 +454,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
series: []*v3.Series{},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{},
},
@@ -489,7 +488,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
newGroupEvalDelay: valuer.TextDuration{}, // zero delay
newGroupEvalDelay: func() *time.Duration { d := time.Duration(0); return &d }(), // zero delay
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -533,7 +532,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
createFirstSeenMap("error_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -573,7 +572,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", []string{"service_name"}, defaultEvalTime, defaultDelay, true, "svc1"),
createFirstSeenMap("request_total", []string{"env"}, defaultEvalTime, defaultDelay, false, "prod"),
),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{}, // max first_seen is new, so should be filtered out
},
@@ -605,7 +604,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
@@ -640,7 +639,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: defaultNewGroupEvalDelay,
newGroupEvalDelay: &defaultDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
@@ -698,14 +697,20 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
telemetryStore,
prometheustest.New(context.Background(), settings, prometheus.Config{}, telemetryStore),
"",
time.Second,
time.Duration(time.Second),
nil,
readerCache,
options,
)
postableRule.NotificationSettings = &ruletypes.NotificationSettings{
NewGroupEvalDelay: tt.newGroupEvalDelay,
// Set newGroupEvalDelay in NotificationSettings if provided
if tt.newGroupEvalDelay != nil {
postableRule.NotificationSettings = &ruletypes.NotificationSettings{
NewGroupEvalDelay: func() *ruletypes.Duration {
d := ruletypes.Duration(*tt.newGroupEvalDelay)
return &d
}(),
}
}
// Create BaseRule using NewBaseRule

View File

@@ -30,7 +30,7 @@ import (
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -66,7 +66,7 @@ type PrepareTestRuleOptions struct {
OrgID valuer.UUID
}
const taskNameSuffix = "webAppEditor"
const taskNamesuffix = "webAppEditor"
func RuleIdFromTaskName(n string) string {
return strings.Split(n, "-groupname")[0]
@@ -97,7 +97,7 @@ type ManagerOptions struct {
SLogger *slog.Logger
Cache cache.Cache
EvalDelay valuer.TextDuration
EvalDelay time.Duration
PrepareTaskFunc func(opts PrepareTaskOptions) (Task, error)
PrepareTestRuleFunc func(opts PrepareTestRuleOptions) (int, *model.ApiError)
@@ -182,8 +182,8 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
rules = append(rules, tr)
// create ch rule task for evaluation
task = newTask(TaskTypeCh, opts.TaskName, taskNameSuffix, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
// create ch rule task for evalution
task = newTask(TaskTypeCh, opts.TaskName, taskNamesuffix, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
@@ -206,8 +206,8 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
rules = append(rules, pr)
// create promql rule task for evaluation
task = newTask(TaskTypeProm, opts.TaskName, taskNameSuffix, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
// create promql rule task for evalution
task = newTask(TaskTypeProm, opts.TaskName, taskNamesuffix, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else {
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
@@ -323,7 +323,7 @@ func (m *Manager) run(_ context.Context) {
}
// Stop the rule manager's rule evaluation cycles.
func (m *Manager) Stop(_ context.Context) {
func (m *Manager) Stop(ctx context.Context) {
m.mtx.Lock()
defer m.mtx.Unlock()
@@ -336,7 +336,7 @@ func (m *Manager) Stop(_ context.Context) {
zap.L().Info("Rule manager stopped")
}
// EditRule writes the rule definition to the
// EditRuleDefinition writes the rule definition to the
// datastore and also updates the rule executor
func (m *Manager) EditRule(ctx context.Context, ruleStr string, id valuer.UUID) error {
claims, err := authtypes.ClaimsFromContext(ctx)
@@ -643,7 +643,7 @@ func (m *Manager) addTask(_ context.Context, orgID valuer.UUID, rule *ruletypes.
m.rules[r.ID()] = r
}
// If there is another task with the same identifier, raise an error
// If there is an another task with the same identifier, raise an error
_, ok := m.tasks[taskName]
if ok {
return fmt.Errorf("a rule with the same name already exists")
@@ -678,8 +678,7 @@ func (m *Manager) RuleTasks() []Task {
return rgs
}
// RuleTasksWithoutLock returns the list of manager's rule tasks without
// acquiring a lock on the manager.
// RuleTasks returns the list of manager's rule tasks.
func (m *Manager) RuleTasksWithoutLock() []Task {
rgs := make([]Task, 0, len(m.tasks))
@@ -890,7 +889,7 @@ func (m *Manager) syncRuleStateWithTask(ctx context.Context, orgID valuer.UUID,
} else {
// check if rule has a task running
if _, ok := m.tasks[taskName]; !ok {
// rule has no task, start one
// rule has not task, start one
if err := m.addTask(ctx, orgID, rule, taskName); err != nil {
return err
}

View File

@@ -9,7 +9,6 @@ import (
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
// ThresholdRuleTestCase defines test case structure for threshold rule test notifications
@@ -41,8 +40,8 @@ func ThresholdRuleAtLeastOnceValueAbove(target float64, recovery *float64) rulet
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
Labels: map[string]string{
"service.name": "frontend",
@@ -100,8 +99,8 @@ func BuildPromAtLeastOnceValueAbove(target float64, recovery *float64) ruletypes
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
Labels: map[string]string{
"service.name": "frontend",

View File

@@ -28,8 +28,6 @@ type PromRule struct {
prometheus prometheus.Prometheus
}
var _ Rule = (*PromRule)(nil)
func NewPromRule(
id string,
orgID valuer.UUID,
@@ -334,7 +332,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration {
a.State = model.StateFiring
a.FiredAt = ts
state := model.StateFiring
@@ -398,7 +396,7 @@ func (r *PromRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.name,
RuleCondition: r.ruleCondition,
EvalWindow: r.evalWindow,
EvalWindow: ruletypes.Duration(r.evalWindow),
Labels: r.labels.Map(),
Annotations: r.annotations.Map(),
PreferredChannels: r.preferredChannels,

View File

@@ -41,12 +41,12 @@ type PromRuleTask struct {
orgID valuer.UUID
}
// NewPromRuleTask holds rules that have promql condition
// and evaluates the rule at a given frequency
// newPromRuleTask holds rules that have promql condition
// and evalutes the rule at a given frequency
func NewPromRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) *PromRuleTask {
zap.L().Info("Initiating a new rule group", zap.String("name", name), zap.Duration("frequency", frequency))
if frequency == 0 {
if time.Now() == time.Now().Add(frequency) {
frequency = DefaultFrequency
}

View File

@@ -41,8 +41,8 @@ func TestPromRuleEval(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -748,8 +748,8 @@ func TestPromRuleUnitCombinations(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1007,8 +1007,8 @@ func _Enable_this_after_9146_issue_fix_is_merged_TestPromRuleNoData(t *testing.T
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1118,8 +1118,8 @@ func TestMultipleThresholdPromRule(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1353,8 +1353,8 @@ func TestPromRule_NoData(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,
@@ -1466,7 +1466,7 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
// 3. Alert fires only if t2 - t1 > AbsentFor
baseTime := time.Unix(1700000000, 0)
evalWindow := valuer.MustParseTextDuration("5m")
evalWindow := 5 * time.Minute
// Set target higher than test data (100.0) so regular threshold alerts don't fire
target := 500.0
@@ -1476,8 +1476,8 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,
@@ -1619,7 +1619,7 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
baseTime := time.Unix(1700000000, 0)
evalTime := baseTime.Add(5 * time.Minute)
evalWindow := valuer.MustParseTextDuration("5m")
evalWindow := 5 * time.Minute
lookBackDelta := time.Minute
postableRule := ruletypes.PostableRule{
@@ -1627,8 +1627,8 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,

View File

@@ -7,7 +7,6 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
// A Rule encapsulates a vector expression which is evaluated at a specified
@@ -20,9 +19,9 @@ type Rule interface {
Labels() labels.BaseLabels
Annotations() labels.BaseLabels
Condition() *ruletypes.RuleCondition
EvalDelay() valuer.TextDuration
EvalWindow() valuer.TextDuration
HoldDuration() valuer.TextDuration
EvalDelay() time.Duration
EvalWindow() time.Duration
HoldDuration() time.Duration
State() model.AlertState
ActiveAlerts() []*ruletypes.Alert
// ActiveAlertsLabelFP returns a map of active alert labels fingerprint

View File

@@ -43,7 +43,7 @@ const DefaultFrequency = 1 * time.Minute
// NewRuleTask makes a new RuleTask with the given name, options, and rules.
func NewRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) *RuleTask {
if frequency == 0 {
if time.Now() == time.Now().Add(frequency) {
frequency = DefaultFrequency
}
zap.L().Info("initiating a new rule task", zap.String("name", name), zap.Duration("frequency", frequency))
@@ -78,7 +78,6 @@ func (g *RuleTask) Type() TaskType { return TaskTypeCh }
func (g *RuleTask) Rules() []Rule { return g.rules }
// Interval returns the group's interval.
// TODO: remove (unused)?
func (g *RuleTask) Interval() time.Duration { return g.frequency }
func (g *RuleTask) Pause(b bool) {

View File

@@ -61,8 +61,6 @@ type ThresholdRule struct {
spansKeys map[string]v3.AttributeKey
}
var _ Rule = (*ThresholdRule)(nil)
func NewThresholdRule(
id string,
orgID valuer.UUID,
@@ -748,7 +746,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration {
r.logger.DebugContext(ctx, "converting pending alert to firing", "name", r.Name())
a.State = model.StateFiring
a.FiredAt = ts
@@ -814,7 +812,7 @@ func (r *ThresholdRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.name,
RuleCondition: r.ruleCondition,
EvalWindow: r.evalWindow,
EvalWindow: ruletypes.Duration(r.evalWindow),
Labels: r.labels.Map(),
Annotations: r.annotations.Map(),
PreferredChannels: r.preferredChannels,

View File

@@ -36,8 +36,8 @@ func TestThresholdRuleEvalBackwardCompat(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -72,7 +72,7 @@ func TestThresholdRuleEvalBackwardCompat(t *testing.T) {
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
}
@@ -152,8 +152,8 @@ func TestPrepareLinksToLogs(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -189,7 +189,7 @@ func TestPrepareLinksToLogs(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
}
@@ -206,8 +206,8 @@ func TestPrepareLinksToLogsV5(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -250,7 +250,7 @@ func TestPrepareLinksToLogsV5(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
}
@@ -267,8 +267,8 @@ func TestPrepareLinksToTracesV5(t *testing.T) {
AlertType: ruletypes.AlertTypeTraces,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -311,7 +311,7 @@ func TestPrepareLinksToTracesV5(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
}
@@ -328,8 +328,8 @@ func TestPrepareLinksToTraces(t *testing.T) {
AlertType: ruletypes.AlertTypeTraces,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -365,7 +365,7 @@ func TestPrepareLinksToTraces(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
}
@@ -382,8 +382,8 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -451,7 +451,7 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
}
@@ -490,8 +490,8 @@ func TestThresholdRuleEvalDelay(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -553,8 +553,8 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -594,7 +594,7 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
logger := instrumentationtest.New().Logger()
for idx, c := range cases {
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
}
@@ -615,8 +615,8 @@ func TestThresholdRuleUnitCombinations(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -816,8 +816,8 @@ func TestThresholdRuleNoData(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -927,8 +927,8 @@ func TestThresholdRuleTracesLink(t *testing.T) {
AlertType: ruletypes.AlertTypeTraces,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1052,8 +1052,8 @@ func TestThresholdRuleLogsLink(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1190,8 +1190,8 @@ func TestThresholdRuleShiftBy(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
Thresholds: &ruletypes.RuleThresholdData{
@@ -1264,8 +1264,8 @@ func TestMultipleThresholdRule(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1455,8 +1455,8 @@ func TestThresholdRuleEval_BasicCases(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1486,8 +1486,8 @@ func TestThresholdRuleEval_MatchPlusCompareOps(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1523,8 +1523,8 @@ func TestThresholdRuleEval_SendUnmatchedBypassesRecovery(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1559,7 +1559,7 @@ func TestThresholdRuleEval_SendUnmatchedBypassesRecovery(t *testing.T) {
}
logger := instrumentationtest.New().Logger()
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
require.NoError(t, err)
now := time.Now()
@@ -1611,8 +1611,8 @@ func TestThresholdRuleEval_SendUnmatchedVariants(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1735,8 +1735,8 @@ func TestThresholdRuleEval_RecoveryNotMetSendUnmatchedFalse(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1820,7 +1820,7 @@ func runEvalTests(t *testing.T, postableRule ruletypes.PostableRule, testCases [
Spec: thresholds,
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
return
@@ -1927,7 +1927,7 @@ func runMultiThresholdEvalTests(t *testing.T, postableRule ruletypes.PostableRul
Spec: thresholds,
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
if err != nil {
assert.NoError(t, err)
return
@@ -2035,8 +2035,8 @@ func TestThresholdRuleEval_MultiThreshold(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -2066,8 +2066,8 @@ func TestThresholdEval_RequireMinPoints(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,

View File

@@ -1,8 +1,6 @@
package signoz
import (
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/signozauthzapi"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/gateway"
@@ -13,14 +11,14 @@ import (
"github.com/SigNoz/signoz/pkg/modules/apdex/implapdex"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/fields/implfields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/modules/savedview"
"github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview"
"github.com/SigNoz/signoz/pkg/modules/services"
@@ -30,7 +28,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type Handlers struct {
@@ -46,21 +43,10 @@ type Handlers struct {
Global global.Handler
FlaggerHandler flagger.Handler
GatewayHandler gateway.Handler
Fields fields.Handler
AuthzHandler authz.Handler
Role role.Handler
}
func NewHandlers(
modules Modules,
providerSettings factory.ProviderSettings,
querier querier.Querier,
licensing licensing.Licensing,
global global.Global,
flaggerService flagger.Flagger,
gatewayService gateway.Gateway,
telemetryMetadataStore telemetrytypes.MetadataStore,
authz authz.AuthZ,
) Handlers {
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global, flaggerService flagger.Flagger, gatewayService gateway.Gateway) Handlers {
return Handlers{
SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex),
@@ -74,7 +60,6 @@ func NewHandlers(
Global: signozglobal.NewHandler(global),
FlaggerHandler: flagger.NewHandler(flaggerService),
GatewayHandler: gateway.NewHandler(gatewayService),
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
AuthzHandler: signozauthzapi.NewHandler(authz),
Role: implrole.NewHandler(modules.RoleSetter, modules.RoleGetter),
}
}

View File

@@ -13,6 +13,7 @@ import (
"github.com/SigNoz/signoz/pkg/factory/factorytest"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sharder"
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
@@ -40,9 +41,13 @@ func TestNewHandlers(t *testing.T) {
queryParser := queryparser.New(providerSettings)
require.NoError(t, err)
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
roleSetter := implrole.NewSetter(implrole.NewStore(sqlstore), nil)
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
grantModule := implrole.NewGranter(implrole.NewStore(sqlstore), nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, roleSetter, roleGetter, grantModule)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil, nil, nil)
reflectVal := reflect.ValueOf(handlers)
for i := 0; i < reflectVal.NumField(); i++ {
f := reflectVal.Field(i)

View File

@@ -25,6 +25,7 @@ import (
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/savedview"
"github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview"
"github.com/SigNoz/signoz/pkg/modules/services"
@@ -66,6 +67,9 @@ type Modules struct {
SpanPercentile spanpercentile.Module
MetricsExplorer metricsexplorer.Module
Promote promote.Module
RoleSetter role.Setter
RoleGetter role.Getter
Granter role.Granter
}
func NewModules(
@@ -85,10 +89,13 @@ func NewModules(
queryParser queryparser.QueryParser,
config Config,
dashboard dashboard.Module,
roleSetter role.Setter,
roleGetter role.Getter,
granter role.Granter,
) Modules {
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, authz, analytics, config.User)
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, granter, analytics, config.User)
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
@@ -110,5 +117,8 @@ func NewModules(
Services: implservices.NewModule(querier, telemetryStore),
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, ruleStore, dashboard, providerSettings, config.MetricsExplorer),
Promote: implpromote.NewModule(telemetryMetadataStore, telemetryStore),
RoleSetter: roleSetter,
RoleGetter: roleGetter,
Granter: granter,
}
}

View File

@@ -13,6 +13,7 @@ import (
"github.com/SigNoz/signoz/pkg/factory/factorytest"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sharder"
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
@@ -40,7 +41,10 @@ func TestNewModules(t *testing.T) {
queryParser := queryparser.New(providerSettings)
require.NoError(t, err)
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
roleSetter := implrole.NewSetter(implrole.NewStore(sqlstore), nil)
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
grantModule := implrole.NewGranter(implrole.NewStore(sqlstore), nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, roleSetter, roleGetter, grantModule)
reflectVal := reflect.ValueOf(modules)
for i := 0; i < reflectVal.NumField(); i++ {

View File

@@ -15,11 +15,11 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
@@ -50,8 +50,8 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
struct{ dashboard.Handler }{},
struct{ metricsexplorer.Handler }{},
struct{ gateway.Handler }{},
struct{ fields.Handler }{},
struct{ authz.Handler }{},
struct{ role.Getter }{},
struct{ role.Handler }{},
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
if err != nil {
return nil, err

View File

@@ -166,7 +166,6 @@ func NewSQLMigrationProviderFactories(
sqlmigration.NewAddAuthzIndexFactory(sqlstore, sqlschema),
sqlmigration.NewMigrateRbacToAuthzFactory(sqlstore),
sqlmigration.NewMigratePublicDashboardsFactory(sqlstore),
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
)
}
@@ -248,8 +247,8 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
handlers.Dashboard,
handlers.MetricsExplorer,
handlers.GatewayHandler,
handlers.Fields,
handlers.AuthzHandler,
modules.RoleGetter,
handlers.Role,
),
)
}

View File

@@ -21,6 +21,8 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
"github.com/SigNoz/signoz/pkg/prometheus"
"github.com/SigNoz/signoz/pkg/querier"
@@ -87,9 +89,10 @@ func New(
sqlstoreProviderFactories factory.NamedMap[factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config]],
telemetrystoreProviderFactories factory.NamedMap[factory.ProviderFactory[telemetrystore.TelemetryStore, telemetrystore.Config]],
authNsCallback func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error),
authzCallback func(context.Context, sqlstore.SQLStore, licensing.Licensing, dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config],
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
authzCallback func(context.Context, sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config],
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, role.Setter, role.Granter, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
gatewayProviderFactory func(licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config],
roleSetterCallback func(sqlstore.SQLStore, authz.AuthZ, licensing.Licensing, []role.RegisterTypeable) role.Setter,
) (*SigNoz, error) {
// Initialize instrumentation
instrumentation, err := instrumentation.New(ctx, config.Instrumentation, version.Info, "signoz")
@@ -281,24 +284,11 @@ func New(
// Initialize user getter
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
licensingProviderFactory := licenseProviderFactory(sqlstore, zeus, orgGetter, analytics)
licensing, err := licensingProviderFactory.New(
ctx,
providerSettings,
licenseConfig,
)
if err != nil {
return nil, err
}
// Initialize query parser (needed for dashboard module)
queryParser := queryparser.New(providerSettings)
// Initialize dashboard module (needed for authz registry)
dashboard := dashboardModuleCallback(sqlstore, providerSettings, analytics, orgGetter, queryParser, querier, licensing)
// Initialize the role getter
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
// Initialize authz
authzProviderFactory := authzCallback(ctx, sqlstore, licensing, dashboard)
authzProviderFactory := authzCallback(ctx, sqlstore)
authz, err := authzProviderFactory.New(ctx, providerSettings, authz.Config{})
if err != nil {
return nil, err
@@ -328,6 +318,9 @@ func New(
return nil, err
}
// Initialize query parser
queryParser := queryparser.New(providerSettings)
// Initialize ruler from the available ruler provider factories
ruler, err := factory.NewProviderFromNamedMap(
ctx,
@@ -340,6 +333,16 @@ func New(
return nil, err
}
licensingProviderFactory := licenseProviderFactory(sqlstore, zeus, orgGetter, analytics)
licensing, err := licensingProviderFactory.New(
ctx,
providerSettings,
licenseConfig,
)
if err != nil {
return nil, err
}
gatewayFactory := gatewayProviderFactory(licensing)
gateway, err := gatewayFactory.New(ctx, providerSettings, config.Gateway)
if err != nil {
@@ -387,10 +390,13 @@ func New(
}
// Initialize all modules
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard)
roleSetter := roleSetterCallback(sqlstore, authz, licensing, nil)
granter := implrole.NewGranter(implrole.NewStore(sqlstore), authz)
dashboard := dashboardModuleCallback(sqlstore, providerSettings, analytics, orgGetter, roleSetter, granter, queryParser, querier, licensing)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard, roleSetter, roleGetter, granter)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz)
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway)
// Initialize the API server
apiserver, err := factory.NewProviderFromNamedMap(

View File

@@ -1,154 +0,0 @@
package sqlmigration
import (
"context"
"time"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/oklog/ulid/v2"
"github.com/uptrace/bun"
"github.com/uptrace/bun/dialect"
"github.com/uptrace/bun/migrate"
)
type addAnonymousPublicDashboardTransaction struct {
sqlstore sqlstore.SQLStore
}
func NewAddAnonymousPublicDashboardTransactionFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("add_public_dashboard_txn"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return newAddAnonymousPublicDashboardTransaction(ctx, ps, c, sqlstore)
})
}
func newAddAnonymousPublicDashboardTransaction(_ context.Context, _ factory.ProviderSettings, _ Config, sqlstore sqlstore.SQLStore) (SQLMigration, error) {
return &addAnonymousPublicDashboardTransaction{
sqlstore: sqlstore,
}, nil
}
func (migration *addAnonymousPublicDashboardTransaction) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *addAnonymousPublicDashboardTransaction) Up(ctx context.Context, db *bun.DB) error {
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
var storeID string
err = tx.QueryRowContext(ctx, `SELECT id FROM store WHERE name = ? LIMIT 1`, "signoz").Scan(&storeID)
if err != nil {
return err
}
// fetch all the orgs for which we need to insert the anonymous public dashboard transaction tuple.
orgIDs := []string{}
rows, err := tx.QueryContext(ctx, `SELECT id FROM organizations`)
if err != nil {
return err
}
defer rows.Close()
for rows.Next() {
var orgID string
if err := rows.Scan(&orgID); err != nil {
return err
}
orgIDs = append(orgIDs, orgID)
}
for _, orgID := range orgIDs {
// based on openfga tuple and changelog id's are same for writes.
// ref: https://github.com/openfga/openfga/blob/main/pkg/storage/sqlite/sqlite.go#L467
entropy := ulid.DefaultEntropy()
now := time.Now().UTC()
tupleID := ulid.MustNew(ulid.Timestamp(now), entropy).String()
// Add wildcard (*) transaction for signoz-anonymous role to read all public-dashboards
// This grants the signoz-anonymous role read access to all public dashboards in the organization
if migration.sqlstore.BunDB().Dialect().Name() == dialect.PG {
result, err := tx.ExecContext(ctx, `
INSERT INTO tuple (store, object_type, object_id, relation, _user, user_type, ulid, inserted_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (store, object_type, object_id, relation, _user) DO NOTHING`,
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role:organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName+"#assignee", "userset", tupleID, now,
)
if err != nil {
return err
}
rowsAffected, err := result.RowsAffected()
if err != nil {
return err
}
if rowsAffected == 0 {
continue
}
_, err = tx.ExecContext(ctx, `
INSERT INTO changelog (store, object_type, object_id, relation, _user, operation, ulid, inserted_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (store, ulid, object_type) DO NOTHING`,
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role:organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName+"#assignee", "TUPLE_OPERATION_WRITE", tupleID, now,
)
if err != nil {
return err
}
} else {
result, err := tx.ExecContext(ctx, `
INSERT INTO tuple (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation, user_type, ulid, inserted_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation) DO NOTHING`,
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role", "organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName, "assignee", "userset", tupleID, now,
)
if err != nil {
return err
}
rowsAffected, err := result.RowsAffected()
if err != nil {
return err
}
if rowsAffected == 0 {
continue
}
_, err = tx.ExecContext(ctx, `
INSERT INTO changelog (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation, operation, ulid, inserted_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (store, ulid, object_type) DO NOTHING`,
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role", "organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName, "assignee", 0, tupleID, now,
)
if err != nil {
return err
}
}
}
err = tx.Commit()
if err != nil {
return err
}
return nil
}
func (migration *addAnonymousPublicDashboardTransaction) Down(context.Context, *bun.DB) error {
return nil
}

View File

@@ -20,19 +20,15 @@ var (
)
var (
typeUserSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
typeRoleSelectorRegex = regexp.MustCompile(`^([a-z-]{1,50}|\*)$`)
typeUserSelectorRegex = regexp.MustCompile(`^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
typeRoleSelectorRegex = regexp.MustCompile(`^[a-z-]{1,50}$`)
typeAnonymousSelectorRegex = regexp.MustCompile(`^\*$`)
typeOrganizationSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
typeOrganizationSelectorRegex = regexp.MustCompile(`^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
typeMetaResourceSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
// metaresources selectors are used to select either all or none until we introduce some hierarchy here.
// metaresources selectors are used to select either all or none
typeMetaResourcesSelectorRegex = regexp.MustCompile(`^\*$`)
)
var (
WildCardSelectorString = "*"
)
type SelectorCallbackWithClaimsFn func(*http.Request, Claims) ([]Selector, error)
type SelectorCallbackWithoutClaimsFn func(*http.Request, []*types.Organization) ([]Selector, valuer.UUID, error)

View File

@@ -24,10 +24,9 @@ func MustNewTypeableMetaResource(name Name) Typeable {
return typeableesource
}
func (typeableMetaResource *typeableMetaResource) Tuples(subject string, relation Relation, selectors []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
func (typeableMetaResource *typeableMetaResource) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selectors {
for _, selector := range selector {
object := typeableMetaResource.Prefix(orgID) + "/" + selector.String()
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}

View File

@@ -24,10 +24,9 @@ func MustNewTypeableMetaResources(name Name) Typeable {
return resources
}
func (typeableResources *typeableMetaResources) Tuples(subject string, relation Relation, selectors []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
func (typeableResources *typeableMetaResources) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selectors {
for _, selector := range selector {
object := typeableResources.Prefix(orgID) + "/" + selector.String()
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}

View File

@@ -11,10 +11,9 @@ var _ Typeable = new(typeableOrganization)
type typeableOrganization struct{}
func (typeableOrganization *typeableOrganization) Tuples(subject string, relation Relation, selectors []Selector, _ valuer.UUID) ([]*openfgav1.TupleKey, error) {
func (typeableOrganization *typeableOrganization) Tuples(subject string, relation Relation, selector []Selector, _ valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selectors {
for _, selector := range selector {
object := strings.Join([]string{typeableOrganization.Type().StringValue(), selector.String()}, ":")
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}

View File

@@ -9,10 +9,9 @@ var _ Typeable = new(typeableRole)
type typeableRole struct{}
func (typeableRole *typeableRole) Tuples(subject string, relation Relation, selectors []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
func (typeableRole *typeableRole) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selectors {
for _, selector := range selector {
object := typeableRole.Prefix(orgID) + "/" + selector.String()
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}

View File

@@ -9,10 +9,9 @@ var _ Typeable = new(typeableUser)
type typeableUser struct{}
func (typeableUser *typeableUser) Tuples(subject string, relation Relation, selectors []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
func (typeableUser *typeableUser) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selectors {
for _, selector := range selector {
object := typeableUser.Prefix(orgID) + "/" + selector.String()
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}

View File

@@ -8,15 +8,15 @@ import (
"time"
"unicode/utf8"
"github.com/prometheus/alertmanager/config"
signozError "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/prometheus/alertmanager/config"
)
type AlertType string
@@ -40,12 +40,12 @@ const (
// PostableRule is used to create alerting rule from HTTP api
type PostableRule struct {
AlertName string `json:"alert,omitempty"`
AlertType AlertType `json:"alertType,omitempty"`
Description string `json:"description,omitempty"`
RuleType RuleType `json:"ruleType,omitempty"`
EvalWindow valuer.TextDuration `json:"evalWindow,omitempty"`
Frequency valuer.TextDuration `json:"frequency,omitempty"`
AlertName string `json:"alert,omitempty"`
AlertType AlertType `json:"alertType,omitempty"`
Description string `json:"description,omitempty"`
RuleType RuleType `json:"ruleType,omitempty"`
EvalWindow Duration `json:"evalWindow,omitempty"`
Frequency Duration `json:"frequency,omitempty"`
RuleCondition *RuleCondition `json:"condition,omitempty"`
Labels map[string]string `json:"labels,omitempty"`
@@ -71,13 +71,13 @@ type NotificationSettings struct {
Renotify Renotify `json:"renotify,omitempty"`
UsePolicy bool `json:"usePolicy,omitempty"`
// NewGroupEvalDelay is the grace period for new series to be excluded from alerts evaluation
NewGroupEvalDelay valuer.TextDuration `json:"newGroupEvalDelay,omitzero"`
NewGroupEvalDelay *Duration `json:"newGroupEvalDelay,omitempty"`
}
type Renotify struct {
Enabled bool `json:"enabled"`
ReNotifyInterval valuer.TextDuration `json:"interval,omitzero"`
AlertStates []model.AlertState `json:"alertStates,omitempty"`
Enabled bool `json:"enabled"`
ReNotifyInterval Duration `json:"interval,omitempty"`
AlertStates []model.AlertState `json:"alertStates,omitempty"`
}
func (ns *NotificationSettings) GetAlertManagerNotificationConfig() alertmanagertypes.NotificationConfig {
@@ -85,10 +85,10 @@ func (ns *NotificationSettings) GetAlertManagerNotificationConfig() alertmanager
var noDataRenotifyInterval time.Duration
if ns.Renotify.Enabled {
if slices.Contains(ns.Renotify.AlertStates, model.StateNoData) {
noDataRenotifyInterval = ns.Renotify.ReNotifyInterval.Duration()
noDataRenotifyInterval = time.Duration(ns.Renotify.ReNotifyInterval)
}
if slices.Contains(ns.Renotify.AlertStates, model.StateFiring) {
renotifyInterval = ns.Renotify.ReNotifyInterval.Duration()
renotifyInterval = time.Duration(ns.Renotify.ReNotifyInterval)
}
} else {
renotifyInterval = 8760 * time.Hour //1 year for no renotify substitute
@@ -190,12 +190,12 @@ func (r *PostableRule) processRuleDefaults() {
r.SchemaVersion = DefaultSchemaVersion
}
if r.EvalWindow.IsZero() {
r.EvalWindow = valuer.MustParseTextDuration("5m")
if r.EvalWindow == 0 {
r.EvalWindow = Duration(5 * time.Minute)
}
if r.Frequency.IsZero() {
r.Frequency = valuer.MustParseTextDuration("1m")
if r.Frequency == 0 {
r.Frequency = Duration(1 * time.Minute)
}
if r.RuleCondition != nil {
@@ -246,7 +246,7 @@ func (r *PostableRule) processRuleDefaults() {
r.NotificationSettings = &NotificationSettings{
Renotify: Renotify{
Enabled: true,
ReNotifyInterval: valuer.MustParseTextDuration("4h"),
ReNotifyInterval: Duration(4 * time.Hour),
AlertStates: []model.AlertState{model.StateFiring},
},
}

View File

@@ -171,10 +171,10 @@ func TestParseIntoRule(t *testing.T) {
kind: RuleDataKindJson,
expectError: false,
validate: func(t *testing.T, rule *PostableRule) {
if rule.EvalWindow.Duration() != 5*time.Minute {
if rule.EvalWindow != Duration(5*time.Minute) {
t.Errorf("Expected default eval window '5m', got '%v'", rule.EvalWindow)
}
if rule.Frequency.Duration() != time.Minute {
if rule.Frequency != Duration(1*time.Minute) {
t.Errorf("Expected default frequency '1m', got '%v'", rule.Frequency)
}
if rule.RuleCondition.CompositeQuery.BuilderQueries["A"].Expression != "A" {
@@ -327,10 +327,10 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
// Verify evaluation window matches rule settings
if window, ok := rule.Evaluation.Spec.(RollingWindow); ok {
if !window.EvalWindow.Equal(rule.EvalWindow) {
if window.EvalWindow != rule.EvalWindow {
t.Errorf("Expected Evaluation EvalWindow %v, got %v", rule.EvalWindow, window.EvalWindow)
}
if !window.Frequency.Equal(rule.Frequency) {
if window.Frequency != rule.Frequency {
t.Errorf("Expected Evaluation Frequency %v, got %v", rule.Frequency, window.Frequency)
}
} else {
@@ -457,10 +457,10 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
t.Fatal("Expected Evaluation to be populated")
}
if window, ok := rule.Evaluation.Spec.(RollingWindow); ok {
if !window.EvalWindow.Equal(rule.EvalWindow) {
if window.EvalWindow != rule.EvalWindow {
t.Errorf("Expected Evaluation EvalWindow to be overwritten to %v, got %v", rule.EvalWindow, window.EvalWindow)
}
if !window.Frequency.Equal(rule.Frequency) {
if window.Frequency != rule.Frequency {
t.Errorf("Expected Evaluation Frequency to be overwritten to %v, got %v", rule.Frequency, window.Frequency)
}
} else {
@@ -504,7 +504,7 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
t.Error("Expected Evaluation to be nil for v2")
}
if rule.EvalWindow.Duration() != 5*time.Minute {
if rule.EvalWindow != Duration(5*time.Minute) {
t.Error("Expected default EvalWindow to be applied")
}
if rule.RuleType != RuleTypeThreshold {

View File

@@ -19,36 +19,36 @@ var (
type Evaluation interface {
NextWindowFor(curr time.Time) (time.Time, time.Time)
GetFrequency() valuer.TextDuration
GetFrequency() Duration
}
type RollingWindow struct {
EvalWindow valuer.TextDuration `json:"evalWindow"`
Frequency valuer.TextDuration `json:"frequency"`
EvalWindow Duration `json:"evalWindow"`
Frequency Duration `json:"frequency"`
}
func (rollingWindow RollingWindow) Validate() error {
if !rollingWindow.EvalWindow.IsPositive() {
if rollingWindow.EvalWindow <= 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "evalWindow must be greater than zero")
}
if !rollingWindow.Frequency.IsPositive() {
if rollingWindow.Frequency <= 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "frequency must be greater than zero")
}
return nil
}
func (rollingWindow RollingWindow) NextWindowFor(curr time.Time) (time.Time, time.Time) {
return curr.Add(-rollingWindow.EvalWindow.Duration()), curr
return curr.Add(time.Duration(-rollingWindow.EvalWindow)), curr
}
func (rollingWindow RollingWindow) GetFrequency() valuer.TextDuration {
func (rollingWindow RollingWindow) GetFrequency() Duration {
return rollingWindow.Frequency
}
type CumulativeWindow struct {
Schedule CumulativeSchedule `json:"schedule"`
Frequency valuer.TextDuration `json:"frequency"`
Timezone string `json:"timezone"`
Schedule CumulativeSchedule `json:"schedule"`
Frequency Duration `json:"frequency"`
Timezone string `json:"timezone"`
}
type CumulativeSchedule struct {
@@ -79,7 +79,7 @@ func (cumulativeWindow CumulativeWindow) Validate() error {
if _, err := time.LoadLocation(cumulativeWindow.Timezone); err != nil {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "timezone is invalid")
}
if !cumulativeWindow.Frequency.IsPositive() {
if cumulativeWindow.Frequency <= 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "frequency must be greater than zero")
}
return nil
@@ -150,8 +150,8 @@ func (cumulativeWindow CumulativeWindow) NextWindowFor(curr time.Time) (time.Tim
return windowStart.In(time.UTC), currInTZ.In(time.UTC)
}
func (cumulativeWindow CumulativeWindow) getLastScheduleTime(curr time.Time, loc *time.Location) time.Time {
schedule := cumulativeWindow.Schedule
func (cw CumulativeWindow) getLastScheduleTime(curr time.Time, loc *time.Location) time.Time {
schedule := cw.Schedule
switch schedule.Type {
case ScheduleTypeHourly:
@@ -220,7 +220,7 @@ func (cumulativeWindow CumulativeWindow) getLastScheduleTime(curr time.Time, loc
}
}
func (cumulativeWindow CumulativeWindow) GetFrequency() valuer.TextDuration {
func (cumulativeWindow CumulativeWindow) GetFrequency() Duration {
return cumulativeWindow.Frequency
}

View File

@@ -4,35 +4,33 @@ import (
"encoding/json"
"testing"
"time"
"github.com/SigNoz/signoz/pkg/valuer"
)
func TestRollingWindow_EvaluationTime(t *testing.T) {
tests := []struct {
name string
evalWindow valuer.TextDuration
evalWindow Duration
current time.Time
wantStart time.Time
wantEnd time.Time
}{
{
name: "5 minute rolling window",
evalWindow: valuer.MustParseTextDuration("5m"),
evalWindow: Duration(5 * time.Minute),
current: time.Date(2023, 12, 1, 12, 30, 0, 0, time.UTC),
wantStart: time.Date(2023, 12, 1, 12, 25, 0, 0, time.UTC),
wantEnd: time.Date(2023, 12, 1, 12, 30, 0, 0, time.UTC),
},
{
name: "1 hour rolling window",
evalWindow: valuer.MustParseTextDuration("1h"),
evalWindow: Duration(1 * time.Hour),
current: time.Date(2023, 12, 1, 15, 45, 30, 0, time.UTC),
wantStart: time.Date(2023, 12, 1, 14, 45, 30, 0, time.UTC),
wantEnd: time.Date(2023, 12, 1, 15, 45, 30, 0, time.UTC),
},
{
name: "30 second rolling window",
evalWindow: valuer.MustParseTextDuration("30s"),
evalWindow: Duration(30 * time.Second),
current: time.Date(2023, 12, 1, 12, 30, 15, 0, time.UTC),
wantStart: time.Date(2023, 12, 1, 12, 29, 45, 0, time.UTC),
wantEnd: time.Date(2023, 12, 1, 12, 30, 15, 0, time.UTC),
@@ -43,7 +41,7 @@ func TestRollingWindow_EvaluationTime(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
rw := &RollingWindow{
EvalWindow: tt.evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
Frequency: Duration(1 * time.Minute),
}
gotStart, gotEnd := rw.NextWindowFor(tt.current)
@@ -71,7 +69,7 @@ func TestCumulativeWindow_NewScheduleSystem(t *testing.T) {
Type: ScheduleTypeHourly,
Minute: intPtr(15),
},
Frequency: valuer.MustParseTextDuration("5m"),
Frequency: Duration(5 * time.Minute),
Timezone: "UTC",
},
current: time.Date(2025, 3, 15, 14, 30, 0, 0, time.UTC),
@@ -85,7 +83,7 @@ func TestCumulativeWindow_NewScheduleSystem(t *testing.T) {
Hour: intPtr(9),
Minute: intPtr(30),
},
Frequency: valuer.MustParseTextDuration("1h"),
Frequency: Duration(1 * time.Hour),
Timezone: "Asia/Kolkata",
},
current: time.Date(2025, 3, 15, 15, 30, 0, 0, time.UTC),
@@ -100,7 +98,7 @@ func TestCumulativeWindow_NewScheduleSystem(t *testing.T) {
Hour: intPtr(14),
Minute: intPtr(0),
},
Frequency: valuer.MustParseTextDuration("24h"),
Frequency: Duration(24 * time.Hour),
Timezone: "America/New_York",
},
current: time.Date(2025, 3, 18, 19, 0, 0, 0, time.UTC), // Tuesday
@@ -115,7 +113,7 @@ func TestCumulativeWindow_NewScheduleSystem(t *testing.T) {
Hour: intPtr(0),
Minute: intPtr(0),
},
Frequency: valuer.MustParseTextDuration("24h"),
Frequency: Duration(24 * time.Hour),
Timezone: "UTC",
},
current: time.Date(2025, 3, 15, 12, 0, 0, 0, time.UTC),
@@ -127,7 +125,7 @@ func TestCumulativeWindow_NewScheduleSystem(t *testing.T) {
Schedule: CumulativeSchedule{
Type: ScheduleTypeHourly,
},
Frequency: valuer.MustParseTextDuration("5m"),
Frequency: Duration(5 * time.Minute),
Timezone: "UTC",
},
current: time.Date(2025, 3, 15, 14, 30, 0, 0, time.UTC),
@@ -757,8 +755,8 @@ func TestEvaluationEnvelope_UnmarshalJSON(t *testing.T) {
jsonInput: `{"kind":"rolling","spec":{"evalWindow":"5m","frequency":"1m"}}`,
wantKind: RollingEvaluation,
wantSpec: RollingWindow{
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: Duration(5 * time.Minute),
Frequency: Duration(1 * time.Minute),
},
},
{
@@ -770,7 +768,7 @@ func TestEvaluationEnvelope_UnmarshalJSON(t *testing.T) {
Type: ScheduleTypeHourly,
Minute: intPtr(30),
},
Frequency: valuer.MustParseTextDuration("2m"),
Frequency: Duration(2 * time.Minute),
Timezone: "UTC",
},
},
@@ -849,10 +847,10 @@ func TestEvaluationEnvelope_UnmarshalJSON(t *testing.T) {
t.Fatalf("Expected RollingWindow spec, got %T", envelope.Spec)
}
wantSpec := tt.wantSpec.(RollingWindow)
if !gotSpec.EvalWindow.Equal(wantSpec.EvalWindow) {
if gotSpec.EvalWindow != wantSpec.EvalWindow {
t.Errorf("RollingWindow.EvalWindow = %v, want %v", gotSpec.EvalWindow, wantSpec.EvalWindow)
}
if !gotSpec.Frequency.Equal(wantSpec.Frequency) {
if gotSpec.Frequency != wantSpec.Frequency {
t.Errorf("RollingWindow.Frequency = %v, want %v", gotSpec.Frequency, wantSpec.Frequency)
}
case CumulativeEvaluation:
@@ -868,7 +866,7 @@ func TestEvaluationEnvelope_UnmarshalJSON(t *testing.T) {
(gotSpec.Schedule.Minute != nil && wantSpec.Schedule.Minute != nil && *gotSpec.Schedule.Minute != *wantSpec.Schedule.Minute) {
t.Errorf("CumulativeWindow.Schedule.Minute = %v, want %v", gotSpec.Schedule.Minute, wantSpec.Schedule.Minute)
}
if !gotSpec.Frequency.Equal(wantSpec.Frequency) {
if gotSpec.Frequency != wantSpec.Frequency {
t.Errorf("CumulativeWindow.Frequency = %v, want %v", gotSpec.Frequency, wantSpec.Frequency)
}
if gotSpec.Timezone != wantSpec.Timezone {

View File

@@ -131,7 +131,7 @@ func (m *GettablePlannedMaintenance) checkDaily(currentTime time.Time, rec *Recu
if candidate.After(currentTime) {
candidate = candidate.AddDate(0, 0, -1)
}
return currentTime.Sub(candidate) <= rec.Duration.Duration()
return currentTime.Sub(candidate) <= time.Duration(rec.Duration)
}
// checkWeekly finds the most recent allowed occurrence by rebasing the recurrences
@@ -160,7 +160,7 @@ func (m *GettablePlannedMaintenance) checkWeekly(currentTime time.Time, rec *Rec
if candidate.After(currentTime) {
candidate = candidate.AddDate(0, 0, -7)
}
if currentTime.Sub(candidate) <= rec.Duration.Duration() {
if currentTime.Sub(candidate) <= time.Duration(rec.Duration) {
return true
}
}
@@ -198,7 +198,7 @@ func (m *GettablePlannedMaintenance) checkMonthly(currentTime time.Time, rec *Re
)
}
}
return currentTime.Sub(candidate) <= rec.Duration.Duration()
return currentTime.Sub(candidate) <= time.Duration(rec.Duration)
}
func (m *GettablePlannedMaintenance) IsActive(now time.Time) bool {
@@ -255,7 +255,7 @@ func (m *GettablePlannedMaintenance) Validate() error {
if m.Schedule.Recurrence.RepeatType == "" {
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidPlannedMaintenancePayload, "missing repeat type in the payload")
}
if m.Schedule.Recurrence.Duration.IsZero() {
if m.Schedule.Recurrence.Duration == 0 {
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidPlannedMaintenancePayload, "missing duration in the payload")
}
if m.Schedule.Recurrence.EndTime != nil && m.Schedule.Recurrence.EndTime.Before(m.Schedule.Recurrence.StartTime) {

View File

@@ -3,8 +3,6 @@ package ruletypes
import (
"testing"
"time"
"github.com/SigNoz/signoz/pkg/valuer"
)
// Helper function to create a time pointer
@@ -27,7 +25,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "Europe/London",
Recurrence: &Recurrence{
StartTime: time.Date(2025, 3, 1, 0, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("24h"),
Duration: Duration(time.Hour * 24),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday, RepeatOnTuesday, RepeatOnWednesday, RepeatOnThursday, RepeatOnFriday, RepeatOnSunday},
},
@@ -44,7 +42,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 1, 22, 0, 0, 0, time.UTC), // Monday 22:00
Duration: valuer.MustParseTextDuration("4h"), // Until Tuesday 02:00
Duration: Duration(time.Hour * 4), // Until Tuesday 02:00
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday}, // Only Monday
},
@@ -61,7 +59,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 1, 22, 0, 0, 0, time.UTC), // Monday 22:00
Duration: valuer.MustParseTextDuration("4h"), // Until Tuesday 02:00
Duration: Duration(time.Hour * 4), // Until Tuesday 02:00
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday}, // Only Monday
},
@@ -78,7 +76,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 1, 22, 0, 0, 0, time.UTC), // Monday 22:00
Duration: valuer.MustParseTextDuration("52h"), // Until Thursday 02:00
Duration: Duration(time.Hour * 52), // Until Thursday 02:00
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday}, // Only Monday
},
@@ -95,7 +93,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 2, 22, 0, 0, 0, time.UTC), // Tuesday 22:00
Duration: valuer.MustParseTextDuration("4h"), // Until Wednesday 02:00
Duration: Duration(time.Hour * 4), // Until Wednesday 02:00
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnTuesday}, // Only Tuesday
},
@@ -112,7 +110,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 23, 0, 0, 0, time.UTC), // 23:00
Duration: valuer.MustParseTextDuration("2h"), // Until 01:00 next day
Duration: Duration(time.Hour * 2), // Until 01:00 next day
RepeatType: RepeatTypeDaily,
},
},
@@ -128,7 +126,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeDaily,
},
},
@@ -144,7 +142,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeDaily,
},
},
@@ -160,7 +158,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 28, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("72h"), // 3 days
Duration: Duration(time.Hour * 72), // 3 days
RepeatType: RepeatTypeMonthly,
},
},
@@ -176,7 +174,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 28, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("72h"), // 3 days
Duration: Duration(time.Hour * 72), // 3 days
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnSunday},
},
@@ -193,7 +191,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 30, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("48h"), // 2 days, crosses to Feb 1
Duration: Duration(time.Hour * 48), // 2 days, crosses to Feb 1
RepeatType: RepeatTypeMonthly,
},
},
@@ -209,7 +207,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "America/New_York", // UTC-5 or UTC-4 depending on DST
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 22, 0, 0, 0, time.FixedZone("America/New_York", -5*3600)),
Duration: valuer.MustParseTextDuration("4h"),
Duration: Duration(time.Hour * 4),
RepeatType: RepeatTypeDaily,
},
},
@@ -225,7 +223,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeDaily,
},
},
@@ -242,7 +240,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
EndTime: timePtr(time.Date(2024, 1, 10, 12, 0, 0, 0, time.UTC)),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeDaily,
},
},
@@ -258,7 +256,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 3, 31, 22, 0, 0, 0, time.UTC), // March 31, 22:00
Duration: valuer.MustParseTextDuration("6h"), // Until April 1, 04:00
Duration: Duration(time.Hour * 6), // Until April 1, 04:00
RepeatType: RepeatTypeMonthly,
},
},
@@ -274,7 +272,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 1, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{}, // Empty - should apply to all days
},
@@ -291,7 +289,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 31, 12, 0, 0, 0, time.UTC), // January 31st
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeMonthly,
},
},
@@ -306,7 +304,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 23, 30, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("1h"), // Crosses to 00:30 next day
Duration: Duration(time.Hour * 1), // Crosses to 00:30 next day
RepeatType: RepeatTypeDaily,
},
},
@@ -321,7 +319,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 31, 12, 0, 0, 0, time.UTC), // January 31st
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeMonthly,
},
},
@@ -336,7 +334,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 30, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("48h"), // 2 days duration
Duration: Duration(time.Hour * 48), // 2 days duration
RepeatType: RepeatTypeMonthly,
},
},
@@ -351,7 +349,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 1, 23, 0, 0, 0, time.UTC), // Monday 23:00
Duration: valuer.MustParseTextDuration("2h"), // Until Tuesday 01:00
Duration: Duration(time.Hour * 2), // Until Tuesday 01:00
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday}, // Only Monday
},
@@ -367,7 +365,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 31, 12, 0, 0, 0, time.UTC), // January 31st
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeMonthly,
},
},
@@ -382,7 +380,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 22, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("4h"), // Until 02:00 next day
Duration: Duration(time.Hour * 4), // Until 02:00 next day
RepeatType: RepeatTypeDaily,
},
},
@@ -397,7 +395,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 31, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeMonthly,
},
},
@@ -448,7 +446,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "US/Eastern",
Recurrence: &Recurrence{
StartTime: time.Date(2025, 3, 29, 20, 0, 0, 0, time.FixedZone("US/Eastern", -4*3600)),
Duration: valuer.MustParseTextDuration("24h"),
Duration: Duration(time.Hour * 24),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnSunday, RepeatOnSaturday},
},
@@ -464,7 +462,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeDaily,
},
},
@@ -479,7 +477,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeDaily,
},
},
@@ -494,7 +492,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeDaily,
},
},
@@ -509,7 +507,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday},
},
@@ -525,7 +523,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday},
},
@@ -541,7 +539,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday},
},
@@ -557,7 +555,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday},
},
@@ -573,7 +571,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday},
},
@@ -589,7 +587,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 04, 04, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeMonthly,
},
},
@@ -604,7 +602,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 04, 04, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeMonthly,
},
},
@@ -619,7 +617,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 04, 04, 12, 0, 0, 0, time.UTC),
Duration: valuer.MustParseTextDuration("2h"),
Duration: Duration(time.Hour * 2),
RepeatType: RepeatTypeMonthly,
},
},

View File

@@ -5,7 +5,7 @@ import (
"encoding/json"
"time"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/errors"
)
type RepeatType string
@@ -38,12 +38,40 @@ var RepeatOnAllMap = map[RepeatOn]time.Weekday{
RepeatOnSaturday: time.Saturday,
}
type Duration time.Duration
func (d Duration) MarshalJSON() ([]byte, error) {
return json.Marshal(time.Duration(d).String())
}
func (d *Duration) UnmarshalJSON(b []byte) error {
var v interface{}
if err := json.Unmarshal(b, &v); err != nil {
return err
}
switch value := v.(type) {
case float64:
*d = Duration(time.Duration(value))
return nil
case string:
tmp, err := time.ParseDuration(value)
if err != nil {
return err
}
*d = Duration(tmp)
return nil
default:
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid duration")
}
}
type Recurrence struct {
StartTime time.Time `json:"startTime"`
EndTime *time.Time `json:"endTime,omitempty"`
Duration valuer.TextDuration `json:"duration"`
RepeatType RepeatType `json:"repeatType"`
RepeatOn []RepeatOn `json:"repeatOn"`
StartTime time.Time `json:"startTime"`
EndTime *time.Time `json:"endTime,omitempty"`
Duration Duration `json:"duration"`
RepeatType RepeatType `json:"repeatType"`
RepeatOn []RepeatOn `json:"repeatOn"`
}
func (r *Recurrence) Scan(src interface{}) error {

View File

@@ -266,110 +266,3 @@ type FieldValueSelector struct {
Value string `json:"value"`
Limit int `json:"limit"`
}
type GettableFieldKeys struct {
Keys map[string][]*TelemetryFieldKey `json:"keys"`
Complete bool `json:"complete"`
}
type PostableFieldKeysParams struct {
Signal Signal `query:"signal"`
Source Source `query:"source"`
Limit int `query:"limit"`
StartUnixMilli int64 `query:"startUnixMilli"`
EndUnixMilli int64 `query:"endUnixMilli"`
FieldContext FieldContext `query:"fieldContext"`
FieldDataType FieldDataType `query:"fieldDataType"`
MetricName string `query:"metricName"`
SearchText string `query:"searchText"`
}
type GettableFieldValues struct {
Values *TelemetryFieldValues `json:"values"`
Complete bool `json:"complete"`
}
type PostableFieldValueParams struct {
PostableFieldKeysParams
Name string `query:"name"`
ExistingQuery string `query:"existingQuery"`
}
func NewFieldKeySelectorFromPostableFieldKeysParams(params PostableFieldKeysParams) *FieldKeySelector {
var req FieldKeySelector
if params.StartUnixMilli != 0 {
req.StartUnixMilli = params.StartUnixMilli
// Round down to the nearest 6 hours (21600000 milliseconds)
req.StartUnixMilli -= req.StartUnixMilli % 21600000
}
if params.EndUnixMilli != 0 {
req.EndUnixMilli = params.EndUnixMilli
}
req.Signal = params.Signal
req.Source = params.Source
req.FieldContext = params.FieldContext
req.FieldDataType = params.FieldDataType
req.SelectorMatchType = FieldSelectorMatchTypeFuzzy
if params.Limit != 0 {
req.Limit = params.Limit
} else {
req.Limit = 1000
}
if params.MetricName != "" {
req.MetricContext = &MetricContext{
MetricName: params.MetricName,
}
}
req.Name = params.SearchText
if params.SearchText != "" && params.FieldContext == FieldContextUnspecified {
parsedFieldKey := GetFieldKeyFromKeyText(params.SearchText)
if parsedFieldKey.FieldContext != FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, req.Signal) {
req.Name = parsedFieldKey.Name
req.FieldContext = parsedFieldKey.FieldContext
}
}
}
return &req
}
func NewFieldValueSelectorFromPostableFieldValueParams(params PostableFieldValueParams) *FieldValueSelector {
keySelector := NewFieldKeySelectorFromPostableFieldKeysParams(params.PostableFieldKeysParams)
fieldValueSelector := &FieldValueSelector{
FieldKeySelector: keySelector,
}
fieldValueSelector.Name = params.Name
if params.Name != "" && fieldValueSelector.FieldContext == FieldContextUnspecified {
parsedFieldKey := GetFieldKeyFromKeyText(params.Name)
if parsedFieldKey.FieldContext != FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, fieldValueSelector.Signal) {
fieldValueSelector.Name = parsedFieldKey.Name
fieldValueSelector.FieldContext = parsedFieldKey.FieldContext
}
}
}
fieldValueSelector.ExistingQuery = params.ExistingQuery
fieldValueSelector.Value = params.SearchText
if params.Limit != 0 {
fieldValueSelector.Limit = params.Limit
} else {
fieldValueSelector.Limit = 50
}
return fieldValueSelector
}

View File

@@ -154,21 +154,3 @@ func (f FieldContext) TagType() string {
}
return ""
}
func isContextValidForSignal(ctx FieldContext, signal Signal) bool {
if ctx == FieldContextResource ||
ctx == FieldContextAttribute ||
ctx == FieldContextScope {
return true
}
switch signal.StringValue() {
case SignalLogs.StringValue():
return ctx == FieldContextLog || ctx == FieldContextBody
case SignalTraces.StringValue():
return ctx == FieldContextSpan || ctx == FieldContextEvent || ctx == FieldContextTrace
case SignalMetrics.StringValue():
return ctx == FieldContextMetric
}
return true
}

View File

@@ -107,13 +107,3 @@ func (enum *Email) UnmarshalText(text []byte) error {
func (enum Email) MarshalText() (text []byte, err error) {
return []byte(enum.StringValue()), nil
}
func (enum *Email) UnmarshalParam(param string) error {
email, err := NewEmail(param)
if err != nil {
return err
}
*enum = email
return nil
}

View File

@@ -77,10 +77,3 @@ func (enum *String) UnmarshalText(text []byte) error {
func (enum String) MarshalText() (text []byte, err error) {
return []byte(enum.StringValue()), nil
}
// Implement Gin's BindUnmarshaler interface
// See https://github.com/SigNoz/signoz/pull/10219 description for additional details
func (enum *String) UnmarshalParam(param string) error {
*enum = NewString(param)
return nil
}

View File

@@ -1,163 +0,0 @@
package valuer
import (
"database/sql/driver"
"encoding/json"
"time"
"github.com/SigNoz/signoz/pkg/errors"
)
var _ Valuer = (*TextDuration)(nil)
// TextDuration preserves the human-readable duration text as provided by the input.
// It keeps the raw JSON bytes so serialization does not normalize values like
// "90m" into "1h30m0s".
type TextDuration struct {
text string
value time.Duration
}
// ParseTextDuration parses a human-readable duration string.
// This preserves the raw text so that it can be serialized back to JSON.
func ParseTextDuration(s string) (TextDuration, error) {
d, err := time.ParseDuration(s)
if err != nil {
return TextDuration{}, errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse duration text")
}
return TextDuration{text: s, value: d}, nil
}
// MustParseTextDuration parses a human-readable duration string, preserving
// the raw text and panics if an error occurs.
func MustParseTextDuration(s string) TextDuration {
d, err := ParseTextDuration(s)
if err != nil {
panic(err)
}
return d
}
// Duration returns the [time.Duration] type.
func (d TextDuration) Duration() time.Duration {
return d.value
}
// IsZero implements [Valuer].
// It returns whether the parsed duration is zero.
func (d TextDuration) IsZero() bool {
return d.value == 0
}
// IsPositive whether the duration is greater than zero.
func (d TextDuration) IsPositive() bool {
return d.value > 0
}
// String implements the [fmt.Stringer] interface.
func (d TextDuration) String() string {
if len(d.text) > 0 {
return d.text
}
return d.value.String()
}
// StringValue implements [Valuer].
func (d TextDuration) StringValue() string {
return d.String()
}
// MarshalJSON implements the [encoding/json.Marshaler] interface.
// It serializes the duration value in a human-readable format (1h30m0s).
// If the original text is available, it is returned as-is. Example: 90m is not normalized to 1h30m0s.
func (d TextDuration) MarshalJSON() ([]byte, error) {
return json.Marshal(d.String())
}
// UnmarshalJSON implements the [encoding/json.Unmarshaler] interface.
// It parses string or numeric durations, and stores the string representation.
func (d *TextDuration) UnmarshalJSON(b []byte) error {
var v any
if err := json.Unmarshal(b, &v); err != nil {
return errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid duration")
}
switch value := v.(type) {
case float64:
d.value = time.Duration(value)
d.text = ""
return nil
case string:
tmp, err := time.ParseDuration(value)
if err != nil {
return errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid duration")
}
d.value = tmp
d.text = value
return nil
default:
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid duration")
}
}
// MarshalText implements [encoding.TextMarshaler].
func (d TextDuration) MarshalText() ([]byte, error) {
return []byte(d.String()), nil
}
// UnmarshalText implements [encoding.TextUnmarshaler].
func (d *TextDuration) UnmarshalText(text []byte) error {
s := string(text)
tmp, err := time.ParseDuration(s)
if err != nil {
return errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse duration text")
}
d.value = tmp
d.text = s
return nil
}
// Value implements [driver.Valuer] by delegating to the underlying duration.
func (d TextDuration) Value() (driver.Value, error) {
return d.String(), nil
}
// Scan implements [database/sql.Scanner] to read the duration from the database.
func (d *TextDuration) Scan(value any) error {
if value == nil {
d.value = 0
d.text = ""
return nil
}
switch v := value.(type) {
case int64:
d.value = time.Duration(v)
d.text = ""
return nil
case []byte:
return d.UnmarshalText(v)
case string:
return d.UnmarshalText([]byte(v))
default:
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput,
"cannot scan type %T into TextDuration", value)
}
}
func (d *TextDuration) UnmarshalParam(param string) error {
return d.UnmarshalText([]byte(param))
}
// Equal reports the two TextDuration represent the same underlying duration values.
//
// Note that the String representations for them can be different.
func (d TextDuration) Equal(d2 TextDuration) bool {
return d.value == d2.value
}
// Milliseconds returns the duration as an integer millisecond count.
func (d TextDuration) Milliseconds() int64 {
return d.value.Milliseconds()
}

View File

@@ -1,144 +0,0 @@
package valuer
import (
"encoding/json"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestTextDuration(t *testing.T) {
cases := []struct {
name string
input string
error bool
duration time.Duration
string string
}{
{
name: "ParseTextDuration(10s)",
input: "10s",
duration: 10 * time.Second,
string: "10s",
},
{
name: "ParseTextDuration(90m)",
input: "90m",
duration: 90 * time.Minute,
string: "90m",
},
{
name: "ParseTextDuration(1h30m)",
input: "1h30m",
duration: 90 * time.Minute,
string: "1h30m",
},
{
name: "Invalid duration",
input: "invalid",
error: true,
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
var err error
d, err := ParseTextDuration(tc.input)
if tc.error {
assert.Error(t, err)
return
}
assert.NoError(t, err)
assert.Equal(t, tc.duration, d.Duration())
assert.Equal(t, tc.string, d.String())
})
}
}
func TestTextDuration_MustParsePanics(t *testing.T) {
assert.Panics(t, func() {
MustParseTextDuration("not-a-duration")
})
}
func TestTextDuration_JSON(t *testing.T) {
t.Run("RoundTrip", func(t *testing.T) {
parsed, err := ParseTextDuration("90m")
require.NoError(t, err)
data, err := json.Marshal(parsed)
require.NoError(t, err)
assert.Equal(t, `"90m"`, string(data))
var decoded TextDuration
require.NoError(t, json.Unmarshal([]byte(`"2h"`), &decoded))
assert.Equal(t, 2*time.Hour, decoded.Duration())
assert.Equal(t, "2h", decoded.String())
})
t.Run("Numeric", func(t *testing.T) {
var decoded TextDuration
require.NoError(t, json.Unmarshal([]byte(`1000000000`), &decoded))
assert.Equal(t, time.Second, decoded.Duration())
assert.Equal(t, "1s", decoded.String())
})
t.Run("Invalid", func(t *testing.T) {
var decoded TextDuration
assert.Error(t, json.Unmarshal([]byte(`true`), &decoded))
assert.Error(t, json.Unmarshal([]byte(`"nope"`), &decoded))
})
}
func TestTextDurationTextMarshaling(t *testing.T) {
parsed, err := ParseTextDuration("45s")
require.NoError(t, err)
data, err := parsed.MarshalText()
require.NoError(t, err)
assert.Equal(t, "45s", string(data))
var decoded TextDuration
require.NoError(t, decoded.UnmarshalText([]byte("2m")))
assert.Equal(t, 2*time.Minute, decoded.Duration())
assert.Equal(t, "2m", decoded.String())
assert.Error(t, decoded.UnmarshalText([]byte("invalid")))
}
func TestTextDurationValueAndScan(t *testing.T) {
parsed, err := ParseTextDuration("2s")
require.NoError(t, err)
val, err := parsed.Value()
require.NoError(t, err)
assert.Equal(t, "2s", val)
var scanned TextDuration
err = scanned.Scan(nil)
require.NoError(t, err)
assert.True(t, scanned.IsZero())
assert.Equal(t, "0s", scanned.String())
err = scanned.Scan([]byte("3s"))
require.NoError(t, err)
assert.Equal(t, 3*time.Second, scanned.Duration())
assert.Equal(t, "3s", scanned.String())
err = scanned.Scan(true)
assert.Error(t, err)
}
func TestTextDurationUnmarshalParam(t *testing.T) {
var decoded TextDuration
require.NoError(t, decoded.UnmarshalParam("2m"))
assert.Equal(t, 2*time.Minute, decoded.Duration())
assert.Equal(t, "2m", decoded.String())
assert.Error(t, decoded.UnmarshalParam("invalid"))
}

View File

@@ -140,13 +140,3 @@ func (enum *UUID) UnmarshalText(text []byte) error {
func (enum UUID) MarshalText() (text []byte, err error) {
return []byte(enum.StringValue()), nil
}
func (enum *UUID) UnmarshalParam(param string) error {
uuid, err := NewUUID(param)
if err != nil {
return err
}
*enum = uuid
return nil
}

View File

@@ -8,7 +8,6 @@ import (
"fmt"
"github.com/SigNoz/signoz/pkg/errors"
ginbinding "github.com/gin-gonic/gin/binding"
)
var (
@@ -43,7 +42,4 @@ type Valuer interface {
// Implement encoding.TextUnmarshaler to allow the value to be marshalled unto a string
encoding.TextMarshaler
// Implement Gin's BindUnmarshaler interface
ginbinding.BindUnmarshaler
}

Some files were not shown because too many files have changed in this diff Show More