Compare commits

...

14 Commits

Author SHA1 Message Date
amlannandy
a6df8ee820 chore: update types 2026-02-09 17:03:10 +07:00
amlannandy
74227f015c chore: address comments 2026-02-09 17:03:10 +07:00
amlannandy
1b9d1fb515 chore: additional changes 2026-02-09 17:03:10 +07:00
amlannandy
337677e4f0 chore: fix CI 2026-02-09 17:03:10 +07:00
amlannandy
eec2c1176d chore: minor changes 2026-02-09 17:03:10 +07:00
amlannandy
243ade9715 chore: switch to generated apis 2026-02-09 17:03:10 +07:00
amlannandy
d8f80fdb35 chore: fix ci 2026-02-09 17:03:10 +07:00
amlannandy
a66d1a44d8 chore: update tests 2026-02-09 17:03:10 +07:00
amlannandy
5c7e276fc4 chore: update loading state 2026-02-09 17:03:09 +07:00
amlannandy
d19e0c3fa2 chore: api migration 2026-02-09 17:03:09 +07:00
amlannandy
6633785d3a chore: add new v2 api func and hooks 2026-02-09 17:03:09 +07:00
Jatinderjit Singh
9e466b56b2 chore: preserve the original duration format (#10149) 2026-02-09 09:24:58 +00:00
Vikrant Gupta
4ad0baa2a2 feat(authz): add support for wildcard selector (#10208)
* feat(authz): remove unnecessary dependency injection for role setter

* feat(authz): deprecate role module

* feat(authz): deprecate role module

* feat(authz): split between server and sql actions

* feat(authz): add bootstrap for managed role transactions

* feat(authz): update and add integration tests

* feat(authz): match names for factory and migration

* feat(authz): fix integration tests

* feat(authz): reduce calls on organisation creeation
2026-02-09 14:37:44 +05:30
Srikanth Chekuri
24b588bfba chore: move fields api to openapi spec (#10219) 2026-02-09 13:43:36 +05:30
101 changed files with 3707 additions and 2299 deletions

View File

@@ -18,8 +18,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/queryparser"
@@ -78,18 +76,15 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
},
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
func(ctx context.Context, sqlstore sqlstore.SQLStore, _ licensing.Licensing, _ dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
},
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, _ role.Setter, _ role.Granter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(impldashboard.NewStore(store), settings, analytics, orgGetter, queryParser)
},
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return noopgateway.NewProviderFactory()
},
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, _ []role.RegisterTypeable) role.Setter {
return implrole.NewSetter(implrole.NewStore(store), authz)
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)

View File

@@ -14,7 +14,6 @@ import (
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/ee/modules/role/implrole"
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
@@ -29,8 +28,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
pkgimplrole "github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/signoz"
@@ -118,18 +115,15 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
return authNs, nil
},
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
func(ctx context.Context, sqlstore sqlstore.SQLStore, licensing licensing.Licensing, dashboardModule dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx), licensing, dashboardModule)
},
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, roleSetter, granter, queryParser, querier, licensing)
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, queryParser, querier, licensing)
},
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return httpgateway.NewProviderFactory(licensing)
},
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
return implrole.NewSetter(pkgimplrole.NewStore(store), authz, licensing, registry)
},
)
if err != nil {

View File

@@ -607,6 +607,178 @@ paths:
summary: Update auth domain
tags:
- authdomains
/api/v1/fields/keys:
get:
deprecated: false
description: This endpoint returns field keys
operationId: GetFieldsKeys
parameters:
- in: query
name: signal
schema:
type: string
- in: query
name: source
schema:
type: string
- in: query
name: limit
schema:
type: integer
- in: query
name: startUnixMilli
schema:
format: int64
type: integer
- in: query
name: endUnixMilli
schema:
format: int64
type: integer
- in: query
name: fieldContext
schema:
type: string
- in: query
name: fieldDataType
schema:
type: string
- in: query
name: metricName
schema:
type: string
- in: query
name: searchText
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/TelemetrytypesGettableFieldKeys'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get field keys
tags:
- fields
/api/v1/fields/values:
get:
deprecated: false
description: This endpoint returns field values
operationId: GetFieldsValues
parameters:
- in: query
name: signal
schema:
type: string
- in: query
name: source
schema:
type: string
- in: query
name: limit
schema:
type: integer
- in: query
name: startUnixMilli
schema:
format: int64
type: integer
- in: query
name: endUnixMilli
schema:
format: int64
type: integer
- in: query
name: fieldContext
schema:
type: string
- in: query
name: fieldDataType
schema:
type: string
- in: query
name: metricName
schema:
type: string
- in: query
name: searchText
schema:
type: string
- in: query
name: name
schema:
type: string
- in: query
name: existingQuery
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/TelemetrytypesGettableFieldValues'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get field values
tags:
- fields
/api/v1/getResetPasswordToken/{id}:
get:
deprecated: false
@@ -4244,6 +4416,60 @@ components:
format: date-time
type: string
type: object
TelemetrytypesGettableFieldKeys:
properties:
complete:
type: boolean
keys:
additionalProperties:
items:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
type: array
nullable: true
type: object
type: object
TelemetrytypesGettableFieldValues:
properties:
complete:
type: boolean
values:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldValues'
type: object
TelemetrytypesTelemetryFieldKey:
properties:
description:
type: string
fieldContext:
type: string
fieldDataType:
type: string
name:
type: string
signal:
type: string
unit:
type: string
type: object
TelemetrytypesTelemetryFieldValues:
properties:
boolValues:
items:
type: boolean
type: array
numberValues:
items:
format: double
type: number
type: array
relatedValues:
items:
type: string
type: array
stringValues:
items:
type: string
type: array
type: object
TypesChangePasswordRequest:
properties:
newPassword:

View File

@@ -2,12 +2,18 @@ package openfgaauthz
import (
"context"
"slices"
"github.com/SigNoz/signoz/ee/authz/openfgaserver"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/authzstore/sqlauthzstore"
pkgopenfgaauthz "github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
@@ -15,84 +21,320 @@ import (
type provider struct {
pkgAuthzService authz.AuthZ
openfgaServer *openfgaserver.Server
licensing licensing.Licensing
store roletypes.Store
registry []authz.RegisterTypeable
}
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry ...authz.RegisterTypeable) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return factory.NewProviderFactory(factory.MustNewName("openfga"), func(ctx context.Context, ps factory.ProviderSettings, config authz.Config) (authz.AuthZ, error) {
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema)
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema, licensing, registry)
})
}
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry []authz.RegisterTypeable) (authz.AuthZ, error) {
pkgOpenfgaAuthzProvider := pkgopenfgaauthz.NewProviderFactory(sqlstore, openfgaSchema)
pkgAuthzService, err := pkgOpenfgaAuthzProvider.New(ctx, settings, config)
if err != nil {
return nil, err
}
openfgaServer, err := openfgaserver.NewOpenfgaServer(ctx, pkgAuthzService)
if err != nil {
return nil, err
}
return &provider{
pkgAuthzService: pkgAuthzService,
openfgaServer: openfgaServer,
licensing: licensing,
store: sqlauthzstore.NewSqlAuthzStore(sqlstore),
registry: registry,
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
return provider.pkgAuthzService.Start(ctx)
return provider.openfgaServer.Start(ctx)
}
func (provider *provider) Stop(ctx context.Context) error {
return provider.pkgAuthzService.Stop(ctx)
return provider.openfgaServer.Stop(ctx)
}
func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return provider.pkgAuthzService.Check(ctx, tuple)
return provider.openfgaServer.Check(ctx, tuple)
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.openfgaServer.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.openfgaServer.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.BatchCheck(ctx, tuples)
return provider.openfgaServer.BatchCheck(ctx, tuples)
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return provider.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
return provider.openfgaServer.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.Write(ctx, additions, deletions)
return provider.openfgaServer.Write(ctx, additions, deletions)
}
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
return provider.pkgAuthzService.Get(ctx, orgID, id)
}
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
return provider.pkgAuthzService.GetByOrgIDAndName(ctx, orgID, name)
}
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
return provider.pkgAuthzService.List(ctx, orgID)
}
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
return provider.pkgAuthzService.ListByOrgIDAndNames(ctx, orgID, names)
}
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
return provider.pkgAuthzService.Grant(ctx, orgID, name, subject)
}
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
return provider.pkgAuthzService.ModifyGrant(ctx, orgID, existingRoleName, updatedRoleName, subject)
}
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
return provider.pkgAuthzService.Revoke(ctx, orgID, name, subject)
}
func (provider *provider) CreateManagedRoles(ctx context.Context, orgID valuer.UUID, managedRoles []*roletypes.Role) error {
return provider.pkgAuthzService.CreateManagedRoles(ctx, orgID, managedRoles)
}
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
tuples := make([]*openfgav1.TupleKey, 0)
grantTuples, err := provider.getManagedRoleGrantTuples(orgID, userID)
if err != nil {
return err
}
tuples = append(tuples, grantTuples...)
managedRoleTuples, err := provider.getManagedRoleTransactionTuples(orgID)
if err != nil {
return err
}
tuples = append(tuples, managedRoleTuples...)
return provider.Write(ctx, tuples, nil)
}
func (provider *provider) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
}
func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
existingRole, err := provider.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
}
if existingRole != nil {
return roletypes.NewRoleFromStorableRole(existingRole), nil
}
err = provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return nil, err
}
return role, nil
}
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
typeables := make([]authtypes.Typeable, 0)
for _, register := range provider.registry {
typeables = append(typeables, register.MustGetTypeables()...)
}
// role module cannot self register itself!
typeables = append(typeables, provider.MustGetTypeables()...)
resources := make([]*authtypes.Resource, 0)
for _, typeable := range typeables {
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
}
return resources
}
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
storableRole, err := provider.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
objects := make([]*authtypes.Object, 0)
for _, resource := range provider.GetResources(ctx) {
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
resourceObjects, err := provider.
ListObjects(
ctx,
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
relation,
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
)
if err != nil {
return nil, err
}
objects = append(objects, resourceObjects...)
}
}
return objects, nil
}
func (provider *provider) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return provider.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
}
func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
if err != nil {
return err
}
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
if err != nil {
return err
}
err = provider.Write(ctx, additionTuples, deletionTuples)
if err != nil {
return err
}
return nil
}
func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
storableRole, err := provider.store.Get(ctx, orgID, id)
if err != nil {
return err
}
role := roletypes.NewRoleFromStorableRole(storableRole)
err = role.CanEditDelete()
if err != nil {
return err
}
return provider.store.Delete(ctx, orgID, id)
}
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
}
func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := []*openfgav1.TupleKey{}
// Grant the admin role to the user
adminSubject := authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil)
adminTuple, err := authtypes.TypeableRole.Tuples(
adminSubject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAdminRoleName),
},
orgID,
)
if err != nil {
return nil, err
}
tuples = append(tuples, adminTuple...)
// Grant the admin role to the anonymous user
anonymousSubject := authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
anonymousTuple, err := authtypes.TypeableRole.Tuples(
anonymousSubject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAnonymousRoleName),
},
orgID,
)
if err != nil {
return nil, err
}
tuples = append(tuples, anonymousTuple...)
return tuples, nil
}
func (provider *provider) getManagedRoleTransactionTuples(orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
transactionsByRole := make(map[string][]*authtypes.Transaction)
for _, register := range provider.registry {
for roleName, txns := range register.MustGetManagedRoleTransactions() {
transactionsByRole[roleName] = append(transactionsByRole[roleName], txns...)
}
}
tuples := make([]*openfgav1.TupleKey, 0)
for roleName, transactions := range transactionsByRole {
for _, txn := range transactions {
typeable := authtypes.MustNewTypeableFromType(txn.Object.Resource.Type, txn.Object.Resource.Name)
txnTuples, err := typeable.Tuples(
authtypes.MustNewSubject(
authtypes.TypeableRole,
roleName,
orgID,
&authtypes.RelationAssignee,
),
txn.Relation,
[]authtypes.Selector{txn.Object.Selector},
orgID,
)
if err != nil {
return nil, err
}
tuples = append(tuples, txnTuples...)
}
}
return tuples, nil
}

View File

@@ -0,0 +1,83 @@
package openfgaserver
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
type Server struct {
pkgAuthzService authz.AuthZ
}
func NewOpenfgaServer(ctx context.Context, pkgAuthzService authz.AuthZ) (*Server, error) {
return &Server{
pkgAuthzService: pkgAuthzService,
}, nil
}
func (server *Server) Start(ctx context.Context) error {
return server.pkgAuthzService.Start(ctx)
}
func (server *Server) Stop(ctx context.Context) error {
return server.pkgAuthzService.Stop(ctx)
}
func (server *Server) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return server.pkgAuthzService.Check(ctx, tuple)
}
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return server.pkgAuthzService.BatchCheck(ctx, tuples)
}
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return server.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
}
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return server.pkgAuthzService.Write(ctx, additions, deletions)
}

View File

@@ -11,7 +11,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/types"
@@ -26,13 +25,11 @@ type module struct {
pkgDashboardModule dashboard.Module
store dashboardtypes.Store
settings factory.ScopedProviderSettings
roleSetter role.Setter
granter role.Granter
querier querier.Querier
licensing licensing.Licensing
}
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard")
pkgDashboardModule := pkgimpldashboard.NewModule(store, settings, analytics, orgGetter, queryParser)
@@ -40,8 +37,6 @@ func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, an
pkgDashboardModule: pkgDashboardModule,
store: store,
settings: scopedProviderSettings,
roleSetter: roleSetter,
granter: granter,
querier: querier,
licensing: licensing,
}
@@ -61,29 +56,6 @@ func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publi
return errors.Newf(errors.TypeAlreadyExists, dashboardtypes.ErrCodePublicDashboardAlreadyExists, "dashboard with id %s is already public", storablePublicDashboard.DashboardID)
}
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
err = module.granter.Grant(ctx, orgID, roletypes.SigNozAnonymousRoleName, authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.StringValue(), orgID, nil))
if err != nil {
return err
}
additionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, []*authtypes.Object{additionObject}, nil)
if err != nil {
return err
}
err = module.store.CreatePublic(ctx, dashboardtypes.NewStorablePublicDashboardFromPublicDashboard(publicDashboard))
if err != nil {
return err
@@ -128,6 +100,7 @@ func (module *module) GetPublicDashboardSelectorsAndOrg(ctx context.Context, id
return []authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeMetaResource, id.StringValue()),
authtypes.MustNewSelector(authtypes.TypeMetaResource, authtypes.WildCardSelectorString),
}, storableDashboard.OrgID, nil
}
@@ -190,29 +163,6 @@ func (module *module) DeletePublic(ctx context.Context, orgID valuer.UUID, dashb
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
publicDashboard, err := module.GetPublic(ctx, orgID, dashboardID)
if err != nil {
return err
}
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
deletionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
if err != nil {
return err
}
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
if err != nil {
return err
@@ -250,10 +200,6 @@ func (module *module) GetByMetricNames(ctx context.Context, orgID valuer.UUID, m
return module.pkgDashboardModule.GetByMetricNames(ctx, orgID, metricNames)
}
func (module *module) MustGetTypeables() []authtypes.Typeable {
return module.pkgDashboardModule.MustGetTypeables()
}
func (module *module) List(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
return module.pkgDashboardModule.List(ctx, orgID)
}
@@ -266,34 +212,27 @@ func (module *module) LockUnlock(ctx context.Context, orgID valuer.UUID, id valu
return module.pkgDashboardModule.LockUnlock(ctx, orgID, id, updatedBy, role, lock)
}
func (module *module) deletePublic(ctx context.Context, orgID valuer.UUID, dashboardID valuer.UUID) error {
publicDashboard, err := module.store.GetPublic(ctx, dashboardID.String())
if err != nil {
return err
}
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
deletionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
if err != nil {
return err
}
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
if err != nil {
return err
}
return nil
func (module *module) MustGetTypeables() []authtypes.Typeable {
return module.pkgDashboardModule.MustGetTypeables()
}
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
return map[string][]*authtypes.Transaction{
roletypes.SigNozAnonymousRoleName: {
{
Relation: authtypes.RelationRead,
Object: *authtypes.MustNewObject(
authtypes.Resource{
Type: authtypes.TypeMetaResource,
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, "*"),
),
},
},
}
}
func (module *module) deletePublic(ctx context.Context, _ valuer.UUID, dashboardID valuer.UUID) error {
return module.store.DeletePublic(ctx, dashboardID.StringValue())
}

View File

@@ -1,165 +0,0 @@
package implrole
import (
"context"
"slices"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type setter struct {
store roletypes.Store
authz authz.AuthZ
licensing licensing.Licensing
registry []role.RegisterTypeable
}
func NewSetter(store roletypes.Store, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
return &setter{
store: store,
authz: authz,
licensing: licensing,
registry: registry,
}
}
func (setter *setter) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
}
func (setter *setter) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
existingRole, err := setter.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
}
if existingRole != nil {
return roletypes.NewRoleFromStorableRole(existingRole), nil
}
err = setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return nil, err
}
return role, nil
}
func (setter *setter) GetResources(_ context.Context) []*authtypes.Resource {
typeables := make([]authtypes.Typeable, 0)
for _, register := range setter.registry {
typeables = append(typeables, register.MustGetTypeables()...)
}
// role module cannot self register itself!
typeables = append(typeables, setter.MustGetTypeables()...)
resources := make([]*authtypes.Resource, 0)
for _, typeable := range typeables {
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
}
return resources
}
func (setter *setter) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
storableRole, err := setter.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
objects := make([]*authtypes.Object, 0)
for _, resource := range setter.GetResources(ctx) {
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
resourceObjects, err := setter.
authz.
ListObjects(
ctx,
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
relation,
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
)
if err != nil {
return nil, err
}
objects = append(objects, resourceObjects...)
}
}
return objects, nil
}
func (setter *setter) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return setter.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
}
func (setter *setter) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
if err != nil {
return err
}
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
if err != nil {
return err
}
err = setter.authz.Write(ctx, additionTuples, deletionTuples)
if err != nil {
return err
}
return nil
}
func (setter *setter) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
storableRole, err := setter.store.Get(ctx, orgID, id)
if err != nil {
return err
}
role := roletypes.NewRoleFromStorableRole(storableRole)
err = role.CanEditDelete()
if err != nil {
return err
}
return setter.store.Delete(ctx, orgID, id)
}
func (setter *setter) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
}

View File

@@ -9,7 +9,6 @@ import (
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
@@ -56,7 +55,6 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
FluxInterval: opts.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),

View File

@@ -211,7 +211,7 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz, s.signoz.Modules.RoleGetter)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
r.Use(otelmux.Middleware(
"apiserver",
@@ -237,7 +237,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterLogsRoutes(r, am)
apiHandler.RegisterIntegrationRoutes(r, am)
apiHandler.RegisterCloudIntegrationsRoutes(r, am)
apiHandler.RegisterFieldsRoutes(r, am)
apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterInfraMetricsRoutes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am)

View File

@@ -15,7 +15,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/transition"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
@@ -63,6 +63,8 @@ type AnomalyRule struct {
seasonality anomaly.Seasonality
}
var _ baserules.Rule = (*AnomalyRule)(nil)
func NewAnomalyRule(
id string,
orgID valuer.UUID,
@@ -490,7 +492,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration() {
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration().Duration() {
a.State = model.StateFiring
a.FiredAt = ts
state := model.StateFiring
@@ -553,7 +555,7 @@ func (r *AnomalyRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.Name(),
RuleCondition: r.Condition(),
EvalWindow: ruletypes.Duration(r.EvalWindow()),
EvalWindow: r.EvalWindow(),
Labels: r.Labels().Map(),
Annotations: r.Annotations().Map(),
PreferredChannels: r.PreferredChannels(),

View File

@@ -40,7 +40,7 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
// Test basic AlertOnAbsent functionality (without AbsentFor grace period)
baseTime := time.Unix(1700000000, 0)
evalWindow := 5 * time.Minute
evalWindow := valuer.MustParseTextDuration("5m")
evalTime := baseTime.Add(5 * time.Minute)
target := 500.0
@@ -50,8 +50,8 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: RuleTypeAnomaly,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,
@@ -147,7 +147,7 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
// 3. Alert fires only if t2 - t1 > AbsentFor
baseTime := time.Unix(1700000000, 0)
evalWindow := 5 * time.Minute
evalWindow := valuer.MustParseTextDuration("5m")
// Set target higher than test data so regular threshold alerts don't fire
target := 500.0
@@ -157,8 +157,8 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: RuleTypeAnomaly,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(time.Minute),
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,

View File

@@ -48,7 +48,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, tr)
// create ch rule task for evaluation
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
@@ -72,7 +72,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, pr)
// create promql rule task for evaluation
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeProm, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
// create anomaly rule
@@ -96,7 +96,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, ar)
// create anomaly rule task for evaluation
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else {
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
@@ -213,8 +213,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
return alertsFound, nil
}
// newTask returns an appropriate group for
// rule type
// newTask returns an appropriate group for the rule type
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) baserules.Task {
if taskType == baserules.TaskTypeCh {
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)

View File

@@ -39,10 +39,7 @@ function RelatedMetricsCard({ metric }: RelatedMetricsCardProps): JSX.Element {
dataSource={DataSource.METRICS}
/>
)}
<DashboardsAndAlertsPopover
dashboards={metric.dashboards}
alerts={metric.alerts}
/>
<DashboardsAndAlertsPopover metricName={metric.name} />
</div>
);
}

View File

@@ -2,8 +2,8 @@
import { useMemo, useState } from 'react';
import { Card, Input, Select, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import { MetricsexplorertypesMetricMetadataDTOType } from 'api/generated/services/sigNoz.schemas';
import { InspectMetricsSeries } from 'api/metricsExplorer/getInspectMetricsDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import classNames from 'classnames';
import { initialQueriesMap } from 'constants/queryBuilder';
import { AggregatorFilter } from 'container/QueryBuilder/filters';
@@ -40,8 +40,10 @@ import {
* returns true if the feature flag is enabled, false otherwise
* Show the inspect button in metrics explorer if the feature flag is enabled
*/
export function isInspectEnabled(metricType: MetricType | undefined): boolean {
return metricType === MetricType.GAUGE;
export function isInspectEnabled(
metricType: MetricsexplorertypesMetricMetadataDTOType | undefined,
): boolean {
return metricType === MetricsexplorertypesMetricMetadataDTOType.gauge;
}
export function getAllTimestampsOfMetrics(

View File

@@ -1,8 +1,17 @@
import { useCallback, useMemo, useState } from 'react';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useCopyToClipboard } from 'react-use';
import { Button, Collapse, Input, Menu, Popover, Typography } from 'antd';
import {
Button,
Collapse,
Input,
Menu,
Popover,
Skeleton,
Typography,
} from 'antd';
import { ColumnsType } from 'antd/es/table';
import logEvent from 'api/common/logEvent';
import { useGetMetricAttributes } from 'api/generated/services/metrics';
import { ResizeTable } from 'components/ResizeTable';
import { DataType } from 'container/LogDetailedView/TableView';
import { useNotifications } from 'hooks/useNotifications';
@@ -15,6 +24,8 @@ import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
import { AllAttributesProps, AllAttributesValueProps } from './types';
import { getMetricDetailsQuery } from './utils';
const ALL_ATTRIBUTES_KEY = 'all-attributes';
export function AllAttributesValue({
filterKey,
filterValue,
@@ -110,12 +121,31 @@ export function AllAttributesValue({
function AllAttributes({
metricName,
attributes,
metricType,
}: AllAttributesProps): JSX.Element {
const [searchString, setSearchString] = useState('');
const [activeKey, setActiveKey] = useState<string | string[]>(
'all-attributes',
const [activeKey, setActiveKey] = useState<string[]>([ALL_ATTRIBUTES_KEY]);
const {
data: attributesData,
isLoading: isLoadingAttributes,
isError: isErrorAttributes,
mutate: getMetricAttributes,
} = useGetMetricAttributes();
useEffect(() => {
if (metricName) {
getMetricAttributes({
data: {
metricName,
},
});
}
}, [getMetricAttributes, metricName]);
const attributes = useMemo(
() => attributesData?.data?.data?.attributes ?? [],
[attributesData],
);
const { handleExplorerTabChange } = useHandleExplorerTabChange();
@@ -178,7 +208,7 @@ function AllAttributes({
attributes.filter(
(attribute) =>
attribute.key.toLowerCase().includes(searchString.toLowerCase()) ||
attribute.value.some((value) =>
attribute.values?.some((value) =>
value.toLowerCase().includes(searchString.toLowerCase()),
),
),
@@ -195,7 +225,7 @@ function AllAttributes({
},
value: {
key: attribute.key,
value: attribute.value,
value: attribute.values,
},
}))
: [],
@@ -252,6 +282,12 @@ function AllAttributes({
],
);
const emptyText = useMemo(
() =>
isErrorAttributes ? 'Error fetching attributes' : 'No attributes found',
[isErrorAttributes],
);
const items = useMemo(
() => [
{
@@ -270,6 +306,7 @@ function AllAttributes({
onClick={(e): void => {
e.stopPropagation();
}}
disabled={isLoadingAttributes}
/>
</div>
),
@@ -277,25 +314,37 @@ function AllAttributes({
children: (
<ResizeTable
columns={columns}
loading={isLoadingAttributes}
tableLayout="fixed"
dataSource={tableData}
pagination={false}
showHeader={false}
className="metrics-accordion-content all-attributes-content"
scroll={{ y: 600 }}
locale={{
emptyText,
}}
/>
),
},
],
[columns, tableData, searchString],
[searchString, columns, isLoadingAttributes, tableData, emptyText],
);
if (isLoadingAttributes) {
return (
<div className="all-attributes-skeleton-container">
<Skeleton active paragraph={{ rows: 8 }} />
</div>
);
}
return (
<Collapse
bordered
className="metrics-accordion metrics-metadata-accordion"
className="metrics-accordion metrics-all-attributes-accordion"
activeKey={activeKey}
onChange={(keys): void => setActiveKey(keys)}
onChange={(keys): void => setActiveKey(keys as string[])}
items={items}
/>
);

View File

@@ -2,36 +2,84 @@ import { useMemo } from 'react';
import { generatePath } from 'react-router-dom';
import { Color } from '@signozhq/design-tokens';
import { Dropdown, Typography } from 'antd';
import { Skeleton } from 'antd/lib';
import { QueryParams } from 'constants/query';
import ROUTES from 'constants/routes';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import useUrlQuery from 'hooks/useUrlQuery';
import history from 'lib/history';
import { Bell, Grid } from 'lucide-react';
import { pluralize } from 'utils/pluralize';
import { DashboardsAndAlertsPopoverProps } from './types';
import {
useGetMetricAlerts,
useGetMetricDashboards,
} from 'api/generated/services/metrics';
function DashboardsAndAlertsPopover({
alerts,
dashboards,
metricName,
}: DashboardsAndAlertsPopoverProps): JSX.Element | null {
const { safeNavigate } = useSafeNavigate();
const params = useUrlQuery();
const {
data: alertsData,
isLoading: isLoadingAlerts,
isError: isErrorAlerts,
} = useGetMetricAlerts(
{
metricName: metricName ?? '',
},
{
query: {
enabled: !!metricName,
},
},
);
const {
data: dashboardsData,
isLoading: isLoadingDashboards,
isError: isErrorDashboards,
} = useGetMetricDashboards(
{
metricName: metricName ?? '',
},
{
query: {
enabled: !!metricName,
},
},
);
const alerts = useMemo(() => {
return alertsData?.data?.data?.alerts ?? [];
}, [alertsData]);
const dashboards = useMemo(() => {
const currentDashboards = dashboardsData?.data?.data?.dashboards ?? [];
// Remove duplicate dashboards
return currentDashboards.filter(
(dashboard, index, self) =>
index === self.findIndex((t) => t.dashboardId === dashboard.dashboardId),
);
}, [dashboardsData]);
const alertsPopoverContent = useMemo(() => {
if (alerts && alerts.length > 0) {
return alerts.map((alert) => ({
key: alert.alert_id,
key: alert.alertId,
label: (
<Typography.Link
key={alert.alert_id}
key={alert.alertId}
onClick={(): void => {
params.set(QueryParams.ruleId, alert.alert_id);
params.set(QueryParams.ruleId, alert.alertId);
history.push(`${ROUTES.ALERT_OVERVIEW}?${params.toString()}`);
}}
className="dashboards-popover-content-item"
>
{alert.alert_name || alert.alert_id}
{alert.alertName || alert.alertId}
</Typography.Link>
),
}));
@@ -39,41 +87,44 @@ function DashboardsAndAlertsPopover({
return null;
}, [alerts, params]);
const uniqueDashboards = useMemo(
() =>
dashboards?.filter(
(item, index, self) =>
index === self.findIndex((t) => t.dashboard_id === item.dashboard_id),
),
[dashboards],
);
const dashboardsPopoverContent = useMemo(() => {
if (uniqueDashboards && uniqueDashboards.length > 0) {
return uniqueDashboards.map((dashboard) => ({
key: dashboard.dashboard_id,
if (dashboards && dashboards.length > 0) {
return dashboards.map((dashboard) => ({
key: dashboard.dashboardId,
label: (
<Typography.Link
key={dashboard.dashboard_id}
key={dashboard.dashboardId}
onClick={(): void => {
safeNavigate(
generatePath(ROUTES.DASHBOARD, {
dashboardId: dashboard.dashboard_id,
dashboardId: dashboard.dashboardId,
}),
);
}}
className="dashboards-popover-content-item"
>
{dashboard.dashboard_name || dashboard.dashboard_id}
{dashboard.dashboardName || dashboard.dashboardId}
</Typography.Link>
),
}));
}
return null;
}, [uniqueDashboards, safeNavigate]);
}, [dashboards, safeNavigate]);
if (!dashboardsPopoverContent && !alertsPopoverContent) {
return null;
if (isLoadingAlerts || isLoadingDashboards) {
return (
<div className="dashboards-and-alerts-popover-container">
<Skeleton title={false} paragraph={{ rows: 1 }} active />
</div>
);
}
// If there are no dashboards or alerts or both have errors, don't show the popover
const hidePopover =
(!dashboardsPopoverContent && !alertsPopoverContent) ||
(isErrorAlerts && isErrorDashboards);
if (hidePopover) {
return <div className="dashboards-and-alerts-popover-container" />;
}
return (
@@ -92,8 +143,7 @@ function DashboardsAndAlertsPopover({
>
<Grid size={12} color={Color.BG_SIENNA_500} />
<Typography.Text>
{uniqueDashboards?.length} dashboard
{uniqueDashboards?.length === 1 ? '' : 's'}
{pluralize(dashboards.length, 'dashboard')}
</Typography.Text>
</div>
</Dropdown>
@@ -112,7 +162,7 @@ function DashboardsAndAlertsPopover({
>
<Bell size={12} color={Color.BG_SAKURA_500} />
<Typography.Text>
{alerts?.length} alert {alerts?.length === 1 ? 'rule' : 'rules'}
{pluralize(alerts.length, 'alert rule')}
</Typography.Text>
</div>
</Dropdown>

View File

@@ -0,0 +1,129 @@
import { Skeleton, Tooltip, Typography } from 'antd';
import { useMemo } from 'react';
import { formatNumberIntoHumanReadableFormat } from '../Summary/utils';
import { HighlightsProps } from './types';
import {
formatNumberToCompactFormat,
formatTimestampToReadableDate,
} from './utils';
import { useGetMetricHighlights } from 'api/generated/services/metrics';
function Highlights({ metricName }: HighlightsProps): JSX.Element {
const {
data: metricHighlightsData,
isLoading: isLoadingMetricHighlights,
isError: isErrorMetricHighlights,
} = useGetMetricHighlights(
{
metricName: metricName ?? '',
},
{
query: {
enabled: !!metricName,
},
},
);
const metricHighlights = useMemo(() => {
return metricHighlightsData?.data?.data ?? null;
}, [metricHighlightsData]);
const dataPoints = useMemo(() => {
if (!metricHighlights) {
return null;
}
if (isErrorMetricHighlights) {
return (
<Typography.Text className="metric-details-grid-value">-</Typography.Text>
);
}
return (
<Typography.Text className="metric-details-grid-value">
<Tooltip title={metricHighlights?.dataPoints?.toLocaleString()}>
{formatNumberIntoHumanReadableFormat(metricHighlights?.dataPoints ?? 0)}
</Tooltip>
</Typography.Text>
);
}, [metricHighlights, isErrorMetricHighlights]);
const timeSeries = useMemo(() => {
if (!metricHighlights) {
return null;
}
if (isErrorMetricHighlights) {
return (
<Typography.Text className="metric-details-grid-value">-</Typography.Text>
);
}
const timeSeriesActive = formatNumberToCompactFormat(
metricHighlights.activeTimeSeries,
);
const timeSeriesTotal = formatNumberToCompactFormat(
metricHighlights.totalTimeSeries,
);
return (
<Typography.Text className="metric-details-grid-value">
<Tooltip
title="Active time series are those that have received data points in the last 1
hour."
placement="top"
>
<span>{`${timeSeriesTotal} total ⎯ ${timeSeriesActive} active`}</span>
</Tooltip>
</Typography.Text>
);
}, [metricHighlights, isErrorMetricHighlights]);
const lastReceived = useMemo(() => {
if (!metricHighlights) {
return null;
}
if (isErrorMetricHighlights) {
return (
<Typography.Text className="metric-details-grid-value">-</Typography.Text>
);
}
const displayText = formatTimestampToReadableDate(
metricHighlights.lastReceived,
);
return (
<Typography.Text className="metric-details-grid-value">
<Tooltip title={displayText}>{displayText}</Tooltip>
</Typography.Text>
);
}, [metricHighlights, isErrorMetricHighlights]);
if (isLoadingMetricHighlights) {
return (
<div className="metric-details-content-grid">
<Skeleton title={false} paragraph={{ rows: 2 }} active />
</div>
);
}
return (
<div className="metric-details-content-grid">
<div className="labels-row">
<Typography.Text type="secondary" className="metric-details-grid-label">
SAMPLES
</Typography.Text>
<Typography.Text type="secondary" className="metric-details-grid-label">
TIME SERIES
</Typography.Text>
<Typography.Text type="secondary" className="metric-details-grid-label">
LAST RECEIVED
</Typography.Text>
</div>
<div className="values-row">
{dataPoints}
{timeSeries}
{lastReceived}
</div>
</div>
);
}
export default Highlights;

View File

@@ -1,18 +1,25 @@
import { useCallback, useMemo, useState } from 'react';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useQueryClient } from 'react-query';
import { Button, Collapse, Input, Select, Typography } from 'antd';
import { Button, Collapse, Input, Select, Skeleton, Typography } from 'antd';
import { ColumnsType } from 'antd/es/table';
import logEvent from 'api/common/logEvent';
import {
invalidateGetMetricMetadata,
useUpdateMetricMetadata,
} from 'api/generated/services/metrics';
import {
MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality,
MetricsexplorertypesUpdateMetricMetadataRequestDTOType,
} from 'api/generated/services/sigNoz.schemas';
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { UpdateMetricMetadataProps } from 'api/metricsExplorer/updateMetricMetadata';
import { ResizeTable } from 'components/ResizeTable';
import YAxisUnitSelector from 'components/YAxisUnitSelector';
import { YAxisSource } from 'components/YAxisUnitSelector/types';
import { getUniversalNameFromMetricUnit } from 'components/YAxisUnitSelector/utils';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import FieldRenderer from 'container/LogDetailedView/FieldRenderer';
import { DataType } from 'container/LogDetailedView/TableView';
import { useUpdateMetricMetadata } from 'hooks/metricsExplorer/useUpdateMetricMetadata';
import { useNotifications } from 'hooks/useNotifications';
import { Edit2, Save, X } from 'lucide-react';
@@ -23,23 +30,27 @@ import {
} from '../Summary/constants';
import { MetricTypeRenderer } from '../Summary/utils';
import { METRIC_METADATA_KEYS } from './constants';
import { MetadataProps } from './types';
import { determineIsMonotonic } from './utils';
import { MetadataProps, MetricMetadataState, TableFields } from './types';
import {
transformMetricType,
transformTemporality,
transformUpdateMetricMetadataRequest,
} from './utils';
function Metadata({
metricName,
metadata,
refetchMetricDetails,
isErrorMetricMetadata,
isLoadingMetricMetadata,
}: MetadataProps): JSX.Element {
const [isEditing, setIsEditing] = useState(false);
const [
metricMetadata,
setMetricMetadata,
] = useState<UpdateMetricMetadataProps>({
metricType: metadata?.metric_type || MetricType.SUM,
description: metadata?.description || '',
temporality: metadata?.temporality,
unit: metadata?.unit,
const [metricMetadata, setMetricMetadata] = useState<MetricMetadataState>({
type: MetricsexplorertypesUpdateMetricMetadataRequestDTOType.sum,
description: '',
temporality:
MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality.unspecified,
unit: '',
});
const { notifications } = useNotifications();
const {
@@ -51,6 +62,18 @@ function Metadata({
);
const queryClient = useQueryClient();
// Initialize state from metadata api data
useEffect(() => {
if (metadata) {
setMetricMetadata({
type: transformMetricType(metadata.type),
description: metadata.description,
temporality: transformTemporality(metadata.temporality),
unit: metadata.unit,
});
}
}, [metadata]);
const tableData = useMemo(
() =>
metadata
@@ -59,7 +82,7 @@ function Metadata({
temporality: metadata?.temporality,
})
// Filter out monotonic as user input is not required
.filter((key) => key !== 'monotonic')
.filter((key) => key !== TableFields.IS_MONOTONIC)
.map((key) => ({
key,
value: {
@@ -72,30 +95,37 @@ function Metadata({
);
// Render un-editable field value
const renderUneditableField = useCallback((key: string, value: string) => {
if (key === 'metric_type') {
return <MetricTypeRenderer type={value as MetricType} />;
}
let fieldValue = value;
if (key === 'unit') {
fieldValue = getUniversalNameFromMetricUnit(value);
}
return <FieldRenderer field={fieldValue || '-'} />;
}, []);
const renderUneditableField = useCallback(
(key: keyof MetricMetadataState, value: string) => {
if (isErrorMetricMetadata) {
return <FieldRenderer field="-" />;
}
if (key === TableFields.TYPE) {
return <MetricTypeRenderer type={value as MetricType} />;
}
let fieldValue = value;
if (key === TableFields.UNIT) {
fieldValue = getUniversalNameFromMetricUnit(value);
}
return <FieldRenderer field={fieldValue || '-'} />;
},
[isErrorMetricMetadata],
);
const renderColumnValue = useCallback(
(field: { value: string; key: string }): JSX.Element => {
(field: { value: string; key: keyof MetricMetadataState }): JSX.Element => {
if (!isEditing) {
return renderUneditableField(field.key, field.value);
}
// Don't allow editing of unit if it's already set
const metricUnitAlreadySet = field.key === 'unit' && Boolean(metadata?.unit);
const metricUnitAlreadySet =
field.key === TableFields.UNIT && Boolean(metadata?.unit);
if (metricUnitAlreadySet) {
return renderUneditableField(field.key, field.value);
}
if (field.key === 'metric_type') {
if (field.key === TableFields.TYPE) {
return (
<Select
data-testid="metric-type-select"
@@ -103,17 +133,17 @@ function Metadata({
value: key,
label: METRIC_TYPE_LABEL_MAP[key as MetricType],
}))}
value={metricMetadata.metricType}
value={metricMetadata.type}
onChange={(value): void => {
setMetricMetadata((prev) => ({
...prev,
metricType: value as MetricType,
metricType: value,
}));
}}
/>
);
}
if (field.key === 'unit') {
if (field.key === TableFields.UNIT) {
return (
<YAxisUnitSelector
value={metricMetadata.unit}
@@ -125,7 +155,7 @@ function Metadata({
/>
);
}
if (field.key === 'temporality') {
if (field.key === TableFields.Temporality) {
return (
<Select
data-testid="temporality-select"
@@ -137,22 +167,18 @@ function Metadata({
onChange={(value): void => {
setMetricMetadata((prev) => ({
...prev,
temporality: value as Temporality,
temporality: value,
}));
}}
/>
);
}
if (field.key === 'description') {
if (field.key === TableFields.DESCRIPTION) {
return (
<Input
data-testid="description-input"
name={field.key}
defaultValue={
metricMetadata[
field.key as Exclude<keyof UpdateMetricMetadataProps, 'isMonotonic'>
]
}
defaultValue={metricMetadata.description}
onChange={(e): void => {
setMetricMetadata((prev) => ({
...prev,
@@ -201,18 +227,14 @@ function Metadata({
const handleSave = useCallback(() => {
updateMetricMetadata(
{
metricName,
payload: {
...metricMetadata,
isMonotonic: determineIsMonotonic(
metricMetadata.metricType,
metricMetadata.temporality,
),
pathParams: {
metricName: metricName ?? '',
},
data: transformUpdateMetricMetadataRequest(metricName, metricMetadata),
},
{
onSuccess: (response): void => {
if (response?.statusCode === 200) {
if (response.status === 200) {
logEvent(MetricsExplorerEvents.MetricMetadataUpdated, {
[MetricsExplorerEventKeys.MetricName]: metricName,
[MetricsExplorerEventKeys.Tab]: 'summary',
@@ -221,9 +243,13 @@ function Metadata({
notifications.success({
message: 'Metadata updated successfully',
});
refetchMetricDetails();
setIsEditing(false);
queryClient.invalidateQueries(['metricsList']);
// TODO: To update this to use invalidateGetMetricList
// once we have switched to the V2 API in summary page
queryClient.invalidateQueries([REACT_QUERY_KEY.GET_METRICS_LIST]);
invalidateGetMetricMetadata(queryClient, {
metricName,
});
} else {
notifications.error({
message:
@@ -243,10 +269,24 @@ function Metadata({
metricName,
metricMetadata,
notifications,
refetchMetricDetails,
queryClient,
]);
const cancelEdit = useCallback(
(e: React.MouseEvent<HTMLElement, MouseEvent>): void => {
e.stopPropagation();
if (metadata) {
setMetricMetadata({
type: transformMetricType(metadata.type),
description: metadata.description,
unit: metadata.unit,
});
}
setIsEditing(false);
},
[metadata],
);
const actionButton = useMemo(() => {
if (isEditing) {
return (
@@ -254,10 +294,7 @@ function Metadata({
<Button
className="action-button"
type="text"
onClick={(e): void => {
e.stopPropagation();
setIsEditing(false);
}}
onClick={cancelEdit}
disabled={isUpdatingMetricsMetadata}
>
<X size={14} />
@@ -294,7 +331,7 @@ function Metadata({
</Button>
</div>
);
}, [handleSave, isEditing, isUpdatingMetricsMetadata]);
}, [isEditing, isUpdatingMetricsMetadata, cancelEdit, handleSave]);
const items = useMemo(
() => [
@@ -321,6 +358,14 @@ function Metadata({
[actionButton, columns, tableData],
);
if (isLoadingMetricMetadata) {
return (
<div className="metrics-metadata-skeleton-container">
<Skeleton active paragraph={{ rows: 8 }} />
</div>
);
}
return (
<Collapse
bordered

View File

@@ -39,6 +39,7 @@
gap: 12px;
.metric-details-content-grid {
height: 50px;
.labels-row,
.values-row {
display: grid;
@@ -72,6 +73,7 @@
.dashboards-and-alerts-popover-container {
display: flex;
gap: 16px;
height: 32px;
.dashboards-and-alerts-popover {
border-radius: 20px;
@@ -102,6 +104,14 @@
}
}
.metrics-metadata-skeleton-container {
height: 330px;
}
.all-attributes-skeleton-container {
height: 600px;
}
.metrics-accordion {
.ant-table-body {
&::-webkit-scrollbar {
@@ -148,7 +158,6 @@
.all-attributes-search-input {
width: 300px;
border: 1px solid var(--bg-slate-300);
}
}
@@ -161,6 +170,7 @@
.ant-typography:first-child {
font-family: 'Geist Mono';
color: var(--bg-robin-400);
background-color: transparent;
}
}
.all-attributes-contribution {
@@ -237,6 +247,7 @@
}
.metric-metadata-value {
height: 67px;
background: rgba(22, 25, 34, 0.4);
overflow-x: scroll;
.field-renderer-container {
@@ -330,18 +341,26 @@
.metric-details-content {
.metrics-accordion {
.metrics-accordion-header {
.action-button {
.ant-typography {
color: var(--bg-slate-400);
.action-menu {
.action-button {
.ant-typography {
color: var(--bg-slate-400);
}
}
}
}
.metrics-accordion-content {
.metric-metadata-key {
.field-renderer-container {
.label {
color: var(--bg-slate-300);
}
}
.all-attributes-key {
.ant-typography:last-child {
color: var(--bg-slate-400);
color: var(--bg-vanilla-200);
background-color: var(--bg-robin-300);
}
}

View File

@@ -1,16 +1,8 @@
import { useCallback, useEffect, useMemo } from 'react';
import { Color } from '@signozhq/design-tokens';
import {
Button,
Divider,
Drawer,
Empty,
Skeleton,
Tooltip,
Typography,
} from 'antd';
import { Button, Divider, Drawer, Empty, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import { useGetMetricDetails } from 'hooks/metricsExplorer/useGetMetricDetails';
import { useGetMetricMetadata } from 'api/generated/services/metrics';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { Compass, Crosshair, X } from 'lucide-react';
@@ -19,16 +11,12 @@ import ROUTES from '../../../constants/routes';
import { useHandleExplorerTabChange } from '../../../hooks/useHandleExplorerTabChange';
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
import { isInspectEnabled } from '../Inspect/utils';
import { formatNumberIntoHumanReadableFormat } from '../Summary/utils';
import AllAttributes from './AllAttributes';
import DashboardsAndAlertsPopover from './DashboardsAndAlertsPopover';
import Highlights from './Highlights';
import Metadata from './Metadata';
import { MetricDetailsProps } from './types';
import {
formatNumberToCompactFormat,
formatTimestampToReadableDate,
getMetricDetailsQuery,
} from './utils';
import { getMetricDetailsQuery } from './utils';
import './MetricDetails.styles.scss';
import '../Summary/Summary.styles.scss';
@@ -43,55 +31,52 @@ function MetricDetails({
const { handleExplorerTabChange } = useHandleExplorerTabChange();
const {
data,
isLoading,
isFetching,
error: metricDetailsError,
refetch: refetchMetricDetails,
} = useGetMetricDetails(metricName ?? '', {
enabled: !!metricName,
});
const metric = data?.payload?.data;
const lastReceived = useMemo(() => {
if (!metric) {
return null;
}
return formatTimestampToReadableDate(metric.lastReceived);
}, [metric]);
const showInspectFeature = useMemo(
() => isInspectEnabled(metric?.metadata?.metric_type),
[metric],
data: metricMetadataResponse,
isLoading: isLoadingMetricMetadata,
isError: isErrorMetricMetadata,
} = useGetMetricMetadata(
{
metricName: metricName ?? '',
},
{
query: {
enabled: !!metricName,
},
},
);
const isMetricDetailsLoading = isLoading || isFetching;
const timeSeries = useMemo(() => {
if (!metric) {
const metadata = useMemo(() => {
if (
!metricMetadataResponse ||
!metricMetadataResponse.data ||
!metricMetadataResponse.data.data
) {
return null;
}
const timeSeriesActive = formatNumberToCompactFormat(metric.timeSeriesActive);
const timeSeriesTotal = formatNumberToCompactFormat(metric.timeSeriesTotal);
const {
type,
description,
unit,
temporality,
isMonotonic,
} = metricMetadataResponse.data.data;
return (
<Tooltip
title="Active time series are those that have received data points in the last 1
hour."
placement="top"
>
<span>{`${timeSeriesTotal} total ⎯ ${timeSeriesActive} active`}</span>
</Tooltip>
);
}, [metric]);
return {
type,
description,
unit,
temporality,
isMonotonic,
};
}, [metricMetadataResponse]);
const showInspectFeature = useMemo(() => isInspectEnabled(metadata?.type), [
metadata,
]);
const goToMetricsExplorerwithSelectedMetric = useCallback(() => {
if (metricName) {
const compositeQuery = getMetricDetailsQuery(
metricName,
metric?.metadata?.metric_type,
);
const compositeQuery = getMetricDetailsQuery(metricName, metadata?.type);
handleExplorerTabChange(
PANEL_TYPES.TIME_SERIES,
{
@@ -107,9 +92,7 @@ function MetricDetails({
[MetricsExplorerEventKeys.Modal]: 'metric-details',
});
}
}, [metricName, handleExplorerTabChange, metric?.metadata?.metric_type]);
const isMetricDetailsError = metricDetailsError || !metric;
}, [metricName, handleExplorerTabChange, metadata?.type]);
useEffect(() => {
logEvent(MetricsExplorerEvents.ModalOpened, {
@@ -117,6 +100,10 @@ function MetricDetails({
});
}, []);
if (!metricName) {
return <Empty description="Metric not found" />;
}
return (
<Drawer
width="60%"
@@ -124,7 +111,7 @@ function MetricDetails({
<div className="metric-details-header">
<div className="metric-details-title">
<Divider type="vertical" />
<Typography.Text>{metric?.name}</Typography.Text>
<Typography.Text>{metricName}</Typography.Text>
</div>
<div className="metric-details-header-buttons">
<Button
@@ -142,8 +129,8 @@ function MetricDetails({
aria-label="Inspect Metric"
icon={<Crosshair size={18} />}
onClick={(): void => {
if (metric?.name) {
openInspectModal(metric.name);
if (metricName) {
openInspectModal(metricName);
}
}}
data-testid="inspect-metric-button"
@@ -163,60 +150,17 @@ function MetricDetails({
destroyOnClose
closeIcon={<X size={16} />}
>
{isMetricDetailsLoading && (
<div data-testid="metric-details-skeleton">
<Skeleton active />
</div>
)}
{isMetricDetailsError && !isMetricDetailsLoading && (
<Empty description="Error fetching metric details" />
)}
{!isMetricDetailsLoading && !isMetricDetailsError && (
<div className="metric-details-content">
<div className="metric-details-content-grid">
<div className="labels-row">
<Typography.Text type="secondary" className="metric-details-grid-label">
SAMPLES
</Typography.Text>
<Typography.Text type="secondary" className="metric-details-grid-label">
TIME SERIES
</Typography.Text>
<Typography.Text type="secondary" className="metric-details-grid-label">
LAST RECEIVED
</Typography.Text>
</div>
<div className="values-row">
<Typography.Text className="metric-details-grid-value">
<Tooltip title={metric?.samples.toLocaleString()}>
{formatNumberIntoHumanReadableFormat(metric?.samples)}
</Tooltip>
</Typography.Text>
<Typography.Text className="metric-details-grid-value">
<Tooltip title={timeSeries}>{timeSeries}</Tooltip>
</Typography.Text>
<Typography.Text className="metric-details-grid-value">
<Tooltip title={lastReceived}>{lastReceived}</Tooltip>
</Typography.Text>
</div>
</div>
<DashboardsAndAlertsPopover
dashboards={metric.dashboards}
alerts={metric.alerts}
/>
<Metadata
metricName={metric?.name}
metadata={metric.metadata}
refetchMetricDetails={refetchMetricDetails}
/>
{metric.attributes && (
<AllAttributes
metricName={metric?.name}
attributes={metric.attributes}
metricType={metric?.metadata?.metric_type}
/>
)}
</div>
)}
<div className="metric-details-content">
<Highlights metricName={metricName} />
<DashboardsAndAlertsPopover metricName={metricName} />
<Metadata
metricName={metricName}
metadata={metadata}
isErrorMetricMetadata={isErrorMetricMetadata}
isLoadingMetricMetadata={isLoadingMetricMetadata}
/>
<AllAttributes metricName={metricName} metricType={metadata?.type} />
</div>
</Drawer>
);
}

View File

@@ -1,11 +1,13 @@
import * as reactUseHooks from 'react-use';
import { fireEvent, render, screen } from '@testing-library/react';
import { render, screen } from '@testing-library/react';
import * as metricsExplorerHooks from 'api/generated/services/metrics';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import * as useHandleExplorerTabChange from 'hooks/useHandleExplorerTabChange';
import { userEvent } from 'tests/test-utils';
import { MetricDetailsAttribute } from '../../../../api/metricsExplorer/getMetricDetails';
import ROUTES from '../../../../constants/routes';
import AllAttributes, { AllAttributesValue } from '../AllAttributes';
import { getMockMetricAttributesData } from './testUtlls';
jest.mock('react-router-dom', () => ({
...jest.requireActual('react-router-dom'),
@@ -22,32 +24,30 @@ jest
const mockMetricName = 'test-metric';
const mockMetricType = MetricType.GAUGE;
const mockAttributes: MetricDetailsAttribute[] = [
{
key: 'attribute1',
value: ['value1', 'value2'],
valueCount: 2,
},
{
key: 'attribute2',
value: ['value3'],
valueCount: 1,
},
];
const mockUseCopyToClipboard = jest.fn();
jest
.spyOn(reactUseHooks, 'useCopyToClipboard')
.mockReturnValue([{ value: 'value1' }, mockUseCopyToClipboard] as any);
const useGetMetricAttributesMock = jest.spyOn(
metricsExplorerHooks,
'useGetMetricAttributes',
);
const mockUseGetMetricAttributes = jest.fn();
describe('AllAttributes', () => {
beforeEach(() => {
useGetMetricAttributesMock.mockReturnValue({
...getMockMetricAttributesData(),
mutate: mockUseGetMetricAttributes,
});
});
it('renders attributes section with title', () => {
render(
<AllAttributes
metricName={mockMetricName}
attributes={mockAttributes}
metricType={mockMetricType}
/>,
<AllAttributes metricName={mockMetricName} metricType={mockMetricType} />,
);
expect(screen.getByText('All Attributes')).toBeInTheDocument();
@@ -55,11 +55,7 @@ describe('AllAttributes', () => {
it('renders all attribute keys and values', () => {
render(
<AllAttributes
metricName={mockMetricName}
attributes={mockAttributes}
metricType={mockMetricType}
/>,
<AllAttributes metricName={mockMetricName} metricType={mockMetricType} />,
);
// Check attribute keys are rendered
@@ -74,11 +70,7 @@ describe('AllAttributes', () => {
it('renders value counts correctly', () => {
render(
<AllAttributes
metricName={mockMetricName}
attributes={mockAttributes}
metricType={mockMetricType}
/>,
<AllAttributes metricName={mockMetricName} metricType={mockMetricType} />,
);
expect(screen.getByText('2')).toBeInTheDocument(); // For attribute1
@@ -86,41 +78,36 @@ describe('AllAttributes', () => {
});
it('handles empty attributes array', () => {
useGetMetricAttributesMock.mockReturnValue({
...getMockMetricAttributesData({
data: {
attributes: [],
totalKeys: 0,
},
}),
mutate: mockUseGetMetricAttributes,
});
render(
<AllAttributes
metricName={mockMetricName}
attributes={[]}
metricType={mockMetricType}
/>,
<AllAttributes metricName={mockMetricName} metricType={mockMetricType} />,
);
expect(screen.getByText('All Attributes')).toBeInTheDocument();
expect(screen.queryByText('No data')).toBeInTheDocument();
expect(screen.getByText('No attributes found')).toBeInTheDocument();
});
it('clicking on an attribute key opens the explorer with the attribute filter applied', () => {
it('clicking on an attribute key opens the explorer with the attribute filter applied', async () => {
render(
<AllAttributes
metricName={mockMetricName}
attributes={mockAttributes}
metricType={mockMetricType}
/>,
<AllAttributes metricName={mockMetricName} metricType={mockMetricType} />,
);
fireEvent.click(screen.getByText('attribute1'));
await userEvent.click(screen.getByText('attribute1'));
expect(mockHandleExplorerTabChange).toHaveBeenCalled();
});
it('filters attributes based on search input', () => {
it('filters attributes based on search input', async () => {
render(
<AllAttributes
metricName={mockMetricName}
attributes={mockAttributes}
metricType={mockMetricType}
/>,
<AllAttributes metricName={mockMetricName} metricType={mockMetricType} />,
);
fireEvent.change(screen.getByPlaceholderText('Search'), {
target: { value: 'value1' },
});
await userEvent.type(screen.getByPlaceholderText('Search'), 'value1');
expect(screen.getByText('attribute1')).toBeInTheDocument();
expect(screen.getByText('value1')).toBeInTheDocument();
@@ -144,7 +131,7 @@ describe('AllAttributesValue', () => {
expect(screen.getByText('value2')).toBeInTheDocument();
});
it('loads more attributes when show more button is clicked', () => {
it('loads more attributes when show more button is clicked', async () => {
render(
<AllAttributesValue
filterKey="attribute1"
@@ -155,7 +142,7 @@ describe('AllAttributesValue', () => {
/>,
);
expect(screen.queryByText('value6')).not.toBeInTheDocument();
fireEvent.click(screen.getByText('Show More'));
await userEvent.click(screen.getByText('Show More'));
expect(screen.getByText('value6')).toBeInTheDocument();
});
@@ -172,7 +159,7 @@ describe('AllAttributesValue', () => {
expect(screen.queryByText('Show More')).not.toBeInTheDocument();
});
it('copy button should copy the attribute value to the clipboard', () => {
it('copy button should copy the attribute value to the clipboard', async () => {
render(
<AllAttributesValue
filterKey="attribute1"
@@ -183,13 +170,13 @@ describe('AllAttributesValue', () => {
/>,
);
expect(screen.getByText('value1')).toBeInTheDocument();
fireEvent.click(screen.getByText('value1'));
await userEvent.click(screen.getByText('value1'));
expect(screen.getByText('Copy Attribute')).toBeInTheDocument();
fireEvent.click(screen.getByText('Copy Attribute'));
await userEvent.click(screen.getByText('Copy Attribute'));
expect(mockUseCopyToClipboard).toHaveBeenCalledWith('value1');
});
it('explorer button should go to metrics explore with the attribute filter applied', () => {
it('explorer button should go to metrics explore with the attribute filter applied', async () => {
render(
<AllAttributesValue
filterKey="attribute1"
@@ -200,10 +187,10 @@ describe('AllAttributesValue', () => {
/>,
);
expect(screen.getByText('value1')).toBeInTheDocument();
fireEvent.click(screen.getByText('value1'));
await userEvent.click(screen.getByText('value1'));
expect(screen.getByText('Open in Explorer')).toBeInTheDocument();
fireEvent.click(screen.getByText('Open in Explorer'));
await userEvent.click(screen.getByText('Open in Explorer'));
expect(mockGoToMetricsExploreWithAppliedAttribute).toHaveBeenCalledWith(
'attribute1',
'value1',

View File

@@ -1,26 +1,17 @@
import { fireEvent, render, screen } from '@testing-library/react';
import { render, screen } from '@testing-library/react';
import { QueryParams } from 'constants/query';
import * as metricsExplorerHooks from 'api/generated/services/metrics';
import { userEvent } from 'tests/test-utils';
import DashboardsAndAlertsPopover from '../DashboardsAndAlertsPopover';
const mockAlert1 = {
alert_id: '1',
alert_name: 'Alert 1',
};
const mockAlert2 = {
alert_id: '2',
alert_name: 'Alert 2',
};
const mockDashboard1 = {
dashboard_id: '1',
dashboard_name: 'Dashboard 1',
};
const mockDashboard2 = {
dashboard_id: '2',
dashboard_name: 'Dashboard 2',
};
const mockAlerts = [mockAlert1, mockAlert2];
const mockDashboards = [mockDashboard1, mockDashboard2];
import {
getMockAlertsData,
getMockDashboardsData,
MOCK_ALERT_1,
MOCK_ALERT_2,
MOCK_DASHBOARD_1,
MOCK_DASHBOARD_2,
} from './testUtlls';
const mockSafeNavigate = jest.fn();
jest.mock('hooks/useSafeNavigate', () => ({
@@ -28,7 +19,6 @@ jest.mock('hooks/useSafeNavigate', () => ({
safeNavigate: mockSafeNavigate,
}),
}));
const mockSetQuery = jest.fn();
const mockUrlQuery = {
set: mockSetQuery,
@@ -39,125 +29,155 @@ jest.mock('hooks/useUrlQuery', () => ({
default: jest.fn(() => mockUrlQuery),
}));
describe('DashboardsAndAlertsPopover', () => {
it('renders the popover correctly with multiple dashboards and alerts', () => {
render(
<DashboardsAndAlertsPopover
alerts={mockAlerts}
dashboards={mockDashboards}
/>,
);
const MOCK_METRIC_NAME = 'test-metric';
expect(
screen.getByText(`${mockDashboards.length} dashboards`),
).toBeInTheDocument();
expect(
screen.getByText(`${mockAlerts.length} alert rules`),
).toBeInTheDocument();
const useGetMetricAlertsMock = jest.spyOn(
metricsExplorerHooks,
'useGetMetricAlerts',
);
const useGetMetricDashboardsMock = jest.spyOn(
metricsExplorerHooks,
'useGetMetricDashboards',
);
describe('DashboardsAndAlertsPopover', () => {
beforeEach(() => {
useGetMetricAlertsMock.mockReturnValue(getMockAlertsData());
useGetMetricDashboardsMock.mockReturnValue(getMockDashboardsData());
});
it('renders the popover correctly with multiple dashboards and alerts', () => {
render(<DashboardsAndAlertsPopover metricName={MOCK_METRIC_NAME} />);
expect(screen.getByText(`2 dashboards`)).toBeInTheDocument();
expect(screen.getByText(`2 alert rules`)).toBeInTheDocument();
});
it('renders null with no dashboards and alerts', () => {
const { container } = render(
<DashboardsAndAlertsPopover alerts={[]} dashboards={[]} />,
useGetMetricAlertsMock.mockReturnValue(
getMockAlertsData({
data: undefined,
}),
);
expect(container).toBeEmptyDOMElement();
useGetMetricDashboardsMock.mockReturnValue(
getMockDashboardsData({
data: undefined,
}),
);
const { container } = render(
<DashboardsAndAlertsPopover metricName={MOCK_METRIC_NAME} />,
);
expect(
container.querySelector('dashboards-and-alerts-popover-container'),
).toBeNull();
});
it('renders popover with single dashboard and alert', () => {
render(
<DashboardsAndAlertsPopover
alerts={[mockAlert1]}
dashboards={[mockDashboard1]}
/>,
useGetMetricAlertsMock.mockReturnValue(
getMockAlertsData({
data: {
alerts: [MOCK_ALERT_1],
},
}),
);
useGetMetricDashboardsMock.mockReturnValue(
getMockDashboardsData({
data: {
dashboards: [MOCK_DASHBOARD_1],
},
}),
);
render(<DashboardsAndAlertsPopover metricName={MOCK_METRIC_NAME} />);
expect(screen.getByText(`1 dashboard`)).toBeInTheDocument();
expect(screen.getByText(`1 alert rule`)).toBeInTheDocument();
});
it('renders popover with dashboard id if name is not available', () => {
render(
<DashboardsAndAlertsPopover
alerts={mockAlerts}
dashboards={[{ ...mockDashboard1, dashboard_name: undefined } as any]}
/>,
it('renders popover with dashboard id if name is not available', async () => {
useGetMetricDashboardsMock.mockReturnValue(
getMockDashboardsData({
data: {
dashboards: [{ ...MOCK_DASHBOARD_1, dashboardName: '' }],
},
}),
);
fireEvent.click(screen.getByText(`1 dashboard`));
expect(screen.getByText(mockDashboard1.dashboard_id)).toBeInTheDocument();
render(<DashboardsAndAlertsPopover metricName={MOCK_METRIC_NAME} />);
await userEvent.click(screen.getByText(`1 dashboard`));
expect(screen.getByText(MOCK_DASHBOARD_1.dashboardId)).toBeInTheDocument();
});
it('renders popover with alert id if name is not available', () => {
render(
<DashboardsAndAlertsPopover
alerts={[{ ...mockAlert1, alert_name: undefined } as any]}
dashboards={mockDashboards}
/>,
it('renders popover with alert id if name is not available', async () => {
useGetMetricAlertsMock.mockReturnValue(
getMockAlertsData({
data: {
alerts: [{ ...MOCK_ALERT_1, alertName: '' }],
},
}),
);
fireEvent.click(screen.getByText(`1 alert rule`));
expect(screen.getByText(mockAlert1.alert_id)).toBeInTheDocument();
render(<DashboardsAndAlertsPopover metricName={MOCK_METRIC_NAME} />);
await userEvent.click(screen.getByText(`1 alert rule`));
expect(screen.getByText(MOCK_ALERT_1.alertId)).toBeInTheDocument();
});
it('navigates to the dashboard when the dashboard is clicked', () => {
render(
<DashboardsAndAlertsPopover
alerts={mockAlerts}
dashboards={mockDashboards}
/>,
);
it('navigates to the dashboard when the dashboard is clicked', async () => {
render(<DashboardsAndAlertsPopover metricName={MOCK_METRIC_NAME} />);
// Click on 2 dashboards button
fireEvent.click(screen.getByText(`${mockDashboards.length} dashboards`));
await userEvent.click(screen.getByText(`2 dashboards`));
// Popover showing list of 2 dashboards should be visible
expect(screen.getByText(mockDashboard1.dashboard_name)).toBeInTheDocument();
expect(screen.getByText(mockDashboard2.dashboard_name)).toBeInTheDocument();
expect(screen.getByText(MOCK_DASHBOARD_1.dashboardName)).toBeInTheDocument();
expect(screen.getByText(MOCK_DASHBOARD_2.dashboardName)).toBeInTheDocument();
// Click on the first dashboard
fireEvent.click(screen.getByText(mockDashboard1.dashboard_name));
await userEvent.click(screen.getByText(MOCK_DASHBOARD_1.dashboardName));
// Should navigate to the dashboard
expect(mockSafeNavigate).toHaveBeenCalledWith(
`/dashboard/${mockDashboard1.dashboard_id}`,
`/dashboard/${MOCK_DASHBOARD_1.dashboardId}`,
);
});
it('navigates to the alert when the alert is clicked', () => {
render(
<DashboardsAndAlertsPopover
alerts={mockAlerts}
dashboards={mockDashboards}
/>,
);
it('navigates to the alert when the alert is clicked', async () => {
render(<DashboardsAndAlertsPopover metricName={MOCK_METRIC_NAME} />);
// Click on 2 alert rules button
fireEvent.click(screen.getByText(`${mockAlerts.length} alert rules`));
await userEvent.click(screen.getByText(`2 alert rules`));
// Popover showing list of 2 alert rules should be visible
expect(screen.getByText(mockAlert1.alert_name)).toBeInTheDocument();
expect(screen.getByText(mockAlert2.alert_name)).toBeInTheDocument();
expect(screen.getByText(MOCK_ALERT_1.alertName)).toBeInTheDocument();
expect(screen.getByText(MOCK_ALERT_2.alertName)).toBeInTheDocument();
// Click on the first alert rule
fireEvent.click(screen.getByText(mockAlert1.alert_name));
await userEvent.click(screen.getByText(MOCK_ALERT_1.alertName));
// Should navigate to the alert rule
expect(mockSetQuery).toHaveBeenCalledWith(
QueryParams.ruleId,
mockAlert1.alert_id,
MOCK_ALERT_1.alertId,
);
});
it('renders unique dashboards even when there are duplicates', () => {
render(
<DashboardsAndAlertsPopover
alerts={mockAlerts}
dashboards={[...mockDashboards, mockDashboard1]}
/>,
it('renders unique dashboards even when there are duplicates', async () => {
useGetMetricDashboardsMock.mockReturnValue(
getMockDashboardsData({
data: {
dashboards: [MOCK_DASHBOARD_1, MOCK_DASHBOARD_2, MOCK_DASHBOARD_1],
},
}),
);
expect(
screen.getByText(`${mockDashboards.length} dashboards`),
).toBeInTheDocument();
fireEvent.click(screen.getByText(`${mockDashboards.length} dashboards`));
expect(screen.getByText(mockDashboard1.dashboard_name)).toBeInTheDocument();
expect(screen.getByText(mockDashboard2.dashboard_name)).toBeInTheDocument();
render(<DashboardsAndAlertsPopover metricName={MOCK_METRIC_NAME} />);
expect(screen.getByText('2 dashboards')).toBeInTheDocument();
await userEvent.click(screen.getByText('2 dashboards'));
expect(screen.getByText(MOCK_DASHBOARD_1.dashboardName)).toBeInTheDocument();
expect(screen.getByText(MOCK_DASHBOARD_2.dashboardName)).toBeInTheDocument();
});
});

View File

@@ -0,0 +1,85 @@
import { render } from '@testing-library/react';
import * as metricsExplorerHooks from 'api/generated/services/metrics';
import Highlights from '../Highlights';
import { getMockMetricHighlightsData } from './testUtlls';
import { formatTimestampToReadableDate } from '../utils';
const MOCK_METRIC_NAME = 'test-metric';
const METRIC_DETAILS_GRID_VALUE_SELECTOR = '.metric-details-grid-value';
const useGetMetricHighlightsMock = jest.spyOn(
metricsExplorerHooks,
'useGetMetricHighlights',
);
describe('Highlights', () => {
beforeEach(() => {
useGetMetricHighlightsMock.mockReturnValue(getMockMetricHighlightsData());
});
it('should render all highlights data correctly', () => {
const { container } = render(<Highlights metricName={MOCK_METRIC_NAME} />);
const metricHighlightsValues = container.querySelectorAll(
METRIC_DETAILS_GRID_VALUE_SELECTOR,
);
expect(metricHighlightsValues).toHaveLength(3);
expect(metricHighlightsValues[0].textContent).toBe('1M+');
expect(metricHighlightsValues[1].textContent).toBe('1M total ⎯ 1M active');
expect(metricHighlightsValues[2].textContent).toBe(
formatTimestampToReadableDate('2026-01-24T00:00:00Z'),
);
});
it('should render "-" for highlights data when there is an error', () => {
useGetMetricHighlightsMock.mockReturnValue(
getMockMetricHighlightsData(
{},
{
isError: true,
},
),
);
const { container } = render(<Highlights metricName={MOCK_METRIC_NAME} />);
const metricHighlightsValues = container.querySelectorAll(
METRIC_DETAILS_GRID_VALUE_SELECTOR,
);
expect(metricHighlightsValues[0].textContent).toBe('-');
expect(metricHighlightsValues[1].textContent).toBe('-');
expect(metricHighlightsValues[2].textContent).toBe('-');
});
it('should render loading state when data is loading', () => {
useGetMetricHighlightsMock.mockReturnValue(
getMockMetricHighlightsData(
{},
{
isLoading: true,
},
),
);
const { container } = render(<Highlights metricName={MOCK_METRIC_NAME} />);
expect(container.querySelector('.ant-skeleton')).toBeInTheDocument();
});
it('should not render grid values when there is no data', () => {
useGetMetricHighlightsMock.mockReturnValue(
getMockMetricHighlightsData({
data: undefined,
}),
);
const { container } = render(<Highlights metricName={MOCK_METRIC_NAME} />);
const metricHighlightsValues = container.querySelectorAll(
METRIC_DETAILS_GRID_VALUE_SELECTOR,
);
expect(metricHighlightsValues).toHaveLength(0);
});
});

View File

@@ -1,16 +1,22 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { fireEvent, render, screen } from '@testing-library/react';
import { render, screen } from '@testing-library/react';
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import {
UniversalYAxisUnit,
YAxisUnitSelectorProps,
} from 'components/YAxisUnitSelector/types';
import * as useUpdateMetricMetadataHooks from 'hooks/metricsExplorer/useUpdateMetricMetadata';
import * as metricsExplorerHooks from 'api/generated/services/metrics';
import * as useNotificationsHooks from 'hooks/useNotifications';
import { userEvent } from 'tests/test-utils';
import { SelectOption } from 'types/common/select';
import Metadata from '../Metadata';
import { MetricMetadata } from '../types';
import { transformMetricMetadata } from '../utils';
import { getMockMetricMetadataData } from './testUtlls';
import { GetMetricMetadata200 } from 'api/generated/services/sigNoz.schemas';
import { AxiosResponse } from 'axios';
// Mock antd select for testing
jest.mock('antd', () => ({
@@ -72,13 +78,18 @@ jest.mock('react-query', () => ({
}),
}));
const mockUseUpdateMetricMetadataHook = jest.spyOn(
metricsExplorerHooks,
'useUpdateMetricMetadata',
);
type UseUpdateMetricMetadataResult = ReturnType<
typeof metricsExplorerHooks.useUpdateMetricMetadata
>;
const mockUseUpdateMetricMetadata = jest.fn();
jest
.spyOn(useUpdateMetricMetadataHooks, 'useUpdateMetricMetadata')
.mockReturnValue({
mutate: mockUseUpdateMetricMetadata,
isLoading: false,
} as any);
const mockMetricMetadata = transformMetricMetadata(
(getMockMetricMetadataData().data as AxiosResponse<GetMetricMetadata200>).data,
) as MetricMetadata;
const mockErrorNotification = jest.fn();
const mockSuccessNotification = jest.fn();
@@ -90,26 +101,26 @@ jest.spyOn(useNotificationsHooks, 'useNotifications').mockReturnValue({
} as any);
const mockMetricName = 'test_metric';
const mockMetricMetadata = {
metric_type: MetricType.GAUGE,
description: 'test_description',
unit: 'test_unit',
temporality: Temporality.DELTA,
};
const mockRefetchMetricDetails = jest.fn();
describe('Metadata', () => {
beforeEach(() => {
mockUseUpdateMetricMetadataHook.mockReturnValue(({
mutate: mockUseUpdateMetricMetadata,
} as Partial<UseUpdateMetricMetadataResult>) as UseUpdateMetricMetadataResult);
});
it('should render the metadata properly', () => {
render(
<Metadata
metricName={mockMetricName}
metadata={mockMetricMetadata}
refetchMetricDetails={mockRefetchMetricDetails}
isErrorMetricMetadata={false}
isLoadingMetricMetadata={false}
/>,
);
expect(screen.getByText('Metric Type')).toBeInTheDocument();
expect(screen.getByText(mockMetricMetadata.metric_type)).toBeInTheDocument();
expect(screen.getByText(mockMetricMetadata.metricType)).toBeInTheDocument();
expect(screen.getByText('Description')).toBeInTheDocument();
expect(screen.getByText(mockMetricMetadata.description)).toBeInTheDocument();
expect(screen.getByText('Unit')).toBeInTheDocument();
@@ -118,18 +129,19 @@ describe('Metadata', () => {
expect(screen.getByText(mockMetricMetadata.temporality)).toBeInTheDocument();
});
it('editing the metadata should show the form inputs', () => {
it('editing the metadata should show the form inputs', async () => {
render(
<Metadata
metricName={mockMetricName}
metadata={mockMetricMetadata}
refetchMetricDetails={mockRefetchMetricDetails}
isErrorMetricMetadata={false}
isLoadingMetricMetadata={false}
/>,
);
const editButton = screen.getByText('Edit');
expect(editButton).toBeInTheDocument();
fireEvent.click(editButton);
await userEvent.click(editButton);
expect(screen.getByTestId('metric-type-select')).toBeInTheDocument();
expect(screen.getByTestId('temporality-select')).toBeInTheDocument();
@@ -144,52 +156,47 @@ describe('Metadata', () => {
...mockMetricMetadata,
unit: '',
}}
refetchMetricDetails={mockRefetchMetricDetails}
isErrorMetricMetadata={false}
isLoadingMetricMetadata={false}
/>,
);
const editButton = screen.getByText('Edit');
expect(editButton).toBeInTheDocument();
fireEvent.click(editButton);
await userEvent.click(editButton);
const metricDescriptionInput = screen.getByTestId('description-input');
expect(metricDescriptionInput).toBeInTheDocument();
fireEvent.change(metricDescriptionInput, {
target: { value: 'Updated description' },
});
await userEvent.clear(metricDescriptionInput);
await userEvent.type(metricDescriptionInput, 'Updated description');
const metricTypeSelect = screen.getByTestId('metric-type-select');
expect(metricTypeSelect).toBeInTheDocument();
fireEvent.change(metricTypeSelect, {
target: { value: MetricType.SUM },
});
await userEvent.selectOptions(metricTypeSelect, MetricType.SUM);
const temporalitySelect = screen.getByTestId('temporality-select');
expect(temporalitySelect).toBeInTheDocument();
fireEvent.change(temporalitySelect, {
target: { value: Temporality.CUMULATIVE },
});
await userEvent.selectOptions(temporalitySelect, Temporality.CUMULATIVE);
const unitSelect = screen.getByTestId('unit-select');
expect(unitSelect).toBeInTheDocument();
fireEvent.change(unitSelect, {
target: { value: 'By' },
});
await userEvent.selectOptions(unitSelect, 'By');
const saveButton = screen.getByText('Save');
expect(saveButton).toBeInTheDocument();
fireEvent.click(saveButton);
await userEvent.click(saveButton);
expect(mockUseUpdateMetricMetadata).toHaveBeenCalledWith(
expect.objectContaining({
metricName: mockMetricName,
payload: expect.objectContaining({
description: 'Updated description',
metricType: MetricType.SUM,
data: expect.objectContaining({
type: MetricType.SUM,
temporality: Temporality.CUMULATIVE,
unit: 'By',
isMonotonic: true,
}),
pathParams: {
metricName: mockMetricName,
},
}),
expect.objectContaining({
onSuccess: expect.any(Function),
@@ -203,29 +210,28 @@ describe('Metadata', () => {
<Metadata
metricName={mockMetricName}
metadata={mockMetricMetadata}
refetchMetricDetails={mockRefetchMetricDetails}
isErrorMetricMetadata={false}
isLoadingMetricMetadata={false}
/>,
);
const editButton = screen.getByText('Edit');
fireEvent.click(editButton);
await userEvent.click(editButton);
const metricDescriptionInput = screen.getByTestId('description-input');
fireEvent.change(metricDescriptionInput, {
target: { value: 'Updated description' },
});
await userEvent.clear(metricDescriptionInput);
await userEvent.type(metricDescriptionInput, 'Updated description');
const saveButton = screen.getByText('Save');
fireEvent.click(saveButton);
await userEvent.click(saveButton);
const onSuccessCallback =
mockUseUpdateMetricMetadata.mock.calls[0][1].onSuccess;
onSuccessCallback({ statusCode: 200 });
onSuccessCallback({ status: 200 });
expect(mockSuccessNotification).toHaveBeenCalledWith({
message: 'Metadata updated successfully',
});
expect(mockRefetchMetricDetails).toHaveBeenCalled();
});
it('should show error notification when metadata update fails with non-200 response', async () => {
@@ -233,24 +239,24 @@ describe('Metadata', () => {
<Metadata
metricName={mockMetricName}
metadata={mockMetricMetadata}
refetchMetricDetails={mockRefetchMetricDetails}
isErrorMetricMetadata={false}
isLoadingMetricMetadata={false}
/>,
);
const editButton = screen.getByText('Edit');
fireEvent.click(editButton);
await userEvent.click(editButton);
const metricDescriptionInput = screen.getByTestId('description-input');
fireEvent.change(metricDescriptionInput, {
target: { value: 'Updated description' },
});
await userEvent.clear(metricDescriptionInput);
await userEvent.type(metricDescriptionInput, 'Updated description');
const saveButton = screen.getByText('Save');
fireEvent.click(saveButton);
await userEvent.click(saveButton);
const onSuccessCallback =
mockUseUpdateMetricMetadata.mock.calls[0][1].onSuccess;
onSuccessCallback({ statusCode: 500 });
onSuccessCallback({ status: 500 });
expect(mockErrorNotification).toHaveBeenCalledWith({
message:
@@ -263,20 +269,20 @@ describe('Metadata', () => {
<Metadata
metricName={mockMetricName}
metadata={mockMetricMetadata}
refetchMetricDetails={mockRefetchMetricDetails}
isErrorMetricMetadata={false}
isLoadingMetricMetadata={false}
/>,
);
const editButton = screen.getByText('Edit');
fireEvent.click(editButton);
await userEvent.click(editButton);
const metricDescriptionInput = screen.getByTestId('description-input');
fireEvent.change(metricDescriptionInput, {
target: { value: 'Updated description' },
});
await userEvent.clear(metricDescriptionInput);
await userEvent.type(metricDescriptionInput, 'Updated description');
const saveButton = screen.getByText('Save');
fireEvent.click(saveButton);
await userEvent.click(saveButton);
const onErrorCallback = mockUseUpdateMetricMetadata.mock.calls[0][1].onError;
@@ -289,39 +295,41 @@ describe('Metadata', () => {
});
});
it('cancel button should cancel the edit mode', () => {
it('cancel button should cancel the edit mode', async () => {
render(
<Metadata
metricName={mockMetricName}
metadata={mockMetricMetadata}
refetchMetricDetails={mockRefetchMetricDetails}
isErrorMetricMetadata={false}
isLoadingMetricMetadata={false}
/>,
);
const editButton = screen.getByText('Edit');
expect(editButton).toBeInTheDocument();
fireEvent.click(editButton);
await userEvent.click(editButton);
const cancelButton = screen.getByText('Cancel');
expect(cancelButton).toBeInTheDocument();
fireEvent.click(cancelButton);
await userEvent.click(cancelButton);
const editButton2 = screen.getByText('Edit');
expect(editButton2).toBeInTheDocument();
});
it('should not allow editing of unit if it is already set', () => {
it('should not allow editing of unit if it is already set', async () => {
render(
<Metadata
metricName={mockMetricName}
metadata={mockMetricMetadata}
refetchMetricDetails={mockRefetchMetricDetails}
isErrorMetricMetadata={false}
isLoadingMetricMetadata={false}
/>,
);
const editButton = screen.getByText('Edit');
expect(editButton).toBeInTheDocument();
fireEvent.click(editButton);
await userEvent.click(editButton);
const unitSelect = screen.queryByTestId('unit-select');
expect(unitSelect).not.toBeInTheDocument();

View File

@@ -1,68 +1,16 @@
import { fireEvent, render, screen } from '@testing-library/react';
import { MetricDetails as MetricDetailsType } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { getUniversalNameFromMetricUnit } from 'components/YAxisUnitSelector/utils';
import { render, screen } from '@testing-library/react';
import ROUTES from 'constants/routes';
import * as useGetMetricDetails from 'hooks/metricsExplorer/useGetMetricDetails';
import * as useUpdateMetricMetadata from 'hooks/metricsExplorer/useUpdateMetricMetadata';
import * as metricsExplorerHooks from 'api/generated/services/metrics';
import * as useHandleExplorerTabChange from 'hooks/useHandleExplorerTabChange';
import { userEvent } from 'tests/test-utils';
import MetricDetails from '../MetricDetails';
import { getMockMetricMetadataData } from './testUtlls';
const mockMetricName = 'test-metric';
const mockMetricDescription = 'description for a test metric';
const mockMetricData: MetricDetailsType = {
name: mockMetricName,
description: mockMetricDescription,
unit: 'count',
attributes: [
{
key: 'test-attribute',
value: ['test-value'],
valueCount: 1,
},
],
alerts: [],
dashboards: [],
metadata: {
metric_type: MetricType.SUM,
description: mockMetricDescription,
unit: 'count',
},
type: '',
timeseries: 0,
samples: 0,
timeSeriesTotal: 0,
timeSeriesActive: 0,
lastReceived: '',
};
const mockOpenInspectModal = jest.fn();
const mockOnClose = jest.fn();
const mockUseGetMetricDetailsData = {
data: {
payload: {
data: mockMetricData,
},
},
isLoading: false,
isFetching: false,
isError: false,
error: null,
refetch: jest.fn(),
};
jest
.spyOn(useGetMetricDetails, 'useGetMetricDetails')
.mockReturnValue(mockUseGetMetricDetailsData as any);
jest.spyOn(useUpdateMetricMetadata, 'useUpdateMetricMetadata').mockReturnValue({
mutate: jest.fn(),
isLoading: false,
isError: false,
error: null,
} as any);
const mockHandleExplorerTabChange = jest.fn();
jest
.spyOn(useHandleExplorerTabChange, 'useHandleExplorerTabChange')
@@ -88,7 +36,50 @@ jest.mock('react-query', () => ({
}),
}));
jest.mock(
'container/MetricsExplorer/MetricDetails/AllAttributes',
() =>
function MockAllAttributes(): JSX.Element {
return <div data-testid="all-attributes">All Attributes</div>;
},
);
jest.mock(
'container/MetricsExplorer/MetricDetails/DashboardsAndAlertsPopover',
() =>
function MockDashboardsAndAlertsPopover(): JSX.Element {
return (
<div data-testid="dashboards-and-alerts-popover">
Dashboards and Alerts Popover
</div>
);
},
);
jest.mock(
'container/MetricsExplorer/MetricDetails/Highlights',
() =>
function MockHighlights(): JSX.Element {
return <div data-testid="highlights">Highlights</div>;
},
);
jest.mock(
'container/MetricsExplorer/MetricDetails/Metadata',
() =>
function MockMetadata(): JSX.Element {
return <div data-testid="metadata">Metadata</div>;
},
);
const useGetMetricMetadataMock = jest.spyOn(
metricsExplorerHooks,
'useGetMetricMetadata',
);
describe('MetricDetails', () => {
beforeEach(() => {
useGetMetricMetadataMock.mockReturnValue(getMockMetricMetadataData());
});
it('renders metric details correctly', () => {
render(
<MetricDetails
@@ -101,27 +92,15 @@ describe('MetricDetails', () => {
);
expect(screen.getByText(mockMetricName)).toBeInTheDocument();
expect(screen.getByText(mockMetricDescription)).toBeInTheDocument();
expect(screen.getByTestId('all-attributes')).toBeInTheDocument();
expect(
screen.getByText(getUniversalNameFromMetricUnit(mockMetricData.unit)),
screen.getByTestId('dashboards-and-alerts-popover'),
).toBeInTheDocument();
expect(screen.getByTestId('highlights')).toBeInTheDocument();
expect(screen.getByTestId('metadata')).toBeInTheDocument();
});
it('renders the "open in explorer" and "inspect" buttons', () => {
jest.spyOn(useGetMetricDetails, 'useGetMetricDetails').mockReturnValueOnce({
...mockUseGetMetricDetailsData,
data: {
payload: {
data: {
...mockMetricData,
metadata: {
...mockMetricData.metadata,
metric_type: MetricType.GAUGE,
},
},
},
},
} as any);
it('renders the "open in explorer" and "inspect" buttons', async () => {
render(
<MetricDetails
onClose={mockOnClose}
@@ -135,93 +114,24 @@ describe('MetricDetails', () => {
expect(screen.getByTestId('open-in-explorer-button')).toBeInTheDocument();
expect(screen.getByTestId('inspect-metric-button')).toBeInTheDocument();
fireEvent.click(screen.getByTestId('open-in-explorer-button'));
await userEvent.click(screen.getByTestId('open-in-explorer-button'));
expect(mockHandleExplorerTabChange).toHaveBeenCalled();
fireEvent.click(screen.getByTestId('inspect-metric-button'));
await userEvent.click(screen.getByTestId('inspect-metric-button'));
expect(mockOpenInspectModal).toHaveBeenCalled();
});
it('should render error state when metric details are not found', () => {
jest.spyOn(useGetMetricDetails, 'useGetMetricDetails').mockReturnValue({
...mockUseGetMetricDetailsData,
isError: true,
error: {
message: 'Error fetching metric details',
},
} as any);
it('should render empty state when metric name is not provided', () => {
render(
<MetricDetails
onClose={mockOnClose}
isOpen
metricName={mockMetricName}
metricName={null}
isModalTimeSelection
openInspectModal={mockOpenInspectModal}
/>,
);
expect(screen.getByText('Error fetching metric details')).toBeInTheDocument();
});
it('should render loading state when metric details are loading', () => {
jest.spyOn(useGetMetricDetails, 'useGetMetricDetails').mockReturnValue({
...mockUseGetMetricDetailsData,
isLoading: true,
} as any);
render(
<MetricDetails
onClose={mockOnClose}
isOpen
metricName={mockMetricName}
isModalTimeSelection
openInspectModal={mockOpenInspectModal}
/>,
);
expect(screen.getByTestId('metric-details-skeleton')).toBeInTheDocument();
});
it('should render all attributes section', () => {
jest
.spyOn(useGetMetricDetails, 'useGetMetricDetails')
.mockReturnValue(mockUseGetMetricDetailsData as any);
render(
<MetricDetails
onClose={mockOnClose}
isOpen
metricName={mockMetricName}
isModalTimeSelection
openInspectModal={mockOpenInspectModal}
/>,
);
expect(screen.getByText('All Attributes')).toBeInTheDocument();
});
it('should not render all attributes section when relevant data is not present', () => {
jest.spyOn(useGetMetricDetails, 'useGetMetricDetails').mockReturnValue({
...mockUseGetMetricDetailsData,
data: {
payload: {
data: {
...mockMetricData,
attributes: null,
},
},
},
} as any);
render(
<MetricDetails
onClose={mockOnClose}
isOpen
metricName={mockMetricName}
isModalTimeSelection
openInspectModal={mockOpenInspectModal}
/>,
);
expect(screen.queryByText('All Attributes')).not.toBeInTheDocument();
expect(screen.getByText('Metric not found')).toBeInTheDocument();
});
});

View File

@@ -0,0 +1,175 @@
import {
GetMetricAlerts200,
GetMetricAttributes200,
GetMetricDashboards200,
GetMetricHighlights200,
GetMetricMetadata200,
} from 'api/generated/services/sigNoz.schemas';
import * as metricsExplorerHooks from 'api/generated/services/metrics';
import { Temporality } from 'types/common/queryBuilder';
export function getMockMetricHighlightsData(
overrides?: Partial<GetMetricHighlights200>,
{
isLoading = false,
isError = false,
}: {
isLoading?: boolean;
isError?: boolean;
} = {},
): ReturnType<typeof metricsExplorerHooks.useGetMetricHighlights> {
return {
data: {
data: {
data: {
dataPoints: 1000000,
lastReceived: '2026-01-24T00:00:00Z',
totalTimeSeries: 1000000,
activeTimeSeries: 1000000,
},
status: 'success',
...overrides,
},
},
isLoading,
isError,
} as ReturnType<typeof metricsExplorerHooks.useGetMetricHighlights>;
}
export const MOCK_DASHBOARD_1 = {
dashboardName: 'Dashboard 1',
dashboardId: '1',
widgetId: '1',
widgetName: 'Widget 1',
};
export const MOCK_DASHBOARD_2 = {
dashboardName: 'Dashboard 2',
dashboardId: '2',
widgetId: '2',
widgetName: 'Widget 2',
};
export const MOCK_ALERT_1 = {
alertName: 'Alert 1',
alertId: '1',
};
export const MOCK_ALERT_2 = {
alertName: 'Alert 2',
alertId: '2',
};
export function getMockDashboardsData(
overrides?: Partial<GetMetricDashboards200>,
{
isLoading = false,
isError = false,
}: {
isLoading?: boolean;
isError?: boolean;
} = {},
): ReturnType<typeof metricsExplorerHooks.useGetMetricDashboards> {
return {
data: {
data: {
data: {
dashboards: [MOCK_DASHBOARD_1, MOCK_DASHBOARD_2],
},
status: 'success',
...overrides,
},
},
isLoading,
isError,
} as ReturnType<typeof metricsExplorerHooks.useGetMetricDashboards>;
}
export function getMockAlertsData(
overrides?: Partial<GetMetricAlerts200>,
{
isLoading = false,
isError = false,
}: {
isLoading?: boolean;
isError?: boolean;
} = {},
): ReturnType<typeof metricsExplorerHooks.useGetMetricAlerts> {
return {
data: {
data: {
data: {
alerts: [MOCK_ALERT_1, MOCK_ALERT_2],
},
status: 'success',
...overrides,
},
},
isLoading,
isError,
} as ReturnType<typeof metricsExplorerHooks.useGetMetricAlerts>;
}
export function getMockMetricAttributesData(
overrides?: Partial<GetMetricAttributes200>,
{
isLoading = false,
isError = false,
}: {
isLoading?: boolean;
isError?: boolean;
} = {},
): ReturnType<typeof metricsExplorerHooks.useGetMetricAttributes> {
return {
data: {
data: {
data: {
attributes: [
{
key: 'attribute1',
values: ['value1', 'value2'],
valueCount: 2,
},
{
key: 'attribute2',
values: ['value3'],
valueCount: 1,
},
],
totalKeys: 2,
},
status: 'success',
...overrides,
},
},
isLoading,
isError,
} as ReturnType<typeof metricsExplorerHooks.useGetMetricAttributes>;
}
export function getMockMetricMetadataData(
overrides?: Partial<GetMetricMetadata200>,
{
isLoading = false,
isError = false,
}: {
isLoading?: boolean;
isError?: boolean;
} = {},
): ReturnType<typeof metricsExplorerHooks.useGetMetricMetadata> {
return {
data: {
data: {
data: {
description: 'test_description',
type: 'gauge',
unit: 'test_unit',
temporality: Temporality.Delta,
isMonotonic: false,
},
status: 'success',
...overrides,
},
},
isLoading,
isError,
} as ReturnType<typeof metricsExplorerHooks.useGetMetricMetadata>;
}

View File

@@ -1,6 +1,6 @@
export const METRIC_METADATA_KEYS = {
description: 'Description',
unit: 'Unit',
metric_type: 'Metric Type',
metricType: 'Metric Type',
temporality: 'Temporality',
};

View File

@@ -1,10 +1,13 @@
import {
MetricDetails,
MetricDetailsAlert,
MetricDetailsAttribute,
MetricDetailsDashboard,
} from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
MetricsexplorertypesMetricAlertDTO,
MetricsexplorertypesMetricAttributeDTO,
MetricsexplorertypesMetricDashboardDTO,
MetricsexplorertypesMetricHighlightsResponseDTO,
MetricsexplorertypesMetricMetadataDTO,
MetricsexplorertypesMetricMetadataDTOType,
MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality,
MetricsexplorertypesUpdateMetricMetadataRequestDTOType,
} from 'api/generated/services/sigNoz.schemas';
export interface MetricDetailsProps {
onClose: () => void;
@@ -14,21 +17,23 @@ export interface MetricDetailsProps {
openInspectModal?: (metricName: string) => void;
}
export interface HighlightsProps {
metricName: string;
}
export interface DashboardsAndAlertsPopoverProps {
dashboards: MetricDetailsDashboard[] | null;
alerts: MetricDetailsAlert[] | null;
metricName: string;
}
export interface MetadataProps {
metricName: string;
metadata: MetricDetails['metadata'] | undefined;
refetchMetricDetails: () => void;
metadata: MetricMetadata | null;
isErrorMetricMetadata: boolean;
isLoadingMetricMetadata: boolean;
}
export interface AllAttributesProps {
attributes: MetricDetailsAttribute[];
metricName: string;
metricType: MetricType | undefined;
metricType: MetricsexplorertypesMetricMetadataDTOType | undefined;
}
export interface AllAttributesValueProps {
@@ -36,3 +41,27 @@ export interface AllAttributesValueProps {
filterValue: string[];
goToMetricsExploreWithAppliedAttribute: (key: string, value: string) => void;
}
export type MetricHighlight = MetricsexplorertypesMetricHighlightsResponseDTO;
export type MetricAlert = MetricsexplorertypesMetricAlertDTO;
export type MetricDashboard = MetricsexplorertypesMetricDashboardDTO;
export type MetricMetadata = MetricsexplorertypesMetricMetadataDTO;
export interface MetricMetadataState {
type: MetricsexplorertypesUpdateMetricMetadataRequestDTOType;
description: string;
temporality?: MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality;
unit: string;
}
export type MetricAttribute = MetricsexplorertypesMetricAttributeDTO;
export enum TableFields {
DESCRIPTION = 'description',
UNIT = 'unit',
TYPE = 'type',
Temporality = 'temporality',
IS_MONOTONIC = 'isMonotonic',
}

View File

@@ -1,12 +1,25 @@
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { UpdateMetricMetadataMutationBody } from 'api/generated/services/metrics';
import {
GetMetricMetadata200,
MetricsexplorertypesMetricMetadataDTOTemporality,
MetricsexplorertypesMetricMetadataDTOType,
MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality,
MetricsexplorertypesUpdateMetricMetadataRequestDTOType,
} from 'api/generated/services/sigNoz.schemas';
import { SpaceAggregation, TimeAggregation } from 'api/v5/v5';
import { initialQueriesMap } from 'constants/queryBuilder';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource, ReduceOperators } from 'types/common/queryBuilder';
export function formatTimestampToReadableDate(timestamp: string): string {
import { MetricMetadata, MetricMetadataState } from './types';
export function formatTimestampToReadableDate(
timestamp: number | string | undefined,
): string {
if (!timestamp) {
return '-';
}
const date = new Date(timestamp);
const now = new Date();
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000);
@@ -39,7 +52,10 @@ export function formatTimestampToReadableDate(timestamp: string): string {
return date.toLocaleDateString();
}
export function formatNumberToCompactFormat(num: number): string {
export function formatNumberToCompactFormat(num: number | undefined): string {
if (!num) {
return '-';
}
return new Intl.NumberFormat('en-US', {
notation: 'compact',
maximumFractionDigits: 1,
@@ -47,27 +63,37 @@ export function formatNumberToCompactFormat(num: number): string {
}
export function determineIsMonotonic(
metricType: MetricType,
temporality?: Temporality,
metricType: MetricsexplorertypesUpdateMetricMetadataRequestDTOType,
temporality?: MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality,
): boolean {
if (
metricType === MetricType.HISTOGRAM ||
metricType === MetricType.EXPONENTIAL_HISTOGRAM
metricType ===
MetricsexplorertypesUpdateMetricMetadataRequestDTOType.histogram ||
metricType ===
MetricsexplorertypesUpdateMetricMetadataRequestDTOType.exponentialhistogram
) {
return true;
}
if (metricType === MetricType.GAUGE || metricType === MetricType.SUMMARY) {
if (
metricType === MetricsexplorertypesUpdateMetricMetadataRequestDTOType.gauge ||
metricType === MetricsexplorertypesUpdateMetricMetadataRequestDTOType.summary
) {
return false;
}
if (metricType === MetricType.SUM) {
return temporality === Temporality.CUMULATIVE;
if (
metricType === MetricsexplorertypesUpdateMetricMetadataRequestDTOType.sum
) {
return (
temporality ===
MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality.cumulative
);
}
return false;
}
export function getMetricDetailsQuery(
metricName: string,
metricType: MetricType | undefined,
metricType: MetricsexplorertypesMetricMetadataDTOType | undefined,
filter?: { key: string; value: string },
groupBy?: string,
): Query {
@@ -75,23 +101,23 @@ export function getMetricDetailsQuery(
let spaceAggregation;
let aggregateOperator;
switch (metricType) {
case MetricType.SUM:
case MetricsexplorertypesMetricMetadataDTOType.sum:
timeAggregation = 'rate';
spaceAggregation = 'sum';
aggregateOperator = 'rate';
break;
case MetricType.GAUGE:
case MetricsexplorertypesMetricMetadataDTOType.gauge:
timeAggregation = 'avg';
spaceAggregation = 'avg';
aggregateOperator = 'avg';
break;
case MetricType.SUMMARY:
case MetricsexplorertypesMetricMetadataDTOType.summary:
timeAggregation = 'noop';
spaceAggregation = 'sum';
aggregateOperator = 'noop';
break;
case MetricType.HISTOGRAM:
case MetricType.EXPONENTIAL_HISTOGRAM:
case MetricsexplorertypesMetricMetadataDTOType.histogram:
case MetricsexplorertypesMetricMetadataDTOType.exponentialhistogram:
timeAggregation = 'noop';
spaceAggregation = 'p90';
aggregateOperator = 'noop';
@@ -160,3 +186,69 @@ export function getMetricDetailsQuery(
},
};
}
export function transformMetricMetadata(
apiData: GetMetricMetadata200 | undefined,
): MetricMetadata | null {
if (!apiData || !apiData.data) {
return null;
}
const { type, description, unit, temporality, isMonotonic } = apiData.data;
return {
type,
description,
unit,
temporality,
isMonotonic,
};
}
export function transformUpdateMetricMetadataRequest(
metricName: string,
metricMetadata: MetricMetadataState,
): UpdateMetricMetadataMutationBody {
return {
metricName: metricName,
type: metricMetadata.type,
description: metricMetadata.description,
unit: metricMetadata.unit,
temporality:
metricMetadata.temporality ??
MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality.unspecified,
isMonotonic: determineIsMonotonic(
metricMetadata.type,
metricMetadata.temporality,
),
};
}
export function transformMetricType(
type: MetricsexplorertypesMetricMetadataDTOType,
): MetricsexplorertypesUpdateMetricMetadataRequestDTOType {
switch (type) {
case MetricsexplorertypesMetricMetadataDTOType.sum:
return MetricsexplorertypesUpdateMetricMetadataRequestDTOType.sum;
case MetricsexplorertypesMetricMetadataDTOType.gauge:
return MetricsexplorertypesUpdateMetricMetadataRequestDTOType.gauge;
case MetricsexplorertypesMetricMetadataDTOType.histogram:
return MetricsexplorertypesUpdateMetricMetadataRequestDTOType.histogram;
case MetricsexplorertypesMetricMetadataDTOType.summary:
return MetricsexplorertypesUpdateMetricMetadataRequestDTOType.summary;
case MetricsexplorertypesMetricMetadataDTOType.exponentialhistogram:
return MetricsexplorertypesUpdateMetricMetadataRequestDTOType.exponentialhistogram;
}
}
export function transformTemporality(
temporality: MetricsexplorertypesMetricMetadataDTOTemporality,
): MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality {
switch (temporality) {
case MetricsexplorertypesMetricMetadataDTOTemporality.delta:
return MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality.delta;
case MetricsexplorertypesMetricMetadataDTOTemporality.cumulative:
return MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality.cumulative;
case MetricsexplorertypesMetricMetadataDTOTemporality.unspecified:
return MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality.unspecified;
}
}

View File

@@ -34,6 +34,7 @@ import {
formatDataForMetricsTable,
getMetricsListQuery,
} from './utils';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import './Summary.styles.scss';
@@ -128,7 +129,7 @@ function Summary(): JSX.Element {
} = useGetMetricsList(metricsListQuery, {
enabled: !!metricsListQuery && !isInspectModalOpen,
queryKey: [
'metricsList',
REACT_QUERY_KEY.GET_METRICS_LIST,
queryFiltersWithoutId,
orderBy,
pageSize,

View File

@@ -157,9 +157,12 @@ function ValidateRowValueWrapper({
}
export const formatNumberIntoHumanReadableFormat = (
num: number,
num: number | undefined,
addPlusSign = true,
): string => {
if (!num) {
return '-';
}
function format(num: number, divisor: number, suffix: string): string {
const value = num / divisor;
return value % 1 === 0

View File

@@ -1,5 +1,5 @@
import { useQueries, UseQueryOptions, UseQueryResult } from 'react-query';
import { getMetricMetadata } from 'api/metricsExplorer/v2/getMetricMetadata';
import { getMetricMetadata } from 'api/generated/services/metrics';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { SuccessResponseV2 } from 'types/api';
import { MetricMetadataResponse } from 'types/api/metricsExplorer/v2/getMetricMetadata';

View File

@@ -0,0 +1,13 @@
export function pluralize(
count: number,
singular: string,
plural?: string,
): string {
if (count === 1) {
return `${count} ${singular}`;
}
if (plural) {
return `${count} ${plural}`;
}
return `${count} ${singular}s`;
}

View File

@@ -1,127 +0,0 @@
package fields
import (
"bytes"
"io"
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
"github.com/SigNoz/signoz/pkg/telemetrymeter"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrytraces"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type API struct {
telemetryStore telemetrystore.TelemetryStore
telemetryMetadataStore telemetrytypes.MetadataStore
}
// TODO: move this to module and remove metastore init
func NewAPI(
settings factory.ProviderSettings,
telemetryStore telemetrystore.TelemetryStore,
) *API {
telemetryMetadataStore := telemetrymetadata.NewTelemetryMetaStore(
settings,
telemetryStore,
telemetrytraces.DBName,
telemetrytraces.TagAttributesV2TableName,
telemetrytraces.SpanAttributesKeysTblName,
telemetrytraces.SpanIndexV3TableName,
telemetrymetrics.DBName,
telemetrymetrics.AttributesMetadataTableName,
telemetrymeter.DBName,
telemetrymeter.SamplesAgg1dTableName,
telemetrylogs.DBName,
telemetrylogs.LogsV2TableName,
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetrymetadata.DBName,
telemetrymetadata.AttributesMetadataLocalTableName,
)
return &API{
telemetryStore: telemetryStore,
telemetryMetadataStore: telemetryMetadataStore,
}
}
func (api *API) GetFieldsKeys(w http.ResponseWriter, r *http.Request) {
type fieldKeysResponse struct {
Keys map[string][]*telemetrytypes.TelemetryFieldKey `json:"keys"`
Complete bool `json:"complete"`
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
fieldKeySelector, err := parseFieldKeyRequest(r)
if err != nil {
render.Error(w, err)
return
}
keys, complete, err := api.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(w, err)
return
}
response := fieldKeysResponse{
Keys: keys,
Complete: complete,
}
render.Success(w, http.StatusOK, response)
}
func (api *API) GetFieldsValues(w http.ResponseWriter, r *http.Request) {
type fieldValuesResponse struct {
Values *telemetrytypes.TelemetryFieldValues `json:"values"`
Complete bool `json:"complete"`
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
fieldValueSelector, err := parseFieldValueRequest(r)
if err != nil {
render.Error(w, err)
return
}
allValues, allComplete, err := api.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(w, err)
return
}
relatedValues, relatedComplete, err := api.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
}
values := &telemetrytypes.TelemetryFieldValues{
StringValues: allValues.StringValues,
NumberValues: allValues.NumberValues,
RelatedValues: relatedValues,
}
response := fieldValuesResponse{
Values: values,
Complete: allComplete && relatedComplete,
}
render.Success(w, http.StatusOK, response)
}

View File

@@ -1,162 +0,0 @@
package fields
import (
"net/http"
"strconv"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, error) {
var req telemetrytypes.FieldKeySelector
var signal telemetrytypes.Signal
var source telemetrytypes.Source
var err error
signalStr := r.URL.Query().Get("signal")
if signalStr != "" {
signal = telemetrytypes.Signal{String: valuer.NewString(signalStr)}
} else {
signal = telemetrytypes.SignalUnspecified
}
sourceStr := r.URL.Query().Get("source")
if sourceStr != "" {
source = telemetrytypes.Source{String: valuer.NewString(sourceStr)}
} else {
source = telemetrytypes.SourceUnspecified
}
if r.URL.Query().Get("limit") != "" {
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse limit")
}
req.Limit = limit
} else {
req.Limit = 1000
}
var startUnixMilli, endUnixMilli int64
if r.URL.Query().Get("startUnixMilli") != "" {
startUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("startUnixMilli"), 10, 64)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse startUnixMilli")
}
// Round down to the nearest 6 hours (21600000 milliseconds)
startUnixMilli -= startUnixMilli % 21600000
}
if r.URL.Query().Get("endUnixMilli") != "" {
endUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("endUnixMilli"), 10, 64)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse endUnixMilli")
}
}
// Parse fieldContext directly instead of using JSON unmarshalling.
var fieldContext telemetrytypes.FieldContext
fieldContextStr := r.URL.Query().Get("fieldContext")
if fieldContextStr != "" {
fieldContext = telemetrytypes.FieldContext{String: valuer.NewString(fieldContextStr)}
}
// Parse fieldDataType directly instead of using JSON unmarshalling.
var fieldDataType telemetrytypes.FieldDataType
fieldDataTypeStr := r.URL.Query().Get("fieldDataType")
if fieldDataTypeStr != "" {
fieldDataType = telemetrytypes.FieldDataType{String: valuer.NewString(fieldDataTypeStr)}
}
metricName := r.URL.Query().Get("metricName")
var metricContext *telemetrytypes.MetricContext
if metricName != "" {
metricContext = &telemetrytypes.MetricContext{
MetricName: metricName,
}
}
name := r.URL.Query().Get("searchText")
if name != "" && fieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, signal) {
name = parsedFieldKey.Name
fieldContext = parsedFieldKey.FieldContext
}
}
}
req = telemetrytypes.FieldKeySelector{
StartUnixMilli: startUnixMilli,
EndUnixMilli: endUnixMilli,
Signal: signal,
Source: source,
Name: name,
FieldContext: fieldContext,
FieldDataType: fieldDataType,
Limit: req.Limit,
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeFuzzy,
MetricContext: metricContext,
}
return &req, nil
}
func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector, error) {
keySelector, err := parseFieldKeyRequest(r)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse field key request")
}
name := r.URL.Query().Get("name")
if name != "" && keySelector.FieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, keySelector.Signal) {
name = parsedFieldKey.Name
keySelector.FieldContext = parsedFieldKey.FieldContext
}
}
}
keySelector.Name = name
existingQuery := r.URL.Query().Get("existingQuery")
value := r.URL.Query().Get("searchText")
// Parse limit for fieldValue request, fallback to default 50 if parsing fails.
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil {
limit = 50
}
req := telemetrytypes.FieldValueSelector{
FieldKeySelector: keySelector,
ExistingQuery: existingQuery,
Value: value,
Limit: limit,
}
return &req, nil
}
func isContextValidForSignal(ctx telemetrytypes.FieldContext, signal telemetrytypes.Signal) bool {
if ctx == telemetrytypes.FieldContextResource ||
ctx == telemetrytypes.FieldContextAttribute ||
ctx == telemetrytypes.FieldContextScope {
return true
}
switch signal.StringValue() {
case telemetrytypes.SignalLogs.StringValue():
return ctx == telemetrytypes.FieldContextLog || ctx == telemetrytypes.FieldContextBody
case telemetrytypes.SignalTraces.StringValue():
return ctx == telemetrytypes.FieldContextSpan || ctx == telemetrytypes.FieldContextEvent || ctx == telemetrytypes.FieldContextTrace
case telemetrytypes.SignalMetrics.StringValue():
return ctx == telemetrytypes.FieldContextMetric
}
return true
}

View File

@@ -0,0 +1,50 @@
package signozapiserver
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/gorilla/mux"
)
func (provider *provider) addFieldsRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/fields/keys", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsKeys), handler.OpenAPIDef{
ID: "GetFieldsKeys",
Tags: []string{"fields"},
Summary: "Get field keys",
Description: "This endpoint returns field keys",
Request: nil,
RequestQuery: new(telemetrytypes.PostableFieldKeysParams),
RequestContentType: "",
Response: new(telemetrytypes.GettableFieldKeys),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v1/fields/values", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsValues), handler.OpenAPIDef{
ID: "GetFieldsValues",
Tags: []string{"fields"},
Summary: "Get field values",
Description: "This endpoint returns field values",
Request: nil,
RequestQuery: new(telemetrytypes.PostableFieldValueParams),
RequestContentType: "",
Response: new(telemetrytypes.GettableFieldValues),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -13,11 +13,11 @@ import (
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types"
@@ -42,8 +42,8 @@ type provider struct {
dashboardHandler dashboard.Handler
metricsExplorerHandler metricsexplorer.Handler
gatewayHandler gateway.Handler
roleGetter role.Getter
roleHandler role.Handler
fieldsHandler fields.Handler
authzHandler authz.Handler
}
func NewFactory(
@@ -61,11 +61,31 @@ func NewFactory(
dashboardHandler dashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
gatewayHandler gateway.Handler,
roleGetter role.Getter,
roleHandler role.Handler,
fieldsHandler fields.Handler,
authzHandler authz.Handler,
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler, flaggerHandler, dashboardModule, dashboardHandler, metricsExplorerHandler, gatewayHandler, roleGetter, roleHandler)
return newProvider(
ctx,
providerSettings,
config,
orgGetter,
authz,
orgHandler,
userHandler,
sessionHandler,
authDomainHandler,
preferenceHandler,
globalHandler,
promoteHandler,
flaggerHandler,
dashboardModule,
dashboardHandler,
metricsExplorerHandler,
gatewayHandler,
fieldsHandler,
authzHandler,
)
})
}
@@ -87,8 +107,8 @@ func newProvider(
dashboardHandler dashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
gatewayHandler gateway.Handler,
roleGetter role.Getter,
roleHandler role.Handler,
fieldsHandler fields.Handler,
authzHandler authz.Handler,
) (apiserver.APIServer, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
router := mux.NewRouter().UseEncodedPath()
@@ -109,11 +129,11 @@ func newProvider(
dashboardHandler: dashboardHandler,
metricsExplorerHandler: metricsExplorerHandler,
gatewayHandler: gatewayHandler,
roleGetter: roleGetter,
roleHandler: roleHandler,
fieldsHandler: fieldsHandler,
authzHandler: authzHandler,
}
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz, roleGetter)
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
if err := provider.AddToRouter(router); err != nil {
return nil, err
@@ -175,6 +195,10 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
return err
}
if err := provider.addFieldsRoutes(router); err != nil {
return err
}
return nil
}

View File

@@ -10,7 +10,7 @@ import (
)
func (provider *provider) addRoleRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Create), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Create), handler.OpenAPIDef{
ID: "CreateRole",
Tags: []string{"role"},
Summary: "Create role",
@@ -27,7 +27,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.roleHandler.List), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.authzHandler.List), handler.OpenAPIDef{
ID: "ListRoles",
Tags: []string{"role"},
Summary: "List roles",
@@ -44,7 +44,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Get), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Get), handler.OpenAPIDef{
ID: "GetRole",
Tags: []string{"role"},
Summary: "Get role",
@@ -61,7 +61,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Patch), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Patch), handler.OpenAPIDef{
ID: "PatchRole",
Tags: []string{"role"},
Summary: "Patch role",
@@ -78,7 +78,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Delete), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Delete), handler.OpenAPIDef{
ID: "DeleteRole",
Tags: []string{"role"},
Summary: "Delete role",

View File

@@ -2,9 +2,11 @@ package authz
import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -29,4 +31,76 @@ type AuthZ interface {
// Lists the selectors for objects assigned to subject (s) with relation (r) on resource (s)
ListObjects(context.Context, string, authtypes.Relation, authtypes.Typeable) ([]*authtypes.Object, error)
// Creates the role.
Create(context.Context, valuer.UUID, *roletypes.Role) error
// Gets the role if it exists or creates one.
GetOrCreate(context.Context, valuer.UUID, *roletypes.Role) (*roletypes.Role, error)
// Gets the objects associated with the given role and relation.
GetObjects(context.Context, valuer.UUID, valuer.UUID, authtypes.Relation) ([]*authtypes.Object, error)
// Gets all the typeable resources registered from role registry.
GetResources(context.Context) []*authtypes.Resource
// Patches the role.
Patch(context.Context, valuer.UUID, *roletypes.Role) error
// Patches the objects in authorization server associated with the given role and relation
PatchObjects(context.Context, valuer.UUID, string, authtypes.Relation, []*authtypes.Object, []*authtypes.Object) error
// Deletes the role and tuples in authorization server.
Delete(context.Context, valuer.UUID, valuer.UUID) error
// Gets the role
Get(context.Context, valuer.UUID, valuer.UUID) (*roletypes.Role, error)
// Gets the role by org_id and name
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*roletypes.Role, error)
// Lists all the roles for the organization.
List(context.Context, valuer.UUID) ([]*roletypes.Role, error)
// Lists all the roles for the organization filtered by name
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*roletypes.Role, error)
// Grants a role to the subject based on role name.
Grant(context.Context, valuer.UUID, string, string) error
// Revokes a granted role from the subject based on role name.
Revoke(context.Context, valuer.UUID, string, string) error
// Changes the granted role for the subject based on role name.
ModifyGrant(context.Context, valuer.UUID, string, string, string) error
// Bootstrap the managed roles.
CreateManagedRoles(context.Context, valuer.UUID, []*roletypes.Role) error
// Bootstrap managed roles transactions and user assignments
CreateManagedUserRoleTransactions(context.Context, valuer.UUID, valuer.UUID) error
}
type RegisterTypeable interface {
MustGetTypeables() []authtypes.Typeable
MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction
}
type Handler interface {
Create(http.ResponseWriter, *http.Request)
Get(http.ResponseWriter, *http.Request)
GetObjects(http.ResponseWriter, *http.Request)
GetResources(http.ResponseWriter, *http.Request)
List(http.ResponseWriter, *http.Request)
Patch(http.ResponseWriter, *http.Request)
PatchObjects(http.ResponseWriter, *http.Request)
Delete(http.ResponseWriter, *http.Request)
}

View File

@@ -1,4 +1,4 @@
package implrole
package sqlauthzstore
import (
"context"
@@ -14,7 +14,7 @@ type store struct {
sqlstore sqlstore.SQLStore
}
func NewStore(sqlstore sqlstore.SQLStore) roletypes.Store {
func NewSqlAuthzStore(sqlstore sqlstore.SQLStore) roletypes.Store {
return &store{sqlstore: sqlstore}
}

View File

@@ -2,35 +2,24 @@ package openfgaauthz
import (
"context"
"strconv"
"sync"
authz "github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/authzstore/sqlauthzstore"
"github.com/SigNoz/signoz/pkg/authz/openfgaserver"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
openfgapkgserver "github.com/openfga/openfga/pkg/server"
"google.golang.org/protobuf/encoding/protojson"
)
var (
openfgaDefaultStore = valuer.NewString("signoz")
)
type provider struct {
config authz.Config
settings factory.ScopedProviderSettings
openfgaSchema []openfgapkgtransformer.ModuleFile
openfgaServer *openfgapkgserver.Server
storeID string
modelID string
mtx sync.RWMutex
stopChan chan struct{}
server *openfgaserver.Server
store roletypes.Store
}
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
@@ -40,301 +29,194 @@ func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtr
}
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/authz/openfgaauthz")
store, err := NewSQLStore(sqlstore)
server, err := openfgaserver.NewOpenfgaServer(ctx, settings, config, sqlstore, openfgaSchema)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to initialize sqlstore for authz")
return nil, err
}
// setup the openfga server
opts := []openfgapkgserver.OpenFGAServiceV1Option{
openfgapkgserver.WithDatastore(store),
openfgapkgserver.WithLogger(NewLogger(scopedProviderSettings.Logger())),
openfgapkgserver.WithContextPropagationToDatastore(true),
}
openfgaServer, err := openfgapkgserver.NewServerWithOpts(opts...)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to create authz server")
return nil, err
}
return &provider{
config: config,
settings: scopedProviderSettings,
openfgaServer: openfgaServer,
openfgaSchema: openfgaSchema,
mtx: sync.RWMutex{},
stopChan: make(chan struct{}),
server: server,
store: sqlauthzstore.NewSqlAuthzStore(sqlstore),
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
storeId, err := provider.getOrCreateStore(ctx, openfgaDefaultStore.StringValue())
if err != nil {
return err
}
modelID, err := provider.getOrCreateModel(ctx, storeId)
if err != nil {
return err
}
provider.mtx.Lock()
provider.modelID = modelID
provider.storeID = storeId
provider.mtx.Unlock()
<-provider.stopChan
return nil
return provider.server.Start(ctx)
}
func (provider *provider) Stop(ctx context.Context) error {
provider.openfgaServer.Close()
close(provider.stopChan)
return nil
return provider.server.Stop(ctx)
}
func (provider *provider) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
storeID, modelID := provider.getStoreIDandModelID()
checkResponse, err := provider.openfgaServer.Check(
ctx,
&openfgav1.CheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tupleReq.User,
Relation: tupleReq.Relation,
Object: tupleReq.Object,
},
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
if !checkResponse.Allowed {
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
}
return nil
return provider.server.Check(ctx, tupleReq)
}
func (provider *provider) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
storeID, modelID := provider.getStoreIDandModelID()
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
for idx, tuple := range tupleReq {
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tuple.User,
Relation: tuple.Relation,
Object: tuple.Object,
},
// the batch check response is map[string] keyed by correlationID.
CorrelationId: strconv.Itoa(idx),
})
}
checkResponse, err := provider.openfgaServer.BatchCheck(
ctx,
&openfgav1.BatchCheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Checks: batchCheckItems,
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
for _, checkResponse := range checkResponse.Result {
if checkResponse.GetAllowed() {
return nil
}
}
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
return provider.server.BatchCheck(ctx, tupleReq)
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.server.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.server.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
if len(additions) == 0 && len(deletions) == 0 {
return nil
}
storeID, modelID := provider.getStoreIDandModelID()
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
for idx, tuple := range deletions {
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
}
_, err := provider.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Writes: func() *openfgav1.WriteRequestWrites {
if len(additions) == 0 {
return nil
}
return &openfgav1.WriteRequestWrites{
TupleKeys: additions,
OnDuplicate: "ignore",
}
}(),
Deletes: func() *openfgav1.WriteRequestDeletes {
if len(deletionTuplesWithoutCondition) == 0 {
return nil
}
return &openfgav1.WriteRequestDeletes{
TupleKeys: deletionTuplesWithoutCondition,
OnMissing: "ignore",
}
}(),
})
return err
return provider.server.Write(ctx, additions, deletions)
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
storeID, modelID := provider.getStoreIDandModelID()
response, err := provider.openfgaServer.ListObjects(ctx, &openfgav1.ListObjectsRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
User: subject,
Relation: relation.StringValue(),
Type: typeable.Type().StringValue(),
return provider.server.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
storableRole, err := provider.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
storableRole, err := provider.store.GetByOrgIDAndName(ctx, orgID, name)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
storableRoles, err := provider.store.List(ctx, orgID)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storableRole := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storableRole)
}
return roles, nil
}
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
storableRoles, err := provider.store.ListByOrgIDAndNames(ctx, orgID, names)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storable := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
}
return roles, nil
}
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return provider.Write(ctx, tuples, nil)
}
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
err := provider.Revoke(ctx, orgID, existingRoleName, subject)
if err != nil {
return err
}
err = provider.Grant(ctx, orgID, updatedRoleName, subject)
if err != nil {
return err
}
return nil
}
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return provider.Write(ctx, nil, tuples)
}
func (provider *provider) CreateManagedRoles(ctx context.Context, _ valuer.UUID, managedRoles []*roletypes.Role) error {
err := provider.store.RunInTx(ctx, func(ctx context.Context) error {
for _, role := range managedRoles {
err := provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return err
}
}
return nil
})
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "cannot list objects for subject %s with relation %s for type %s", subject, relation.StringValue(), typeable.Type().StringValue())
return err
}
return authtypes.MustNewObjectsFromStringSlice(response.Objects), nil
return nil
}
func (provider *provider) getOrCreateStore(ctx context.Context, name string) (string, error) {
stores, err := provider.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
if err != nil {
return "", err
}
for _, store := range stores.GetStores() {
if store.GetName() == name {
return store.Id, nil
}
}
store, err := provider.openfgaServer.CreateStore(ctx, &openfgav1.CreateStoreRequest{Name: name})
if err != nil {
return "", err
}
return store.Id, nil
func (provider *provider) SetManagedRoleTransactions(context.Context, valuer.UUID) error {
return nil
}
func (provider *provider) getOrCreateModel(ctx context.Context, storeID string) (string, error) {
schema, err := openfgapkgtransformer.TransformModuleFilesToModel(provider.openfgaSchema, "1.1")
if err != nil {
return "", err
}
authorisationModels, err := provider.openfgaServer.ReadAuthorizationModels(ctx, &openfgav1.ReadAuthorizationModelsRequest{StoreId: storeID})
if err != nil {
return "", err
}
for _, authModel := range authorisationModels.GetAuthorizationModels() {
equal, err := provider.isModelEqual(schema, authModel)
if err != nil {
return "", err
}
if equal {
return authModel.Id, nil
}
}
authorizationModel, err := provider.openfgaServer.WriteAuthorizationModel(ctx, &openfgav1.WriteAuthorizationModelRequest{
StoreId: storeID,
TypeDefinitions: schema.TypeDefinitions,
SchemaVersion: schema.SchemaVersion,
Conditions: schema.Conditions,
})
if err != nil {
return "", err
}
return authorizationModel.AuthorizationModelId, nil
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
return provider.Grant(ctx, orgID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil))
}
// the language model doesn't have any equality check
// https://github.com/openfga/language/blob/main/pkg/go/transformer/module-to-model_test.go#L38
func (provider *provider) isModelEqual(expected *openfgav1.AuthorizationModel, actual *openfgav1.AuthorizationModel) (bool, error) {
// we need to initialize a new model since the model extracted from schema doesn't have id
expectedAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: expected.SchemaVersion,
TypeDefinitions: expected.TypeDefinitions,
Conditions: expected.Conditions,
}
expectedAuthModelBytes, err := protojson.Marshal(&expectedAuthModel)
if err != nil {
return false, err
}
actualAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: actual.SchemaVersion,
TypeDefinitions: actual.TypeDefinitions,
Conditions: actual.Conditions,
}
actualAuthModelBytes, err := protojson.Marshal(&actualAuthModel)
if err != nil {
return false, err
}
return string(expectedAuthModelBytes) == string(actualAuthModelBytes), nil
func (setter *provider) Create(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) getStoreIDandModelID() (string, string) {
provider.mtx.RLock()
defer provider.mtx.RUnlock()
storeID := provider.storeID
modelID := provider.modelID
return storeID, modelID
func (provider *provider) GetOrCreate(_ context.Context, _ valuer.UUID, _ *roletypes.Role) (*roletypes.Role, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
return nil
}
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) Patch(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) PatchObjects(_ context.Context, _ valuer.UUID, _ string, _ authtypes.Relation, _, _ []*authtypes.Object) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) Delete(_ context.Context, _ valuer.UUID, _ valuer.UUID) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
return nil
}

View File

@@ -1,4 +1,4 @@
package openfgaauthz
package openfgaserver
import (
"context"

View File

@@ -0,0 +1,334 @@
package openfgaserver
import (
"context"
"strconv"
"sync"
authz "github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
openfgapkgserver "github.com/openfga/openfga/pkg/server"
"google.golang.org/protobuf/encoding/protojson"
)
var (
openfgaDefaultStore = valuer.NewString("signoz")
)
type Server struct {
config authz.Config
settings factory.ScopedProviderSettings
openfgaSchema []openfgapkgtransformer.ModuleFile
openfgaServer *openfgapkgserver.Server
storeID string
modelID string
mtx sync.RWMutex
stopChan chan struct{}
}
func NewOpenfgaServer(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (*Server, error) {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/authz/openfgaauthz")
store, err := NewSQLStore(sqlstore)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to initialize sqlstore for authz")
return nil, err
}
// setup the openfga server
opts := []openfgapkgserver.OpenFGAServiceV1Option{
openfgapkgserver.WithDatastore(store),
openfgapkgserver.WithLogger(NewLogger(scopedProviderSettings.Logger())),
openfgapkgserver.WithContextPropagationToDatastore(true),
}
openfgaServer, err := openfgapkgserver.NewServerWithOpts(opts...)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to create authz server")
return nil, err
}
return &Server{
config: config,
settings: scopedProviderSettings,
openfgaServer: openfgaServer,
openfgaSchema: openfgaSchema,
mtx: sync.RWMutex{},
stopChan: make(chan struct{}),
}, nil
}
func (server *Server) Start(ctx context.Context) error {
storeID, err := server.getOrCreateStore(ctx, openfgaDefaultStore.StringValue())
if err != nil {
return err
}
modelID, err := server.getOrCreateModel(ctx, storeID)
if err != nil {
return err
}
server.mtx.Lock()
server.modelID = modelID
server.storeID = storeID
server.mtx.Unlock()
<-server.stopChan
return nil
}
func (server *Server) Stop(ctx context.Context) error {
server.openfgaServer.Close()
close(server.stopChan)
return nil
}
func (server *Server) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
storeID, modelID := server.getStoreIDandModelID()
checkResponse, err := server.openfgaServer.Check(
ctx,
&openfgav1.CheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tupleReq.User,
Relation: tupleReq.Relation,
Object: tupleReq.Object,
},
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
if !checkResponse.Allowed {
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
}
return nil
}
func (server *Server) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
storeID, modelID := server.getStoreIDandModelID()
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
for idx, tuple := range tupleReq {
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tuple.User,
Relation: tuple.Relation,
Object: tuple.Object,
},
// the batch check response is map[string] keyed by correlationID.
CorrelationId: strconv.Itoa(idx),
})
}
checkResponse, err := server.openfgaServer.BatchCheck(
ctx,
&openfgav1.BatchCheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Checks: batchCheckItems,
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
for _, checkResponse := range checkResponse.Result {
if checkResponse.GetAllowed() {
return nil
}
}
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
}
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
if len(additions) == 0 && len(deletions) == 0 {
return nil
}
storeID, modelID := server.getStoreIDandModelID()
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
for idx, tuple := range deletions {
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
}
_, err := server.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Writes: func() *openfgav1.WriteRequestWrites {
if len(additions) == 0 {
return nil
}
return &openfgav1.WriteRequestWrites{
TupleKeys: additions,
OnDuplicate: "ignore",
}
}(),
Deletes: func() *openfgav1.WriteRequestDeletes {
if len(deletionTuplesWithoutCondition) == 0 {
return nil
}
return &openfgav1.WriteRequestDeletes{
TupleKeys: deletionTuplesWithoutCondition,
OnMissing: "ignore",
}
}(),
})
return err
}
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
storeID, modelID := server.getStoreIDandModelID()
response, err := server.openfgaServer.ListObjects(ctx, &openfgav1.ListObjectsRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
User: subject,
Relation: relation.StringValue(),
Type: typeable.Type().StringValue(),
})
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "cannot list objects for subject %s with relation %s for type %s", subject, relation.StringValue(), typeable.Type().StringValue())
}
return authtypes.MustNewObjectsFromStringSlice(response.Objects), nil
}
func (server *Server) getOrCreateStore(ctx context.Context, name string) (string, error) {
stores, err := server.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
if err != nil {
return "", err
}
for _, store := range stores.GetStores() {
if store.GetName() == name {
return store.Id, nil
}
}
store, err := server.openfgaServer.CreateStore(ctx, &openfgav1.CreateStoreRequest{Name: name})
if err != nil {
return "", err
}
return store.Id, nil
}
func (server *Server) getOrCreateModel(ctx context.Context, storeID string) (string, error) {
schema, err := openfgapkgtransformer.TransformModuleFilesToModel(server.openfgaSchema, "1.1")
if err != nil {
return "", err
}
authorisationModels, err := server.openfgaServer.ReadAuthorizationModels(ctx, &openfgav1.ReadAuthorizationModelsRequest{StoreId: storeID})
if err != nil {
return "", err
}
for _, authModel := range authorisationModels.GetAuthorizationModels() {
equal, err := server.isModelEqual(schema, authModel)
if err != nil {
return "", err
}
if equal {
return authModel.Id, nil
}
}
authorizationModel, err := server.openfgaServer.WriteAuthorizationModel(ctx, &openfgav1.WriteAuthorizationModelRequest{
StoreId: storeID,
TypeDefinitions: schema.TypeDefinitions,
SchemaVersion: schema.SchemaVersion,
Conditions: schema.Conditions,
})
if err != nil {
return "", err
}
return authorizationModel.AuthorizationModelId, nil
}
// the language model doesn't have any equality check
// https://github.com/openfga/language/blob/main/pkg/go/transformer/module-to-model_test.go#L38
func (server *Server) isModelEqual(expected *openfgav1.AuthorizationModel, actual *openfgav1.AuthorizationModel) (bool, error) {
// we need to initialize a new model since the model extracted from schema doesn't have id
expectedAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: expected.SchemaVersion,
TypeDefinitions: expected.TypeDefinitions,
Conditions: expected.Conditions,
}
expectedAuthModelBytes, err := protojson.Marshal(&expectedAuthModel)
if err != nil {
return false, err
}
actualAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: actual.SchemaVersion,
TypeDefinitions: actual.TypeDefinitions,
Conditions: actual.Conditions,
}
actualAuthModelBytes, err := protojson.Marshal(&actualAuthModel)
if err != nil {
return false, err
}
return string(expectedAuthModelBytes) == string(actualAuthModelBytes), nil
}
func (server *Server) getStoreIDandModelID() (string, string) {
server.mtx.RLock()
defer server.mtx.RUnlock()
storeID := server.storeID
modelID := server.modelID
return storeID, modelID
}

View File

@@ -1,4 +1,4 @@
package openfgaauthz
package openfgaserver
import (
"context"
@@ -20,7 +20,7 @@ func TestProviderStartStop(t *testing.T) {
expectedModel := `module base
type user`
provider, err := newOpenfgaProvider(context.Background(), providerSettings, authz.Config{}, sqlstore, []transformer.ModuleFile{{Name: "test.fga", Contents: expectedModel}})
provider, err := NewOpenfgaServer(context.Background(), providerSettings, authz.Config{}, sqlstore, []transformer.ModuleFile{{Name: "test.fga", Contents: expectedModel}})
require.NoError(t, err)
storeRows := sqlstore.Mock().NewRows([]string{"id", "name", "created_at", "updated_at"}).AddRow("01K3V0NTN47MPTMEV1PD5ST6ZC", "signoz", time.Now(), time.Now())

View File

@@ -1,4 +1,4 @@
package openfgaauthz
package openfgaserver
import (
"github.com/SigNoz/signoz/pkg/errors"

View File

@@ -1,12 +1,12 @@
package implrole
package signozauthzapi
import (
"net/http"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -14,12 +14,11 @@ import (
)
type handler struct {
setter role.Setter
getter role.Getter
authz authz.AuthZ
}
func NewHandler(setter role.Setter, getter role.Getter) role.Handler {
return &handler{setter: setter, getter: getter}
func NewHandler(authz authz.AuthZ) authz.Handler {
return &handler{authz: authz}
}
func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
@@ -36,7 +35,7 @@ func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.setter.Create(ctx, valuer.MustNewUUID(claims.OrgID), roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID)))
err = handler.authz.Create(ctx, valuer.MustNewUUID(claims.OrgID), roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID)))
if err != nil {
render.Error(rw, err)
return
@@ -64,7 +63,7 @@ func (handler *handler) Get(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), roleID)
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), roleID)
if err != nil {
render.Error(rw, err)
return
@@ -103,7 +102,7 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
return
}
objects, err := handler.setter.GetObjects(ctx, valuer.MustNewUUID(claims.OrgID), roleID, relation)
objects, err := handler.authz.GetObjects(ctx, valuer.MustNewUUID(claims.OrgID), roleID, relation)
if err != nil {
render.Error(rw, err)
return
@@ -114,7 +113,7 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
func (handler *handler) GetResources(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
resources := handler.setter.GetResources(ctx)
resources := handler.authz.GetResources(ctx)
var resourceRelations = struct {
Resources []*authtypes.Resource `json:"resources"`
@@ -134,7 +133,7 @@ func (handler *handler) List(rw http.ResponseWriter, r *http.Request) {
return
}
roles, err := handler.getter.List(ctx, valuer.MustNewUUID(claims.OrgID))
roles, err := handler.authz.List(ctx, valuer.MustNewUUID(claims.OrgID))
if err != nil {
render.Error(rw, err)
return
@@ -163,7 +162,7 @@ func (handler *handler) Patch(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return
@@ -175,7 +174,7 @@ func (handler *handler) Patch(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.setter.Patch(ctx, valuer.MustNewUUID(claims.OrgID), role)
err = handler.authz.Patch(ctx, valuer.MustNewUUID(claims.OrgID), role)
if err != nil {
render.Error(rw, err)
return
@@ -210,7 +209,7 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return
@@ -222,7 +221,7 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.setter.PatchObjects(ctx, valuer.MustNewUUID(claims.OrgID), role.Name, relation, patchableObjects.Additions, patchableObjects.Deletions)
err = handler.authz.PatchObjects(ctx, valuer.MustNewUUID(claims.OrgID), role.Name, relation, patchableObjects.Additions, patchableObjects.Deletions)
if err != nil {
render.Error(rw, err)
return
@@ -245,7 +244,7 @@ func (handler *handler) Delete(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.setter.Delete(ctx, valuer.MustNewUUID(claims.OrgID), id)
err = handler.authz.Delete(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return

View File

@@ -8,7 +8,6 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
@@ -24,15 +23,14 @@ type AuthZ struct {
logger *slog.Logger
orgGetter organization.Getter
authzService authz.AuthZ
roleGetter role.Getter
}
func NewAuthZ(logger *slog.Logger, orgGetter organization.Getter, authzService authz.AuthZ, roleGetter role.Getter) *AuthZ {
func NewAuthZ(logger *slog.Logger, orgGetter organization.Getter, authzService authz.AuthZ) *AuthZ {
if logger == nil {
panic("cannot build authz middleware, logger is empty")
}
return &AuthZ{logger: logger, orgGetter: orgGetter, authzService: authzService, roleGetter: roleGetter}
return &AuthZ{logger: logger, orgGetter: orgGetter, authzService: authzService}
}
func (middleware *AuthZ) ViewAccess(next http.HandlerFunc) http.HandlerFunc {

View File

@@ -4,7 +4,7 @@ import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/statsreporter"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
@@ -51,7 +51,7 @@ type Module interface {
statsreporter.StatsCollector
role.RegisterTypeable
authz.RegisterTypeable
}
type Handler interface {

View File

@@ -206,6 +206,10 @@ func (module *module) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{dashboardtypes.TypeableMetaResourceDashboard, dashboardtypes.TypeableMetaResourcesDashboards}
}
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
return nil
}
// not supported
func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publicDashboard *dashboardtypes.PublicDashboard) error {
return errors.Newf(errors.TypeUnsupported, dashboardtypes.ErrCodePublicDashboardUnsupported, "not implemented")

View File

@@ -0,0 +1,11 @@
package fields
import "net/http"
type Handler interface {
// Gets the fields keys for the given field key selector
GetFieldsKeys(http.ResponseWriter, *http.Request)
// Gets the fields values for the given field value selector
GetFieldsValues(http.ResponseWriter, *http.Request)
}

View File

@@ -0,0 +1,79 @@
package implfields
import (
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type handler struct {
telemetryMetadataStore telemetrytypes.MetadataStore
}
func NewHandler(settings factory.ProviderSettings, telemetryMetadataStore telemetrytypes.MetadataStore) fields.Handler {
return &handler{
telemetryMetadataStore: telemetryMetadataStore,
}
}
func (handler *handler) GetFieldsKeys(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
var params telemetrytypes.PostableFieldKeysParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
fieldKeySelector := telemetrytypes.NewFieldKeySelectorFromPostableFieldKeysParams(params)
keys, complete, err := handler.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldKeys{
Keys: keys,
Complete: complete,
})
}
func (handler *handler) GetFieldsValues(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
var params telemetrytypes.PostableFieldValueParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
fieldValueSelector := telemetrytypes.NewFieldValueSelectorFromPostableFieldValueParams(params)
allValues, allComplete, err := handler.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(rw, err)
return
}
relatedValues, relatedComplete, err := handler.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
}
values := &telemetrytypes.TelemetryFieldValues{
StringValues: allValues.StringValues,
NumberValues: allValues.NumberValues,
RelatedValues: relatedValues,
}
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldValues{
Values: values,
Complete: allComplete && relatedComplete,
})
}

View File

@@ -1,63 +0,0 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type getter struct {
store roletypes.Store
}
func NewGetter(store roletypes.Store) role.Getter {
return &getter{store: store}
}
func (getter *getter) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
storableRole, err := getter.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (getter *getter) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
storableRole, err := getter.store.GetByOrgIDAndName(ctx, orgID, name)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (getter *getter) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
storableRoles, err := getter.store.List(ctx, orgID)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storableRole := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storableRole)
}
return roles, nil
}
func (getter *getter) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
storableRoles, err := getter.store.ListByOrgIDAndNames(ctx, orgID, names)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storable := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
}
return roles, nil
}

View File

@@ -1,83 +0,0 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type granter struct {
store roletypes.Store
authz authz.AuthZ
}
func NewGranter(store roletypes.Store, authz authz.AuthZ) role.Granter {
return &granter{store: store, authz: authz}
}
func (granter *granter) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return granter.authz.Write(ctx, tuples, nil)
}
func (granter *granter) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
err := granter.Revoke(ctx, orgID, existingRoleName, subject)
if err != nil {
return err
}
err = granter.Grant(ctx, orgID, updatedRoleName, subject)
if err != nil {
return err
}
return nil
}
func (granter *granter) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return granter.authz.Write(ctx, nil, tuples)
}
func (granter *granter) CreateManagedRoles(ctx context.Context, _ valuer.UUID, managedRoles []*roletypes.Role) error {
err := granter.store.RunInTx(ctx, func(ctx context.Context) error {
for _, role := range managedRoles {
err := granter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return err
}
}
return nil
})
if err != nil {
return err
}
return nil
}

View File

@@ -1,53 +0,0 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type setter struct {
store roletypes.Store
authz authz.AuthZ
}
func NewSetter(store roletypes.Store, authz authz.AuthZ) role.Setter {
return &setter{store: store, authz: authz}
}
func (setter *setter) Create(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) GetOrCreate(_ context.Context, _ valuer.UUID, _ *roletypes.Role) (*roletypes.Role, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) GetResources(_ context.Context) []*authtypes.Resource {
return nil
}
func (setter *setter) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) Patch(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) PatchObjects(_ context.Context, _ valuer.UUID, _ string, _ authtypes.Relation, _, _ []*authtypes.Object) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) Delete(_ context.Context, _ valuer.UUID, _ valuer.UUID) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) MustGetTypeables() []authtypes.Typeable {
return nil
}

View File

@@ -1,85 +0,0 @@
package role
import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type Setter interface {
// Creates the role.
Create(context.Context, valuer.UUID, *roletypes.Role) error
// Gets the role if it exists or creates one.
GetOrCreate(context.Context, valuer.UUID, *roletypes.Role) (*roletypes.Role, error)
// Gets the objects associated with the given role and relation.
GetObjects(context.Context, valuer.UUID, valuer.UUID, authtypes.Relation) ([]*authtypes.Object, error)
// Gets all the typeable resources registered from role registry.
GetResources(context.Context) []*authtypes.Resource
// Patches the role.
Patch(context.Context, valuer.UUID, *roletypes.Role) error
// Patches the objects in authorization server associated with the given role and relation
PatchObjects(context.Context, valuer.UUID, string, authtypes.Relation, []*authtypes.Object, []*authtypes.Object) error
// Deletes the role and tuples in authorization server.
Delete(context.Context, valuer.UUID, valuer.UUID) error
RegisterTypeable
}
type Getter interface {
// Gets the role
Get(context.Context, valuer.UUID, valuer.UUID) (*roletypes.Role, error)
// Gets the role by org_id and name
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*roletypes.Role, error)
// Lists all the roles for the organization.
List(context.Context, valuer.UUID) ([]*roletypes.Role, error)
// Lists all the roles for the organization filtered by name
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*roletypes.Role, error)
}
type Granter interface {
// Grants a role to the subject based on role name.
Grant(context.Context, valuer.UUID, string, string) error
// Revokes a granted role from the subject based on role name.
Revoke(context.Context, valuer.UUID, string, string) error
// Changes the granted role for the subject based on role name.
ModifyGrant(context.Context, valuer.UUID, string, string, string) error
// Bootstrap the managed roles.
CreateManagedRoles(context.Context, valuer.UUID, []*roletypes.Role) error
}
type RegisterTypeable interface {
MustGetTypeables() []authtypes.Typeable
}
type Handler interface {
Create(http.ResponseWriter, *http.Request)
Get(http.ResponseWriter, *http.Request)
GetObjects(http.ResponseWriter, *http.Request)
GetResources(http.ResponseWriter, *http.Request)
List(http.ResponseWriter, *http.Request)
Patch(http.ResponseWriter, *http.Request)
PatchObjects(http.ResponseWriter, *http.Request)
Delete(http.ResponseWriter, *http.Request)
}

View File

@@ -8,11 +8,11 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/emailing"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/user"
root "github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/tokenizer"
@@ -32,13 +32,13 @@ type Module struct {
emailing emailing.Emailing
settings factory.ScopedProviderSettings
orgSetter organization.Setter
granter role.Granter
authz authz.AuthZ
analytics analytics.Analytics
config user.Config
}
// This module is a WIP, don't take inspiration from this.
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, granter role.Granter, analytics analytics.Analytics, config user.Config) root.Module {
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, authz authz.AuthZ, analytics analytics.Analytics, config user.Config) root.Module {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/modules/user/impluser")
return &Module{
store: store,
@@ -47,7 +47,7 @@ func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing em
settings: settings,
orgSetter: orgSetter,
analytics: analytics,
granter: granter,
authz: authz,
config: config,
}
}
@@ -172,7 +172,7 @@ func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ..
createUserOpts := root.NewCreateUserOptions(opts...)
// since assign is idempotant multiple calls to assign won't cause issues in case of retries.
err := module.granter.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
err := module.authz.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
if err != nil {
return err
}
@@ -238,7 +238,7 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
}
if user.Role != existingUser.Role {
err = m.granter.ModifyGrant(ctx,
err = m.authz.ModifyGrant(ctx,
orgID,
roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role),
roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role),
@@ -301,7 +301,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
}
// since revoke is idempotant multiple calls to revoke won't cause issues in case of retries
err = module.granter.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
err = module.authz.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
if err != nil {
return err
}
@@ -504,14 +504,14 @@ func (module *Module) CreateFirstUser(ctx context.Context, organization *types.O
}
managedRoles := roletypes.NewManagedRoles(organization.ID)
err = module.granter.Grant(ctx, organization.ID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, user.ID.StringValue(), user.OrgID, nil))
err = module.authz.CreateManagedUserRoleTransactions(ctx, organization.ID, user.ID)
if err != nil {
return nil, err
}
if err = module.store.RunInTx(ctx, func(ctx context.Context) error {
err = module.orgSetter.Create(ctx, organization, func(ctx context.Context, orgID valuer.UUID) error {
err = module.granter.CreateManagedRoles(ctx, orgID, managedRoles)
err = module.authz.CreateManagedRoles(ctx, orgID, managedRoles)
if err != nil {
return err
}

View File

@@ -25,7 +25,6 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/http/render"
@@ -69,7 +68,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/opamptypes"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunneltypes"
"go.uber.org/zap"
@@ -104,10 +103,11 @@ type APIHandler struct {
querierV2 interfaces.Querier
queryBuilder *queryBuilder.QueryBuilder
// temporalityMap is a map of metric name to temporality
// to avoid fetching temporality for the same metric multiple times
// querying the v4 table on low cardinal temporality column
// should be fast but we can still avoid the query if we have the data in memory
// temporalityMap is a map of metric name to temporality to avoid fetching
// temporality for the same metric multiple times.
//
// Querying the v4 table on a low cardinal temporality column should be
// fast, but we can still avoid the query if we have the data in memory.
temporalityMap map[string]map[v3.Temporality]bool
temporalityMux sync.Mutex
@@ -145,8 +145,6 @@ type APIHandler struct {
LicensingAPI licensing.API
FieldsAPI *fields.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
@@ -177,8 +175,6 @@ type APIHandlerOpts struct {
LicensingAPI licensing.API
FieldsAPI *fields.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
@@ -243,7 +239,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
AlertmanagerAPI: opts.AlertmanagerAPI,
LicensingAPI: opts.LicensingAPI,
Signoz: opts.Signoz,
FieldsAPI: opts.FieldsAPI,
QuerierAPI: opts.QuerierAPI,
QueryParserAPI: opts.QueryParserAPI,
}
@@ -399,13 +394,6 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *middlew
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.QuerierAPI.QueryRawStream)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterFieldsRoutes(router *mux.Router, am *middleware.AuthZ) {
subRouter := router.PathPrefix("/api/v1").Subrouter()
subRouter.HandleFunc("/fields/keys", am.ViewAccess(aH.FieldsAPI.GetFieldsKeys)).Methods(http.MethodGet)
subRouter.HandleFunc("/fields/values", am.ViewAccess(aH.FieldsAPI.GetFieldsValues)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *middleware.AuthZ) {
hostsSubRouter := router.PathPrefix("/api/v1/hosts").Subrouter()
hostsSubRouter.HandleFunc("/attribute_keys", am.ViewAccess(aH.getHostAttributeKeys)).Methods(http.MethodGet)
@@ -1023,7 +1011,7 @@ func (aH *APIHandler) getRuleStateHistory(w http.ResponseWriter, r *http.Request
// the query range is calculated based on the rule's evalWindow and evalDelay
// alerts have 2 minutes delay built in, so we need to subtract that from the start time
// to get the correct query range
start := end.Add(-time.Duration(rule.EvalWindow)).Add(-3 * time.Minute)
start := end.Add(-rule.EvalWindow.Duration() - 3*time.Minute)
if rule.AlertType == ruletypes.AlertTypeLogs {
if rule.Version != "v5" {
res.Items[idx].RelatedLogsLink = contextlinks.PrepareLinksToLogs(start, end, newFilters)
@@ -1230,12 +1218,12 @@ func (aH *APIHandler) Get(rw http.ResponseWriter, r *http.Request) {
dashboard := new(dashboardtypes.Dashboard)
if aH.CloudIntegrationsController.IsCloudIntegrationDashboardUuid(id) {
cloudintegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
cloudIntegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
if apiErr != nil {
render.Error(rw, errorsV2.Wrapf(apiErr, errorsV2.TypeInternal, errorsV2.CodeInternal, "failed to get dashboard"))
return
}
dashboard = cloudintegrationDashboard
dashboard = cloudIntegrationDashboard
} else if aH.IntegrationsController.IsInstalledIntegrationDashboardID(id) {
integrationDashboard, apiErr := aH.IntegrationsController.GetInstalledIntegrationDashboardById(ctx, orgID, id)
if apiErr != nil {
@@ -1564,13 +1552,13 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
RespondError(w, &model.ApiError{Typ: model.ErrorExec, Err: res.Err}, nil)
}
response_data := &model.QueryData{
responseData := &model.QueryData{
ResultType: res.Value.Type(),
Result: res.Value,
Stats: qs,
}
aH.Respond(w, response_data)
aH.Respond(w, responseData)
}
@@ -2652,12 +2640,12 @@ func (aH *APIHandler) getProducerData(w http.ResponseWriter, r *http.Request) {
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2705,12 +2693,12 @@ func (aH *APIHandler) getConsumerData(w http.ResponseWriter, r *http.Request) {
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2759,12 +2747,12 @@ func (aH *APIHandler) getPartitionOverviewLatencyData(w http.ResponseWriter, r *
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2813,12 +2801,12 @@ func (aH *APIHandler) getConsumerPartitionLatencyData(w http.ResponseWriter, r *
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2870,12 +2858,12 @@ func (aH *APIHandler) getProducerThroughputOverview(w http.ResponseWriter, r *ht
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, producerQueryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, producerQueryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
@@ -2981,12 +2969,12 @@ func (aH *APIHandler) getProducerThroughputDetails(w http.ResponseWriter, r *htt
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -3035,12 +3023,12 @@ func (aH *APIHandler) getConsumerThroughputOverview(w http.ResponseWriter, r *ht
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -3089,12 +3077,12 @@ func (aH *APIHandler) getConsumerThroughputDetails(w http.ResponseWriter, r *htt
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -3149,12 +3137,12 @@ func (aH *APIHandler) getProducerConsumerEval(w http.ResponseWriter, r *http.Req
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
@@ -4138,11 +4126,11 @@ func (aH *APIHandler) ListLogsPipelinesHandler(w http.ResponseWriter, r *http.Re
aH.Respond(w, payload)
}
// listLogsPipelines lists logs piplines for latest version
// listLogsPipelines lists logs pipelines for latest version
func (aH *APIHandler) listLogsPipelines(ctx context.Context, orgID valuer.UUID) (
*logparsingpipeline.PipelinesResponse, error,
) {
// get lateset agent config
// get latest agent config
latestVersion := -1
lastestConfig, err := agentConf.GetLatestVersion(ctx, orgID, opamptypes.ElementTypeLogPipelines)
if err != nil && !errorsV2.Ast(err, errorsV2.TypeNotFound) {
@@ -4439,7 +4427,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
var spanKeys map[string]v3.AttributeKey
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
hasLogsQuery := false
@@ -4456,7 +4444,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
if logsv3.EnrichmentRequired(queryRangeParams) && hasLogsQuery {
logsFields, apiErr := aH.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(queryRangeParams.CompositeQuery))
if apiErr != nil {
RespondError(w, apiErr, errQuriesByName)
RespondError(w, apiErr, errQueriesByName)
return
}
// get the fields if any logs query is present
@@ -4467,7 +4455,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
spanKeys, err = aH.getSpanKeysV3(ctx, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
tracesV4.Enrich(queryRangeParams, spanKeys)
@@ -4512,11 +4500,11 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
}
}
result, errQuriesByName, err = aH.querier.QueryRange(ctx, orgID, queryRangeParams)
result, errQueriesByName, err = aH.querier.QueryRange(ctx, orgID, queryRangeParams)
if err != nil {
queryErrors := map[string]string{}
for name, err := range errQuriesByName {
for name, err := range errQueriesByName {
queryErrors[fmt.Sprintf("Query-%s", name)] = err.Error()
}
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
@@ -4792,7 +4780,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
var spanKeys map[string]v3.AttributeKey
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
hasLogsQuery := false
@@ -4822,7 +4810,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
spanKeys, err = aH.getSpanKeysV3(ctx, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
tracesV4.Enrich(queryRangeParams, spanKeys)
@@ -4845,11 +4833,11 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
}
}
result, errQuriesByName, err = aH.querierV2.QueryRange(ctx, orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(ctx, orgID, queryRangeParams)
if err != nil {
queryErrors := map[string]string{}
for name, err := range errQuriesByName {
for name, err := range errQueriesByName {
queryErrors[fmt.Sprintf("Query-%s", name)] = err.Error()
}
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
@@ -4866,7 +4854,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
aH.sendQueryResultEvents(r, result, queryRangeParams, "v4")

View File

@@ -17,7 +17,6 @@ import (
"github.com/gorilla/handlers"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
@@ -133,7 +132,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
FluxInterval: config.Querier.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: nooplicensing.NewLicenseAPI(),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
@@ -209,13 +207,12 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
r.Use(middleware.NewComment().Wrap)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz, s.signoz.Modules.RoleGetter)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
api.RegisterRoutes(r, am)
api.RegisterLogsRoutes(r, am)
api.RegisterIntegrationRoutes(r, am)
api.RegisterCloudIntegrationsRoutes(r, am)
api.RegisterFieldsRoutes(r, am)
api.RegisterQueryRangeV3Routes(r, am)
api.RegisterInfraMetricsRoutes(r, am)
api.RegisterWebSocketPaths(r, am)

View File

@@ -5,10 +5,10 @@ import (
"os"
"regexp"
"strconv"
"time"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/valuer"
)
const (
@@ -40,11 +40,11 @@ const NormalizedMetricsMapQueryThreads = 10
var NormalizedMetricsMapRegex = regexp.MustCompile(`[^a-zA-Z0-9]`)
var NormalizedMetricsMapQuantileRegex = regexp.MustCompile(`(?i)([._-]?quantile.*)$`)
func GetEvalDelay() time.Duration {
func GetEvalDelay() valuer.TextDuration {
evalDelayStr := GetOrDefaultEnv("RULES_EVAL_DELAY", "2m")
evalDelayDuration, err := time.ParseDuration(evalDelayStr)
evalDelayDuration, err := valuer.ParseTextDuration(evalDelayStr)
if err != nil {
return 0
return valuer.TextDuration{}
}
return evalDelayDuration
}

View File

@@ -40,13 +40,13 @@ type BaseRule struct {
// evalWindow is the time window used for evaluating the rule
// i.e. each time we lookback from the current time, we look at data for the last
// evalWindow duration
evalWindow time.Duration
evalWindow valuer.TextDuration
// holdDuration is the duration for which the alert waits before firing
holdDuration time.Duration
holdDuration valuer.TextDuration
// evalDelay is the delay in evaluation of the rule
// this is useful in cases where the data is not available immediately
evalDelay time.Duration
evalDelay valuer.TextDuration
// holds the static set of labels and annotations for the rule
// these are the same for all alerts created for this rule
@@ -94,7 +94,7 @@ type BaseRule struct {
evaluation ruletypes.Evaluation
// newGroupEvalDelay is the grace period for new alert groups
newGroupEvalDelay *time.Duration
newGroupEvalDelay valuer.TextDuration
queryParser queryparser.QueryParser
}
@@ -113,7 +113,7 @@ func WithSendUnmatched() RuleOption {
}
}
func WithEvalDelay(dur time.Duration) RuleOption {
func WithEvalDelay(dur valuer.TextDuration) RuleOption {
return func(r *BaseRule) {
r.evalDelay = dur
}
@@ -163,7 +163,7 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader
source: p.Source,
typ: p.AlertType,
ruleCondition: p.RuleCondition,
evalWindow: time.Duration(p.EvalWindow),
evalWindow: p.EvalWindow,
labels: qslabels.FromMap(p.Labels),
annotations: qslabels.FromMap(p.Annotations),
preferredChannels: p.PreferredChannels,
@@ -176,13 +176,12 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader
}
// Store newGroupEvalDelay and groupBy keys from NotificationSettings
if p.NotificationSettings != nil && p.NotificationSettings.NewGroupEvalDelay != nil {
newGroupEvalDelay := time.Duration(*p.NotificationSettings.NewGroupEvalDelay)
baseRule.newGroupEvalDelay = &newGroupEvalDelay
if p.NotificationSettings != nil {
baseRule.newGroupEvalDelay = p.NotificationSettings.NewGroupEvalDelay
}
if baseRule.evalWindow == 0 {
baseRule.evalWindow = 5 * time.Minute
if baseRule.evalWindow.IsZero() {
baseRule.evalWindow = valuer.MustParseTextDuration("5m")
}
for _, opt := range opts {
@@ -245,15 +244,15 @@ func (r *BaseRule) ActiveAlertsLabelFP() map[uint64]struct{} {
return activeAlerts
}
func (r *BaseRule) EvalDelay() time.Duration {
func (r *BaseRule) EvalDelay() valuer.TextDuration {
return r.evalDelay
}
func (r *BaseRule) EvalWindow() time.Duration {
func (r *BaseRule) EvalWindow() valuer.TextDuration {
return r.evalWindow
}
func (r *BaseRule) HoldDuration() time.Duration {
func (r *BaseRule) HoldDuration() valuer.TextDuration {
return r.holdDuration
}
@@ -281,7 +280,7 @@ func (r *BaseRule) Timestamps(ts time.Time) (time.Time, time.Time) {
start := st.UnixMilli()
end := en.UnixMilli()
if r.evalDelay > 0 {
if r.evalDelay.IsPositive() {
start = start - r.evalDelay.Milliseconds()
end = end - r.evalDelay.Milliseconds()
}
@@ -552,7 +551,7 @@ func (r *BaseRule) PopulateTemporality(ctx context.Context, orgID valuer.UUID, q
// ShouldSkipNewGroups returns true if new group filtering should be applied
func (r *BaseRule) ShouldSkipNewGroups() bool {
return r.newGroupEvalDelay != nil && *r.newGroupEvalDelay > 0
return r.newGroupEvalDelay.IsPositive()
}
// isFilterNewSeriesSupported checks if the query is supported for new series filtering

View File

@@ -20,7 +20,7 @@ import (
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -124,8 +124,8 @@ func createPostableRule(compositeQuery *v3.CompositeQuery) ruletypes.PostableRul
Evaluation: &ruletypes.EvaluationEnvelope{
Kind: ruletypes.RollingEvaluation,
Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
},
},
RuleCondition: &ruletypes.RuleCondition{
@@ -151,7 +151,7 @@ type filterNewSeriesTestCase struct {
compositeQuery *v3.CompositeQuery
series []*v3.Series
firstSeenMap map[telemetrytypes.MetricMetadataLookupKey]int64
newGroupEvalDelay *time.Duration
newGroupEvalDelay valuer.TextDuration
evalTime time.Time
expectedFiltered []*v3.Series // series that should be in the final filtered result (old enough)
expectError bool
@@ -159,7 +159,8 @@ type filterNewSeriesTestCase struct {
func TestBaseRule_FilterNewSeries(t *testing.T) {
defaultEvalTime := time.Unix(1700000000, 0)
defaultDelay := 2 * time.Minute
defaultNewGroupEvalDelay := valuer.MustParseTextDuration("2m")
defaultDelay := defaultNewGroupEvalDelay.Duration()
defaultGroupByFields := []string{"service_name", "env"}
logger := instrumentationtest.New().Logger()
@@ -202,7 +203,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new", "prod"),
// svc-missing has no metadata, so it will be included
),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
@@ -234,7 +235,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new1", "prod"),
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new2", "stage"),
),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{}, // all should be filtered out (new series)
},
@@ -261,7 +262,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old1", "prod"),
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old2", "stage"),
),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
@@ -295,7 +296,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -325,7 +326,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -361,7 +362,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"status": "200"}, nil), // no service_name or env
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"status": "200"}, nil),
@@ -390,7 +391,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old", "prod"),
// svc-no-metadata has no entry in firstSeenMap
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
@@ -420,7 +421,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
{MetricName: "request_total", AttributeName: "service_name", AttributeValue: "svc-partial"}: calculateFirstSeen(defaultEvalTime, defaultDelay, true),
// env metadata is missing
},
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
@@ -454,7 +455,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
series: []*v3.Series{},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{},
},
@@ -488,7 +489,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
newGroupEvalDelay: func() *time.Duration { d := time.Duration(0); return &d }(), // zero delay
newGroupEvalDelay: valuer.TextDuration{}, // zero delay
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -532,7 +533,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
createFirstSeenMap("error_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -572,7 +573,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", []string{"service_name"}, defaultEvalTime, defaultDelay, true, "svc1"),
createFirstSeenMap("request_total", []string{"env"}, defaultEvalTime, defaultDelay, false, "prod"),
),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{}, // max first_seen is new, so should be filtered out
},
@@ -604,7 +605,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
@@ -639,7 +640,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
@@ -697,20 +698,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
telemetryStore,
prometheustest.New(context.Background(), settings, prometheus.Config{}, telemetryStore),
"",
time.Duration(time.Second),
time.Second,
nil,
readerCache,
options,
)
// Set newGroupEvalDelay in NotificationSettings if provided
if tt.newGroupEvalDelay != nil {
postableRule.NotificationSettings = &ruletypes.NotificationSettings{
NewGroupEvalDelay: func() *ruletypes.Duration {
d := ruletypes.Duration(*tt.newGroupEvalDelay)
return &d
}(),
}
postableRule.NotificationSettings = &ruletypes.NotificationSettings{
NewGroupEvalDelay: tt.newGroupEvalDelay,
}
// Create BaseRule using NewBaseRule

View File

@@ -30,7 +30,7 @@ import (
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/types/authtypes"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -66,7 +66,7 @@ type PrepareTestRuleOptions struct {
OrgID valuer.UUID
}
const taskNamesuffix = "webAppEditor"
const taskNameSuffix = "webAppEditor"
func RuleIdFromTaskName(n string) string {
return strings.Split(n, "-groupname")[0]
@@ -97,7 +97,7 @@ type ManagerOptions struct {
SLogger *slog.Logger
Cache cache.Cache
EvalDelay time.Duration
EvalDelay valuer.TextDuration
PrepareTaskFunc func(opts PrepareTaskOptions) (Task, error)
PrepareTestRuleFunc func(opts PrepareTestRuleOptions) (int, *model.ApiError)
@@ -182,8 +182,8 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
rules = append(rules, tr)
// create ch rule task for evalution
task = newTask(TaskTypeCh, opts.TaskName, taskNamesuffix, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
// create ch rule task for evaluation
task = newTask(TaskTypeCh, opts.TaskName, taskNameSuffix, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
@@ -206,8 +206,8 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
rules = append(rules, pr)
// create promql rule task for evalution
task = newTask(TaskTypeProm, opts.TaskName, taskNamesuffix, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
// create promql rule task for evaluation
task = newTask(TaskTypeProm, opts.TaskName, taskNameSuffix, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else {
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
@@ -323,7 +323,7 @@ func (m *Manager) run(_ context.Context) {
}
// Stop the rule manager's rule evaluation cycles.
func (m *Manager) Stop(ctx context.Context) {
func (m *Manager) Stop(_ context.Context) {
m.mtx.Lock()
defer m.mtx.Unlock()
@@ -336,7 +336,7 @@ func (m *Manager) Stop(ctx context.Context) {
zap.L().Info("Rule manager stopped")
}
// EditRuleDefinition writes the rule definition to the
// EditRule writes the rule definition to the
// datastore and also updates the rule executor
func (m *Manager) EditRule(ctx context.Context, ruleStr string, id valuer.UUID) error {
claims, err := authtypes.ClaimsFromContext(ctx)
@@ -643,7 +643,7 @@ func (m *Manager) addTask(_ context.Context, orgID valuer.UUID, rule *ruletypes.
m.rules[r.ID()] = r
}
// If there is an another task with the same identifier, raise an error
// If there is another task with the same identifier, raise an error
_, ok := m.tasks[taskName]
if ok {
return fmt.Errorf("a rule with the same name already exists")
@@ -678,7 +678,8 @@ func (m *Manager) RuleTasks() []Task {
return rgs
}
// RuleTasks returns the list of manager's rule tasks.
// RuleTasksWithoutLock returns the list of manager's rule tasks without
// acquiring a lock on the manager.
func (m *Manager) RuleTasksWithoutLock() []Task {
rgs := make([]Task, 0, len(m.tasks))
@@ -889,7 +890,7 @@ func (m *Manager) syncRuleStateWithTask(ctx context.Context, orgID valuer.UUID,
} else {
// check if rule has a task running
if _, ok := m.tasks[taskName]; !ok {
// rule has not task, start one
// rule has no task, start one
if err := m.addTask(ctx, orgID, rule, taskName); err != nil {
return err
}

View File

@@ -9,6 +9,7 @@ import (
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
// ThresholdRuleTestCase defines test case structure for threshold rule test notifications
@@ -40,8 +41,8 @@ func ThresholdRuleAtLeastOnceValueAbove(target float64, recovery *float64) rulet
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
Labels: map[string]string{
"service.name": "frontend",
@@ -99,8 +100,8 @@ func BuildPromAtLeastOnceValueAbove(target float64, recovery *float64) ruletypes
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
Labels: map[string]string{
"service.name": "frontend",

View File

@@ -28,6 +28,8 @@ type PromRule struct {
prometheus prometheus.Prometheus
}
var _ Rule = (*PromRule)(nil)
func NewPromRule(
id string,
orgID valuer.UUID,
@@ -332,7 +334,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration {
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
a.State = model.StateFiring
a.FiredAt = ts
state := model.StateFiring
@@ -396,7 +398,7 @@ func (r *PromRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.name,
RuleCondition: r.ruleCondition,
EvalWindow: ruletypes.Duration(r.evalWindow),
EvalWindow: r.evalWindow,
Labels: r.labels.Map(),
Annotations: r.annotations.Map(),
PreferredChannels: r.preferredChannels,

View File

@@ -41,12 +41,12 @@ type PromRuleTask struct {
orgID valuer.UUID
}
// newPromRuleTask holds rules that have promql condition
// and evalutes the rule at a given frequency
// NewPromRuleTask holds rules that have promql condition
// and evaluates the rule at a given frequency
func NewPromRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) *PromRuleTask {
zap.L().Info("Initiating a new rule group", zap.String("name", name), zap.Duration("frequency", frequency))
if time.Now() == time.Now().Add(frequency) {
if frequency == 0 {
frequency = DefaultFrequency
}

View File

@@ -41,8 +41,8 @@ func TestPromRuleEval(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -748,8 +748,8 @@ func TestPromRuleUnitCombinations(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1007,8 +1007,8 @@ func _Enable_this_after_9146_issue_fix_is_merged_TestPromRuleNoData(t *testing.T
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1118,8 +1118,8 @@ func TestMultipleThresholdPromRule(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1353,8 +1353,8 @@ func TestPromRule_NoData(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,
@@ -1466,7 +1466,7 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
// 3. Alert fires only if t2 - t1 > AbsentFor
baseTime := time.Unix(1700000000, 0)
evalWindow := 5 * time.Minute
evalWindow := valuer.MustParseTextDuration("5m")
// Set target higher than test data (100.0) so regular threshold alerts don't fire
target := 500.0
@@ -1476,8 +1476,8 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,
@@ -1619,7 +1619,7 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
baseTime := time.Unix(1700000000, 0)
evalTime := baseTime.Add(5 * time.Minute)
evalWindow := 5 * time.Minute
evalWindow := valuer.MustParseTextDuration("5m")
lookBackDelta := time.Minute
postableRule := ruletypes.PostableRule{
@@ -1627,8 +1627,8 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(time.Minute),
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,

View File

@@ -7,6 +7,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
// A Rule encapsulates a vector expression which is evaluated at a specified
@@ -19,9 +20,9 @@ type Rule interface {
Labels() labels.BaseLabels
Annotations() labels.BaseLabels
Condition() *ruletypes.RuleCondition
EvalDelay() time.Duration
EvalWindow() time.Duration
HoldDuration() time.Duration
EvalDelay() valuer.TextDuration
EvalWindow() valuer.TextDuration
HoldDuration() valuer.TextDuration
State() model.AlertState
ActiveAlerts() []*ruletypes.Alert
// ActiveAlertsLabelFP returns a map of active alert labels fingerprint

View File

@@ -43,7 +43,7 @@ const DefaultFrequency = 1 * time.Minute
// NewRuleTask makes a new RuleTask with the given name, options, and rules.
func NewRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) *RuleTask {
if time.Now() == time.Now().Add(frequency) {
if frequency == 0 {
frequency = DefaultFrequency
}
zap.L().Info("initiating a new rule task", zap.String("name", name), zap.Duration("frequency", frequency))
@@ -78,6 +78,7 @@ func (g *RuleTask) Type() TaskType { return TaskTypeCh }
func (g *RuleTask) Rules() []Rule { return g.rules }
// Interval returns the group's interval.
// TODO: remove (unused)?
func (g *RuleTask) Interval() time.Duration { return g.frequency }
func (g *RuleTask) Pause(b bool) {

View File

@@ -61,6 +61,8 @@ type ThresholdRule struct {
spansKeys map[string]v3.AttributeKey
}
var _ Rule = (*ThresholdRule)(nil)
func NewThresholdRule(
id string,
orgID valuer.UUID,
@@ -746,7 +748,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration {
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
r.logger.DebugContext(ctx, "converting pending alert to firing", "name", r.Name())
a.State = model.StateFiring
a.FiredAt = ts
@@ -812,7 +814,7 @@ func (r *ThresholdRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.name,
RuleCondition: r.ruleCondition,
EvalWindow: ruletypes.Duration(r.evalWindow),
EvalWindow: r.evalWindow,
Labels: r.labels.Map(),
Annotations: r.annotations.Map(),
PreferredChannels: r.preferredChannels,

View File

@@ -36,8 +36,8 @@ func TestThresholdRuleEvalBackwardCompat(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -72,7 +72,7 @@ func TestThresholdRuleEvalBackwardCompat(t *testing.T) {
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -152,8 +152,8 @@ func TestPrepareLinksToLogs(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -189,7 +189,7 @@ func TestPrepareLinksToLogs(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -206,8 +206,8 @@ func TestPrepareLinksToLogsV5(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -250,7 +250,7 @@ func TestPrepareLinksToLogsV5(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -267,8 +267,8 @@ func TestPrepareLinksToTracesV5(t *testing.T) {
AlertType: ruletypes.AlertTypeTraces,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -311,7 +311,7 @@ func TestPrepareLinksToTracesV5(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -328,8 +328,8 @@ func TestPrepareLinksToTraces(t *testing.T) {
AlertType: ruletypes.AlertTypeTraces,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -365,7 +365,7 @@ func TestPrepareLinksToTraces(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -382,8 +382,8 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -451,7 +451,7 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -490,8 +490,8 @@ func TestThresholdRuleEvalDelay(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -553,8 +553,8 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -594,7 +594,7 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
logger := instrumentationtest.New().Logger()
for idx, c := range cases {
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -615,8 +615,8 @@ func TestThresholdRuleUnitCombinations(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -816,8 +816,8 @@ func TestThresholdRuleNoData(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -927,8 +927,8 @@ func TestThresholdRuleTracesLink(t *testing.T) {
AlertType: ruletypes.AlertTypeTraces,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1052,8 +1052,8 @@ func TestThresholdRuleLogsLink(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1190,8 +1190,8 @@ func TestThresholdRuleShiftBy(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
Thresholds: &ruletypes.RuleThresholdData{
@@ -1264,8 +1264,8 @@ func TestMultipleThresholdRule(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1455,8 +1455,8 @@ func TestThresholdRuleEval_BasicCases(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1486,8 +1486,8 @@ func TestThresholdRuleEval_MatchPlusCompareOps(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1523,8 +1523,8 @@ func TestThresholdRuleEval_SendUnmatchedBypassesRecovery(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1559,7 +1559,7 @@ func TestThresholdRuleEval_SendUnmatchedBypassesRecovery(t *testing.T) {
}
logger := instrumentationtest.New().Logger()
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
require.NoError(t, err)
now := time.Now()
@@ -1611,8 +1611,8 @@ func TestThresholdRuleEval_SendUnmatchedVariants(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1735,8 +1735,8 @@ func TestThresholdRuleEval_RecoveryNotMetSendUnmatchedFalse(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1820,7 +1820,7 @@ func runEvalTests(t *testing.T, postableRule ruletypes.PostableRule, testCases [
Spec: thresholds,
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
return
@@ -1927,7 +1927,7 @@ func runMultiThresholdEvalTests(t *testing.T, postableRule ruletypes.PostableRul
Spec: thresholds,
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
return
@@ -2035,8 +2035,8 @@ func TestThresholdRuleEval_MultiThreshold(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -2066,8 +2066,8 @@ func TestThresholdEval_RequireMinPoints(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,

View File

@@ -1,6 +1,8 @@
package signoz
import (
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/signozauthzapi"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/gateway"
@@ -11,14 +13,14 @@ import (
"github.com/SigNoz/signoz/pkg/modules/apdex/implapdex"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/fields/implfields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/modules/savedview"
"github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview"
"github.com/SigNoz/signoz/pkg/modules/services"
@@ -28,6 +30,7 @@ import (
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type Handlers struct {
@@ -43,10 +46,21 @@ type Handlers struct {
Global global.Handler
FlaggerHandler flagger.Handler
GatewayHandler gateway.Handler
Role role.Handler
Fields fields.Handler
AuthzHandler authz.Handler
}
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global, flaggerService flagger.Flagger, gatewayService gateway.Gateway) Handlers {
func NewHandlers(
modules Modules,
providerSettings factory.ProviderSettings,
querier querier.Querier,
licensing licensing.Licensing,
global global.Global,
flaggerService flagger.Flagger,
gatewayService gateway.Gateway,
telemetryMetadataStore telemetrytypes.MetadataStore,
authz authz.AuthZ,
) Handlers {
return Handlers{
SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex),
@@ -60,6 +74,7 @@ func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, que
Global: signozglobal.NewHandler(global),
FlaggerHandler: flagger.NewHandler(flaggerService),
GatewayHandler: gateway.NewHandler(gatewayService),
Role: implrole.NewHandler(modules.RoleSetter, modules.RoleGetter),
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
AuthzHandler: signozauthzapi.NewHandler(authz),
}
}

View File

@@ -13,7 +13,6 @@ import (
"github.com/SigNoz/signoz/pkg/factory/factorytest"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sharder"
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
@@ -41,13 +40,9 @@ func TestNewHandlers(t *testing.T) {
queryParser := queryparser.New(providerSettings)
require.NoError(t, err)
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
roleSetter := implrole.NewSetter(implrole.NewStore(sqlstore), nil)
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
grantModule := implrole.NewGranter(implrole.NewStore(sqlstore), nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, roleSetter, roleGetter, grantModule)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil, nil, nil)
reflectVal := reflect.ValueOf(handlers)
for i := 0; i < reflectVal.NumField(); i++ {
f := reflectVal.Field(i)

View File

@@ -25,7 +25,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/savedview"
"github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview"
"github.com/SigNoz/signoz/pkg/modules/services"
@@ -67,9 +66,6 @@ type Modules struct {
SpanPercentile spanpercentile.Module
MetricsExplorer metricsexplorer.Module
Promote promote.Module
RoleSetter role.Setter
RoleGetter role.Getter
Granter role.Granter
}
func NewModules(
@@ -89,13 +85,10 @@ func NewModules(
queryParser queryparser.QueryParser,
config Config,
dashboard dashboard.Module,
roleSetter role.Setter,
roleGetter role.Getter,
granter role.Granter,
) Modules {
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, granter, analytics, config.User)
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, authz, analytics, config.User)
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
@@ -117,8 +110,5 @@ func NewModules(
Services: implservices.NewModule(querier, telemetryStore),
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, ruleStore, dashboard, providerSettings, config.MetricsExplorer),
Promote: implpromote.NewModule(telemetryMetadataStore, telemetryStore),
RoleSetter: roleSetter,
RoleGetter: roleGetter,
Granter: granter,
}
}

View File

@@ -13,7 +13,6 @@ import (
"github.com/SigNoz/signoz/pkg/factory/factorytest"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sharder"
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
@@ -41,10 +40,7 @@ func TestNewModules(t *testing.T) {
queryParser := queryparser.New(providerSettings)
require.NoError(t, err)
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
roleSetter := implrole.NewSetter(implrole.NewStore(sqlstore), nil)
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
grantModule := implrole.NewGranter(implrole.NewStore(sqlstore), nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, roleSetter, roleGetter, grantModule)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
reflectVal := reflect.ValueOf(modules)
for i := 0; i < reflectVal.NumField(); i++ {

View File

@@ -15,11 +15,11 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
@@ -50,8 +50,8 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
struct{ dashboard.Handler }{},
struct{ metricsexplorer.Handler }{},
struct{ gateway.Handler }{},
struct{ role.Getter }{},
struct{ role.Handler }{},
struct{ fields.Handler }{},
struct{ authz.Handler }{},
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
if err != nil {
return nil, err

View File

@@ -166,6 +166,7 @@ func NewSQLMigrationProviderFactories(
sqlmigration.NewAddAuthzIndexFactory(sqlstore, sqlschema),
sqlmigration.NewMigrateRbacToAuthzFactory(sqlstore),
sqlmigration.NewMigratePublicDashboardsFactory(sqlstore),
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
)
}
@@ -247,8 +248,8 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
handlers.Dashboard,
handlers.MetricsExplorer,
handlers.GatewayHandler,
modules.RoleGetter,
handlers.Role,
handlers.Fields,
handlers.AuthzHandler,
),
)
}

View File

@@ -21,8 +21,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
"github.com/SigNoz/signoz/pkg/prometheus"
"github.com/SigNoz/signoz/pkg/querier"
@@ -89,10 +87,9 @@ func New(
sqlstoreProviderFactories factory.NamedMap[factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config]],
telemetrystoreProviderFactories factory.NamedMap[factory.ProviderFactory[telemetrystore.TelemetryStore, telemetrystore.Config]],
authNsCallback func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error),
authzCallback func(context.Context, sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config],
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, role.Setter, role.Granter, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
authzCallback func(context.Context, sqlstore.SQLStore, licensing.Licensing, dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config],
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
gatewayProviderFactory func(licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config],
roleSetterCallback func(sqlstore.SQLStore, authz.AuthZ, licensing.Licensing, []role.RegisterTypeable) role.Setter,
) (*SigNoz, error) {
// Initialize instrumentation
instrumentation, err := instrumentation.New(ctx, config.Instrumentation, version.Info, "signoz")
@@ -284,11 +281,24 @@ func New(
// Initialize user getter
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
// Initialize the role getter
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
licensingProviderFactory := licenseProviderFactory(sqlstore, zeus, orgGetter, analytics)
licensing, err := licensingProviderFactory.New(
ctx,
providerSettings,
licenseConfig,
)
if err != nil {
return nil, err
}
// Initialize query parser (needed for dashboard module)
queryParser := queryparser.New(providerSettings)
// Initialize dashboard module (needed for authz registry)
dashboard := dashboardModuleCallback(sqlstore, providerSettings, analytics, orgGetter, queryParser, querier, licensing)
// Initialize authz
authzProviderFactory := authzCallback(ctx, sqlstore)
authzProviderFactory := authzCallback(ctx, sqlstore, licensing, dashboard)
authz, err := authzProviderFactory.New(ctx, providerSettings, authz.Config{})
if err != nil {
return nil, err
@@ -318,9 +328,6 @@ func New(
return nil, err
}
// Initialize query parser
queryParser := queryparser.New(providerSettings)
// Initialize ruler from the available ruler provider factories
ruler, err := factory.NewProviderFromNamedMap(
ctx,
@@ -333,16 +340,6 @@ func New(
return nil, err
}
licensingProviderFactory := licenseProviderFactory(sqlstore, zeus, orgGetter, analytics)
licensing, err := licensingProviderFactory.New(
ctx,
providerSettings,
licenseConfig,
)
if err != nil {
return nil, err
}
gatewayFactory := gatewayProviderFactory(licensing)
gateway, err := gatewayFactory.New(ctx, providerSettings, config.Gateway)
if err != nil {
@@ -390,13 +387,10 @@ func New(
}
// Initialize all modules
roleSetter := roleSetterCallback(sqlstore, authz, licensing, nil)
granter := implrole.NewGranter(implrole.NewStore(sqlstore), authz)
dashboard := dashboardModuleCallback(sqlstore, providerSettings, analytics, orgGetter, roleSetter, granter, queryParser, querier, licensing)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard, roleSetter, roleGetter, granter)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway)
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz)
// Initialize the API server
apiserver, err := factory.NewProviderFromNamedMap(

View File

@@ -0,0 +1,154 @@
package sqlmigration
import (
"context"
"time"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/oklog/ulid/v2"
"github.com/uptrace/bun"
"github.com/uptrace/bun/dialect"
"github.com/uptrace/bun/migrate"
)
type addAnonymousPublicDashboardTransaction struct {
sqlstore sqlstore.SQLStore
}
func NewAddAnonymousPublicDashboardTransactionFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("add_public_dashboard_txn"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return newAddAnonymousPublicDashboardTransaction(ctx, ps, c, sqlstore)
})
}
func newAddAnonymousPublicDashboardTransaction(_ context.Context, _ factory.ProviderSettings, _ Config, sqlstore sqlstore.SQLStore) (SQLMigration, error) {
return &addAnonymousPublicDashboardTransaction{
sqlstore: sqlstore,
}, nil
}
func (migration *addAnonymousPublicDashboardTransaction) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *addAnonymousPublicDashboardTransaction) Up(ctx context.Context, db *bun.DB) error {
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
var storeID string
err = tx.QueryRowContext(ctx, `SELECT id FROM store WHERE name = ? LIMIT 1`, "signoz").Scan(&storeID)
if err != nil {
return err
}
// fetch all the orgs for which we need to insert the anonymous public dashboard transaction tuple.
orgIDs := []string{}
rows, err := tx.QueryContext(ctx, `SELECT id FROM organizations`)
if err != nil {
return err
}
defer rows.Close()
for rows.Next() {
var orgID string
if err := rows.Scan(&orgID); err != nil {
return err
}
orgIDs = append(orgIDs, orgID)
}
for _, orgID := range orgIDs {
// based on openfga tuple and changelog id's are same for writes.
// ref: https://github.com/openfga/openfga/blob/main/pkg/storage/sqlite/sqlite.go#L467
entropy := ulid.DefaultEntropy()
now := time.Now().UTC()
tupleID := ulid.MustNew(ulid.Timestamp(now), entropy).String()
// Add wildcard (*) transaction for signoz-anonymous role to read all public-dashboards
// This grants the signoz-anonymous role read access to all public dashboards in the organization
if migration.sqlstore.BunDB().Dialect().Name() == dialect.PG {
result, err := tx.ExecContext(ctx, `
INSERT INTO tuple (store, object_type, object_id, relation, _user, user_type, ulid, inserted_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (store, object_type, object_id, relation, _user) DO NOTHING`,
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role:organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName+"#assignee", "userset", tupleID, now,
)
if err != nil {
return err
}
rowsAffected, err := result.RowsAffected()
if err != nil {
return err
}
if rowsAffected == 0 {
continue
}
_, err = tx.ExecContext(ctx, `
INSERT INTO changelog (store, object_type, object_id, relation, _user, operation, ulid, inserted_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (store, ulid, object_type) DO NOTHING`,
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role:organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName+"#assignee", "TUPLE_OPERATION_WRITE", tupleID, now,
)
if err != nil {
return err
}
} else {
result, err := tx.ExecContext(ctx, `
INSERT INTO tuple (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation, user_type, ulid, inserted_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation) DO NOTHING`,
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role", "organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName, "assignee", "userset", tupleID, now,
)
if err != nil {
return err
}
rowsAffected, err := result.RowsAffected()
if err != nil {
return err
}
if rowsAffected == 0 {
continue
}
_, err = tx.ExecContext(ctx, `
INSERT INTO changelog (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation, operation, ulid, inserted_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (store, ulid, object_type) DO NOTHING`,
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role", "organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName, "assignee", 0, tupleID, now,
)
if err != nil {
return err
}
}
}
err = tx.Commit()
if err != nil {
return err
}
return nil
}
func (migration *addAnonymousPublicDashboardTransaction) Down(context.Context, *bun.DB) error {
return nil
}

View File

@@ -20,15 +20,19 @@ var (
)
var (
typeUserSelectorRegex = regexp.MustCompile(`^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
typeRoleSelectorRegex = regexp.MustCompile(`^[a-z-]{1,50}$`)
typeUserSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
typeRoleSelectorRegex = regexp.MustCompile(`^([a-z-]{1,50}|\*)$`)
typeAnonymousSelectorRegex = regexp.MustCompile(`^\*$`)
typeOrganizationSelectorRegex = regexp.MustCompile(`^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
typeOrganizationSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
typeMetaResourceSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
// metaresources selectors are used to select either all or none
// metaresources selectors are used to select either all or none until we introduce some hierarchy here.
typeMetaResourcesSelectorRegex = regexp.MustCompile(`^\*$`)
)
var (
WildCardSelectorString = "*"
)
type SelectorCallbackWithClaimsFn func(*http.Request, Claims) ([]Selector, error)
type SelectorCallbackWithoutClaimsFn func(*http.Request, []*types.Organization) ([]Selector, valuer.UUID, error)

View File

@@ -24,9 +24,10 @@ func MustNewTypeableMetaResource(name Name) Typeable {
return typeableesource
}
func (typeableMetaResource *typeableMetaResource) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
func (typeableMetaResource *typeableMetaResource) Tuples(subject string, relation Relation, selectors []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selector {
for _, selector := range selectors {
object := typeableMetaResource.Prefix(orgID) + "/" + selector.String()
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}

View File

@@ -24,9 +24,10 @@ func MustNewTypeableMetaResources(name Name) Typeable {
return resources
}
func (typeableResources *typeableMetaResources) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
func (typeableResources *typeableMetaResources) Tuples(subject string, relation Relation, selectors []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selector {
for _, selector := range selectors {
object := typeableResources.Prefix(orgID) + "/" + selector.String()
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}

View File

@@ -11,9 +11,10 @@ var _ Typeable = new(typeableOrganization)
type typeableOrganization struct{}
func (typeableOrganization *typeableOrganization) Tuples(subject string, relation Relation, selector []Selector, _ valuer.UUID) ([]*openfgav1.TupleKey, error) {
func (typeableOrganization *typeableOrganization) Tuples(subject string, relation Relation, selectors []Selector, _ valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selector {
for _, selector := range selectors {
object := strings.Join([]string{typeableOrganization.Type().StringValue(), selector.String()}, ":")
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}

View File

@@ -9,9 +9,10 @@ var _ Typeable = new(typeableRole)
type typeableRole struct{}
func (typeableRole *typeableRole) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
func (typeableRole *typeableRole) Tuples(subject string, relation Relation, selectors []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selector {
for _, selector := range selectors {
object := typeableRole.Prefix(orgID) + "/" + selector.String()
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}

View File

@@ -9,9 +9,10 @@ var _ Typeable = new(typeableUser)
type typeableUser struct{}
func (typeableUser *typeableUser) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
func (typeableUser *typeableUser) Tuples(subject string, relation Relation, selectors []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := make([]*openfgav1.TupleKey, 0)
for _, selector := range selector {
for _, selector := range selectors {
object := typeableUser.Prefix(orgID) + "/" + selector.String()
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
}

View File

@@ -8,15 +8,15 @@ import (
"time"
"unicode/utf8"
"github.com/prometheus/alertmanager/config"
signozError "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/prometheus/alertmanager/config"
"github.com/SigNoz/signoz/pkg/valuer"
)
type AlertType string
@@ -40,12 +40,12 @@ const (
// PostableRule is used to create alerting rule from HTTP api
type PostableRule struct {
AlertName string `json:"alert,omitempty"`
AlertType AlertType `json:"alertType,omitempty"`
Description string `json:"description,omitempty"`
RuleType RuleType `json:"ruleType,omitempty"`
EvalWindow Duration `json:"evalWindow,omitempty"`
Frequency Duration `json:"frequency,omitempty"`
AlertName string `json:"alert,omitempty"`
AlertType AlertType `json:"alertType,omitempty"`
Description string `json:"description,omitempty"`
RuleType RuleType `json:"ruleType,omitempty"`
EvalWindow valuer.TextDuration `json:"evalWindow,omitempty"`
Frequency valuer.TextDuration `json:"frequency,omitempty"`
RuleCondition *RuleCondition `json:"condition,omitempty"`
Labels map[string]string `json:"labels,omitempty"`
@@ -71,13 +71,13 @@ type NotificationSettings struct {
Renotify Renotify `json:"renotify,omitempty"`
UsePolicy bool `json:"usePolicy,omitempty"`
// NewGroupEvalDelay is the grace period for new series to be excluded from alerts evaluation
NewGroupEvalDelay *Duration `json:"newGroupEvalDelay,omitempty"`
NewGroupEvalDelay valuer.TextDuration `json:"newGroupEvalDelay,omitzero"`
}
type Renotify struct {
Enabled bool `json:"enabled"`
ReNotifyInterval Duration `json:"interval,omitempty"`
AlertStates []model.AlertState `json:"alertStates,omitempty"`
Enabled bool `json:"enabled"`
ReNotifyInterval valuer.TextDuration `json:"interval,omitzero"`
AlertStates []model.AlertState `json:"alertStates,omitempty"`
}
func (ns *NotificationSettings) GetAlertManagerNotificationConfig() alertmanagertypes.NotificationConfig {
@@ -85,10 +85,10 @@ func (ns *NotificationSettings) GetAlertManagerNotificationConfig() alertmanager
var noDataRenotifyInterval time.Duration
if ns.Renotify.Enabled {
if slices.Contains(ns.Renotify.AlertStates, model.StateNoData) {
noDataRenotifyInterval = time.Duration(ns.Renotify.ReNotifyInterval)
noDataRenotifyInterval = ns.Renotify.ReNotifyInterval.Duration()
}
if slices.Contains(ns.Renotify.AlertStates, model.StateFiring) {
renotifyInterval = time.Duration(ns.Renotify.ReNotifyInterval)
renotifyInterval = ns.Renotify.ReNotifyInterval.Duration()
}
} else {
renotifyInterval = 8760 * time.Hour //1 year for no renotify substitute
@@ -190,12 +190,12 @@ func (r *PostableRule) processRuleDefaults() {
r.SchemaVersion = DefaultSchemaVersion
}
if r.EvalWindow == 0 {
r.EvalWindow = Duration(5 * time.Minute)
if r.EvalWindow.IsZero() {
r.EvalWindow = valuer.MustParseTextDuration("5m")
}
if r.Frequency == 0 {
r.Frequency = Duration(1 * time.Minute)
if r.Frequency.IsZero() {
r.Frequency = valuer.MustParseTextDuration("1m")
}
if r.RuleCondition != nil {
@@ -246,7 +246,7 @@ func (r *PostableRule) processRuleDefaults() {
r.NotificationSettings = &NotificationSettings{
Renotify: Renotify{
Enabled: true,
ReNotifyInterval: Duration(4 * time.Hour),
ReNotifyInterval: valuer.MustParseTextDuration("4h"),
AlertStates: []model.AlertState{model.StateFiring},
},
}

View File

@@ -171,10 +171,10 @@ func TestParseIntoRule(t *testing.T) {
kind: RuleDataKindJson,
expectError: false,
validate: func(t *testing.T, rule *PostableRule) {
if rule.EvalWindow != Duration(5*time.Minute) {
if rule.EvalWindow.Duration() != 5*time.Minute {
t.Errorf("Expected default eval window '5m', got '%v'", rule.EvalWindow)
}
if rule.Frequency != Duration(1*time.Minute) {
if rule.Frequency.Duration() != time.Minute {
t.Errorf("Expected default frequency '1m', got '%v'", rule.Frequency)
}
if rule.RuleCondition.CompositeQuery.BuilderQueries["A"].Expression != "A" {
@@ -327,10 +327,10 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
// Verify evaluation window matches rule settings
if window, ok := rule.Evaluation.Spec.(RollingWindow); ok {
if window.EvalWindow != rule.EvalWindow {
if !window.EvalWindow.Equal(rule.EvalWindow) {
t.Errorf("Expected Evaluation EvalWindow %v, got %v", rule.EvalWindow, window.EvalWindow)
}
if window.Frequency != rule.Frequency {
if !window.Frequency.Equal(rule.Frequency) {
t.Errorf("Expected Evaluation Frequency %v, got %v", rule.Frequency, window.Frequency)
}
} else {
@@ -457,10 +457,10 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
t.Fatal("Expected Evaluation to be populated")
}
if window, ok := rule.Evaluation.Spec.(RollingWindow); ok {
if window.EvalWindow != rule.EvalWindow {
if !window.EvalWindow.Equal(rule.EvalWindow) {
t.Errorf("Expected Evaluation EvalWindow to be overwritten to %v, got %v", rule.EvalWindow, window.EvalWindow)
}
if window.Frequency != rule.Frequency {
if !window.Frequency.Equal(rule.Frequency) {
t.Errorf("Expected Evaluation Frequency to be overwritten to %v, got %v", rule.Frequency, window.Frequency)
}
} else {
@@ -504,7 +504,7 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
t.Error("Expected Evaluation to be nil for v2")
}
if rule.EvalWindow != Duration(5*time.Minute) {
if rule.EvalWindow.Duration() != 5*time.Minute {
t.Error("Expected default EvalWindow to be applied")
}
if rule.RuleType != RuleTypeThreshold {

View File

@@ -19,36 +19,36 @@ var (
type Evaluation interface {
NextWindowFor(curr time.Time) (time.Time, time.Time)
GetFrequency() Duration
GetFrequency() valuer.TextDuration
}
type RollingWindow struct {
EvalWindow Duration `json:"evalWindow"`
Frequency Duration `json:"frequency"`
EvalWindow valuer.TextDuration `json:"evalWindow"`
Frequency valuer.TextDuration `json:"frequency"`
}
func (rollingWindow RollingWindow) Validate() error {
if rollingWindow.EvalWindow <= 0 {
if !rollingWindow.EvalWindow.IsPositive() {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "evalWindow must be greater than zero")
}
if rollingWindow.Frequency <= 0 {
if !rollingWindow.Frequency.IsPositive() {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "frequency must be greater than zero")
}
return nil
}
func (rollingWindow RollingWindow) NextWindowFor(curr time.Time) (time.Time, time.Time) {
return curr.Add(time.Duration(-rollingWindow.EvalWindow)), curr
return curr.Add(-rollingWindow.EvalWindow.Duration()), curr
}
func (rollingWindow RollingWindow) GetFrequency() Duration {
func (rollingWindow RollingWindow) GetFrequency() valuer.TextDuration {
return rollingWindow.Frequency
}
type CumulativeWindow struct {
Schedule CumulativeSchedule `json:"schedule"`
Frequency Duration `json:"frequency"`
Timezone string `json:"timezone"`
Schedule CumulativeSchedule `json:"schedule"`
Frequency valuer.TextDuration `json:"frequency"`
Timezone string `json:"timezone"`
}
type CumulativeSchedule struct {
@@ -79,7 +79,7 @@ func (cumulativeWindow CumulativeWindow) Validate() error {
if _, err := time.LoadLocation(cumulativeWindow.Timezone); err != nil {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "timezone is invalid")
}
if cumulativeWindow.Frequency <= 0 {
if !cumulativeWindow.Frequency.IsPositive() {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "frequency must be greater than zero")
}
return nil
@@ -150,8 +150,8 @@ func (cumulativeWindow CumulativeWindow) NextWindowFor(curr time.Time) (time.Tim
return windowStart.In(time.UTC), currInTZ.In(time.UTC)
}
func (cw CumulativeWindow) getLastScheduleTime(curr time.Time, loc *time.Location) time.Time {
schedule := cw.Schedule
func (cumulativeWindow CumulativeWindow) getLastScheduleTime(curr time.Time, loc *time.Location) time.Time {
schedule := cumulativeWindow.Schedule
switch schedule.Type {
case ScheduleTypeHourly:
@@ -220,7 +220,7 @@ func (cw CumulativeWindow) getLastScheduleTime(curr time.Time, loc *time.Locatio
}
}
func (cumulativeWindow CumulativeWindow) GetFrequency() Duration {
func (cumulativeWindow CumulativeWindow) GetFrequency() valuer.TextDuration {
return cumulativeWindow.Frequency
}

View File

@@ -4,33 +4,35 @@ import (
"encoding/json"
"testing"
"time"
"github.com/SigNoz/signoz/pkg/valuer"
)
func TestRollingWindow_EvaluationTime(t *testing.T) {
tests := []struct {
name string
evalWindow Duration
evalWindow valuer.TextDuration
current time.Time
wantStart time.Time
wantEnd time.Time
}{
{
name: "5 minute rolling window",
evalWindow: Duration(5 * time.Minute),
evalWindow: valuer.MustParseTextDuration("5m"),
current: time.Date(2023, 12, 1, 12, 30, 0, 0, time.UTC),
wantStart: time.Date(2023, 12, 1, 12, 25, 0, 0, time.UTC),
wantEnd: time.Date(2023, 12, 1, 12, 30, 0, 0, time.UTC),
},
{
name: "1 hour rolling window",
evalWindow: Duration(1 * time.Hour),
evalWindow: valuer.MustParseTextDuration("1h"),
current: time.Date(2023, 12, 1, 15, 45, 30, 0, time.UTC),
wantStart: time.Date(2023, 12, 1, 14, 45, 30, 0, time.UTC),
wantEnd: time.Date(2023, 12, 1, 15, 45, 30, 0, time.UTC),
},
{
name: "30 second rolling window",
evalWindow: Duration(30 * time.Second),
evalWindow: valuer.MustParseTextDuration("30s"),
current: time.Date(2023, 12, 1, 12, 30, 15, 0, time.UTC),
wantStart: time.Date(2023, 12, 1, 12, 29, 45, 0, time.UTC),
wantEnd: time.Date(2023, 12, 1, 12, 30, 15, 0, time.UTC),
@@ -41,7 +43,7 @@ func TestRollingWindow_EvaluationTime(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
rw := &RollingWindow{
EvalWindow: tt.evalWindow,
Frequency: Duration(1 * time.Minute),
Frequency: valuer.MustParseTextDuration("1m"),
}
gotStart, gotEnd := rw.NextWindowFor(tt.current)
@@ -69,7 +71,7 @@ func TestCumulativeWindow_NewScheduleSystem(t *testing.T) {
Type: ScheduleTypeHourly,
Minute: intPtr(15),
},
Frequency: Duration(5 * time.Minute),
Frequency: valuer.MustParseTextDuration("5m"),
Timezone: "UTC",
},
current: time.Date(2025, 3, 15, 14, 30, 0, 0, time.UTC),
@@ -83,7 +85,7 @@ func TestCumulativeWindow_NewScheduleSystem(t *testing.T) {
Hour: intPtr(9),
Minute: intPtr(30),
},
Frequency: Duration(1 * time.Hour),
Frequency: valuer.MustParseTextDuration("1h"),
Timezone: "Asia/Kolkata",
},
current: time.Date(2025, 3, 15, 15, 30, 0, 0, time.UTC),
@@ -98,7 +100,7 @@ func TestCumulativeWindow_NewScheduleSystem(t *testing.T) {
Hour: intPtr(14),
Minute: intPtr(0),
},
Frequency: Duration(24 * time.Hour),
Frequency: valuer.MustParseTextDuration("24h"),
Timezone: "America/New_York",
},
current: time.Date(2025, 3, 18, 19, 0, 0, 0, time.UTC), // Tuesday
@@ -113,7 +115,7 @@ func TestCumulativeWindow_NewScheduleSystem(t *testing.T) {
Hour: intPtr(0),
Minute: intPtr(0),
},
Frequency: Duration(24 * time.Hour),
Frequency: valuer.MustParseTextDuration("24h"),
Timezone: "UTC",
},
current: time.Date(2025, 3, 15, 12, 0, 0, 0, time.UTC),
@@ -125,7 +127,7 @@ func TestCumulativeWindow_NewScheduleSystem(t *testing.T) {
Schedule: CumulativeSchedule{
Type: ScheduleTypeHourly,
},
Frequency: Duration(5 * time.Minute),
Frequency: valuer.MustParseTextDuration("5m"),
Timezone: "UTC",
},
current: time.Date(2025, 3, 15, 14, 30, 0, 0, time.UTC),
@@ -755,8 +757,8 @@ func TestEvaluationEnvelope_UnmarshalJSON(t *testing.T) {
jsonInput: `{"kind":"rolling","spec":{"evalWindow":"5m","frequency":"1m"}}`,
wantKind: RollingEvaluation,
wantSpec: RollingWindow{
EvalWindow: Duration(5 * time.Minute),
Frequency: Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
},
},
{
@@ -768,7 +770,7 @@ func TestEvaluationEnvelope_UnmarshalJSON(t *testing.T) {
Type: ScheduleTypeHourly,
Minute: intPtr(30),
},
Frequency: Duration(2 * time.Minute),
Frequency: valuer.MustParseTextDuration("2m"),
Timezone: "UTC",
},
},
@@ -847,10 +849,10 @@ func TestEvaluationEnvelope_UnmarshalJSON(t *testing.T) {
t.Fatalf("Expected RollingWindow spec, got %T", envelope.Spec)
}
wantSpec := tt.wantSpec.(RollingWindow)
if gotSpec.EvalWindow != wantSpec.EvalWindow {
if !gotSpec.EvalWindow.Equal(wantSpec.EvalWindow) {
t.Errorf("RollingWindow.EvalWindow = %v, want %v", gotSpec.EvalWindow, wantSpec.EvalWindow)
}
if gotSpec.Frequency != wantSpec.Frequency {
if !gotSpec.Frequency.Equal(wantSpec.Frequency) {
t.Errorf("RollingWindow.Frequency = %v, want %v", gotSpec.Frequency, wantSpec.Frequency)
}
case CumulativeEvaluation:
@@ -866,7 +868,7 @@ func TestEvaluationEnvelope_UnmarshalJSON(t *testing.T) {
(gotSpec.Schedule.Minute != nil && wantSpec.Schedule.Minute != nil && *gotSpec.Schedule.Minute != *wantSpec.Schedule.Minute) {
t.Errorf("CumulativeWindow.Schedule.Minute = %v, want %v", gotSpec.Schedule.Minute, wantSpec.Schedule.Minute)
}
if gotSpec.Frequency != wantSpec.Frequency {
if !gotSpec.Frequency.Equal(wantSpec.Frequency) {
t.Errorf("CumulativeWindow.Frequency = %v, want %v", gotSpec.Frequency, wantSpec.Frequency)
}
if gotSpec.Timezone != wantSpec.Timezone {

View File

@@ -131,7 +131,7 @@ func (m *GettablePlannedMaintenance) checkDaily(currentTime time.Time, rec *Recu
if candidate.After(currentTime) {
candidate = candidate.AddDate(0, 0, -1)
}
return currentTime.Sub(candidate) <= time.Duration(rec.Duration)
return currentTime.Sub(candidate) <= rec.Duration.Duration()
}
// checkWeekly finds the most recent allowed occurrence by rebasing the recurrences
@@ -160,7 +160,7 @@ func (m *GettablePlannedMaintenance) checkWeekly(currentTime time.Time, rec *Rec
if candidate.After(currentTime) {
candidate = candidate.AddDate(0, 0, -7)
}
if currentTime.Sub(candidate) <= time.Duration(rec.Duration) {
if currentTime.Sub(candidate) <= rec.Duration.Duration() {
return true
}
}
@@ -198,7 +198,7 @@ func (m *GettablePlannedMaintenance) checkMonthly(currentTime time.Time, rec *Re
)
}
}
return currentTime.Sub(candidate) <= time.Duration(rec.Duration)
return currentTime.Sub(candidate) <= rec.Duration.Duration()
}
func (m *GettablePlannedMaintenance) IsActive(now time.Time) bool {
@@ -255,7 +255,7 @@ func (m *GettablePlannedMaintenance) Validate() error {
if m.Schedule.Recurrence.RepeatType == "" {
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidPlannedMaintenancePayload, "missing repeat type in the payload")
}
if m.Schedule.Recurrence.Duration == 0 {
if m.Schedule.Recurrence.Duration.IsZero() {
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidPlannedMaintenancePayload, "missing duration in the payload")
}
if m.Schedule.Recurrence.EndTime != nil && m.Schedule.Recurrence.EndTime.Before(m.Schedule.Recurrence.StartTime) {

View File

@@ -3,6 +3,8 @@ package ruletypes
import (
"testing"
"time"
"github.com/SigNoz/signoz/pkg/valuer"
)
// Helper function to create a time pointer
@@ -25,7 +27,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "Europe/London",
Recurrence: &Recurrence{
StartTime: time.Date(2025, 3, 1, 0, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 24),
Duration: valuer.MustParseTextDuration("24h"),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday, RepeatOnTuesday, RepeatOnWednesday, RepeatOnThursday, RepeatOnFriday, RepeatOnSunday},
},
@@ -42,7 +44,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 1, 22, 0, 0, 0, time.UTC), // Monday 22:00
Duration: Duration(time.Hour * 4), // Until Tuesday 02:00
Duration: valuer.MustParseTextDuration("4h"), // Until Tuesday 02:00
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday}, // Only Monday
},
@@ -59,7 +61,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 1, 22, 0, 0, 0, time.UTC), // Monday 22:00
Duration: Duration(time.Hour * 4), // Until Tuesday 02:00
Duration: valuer.MustParseTextDuration("4h"), // Until Tuesday 02:00
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday}, // Only Monday
},
@@ -76,7 +78,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 1, 22, 0, 0, 0, time.UTC), // Monday 22:00
Duration: Duration(time.Hour * 52), // Until Thursday 02:00
Duration: valuer.MustParseTextDuration("52h"), // Until Thursday 02:00
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday}, // Only Monday
},
@@ -93,7 +95,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 2, 22, 0, 0, 0, time.UTC), // Tuesday 22:00
Duration: Duration(time.Hour * 4), // Until Wednesday 02:00
Duration: valuer.MustParseTextDuration("4h"), // Until Wednesday 02:00
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnTuesday}, // Only Tuesday
},
@@ -110,7 +112,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 23, 0, 0, 0, time.UTC), // 23:00
Duration: Duration(time.Hour * 2), // Until 01:00 next day
Duration: valuer.MustParseTextDuration("2h"), // Until 01:00 next day
RepeatType: RepeatTypeDaily,
},
},
@@ -126,7 +128,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeDaily,
},
},
@@ -142,7 +144,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeDaily,
},
},
@@ -158,7 +160,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 28, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 72), // 3 days
Duration: valuer.MustParseTextDuration("72h"), // 3 days
RepeatType: RepeatTypeMonthly,
},
},
@@ -174,7 +176,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 28, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 72), // 3 days
Duration: valuer.MustParseTextDuration("72h"), // 3 days
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnSunday},
},
@@ -191,7 +193,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 30, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 48), // 2 days, crosses to Feb 1
Duration: valuer.MustParseTextDuration("48h"), // 2 days, crosses to Feb 1
RepeatType: RepeatTypeMonthly,
},
},
@@ -207,7 +209,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "America/New_York", // UTC-5 or UTC-4 depending on DST
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 22, 0, 0, 0, time.FixedZone("America/New_York", -5*3600)),
Duration: Duration(time.Hour * 4),
Duration: valuer.MustParseTextDuration("4h"),
RepeatType: RepeatTypeDaily,
},
},
@@ -223,7 +225,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeDaily,
},
},
@@ -240,7 +242,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
EndTime: timePtr(time.Date(2024, 1, 10, 12, 0, 0, 0, time.UTC)),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeDaily,
},
},
@@ -256,7 +258,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 3, 31, 22, 0, 0, 0, time.UTC), // March 31, 22:00
Duration: Duration(time.Hour * 6), // Until April 1, 04:00
Duration: valuer.MustParseTextDuration("6h"), // Until April 1, 04:00
RepeatType: RepeatTypeMonthly,
},
},
@@ -272,7 +274,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 1, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{}, // Empty - should apply to all days
},
@@ -289,7 +291,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 31, 12, 0, 0, 0, time.UTC), // January 31st
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeMonthly,
},
},
@@ -304,7 +306,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 23, 30, 0, 0, time.UTC),
Duration: Duration(time.Hour * 1), // Crosses to 00:30 next day
Duration: valuer.MustParseTextDuration("1h"), // Crosses to 00:30 next day
RepeatType: RepeatTypeDaily,
},
},
@@ -319,7 +321,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 31, 12, 0, 0, 0, time.UTC), // January 31st
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeMonthly,
},
},
@@ -334,7 +336,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 30, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 48), // 2 days duration
Duration: valuer.MustParseTextDuration("48h"), // 2 days duration
RepeatType: RepeatTypeMonthly,
},
},
@@ -349,7 +351,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 4, 1, 23, 0, 0, 0, time.UTC), // Monday 23:00
Duration: Duration(time.Hour * 2), // Until Tuesday 01:00
Duration: valuer.MustParseTextDuration("2h"), // Until Tuesday 01:00
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday}, // Only Monday
},
@@ -365,7 +367,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 31, 12, 0, 0, 0, time.UTC), // January 31st
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeMonthly,
},
},
@@ -380,7 +382,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 22, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 4), // Until 02:00 next day
Duration: valuer.MustParseTextDuration("4h"), // Until 02:00 next day
RepeatType: RepeatTypeDaily,
},
},
@@ -395,7 +397,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 31, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeMonthly,
},
},
@@ -446,7 +448,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "US/Eastern",
Recurrence: &Recurrence{
StartTime: time.Date(2025, 3, 29, 20, 0, 0, 0, time.FixedZone("US/Eastern", -4*3600)),
Duration: Duration(time.Hour * 24),
Duration: valuer.MustParseTextDuration("24h"),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnSunday, RepeatOnSaturday},
},
@@ -462,7 +464,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeDaily,
},
},
@@ -477,7 +479,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeDaily,
},
},
@@ -492,7 +494,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeDaily,
},
},
@@ -507,7 +509,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday},
},
@@ -523,7 +525,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday},
},
@@ -539,7 +541,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday},
},
@@ -555,7 +557,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday},
},
@@ -571,7 +573,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeWeekly,
RepeatOn: []RepeatOn{RepeatOnMonday},
},
@@ -587,7 +589,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 04, 04, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeMonthly,
},
},
@@ -602,7 +604,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 04, 04, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeMonthly,
},
},
@@ -617,7 +619,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
Timezone: "UTC",
Recurrence: &Recurrence{
StartTime: time.Date(2024, 04, 04, 12, 0, 0, 0, time.UTC),
Duration: Duration(time.Hour * 2),
Duration: valuer.MustParseTextDuration("2h"),
RepeatType: RepeatTypeMonthly,
},
},

View File

@@ -5,7 +5,7 @@ import (
"encoding/json"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/valuer"
)
type RepeatType string
@@ -38,40 +38,12 @@ var RepeatOnAllMap = map[RepeatOn]time.Weekday{
RepeatOnSaturday: time.Saturday,
}
type Duration time.Duration
func (d Duration) MarshalJSON() ([]byte, error) {
return json.Marshal(time.Duration(d).String())
}
func (d *Duration) UnmarshalJSON(b []byte) error {
var v interface{}
if err := json.Unmarshal(b, &v); err != nil {
return err
}
switch value := v.(type) {
case float64:
*d = Duration(time.Duration(value))
return nil
case string:
tmp, err := time.ParseDuration(value)
if err != nil {
return err
}
*d = Duration(tmp)
return nil
default:
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid duration")
}
}
type Recurrence struct {
StartTime time.Time `json:"startTime"`
EndTime *time.Time `json:"endTime,omitempty"`
Duration Duration `json:"duration"`
RepeatType RepeatType `json:"repeatType"`
RepeatOn []RepeatOn `json:"repeatOn"`
StartTime time.Time `json:"startTime"`
EndTime *time.Time `json:"endTime,omitempty"`
Duration valuer.TextDuration `json:"duration"`
RepeatType RepeatType `json:"repeatType"`
RepeatOn []RepeatOn `json:"repeatOn"`
}
func (r *Recurrence) Scan(src interface{}) error {

View File

@@ -266,3 +266,110 @@ type FieldValueSelector struct {
Value string `json:"value"`
Limit int `json:"limit"`
}
type GettableFieldKeys struct {
Keys map[string][]*TelemetryFieldKey `json:"keys"`
Complete bool `json:"complete"`
}
type PostableFieldKeysParams struct {
Signal Signal `query:"signal"`
Source Source `query:"source"`
Limit int `query:"limit"`
StartUnixMilli int64 `query:"startUnixMilli"`
EndUnixMilli int64 `query:"endUnixMilli"`
FieldContext FieldContext `query:"fieldContext"`
FieldDataType FieldDataType `query:"fieldDataType"`
MetricName string `query:"metricName"`
SearchText string `query:"searchText"`
}
type GettableFieldValues struct {
Values *TelemetryFieldValues `json:"values"`
Complete bool `json:"complete"`
}
type PostableFieldValueParams struct {
PostableFieldKeysParams
Name string `query:"name"`
ExistingQuery string `query:"existingQuery"`
}
func NewFieldKeySelectorFromPostableFieldKeysParams(params PostableFieldKeysParams) *FieldKeySelector {
var req FieldKeySelector
if params.StartUnixMilli != 0 {
req.StartUnixMilli = params.StartUnixMilli
// Round down to the nearest 6 hours (21600000 milliseconds)
req.StartUnixMilli -= req.StartUnixMilli % 21600000
}
if params.EndUnixMilli != 0 {
req.EndUnixMilli = params.EndUnixMilli
}
req.Signal = params.Signal
req.Source = params.Source
req.FieldContext = params.FieldContext
req.FieldDataType = params.FieldDataType
req.SelectorMatchType = FieldSelectorMatchTypeFuzzy
if params.Limit != 0 {
req.Limit = params.Limit
} else {
req.Limit = 1000
}
if params.MetricName != "" {
req.MetricContext = &MetricContext{
MetricName: params.MetricName,
}
}
req.Name = params.SearchText
if params.SearchText != "" && params.FieldContext == FieldContextUnspecified {
parsedFieldKey := GetFieldKeyFromKeyText(params.SearchText)
if parsedFieldKey.FieldContext != FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, req.Signal) {
req.Name = parsedFieldKey.Name
req.FieldContext = parsedFieldKey.FieldContext
}
}
}
return &req
}
func NewFieldValueSelectorFromPostableFieldValueParams(params PostableFieldValueParams) *FieldValueSelector {
keySelector := NewFieldKeySelectorFromPostableFieldKeysParams(params.PostableFieldKeysParams)
fieldValueSelector := &FieldValueSelector{
FieldKeySelector: keySelector,
}
fieldValueSelector.Name = params.Name
if params.Name != "" && fieldValueSelector.FieldContext == FieldContextUnspecified {
parsedFieldKey := GetFieldKeyFromKeyText(params.Name)
if parsedFieldKey.FieldContext != FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, fieldValueSelector.Signal) {
fieldValueSelector.Name = parsedFieldKey.Name
fieldValueSelector.FieldContext = parsedFieldKey.FieldContext
}
}
}
fieldValueSelector.ExistingQuery = params.ExistingQuery
fieldValueSelector.Value = params.SearchText
if params.Limit != 0 {
fieldValueSelector.Limit = params.Limit
} else {
fieldValueSelector.Limit = 50
}
return fieldValueSelector
}

View File

@@ -154,3 +154,21 @@ func (f FieldContext) TagType() string {
}
return ""
}
func isContextValidForSignal(ctx FieldContext, signal Signal) bool {
if ctx == FieldContextResource ||
ctx == FieldContextAttribute ||
ctx == FieldContextScope {
return true
}
switch signal.StringValue() {
case SignalLogs.StringValue():
return ctx == FieldContextLog || ctx == FieldContextBody
case SignalTraces.StringValue():
return ctx == FieldContextSpan || ctx == FieldContextEvent || ctx == FieldContextTrace
case SignalMetrics.StringValue():
return ctx == FieldContextMetric
}
return true
}

View File

@@ -107,3 +107,13 @@ func (enum *Email) UnmarshalText(text []byte) error {
func (enum Email) MarshalText() (text []byte, err error) {
return []byte(enum.StringValue()), nil
}
func (enum *Email) UnmarshalParam(param string) error {
email, err := NewEmail(param)
if err != nil {
return err
}
*enum = email
return nil
}

View File

@@ -77,3 +77,10 @@ func (enum *String) UnmarshalText(text []byte) error {
func (enum String) MarshalText() (text []byte, err error) {
return []byte(enum.StringValue()), nil
}
// Implement Gin's BindUnmarshaler interface
// See https://github.com/SigNoz/signoz/pull/10219 description for additional details
func (enum *String) UnmarshalParam(param string) error {
*enum = NewString(param)
return nil
}

163
pkg/valuer/text_duration.go Normal file
View File

@@ -0,0 +1,163 @@
package valuer
import (
"database/sql/driver"
"encoding/json"
"time"
"github.com/SigNoz/signoz/pkg/errors"
)
var _ Valuer = (*TextDuration)(nil)
// TextDuration preserves the human-readable duration text as provided by the input.
// It keeps the raw JSON bytes so serialization does not normalize values like
// "90m" into "1h30m0s".
type TextDuration struct {
text string
value time.Duration
}
// ParseTextDuration parses a human-readable duration string.
// This preserves the raw text so that it can be serialized back to JSON.
func ParseTextDuration(s string) (TextDuration, error) {
d, err := time.ParseDuration(s)
if err != nil {
return TextDuration{}, errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse duration text")
}
return TextDuration{text: s, value: d}, nil
}
// MustParseTextDuration parses a human-readable duration string, preserving
// the raw text and panics if an error occurs.
func MustParseTextDuration(s string) TextDuration {
d, err := ParseTextDuration(s)
if err != nil {
panic(err)
}
return d
}
// Duration returns the [time.Duration] type.
func (d TextDuration) Duration() time.Duration {
return d.value
}
// IsZero implements [Valuer].
// It returns whether the parsed duration is zero.
func (d TextDuration) IsZero() bool {
return d.value == 0
}
// IsPositive whether the duration is greater than zero.
func (d TextDuration) IsPositive() bool {
return d.value > 0
}
// String implements the [fmt.Stringer] interface.
func (d TextDuration) String() string {
if len(d.text) > 0 {
return d.text
}
return d.value.String()
}
// StringValue implements [Valuer].
func (d TextDuration) StringValue() string {
return d.String()
}
// MarshalJSON implements the [encoding/json.Marshaler] interface.
// It serializes the duration value in a human-readable format (1h30m0s).
// If the original text is available, it is returned as-is. Example: 90m is not normalized to 1h30m0s.
func (d TextDuration) MarshalJSON() ([]byte, error) {
return json.Marshal(d.String())
}
// UnmarshalJSON implements the [encoding/json.Unmarshaler] interface.
// It parses string or numeric durations, and stores the string representation.
func (d *TextDuration) UnmarshalJSON(b []byte) error {
var v any
if err := json.Unmarshal(b, &v); err != nil {
return errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid duration")
}
switch value := v.(type) {
case float64:
d.value = time.Duration(value)
d.text = ""
return nil
case string:
tmp, err := time.ParseDuration(value)
if err != nil {
return errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid duration")
}
d.value = tmp
d.text = value
return nil
default:
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid duration")
}
}
// MarshalText implements [encoding.TextMarshaler].
func (d TextDuration) MarshalText() ([]byte, error) {
return []byte(d.String()), nil
}
// UnmarshalText implements [encoding.TextUnmarshaler].
func (d *TextDuration) UnmarshalText(text []byte) error {
s := string(text)
tmp, err := time.ParseDuration(s)
if err != nil {
return errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse duration text")
}
d.value = tmp
d.text = s
return nil
}
// Value implements [driver.Valuer] by delegating to the underlying duration.
func (d TextDuration) Value() (driver.Value, error) {
return d.String(), nil
}
// Scan implements [database/sql.Scanner] to read the duration from the database.
func (d *TextDuration) Scan(value any) error {
if value == nil {
d.value = 0
d.text = ""
return nil
}
switch v := value.(type) {
case int64:
d.value = time.Duration(v)
d.text = ""
return nil
case []byte:
return d.UnmarshalText(v)
case string:
return d.UnmarshalText([]byte(v))
default:
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput,
"cannot scan type %T into TextDuration", value)
}
}
func (d *TextDuration) UnmarshalParam(param string) error {
return d.UnmarshalText([]byte(param))
}
// Equal reports the two TextDuration represent the same underlying duration values.
//
// Note that the String representations for them can be different.
func (d TextDuration) Equal(d2 TextDuration) bool {
return d.value == d2.value
}
// Milliseconds returns the duration as an integer millisecond count.
func (d TextDuration) Milliseconds() int64 {
return d.value.Milliseconds()
}

View File

@@ -0,0 +1,144 @@
package valuer
import (
"encoding/json"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestTextDuration(t *testing.T) {
cases := []struct {
name string
input string
error bool
duration time.Duration
string string
}{
{
name: "ParseTextDuration(10s)",
input: "10s",
duration: 10 * time.Second,
string: "10s",
},
{
name: "ParseTextDuration(90m)",
input: "90m",
duration: 90 * time.Minute,
string: "90m",
},
{
name: "ParseTextDuration(1h30m)",
input: "1h30m",
duration: 90 * time.Minute,
string: "1h30m",
},
{
name: "Invalid duration",
input: "invalid",
error: true,
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
var err error
d, err := ParseTextDuration(tc.input)
if tc.error {
assert.Error(t, err)
return
}
assert.NoError(t, err)
assert.Equal(t, tc.duration, d.Duration())
assert.Equal(t, tc.string, d.String())
})
}
}
func TestTextDuration_MustParsePanics(t *testing.T) {
assert.Panics(t, func() {
MustParseTextDuration("not-a-duration")
})
}
func TestTextDuration_JSON(t *testing.T) {
t.Run("RoundTrip", func(t *testing.T) {
parsed, err := ParseTextDuration("90m")
require.NoError(t, err)
data, err := json.Marshal(parsed)
require.NoError(t, err)
assert.Equal(t, `"90m"`, string(data))
var decoded TextDuration
require.NoError(t, json.Unmarshal([]byte(`"2h"`), &decoded))
assert.Equal(t, 2*time.Hour, decoded.Duration())
assert.Equal(t, "2h", decoded.String())
})
t.Run("Numeric", func(t *testing.T) {
var decoded TextDuration
require.NoError(t, json.Unmarshal([]byte(`1000000000`), &decoded))
assert.Equal(t, time.Second, decoded.Duration())
assert.Equal(t, "1s", decoded.String())
})
t.Run("Invalid", func(t *testing.T) {
var decoded TextDuration
assert.Error(t, json.Unmarshal([]byte(`true`), &decoded))
assert.Error(t, json.Unmarshal([]byte(`"nope"`), &decoded))
})
}
func TestTextDurationTextMarshaling(t *testing.T) {
parsed, err := ParseTextDuration("45s")
require.NoError(t, err)
data, err := parsed.MarshalText()
require.NoError(t, err)
assert.Equal(t, "45s", string(data))
var decoded TextDuration
require.NoError(t, decoded.UnmarshalText([]byte("2m")))
assert.Equal(t, 2*time.Minute, decoded.Duration())
assert.Equal(t, "2m", decoded.String())
assert.Error(t, decoded.UnmarshalText([]byte("invalid")))
}
func TestTextDurationValueAndScan(t *testing.T) {
parsed, err := ParseTextDuration("2s")
require.NoError(t, err)
val, err := parsed.Value()
require.NoError(t, err)
assert.Equal(t, "2s", val)
var scanned TextDuration
err = scanned.Scan(nil)
require.NoError(t, err)
assert.True(t, scanned.IsZero())
assert.Equal(t, "0s", scanned.String())
err = scanned.Scan([]byte("3s"))
require.NoError(t, err)
assert.Equal(t, 3*time.Second, scanned.Duration())
assert.Equal(t, "3s", scanned.String())
err = scanned.Scan(true)
assert.Error(t, err)
}
func TestTextDurationUnmarshalParam(t *testing.T) {
var decoded TextDuration
require.NoError(t, decoded.UnmarshalParam("2m"))
assert.Equal(t, 2*time.Minute, decoded.Duration())
assert.Equal(t, "2m", decoded.String())
assert.Error(t, decoded.UnmarshalParam("invalid"))
}

View File

@@ -140,3 +140,13 @@ func (enum *UUID) UnmarshalText(text []byte) error {
func (enum UUID) MarshalText() (text []byte, err error) {
return []byte(enum.StringValue()), nil
}
func (enum *UUID) UnmarshalParam(param string) error {
uuid, err := NewUUID(param)
if err != nil {
return err
}
*enum = uuid
return nil
}

View File

@@ -8,6 +8,7 @@ import (
"fmt"
"github.com/SigNoz/signoz/pkg/errors"
ginbinding "github.com/gin-gonic/gin/binding"
)
var (
@@ -42,4 +43,7 @@ type Valuer interface {
// Implement encoding.TextUnmarshaler to allow the value to be marshalled unto a string
encoding.TextMarshaler
// Implement Gin's BindUnmarshaler interface
ginbinding.BindUnmarshaler
}

View File

@@ -1,6 +1,7 @@
from http import HTTPStatus
from typing import Callable, List
import pytest
import requests
from sqlalchemy import sql
from wiremock.resources.mappings import Mapping
@@ -67,40 +68,6 @@ def test_create_and_get_public_dashboard(
assert response.json()["status"] == "success"
assert response.json()["data"]["timeRangeEnabled"] is True
assert response.json()["data"]["defaultTimeRange"] == "10s"
public_path = response.json()["data"]["publicPath"]
assert public_path.startswith("/public/dashboard/")
public_dashboard_id = public_path.split("/public/dashboard/")[-1]
row = None
with signoz.sqlstore.conn.connect() as conn:
# verify the role creation
result = conn.execute(
sql.text("SELECT * FROM role WHERE name = :role"),
{"role": "signoz-anonymous"},
)
row = result.mappings().fetchone()
assert row is not None
assert row["name"] == "signoz-anonymous"
# verify the tuple creation for role
tuple_object_id = f"organization/{row["org_id"]}/role/signoz-anonymous"
tuple_result = conn.execute(
sql.text("SELECT * FROM tuple WHERE object_id = :object_id"),
{"object_id": tuple_object_id},
)
tuple_row = tuple_result.fetchone()
assert tuple_row is not None
# verify the tuple creation for public-dashboard
tuple_object_id = (
f"organization/{row["org_id"]}/public-dashboard/{public_dashboard_id}"
)
tuple_result = conn.execute(
sql.text("SELECT * FROM tuple WHERE object_id = :object_id"),
{"object_id": tuple_object_id},
)
tuple_row = tuple_result.fetchone()
assert tuple_row is not None
def test_public_dashboard_widget_query_range(
@@ -224,3 +191,59 @@ def test_public_dashboard_widget_query_range(
timeout=2,
)
assert resp.status_code == HTTPStatus.BAD_REQUEST
def test_anonymous_role_has_public_dashboard_permission(
request: pytest.FixtureRequest,
signoz: SigNoz,
create_user_admin: Operation, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
):
"""
Verify that the signoz-anonymous role has the public-dashboard/* permission.
"""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Get the roles to find the org_id
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/roles"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
roles = response.json()["data"]
anonymous_role = next(
(role for role in roles if role["name"] == "signoz-anonymous"), None
)
assert anonymous_role is not None
org_id = anonymous_role["orgId"]
# Verify the tuple exists in the database that grants
# signoz-anonymous role read access to public-dashboard/*
with signoz.sqlstore.conn.connect() as conn:
tuple_object_id = f"organization/{org_id}/public-dashboard/*"
tuple_result = conn.execute(
sql.text("SELECT * FROM tuple WHERE object_id = :object_id"),
{"object_id": tuple_object_id},
)
tuple_row = tuple_result.mappings().fetchone()
assert tuple_row is not None
assert tuple_row["object_type"] == "metaresource"
assert tuple_row["relation"] == "read"
if request.config.getoption("--sqlstore-provider") == "sqlite":
assert tuple_row["user_object_type"] == "role"
assert (
tuple_row["user_object_id"]
== f"organization/{org_id}/role/signoz-anonymous"
)
assert tuple_row["user_relation"] == "assignee"
else:
assert (
tuple_row["_user"]
== f"role:organization/{org_id}/role/signoz-anonymous#assignee"
)

Some files were not shown because too many files have changed in this diff Show More