Compare commits

...

18 Commits

Author SHA1 Message Date
srikanthccv
561e301094 chore: update test 2026-02-09 16:38:45 +05:30
srikanthccv
b78ab925a9 Merge branch 'update-js-ci' of github.com:SigNoz/signoz into update-js-ci 2026-02-09 16:30:05 +05:30
srikanthccv
5595a2af5b chore: update types 2026-02-09 16:29:36 +05:30
Srikanth Chekuri
a1dba654fb Merge branch 'main' into update-js-ci 2026-02-09 16:14:03 +05:30
srikanthccv
dac824d2c8 chore: regenerate 2026-02-09 16:12:30 +05:30
Ashwin Bhatkal
128497f27a chore: folder name change + CODEOWNER update (#10246)
Some checks are pending
build-staging / prepare (push) Waiting to run
build-staging / js-build (push) Blocked by required conditions
build-staging / go-build (push) Blocked by required conditions
build-staging / staging (push) Blocked by required conditions
Release Drafter / update_release_draft (push) Waiting to run
* chore: folder name change + CODEOWNER update

* chore: revert multi select file change
2026-02-09 09:40:26 +00:00
Jatinderjit Singh
9e466b56b2 chore: preserve the original duration format (#10149) 2026-02-09 09:24:58 +00:00
Vikrant Gupta
4ad0baa2a2 feat(authz): add support for wildcard selector (#10208)
* feat(authz): remove unnecessary dependency injection for role setter

* feat(authz): deprecate role module

* feat(authz): deprecate role module

* feat(authz): split between server and sql actions

* feat(authz): add bootstrap for managed role transactions

* feat(authz): update and add integration tests

* feat(authz): match names for factory and migration

* feat(authz): fix integration tests

* feat(authz): reduce calls on organisation creeation
2026-02-09 14:37:44 +05:30
srikanthccv
a5cbad73d1 chore: move to goci only 2026-02-09 14:15:51 +05:30
srikanthccv
cf52c1a9d2 chore: add openapi check for jsci 2026-02-09 14:09:45 +05:30
Srikanth Chekuri
24b588bfba chore: move fields api to openapi spec (#10219) 2026-02-09 13:43:36 +05:30
Abhi kumar
e5867cc2ad chore: updated chart theme colors (#10233)
* chore: updated chart theme colors

* fix: fixed failing tests
2026-02-09 12:25:36 +05:30
Srikanth Chekuri
b420ca494e chore: remove inline enum and implement jsonschema.Enum for metric types (#10238)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
2026-02-09 07:02:32 +05:30
Piyush Singariya
e4693ce64c fix: improve qbtoexpr test suite (#10217)
Some checks failed
build-staging / prepare (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
* fix: improve qbtoexpr test suite

* fix: assert eq order

---------

Co-authored-by: Nityananda Gohain <nityanandagohain@gmail.com>
2026-02-06 15:15:56 +00:00
Ashwin Bhatkal
ca9b3a910a chore: separate out query and custom variable (#10221)
* chore: separate out query and custom variable

* chore: resolve own comments
2026-02-06 19:06:30 +05:30
Karan Balani
9dc7d2389a fix: minor changes for gateway and forgot password apis (#10204)
* fix: make size and count included in json if zero

* fix: make forgot password api fields required

* fix: openapi spec

* fix: error message casing for frontend

* chore: fix openapi spec

* fix: openapi specs
2026-02-06 18:00:33 +05:30
Abhi kumar
da5860297f feat: added new time series panel (#10207)
* feat: added new time series panel

* fix: pr review comments

* fix: pr review comments

* chore: memoized data creation in chartmanager
2026-02-06 16:35:30 +05:30
Ashwin Bhatkal
54fca5ba44 chore: separate out textbox variable into it's own component (#10206)
* chore: initiliase fetch store

* chore: small refactor

* chore: separate out textbox component into it's own component
2026-02-06 16:19:49 +05:30
127 changed files with 5508 additions and 2718 deletions

7
.github/CODEOWNERS vendored
View File

@@ -133,5 +133,8 @@
/frontend/src/pages/PublicDashboard/ @SigNoz/pulse-frontend
/frontend/src/container/PublicDashboardContainer/ @SigNoz/pulse-frontend
## UplotV2
/frontend/src/lib/uPlotV2/ @SigNoz/pulse-frontend
## Dashboard Libs + Components
/frontend/src/lib/uPlotV2/ @SigNoz/pulse-frontend
/frontend/src/lib/dashboard/ @SigNoz/pulse-frontend
/frontend/src/lib/dashboardVariables/ @SigNoz/pulse-frontend
/frontend/src/components/NewSelect/ @SigNoz/pulse-frontend

View File

@@ -93,3 +93,13 @@ jobs:
run: |
go run cmd/enterprise/*.go generate openapi
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in openapi spec. Run go run cmd/enterprise/*.go generate openapi locally and commit."; exit 1)
- name: node-install
uses: actions/setup-node@v5
with:
node-version: "22"
- name: install-frontend
run: cd frontend && yarn install
- name: generate-api-clients
run: |
cd frontend && yarn generate:api
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in generated api clients. Run yarn generate:api in frontend/ locally and commit."; exit 1)

View File

@@ -18,8 +18,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/queryparser"
@@ -78,18 +76,15 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
},
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
func(ctx context.Context, sqlstore sqlstore.SQLStore, _ licensing.Licensing, _ dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
},
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, _ role.Setter, _ role.Granter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(impldashboard.NewStore(store), settings, analytics, orgGetter, queryParser)
},
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return noopgateway.NewProviderFactory()
},
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, _ []role.RegisterTypeable) role.Setter {
return implrole.NewSetter(implrole.NewStore(store), authz)
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)

View File

@@ -14,7 +14,6 @@ import (
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/ee/modules/role/implrole"
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
@@ -29,8 +28,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
pkgimplrole "github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/signoz"
@@ -118,18 +115,15 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
return authNs, nil
},
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
func(ctx context.Context, sqlstore sqlstore.SQLStore, licensing licensing.Licensing, dashboardModule dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx), licensing, dashboardModule)
},
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, roleSetter, granter, queryParser, querier, licensing)
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, queryParser, querier, licensing)
},
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return httpgateway.NewProviderFactory(licensing)
},
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
return implrole.NewSetter(pkgimplrole.NewStore(store), authz, licensing, registry)
},
)
if err != nil {

View File

@@ -607,6 +607,178 @@ paths:
summary: Update auth domain
tags:
- authdomains
/api/v1/fields/keys:
get:
deprecated: false
description: This endpoint returns field keys
operationId: GetFieldsKeys
parameters:
- in: query
name: signal
schema:
type: string
- in: query
name: source
schema:
type: string
- in: query
name: limit
schema:
type: integer
- in: query
name: startUnixMilli
schema:
format: int64
type: integer
- in: query
name: endUnixMilli
schema:
format: int64
type: integer
- in: query
name: fieldContext
schema:
type: string
- in: query
name: fieldDataType
schema:
type: string
- in: query
name: metricName
schema:
type: string
- in: query
name: searchText
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/TelemetrytypesGettableFieldKeys'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get field keys
tags:
- fields
/api/v1/fields/values:
get:
deprecated: false
description: This endpoint returns field values
operationId: GetFieldsValues
parameters:
- in: query
name: signal
schema:
type: string
- in: query
name: source
schema:
type: string
- in: query
name: limit
schema:
type: integer
- in: query
name: startUnixMilli
schema:
format: int64
type: integer
- in: query
name: endUnixMilli
schema:
format: int64
type: integer
- in: query
name: fieldContext
schema:
type: string
- in: query
name: fieldDataType
schema:
type: string
- in: query
name: metricName
schema:
type: string
- in: query
name: searchText
schema:
type: string
- in: query
name: name
schema:
type: string
- in: query
name: existingQuery
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/TelemetrytypesGettableFieldValues'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get field values
tags:
- fields
/api/v1/getResetPasswordToken/{id}:
get:
deprecated: false
@@ -2226,6 +2398,12 @@ paths:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"422":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unprocessable Entity
"500":
content:
application/json:
@@ -3922,19 +4100,9 @@ components:
isMonotonic:
type: boolean
temporality:
enum:
- delta
- cumulative
- unspecified
type: string
$ref: '#/components/schemas/MetrictypesTemporality'
type:
enum:
- gauge
- sum
- histogram
- summary
- exponentialhistogram
type: string
$ref: '#/components/schemas/MetrictypesType'
unit:
type: string
required:
@@ -3957,13 +4125,7 @@ components:
minimum: 0
type: integer
type:
enum:
- gauge
- sum
- histogram
- summary
- exponentialhistogram
type: string
$ref: '#/components/schemas/MetrictypesType'
unit:
type: string
required:
@@ -4024,6 +4186,11 @@ components:
- percentage
- totalValue
type: object
MetricsexplorertypesTreemapMode:
enum:
- timeseries
- samples
type: string
MetricsexplorertypesTreemapRequest:
properties:
end:
@@ -4034,10 +4201,7 @@ components:
limit:
type: integer
mode:
enum:
- timeseries
- samples
type: string
$ref: '#/components/schemas/MetricsexplorertypesTreemapMode'
start:
format: int64
type: integer
@@ -4072,19 +4236,9 @@ components:
metricName:
type: string
temporality:
enum:
- delta
- cumulative
- unspecified
type: string
$ref: '#/components/schemas/MetrictypesTemporality'
type:
enum:
- gauge
- sum
- histogram
- summary
- exponentialhistogram
type: string
$ref: '#/components/schemas/MetrictypesType'
unit:
type: string
required:
@@ -4095,6 +4249,20 @@ components:
- temporality
- isMonotonic
type: object
MetrictypesTemporality:
enum:
- delta
- cumulative
- unspecified
type: string
MetrictypesType:
enum:
- gauge
- sum
- histogram
- summary
- exponentialhistogram
type: string
PreferencetypesPreference:
properties:
allowedScopes:
@@ -4187,6 +4355,8 @@ components:
type: string
unit:
type: string
required:
- name
type: object
Querybuildertypesv5QueryData:
properties:
@@ -4248,6 +4418,68 @@ components:
format: date-time
type: string
type: object
TelemetrytypesGettableFieldKeys:
properties:
complete:
type: boolean
keys:
additionalProperties:
items:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
type: array
nullable: true
type: object
required:
- keys
- complete
type: object
TelemetrytypesGettableFieldValues:
properties:
complete:
type: boolean
values:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldValues'
required:
- values
- complete
type: object
TelemetrytypesTelemetryFieldKey:
properties:
description:
type: string
fieldContext:
type: string
fieldDataType:
type: string
name:
type: string
signal:
type: string
unit:
type: string
required:
- name
type: object
TelemetrytypesTelemetryFieldValues:
properties:
boolValues:
items:
type: boolean
type: array
numberValues:
items:
format: double
type: number
type: array
relatedValues:
items:
type: string
type: array
stringValues:
items:
type: string
type: array
type: object
TypesChangePasswordRequest:
properties:
newPassword:
@@ -4376,6 +4608,9 @@ components:
type: string
orgId:
type: string
required:
- orgId
- email
type: object
TypesPostableInvite:
properties:

View File

@@ -2,12 +2,18 @@ package openfgaauthz
import (
"context"
"slices"
"github.com/SigNoz/signoz/ee/authz/openfgaserver"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/authzstore/sqlauthzstore"
pkgopenfgaauthz "github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
@@ -15,84 +21,320 @@ import (
type provider struct {
pkgAuthzService authz.AuthZ
openfgaServer *openfgaserver.Server
licensing licensing.Licensing
store roletypes.Store
registry []authz.RegisterTypeable
}
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry ...authz.RegisterTypeable) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return factory.NewProviderFactory(factory.MustNewName("openfga"), func(ctx context.Context, ps factory.ProviderSettings, config authz.Config) (authz.AuthZ, error) {
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema)
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema, licensing, registry)
})
}
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry []authz.RegisterTypeable) (authz.AuthZ, error) {
pkgOpenfgaAuthzProvider := pkgopenfgaauthz.NewProviderFactory(sqlstore, openfgaSchema)
pkgAuthzService, err := pkgOpenfgaAuthzProvider.New(ctx, settings, config)
if err != nil {
return nil, err
}
openfgaServer, err := openfgaserver.NewOpenfgaServer(ctx, pkgAuthzService)
if err != nil {
return nil, err
}
return &provider{
pkgAuthzService: pkgAuthzService,
openfgaServer: openfgaServer,
licensing: licensing,
store: sqlauthzstore.NewSqlAuthzStore(sqlstore),
registry: registry,
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
return provider.pkgAuthzService.Start(ctx)
return provider.openfgaServer.Start(ctx)
}
func (provider *provider) Stop(ctx context.Context) error {
return provider.pkgAuthzService.Stop(ctx)
return provider.openfgaServer.Stop(ctx)
}
func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return provider.pkgAuthzService.Check(ctx, tuple)
return provider.openfgaServer.Check(ctx, tuple)
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.openfgaServer.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.openfgaServer.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.BatchCheck(ctx, tuples)
return provider.openfgaServer.BatchCheck(ctx, tuples)
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return provider.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
return provider.openfgaServer.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.Write(ctx, additions, deletions)
return provider.openfgaServer.Write(ctx, additions, deletions)
}
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
return provider.pkgAuthzService.Get(ctx, orgID, id)
}
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
return provider.pkgAuthzService.GetByOrgIDAndName(ctx, orgID, name)
}
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
return provider.pkgAuthzService.List(ctx, orgID)
}
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
return provider.pkgAuthzService.ListByOrgIDAndNames(ctx, orgID, names)
}
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
return provider.pkgAuthzService.Grant(ctx, orgID, name, subject)
}
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
return provider.pkgAuthzService.ModifyGrant(ctx, orgID, existingRoleName, updatedRoleName, subject)
}
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
return provider.pkgAuthzService.Revoke(ctx, orgID, name, subject)
}
func (provider *provider) CreateManagedRoles(ctx context.Context, orgID valuer.UUID, managedRoles []*roletypes.Role) error {
return provider.pkgAuthzService.CreateManagedRoles(ctx, orgID, managedRoles)
}
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
tuples := make([]*openfgav1.TupleKey, 0)
grantTuples, err := provider.getManagedRoleGrantTuples(orgID, userID)
if err != nil {
return err
}
tuples = append(tuples, grantTuples...)
managedRoleTuples, err := provider.getManagedRoleTransactionTuples(orgID)
if err != nil {
return err
}
tuples = append(tuples, managedRoleTuples...)
return provider.Write(ctx, tuples, nil)
}
func (provider *provider) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
}
func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
existingRole, err := provider.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
}
if existingRole != nil {
return roletypes.NewRoleFromStorableRole(existingRole), nil
}
err = provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return nil, err
}
return role, nil
}
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
typeables := make([]authtypes.Typeable, 0)
for _, register := range provider.registry {
typeables = append(typeables, register.MustGetTypeables()...)
}
// role module cannot self register itself!
typeables = append(typeables, provider.MustGetTypeables()...)
resources := make([]*authtypes.Resource, 0)
for _, typeable := range typeables {
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
}
return resources
}
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
storableRole, err := provider.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
objects := make([]*authtypes.Object, 0)
for _, resource := range provider.GetResources(ctx) {
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
resourceObjects, err := provider.
ListObjects(
ctx,
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
relation,
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
)
if err != nil {
return nil, err
}
objects = append(objects, resourceObjects...)
}
}
return objects, nil
}
func (provider *provider) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return provider.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
}
func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
if err != nil {
return err
}
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
if err != nil {
return err
}
err = provider.Write(ctx, additionTuples, deletionTuples)
if err != nil {
return err
}
return nil
}
func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
storableRole, err := provider.store.Get(ctx, orgID, id)
if err != nil {
return err
}
role := roletypes.NewRoleFromStorableRole(storableRole)
err = role.CanEditDelete()
if err != nil {
return err
}
return provider.store.Delete(ctx, orgID, id)
}
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
}
func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := []*openfgav1.TupleKey{}
// Grant the admin role to the user
adminSubject := authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil)
adminTuple, err := authtypes.TypeableRole.Tuples(
adminSubject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAdminRoleName),
},
orgID,
)
if err != nil {
return nil, err
}
tuples = append(tuples, adminTuple...)
// Grant the admin role to the anonymous user
anonymousSubject := authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
anonymousTuple, err := authtypes.TypeableRole.Tuples(
anonymousSubject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAnonymousRoleName),
},
orgID,
)
if err != nil {
return nil, err
}
tuples = append(tuples, anonymousTuple...)
return tuples, nil
}
func (provider *provider) getManagedRoleTransactionTuples(orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
transactionsByRole := make(map[string][]*authtypes.Transaction)
for _, register := range provider.registry {
for roleName, txns := range register.MustGetManagedRoleTransactions() {
transactionsByRole[roleName] = append(transactionsByRole[roleName], txns...)
}
}
tuples := make([]*openfgav1.TupleKey, 0)
for roleName, transactions := range transactionsByRole {
for _, txn := range transactions {
typeable := authtypes.MustNewTypeableFromType(txn.Object.Resource.Type, txn.Object.Resource.Name)
txnTuples, err := typeable.Tuples(
authtypes.MustNewSubject(
authtypes.TypeableRole,
roleName,
orgID,
&authtypes.RelationAssignee,
),
txn.Relation,
[]authtypes.Selector{txn.Object.Selector},
orgID,
)
if err != nil {
return nil, err
}
tuples = append(tuples, txnTuples...)
}
}
return tuples, nil
}

View File

@@ -0,0 +1,83 @@
package openfgaserver
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
type Server struct {
pkgAuthzService authz.AuthZ
}
func NewOpenfgaServer(ctx context.Context, pkgAuthzService authz.AuthZ) (*Server, error) {
return &Server{
pkgAuthzService: pkgAuthzService,
}, nil
}
func (server *Server) Start(ctx context.Context) error {
return server.pkgAuthzService.Start(ctx)
}
func (server *Server) Stop(ctx context.Context) error {
return server.pkgAuthzService.Stop(ctx)
}
func (server *Server) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return server.pkgAuthzService.Check(ctx, tuple)
}
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return server.pkgAuthzService.BatchCheck(ctx, tuples)
}
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return server.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
}
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return server.pkgAuthzService.Write(ctx, additions, deletions)
}

View File

@@ -11,7 +11,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/types"
@@ -26,13 +25,11 @@ type module struct {
pkgDashboardModule dashboard.Module
store dashboardtypes.Store
settings factory.ScopedProviderSettings
roleSetter role.Setter
granter role.Granter
querier querier.Querier
licensing licensing.Licensing
}
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard")
pkgDashboardModule := pkgimpldashboard.NewModule(store, settings, analytics, orgGetter, queryParser)
@@ -40,8 +37,6 @@ func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, an
pkgDashboardModule: pkgDashboardModule,
store: store,
settings: scopedProviderSettings,
roleSetter: roleSetter,
granter: granter,
querier: querier,
licensing: licensing,
}
@@ -61,29 +56,6 @@ func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publi
return errors.Newf(errors.TypeAlreadyExists, dashboardtypes.ErrCodePublicDashboardAlreadyExists, "dashboard with id %s is already public", storablePublicDashboard.DashboardID)
}
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
err = module.granter.Grant(ctx, orgID, roletypes.SigNozAnonymousRoleName, authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.StringValue(), orgID, nil))
if err != nil {
return err
}
additionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, []*authtypes.Object{additionObject}, nil)
if err != nil {
return err
}
err = module.store.CreatePublic(ctx, dashboardtypes.NewStorablePublicDashboardFromPublicDashboard(publicDashboard))
if err != nil {
return err
@@ -128,6 +100,7 @@ func (module *module) GetPublicDashboardSelectorsAndOrg(ctx context.Context, id
return []authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeMetaResource, id.StringValue()),
authtypes.MustNewSelector(authtypes.TypeMetaResource, authtypes.WildCardSelectorString),
}, storableDashboard.OrgID, nil
}
@@ -190,29 +163,6 @@ func (module *module) DeletePublic(ctx context.Context, orgID valuer.UUID, dashb
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
publicDashboard, err := module.GetPublic(ctx, orgID, dashboardID)
if err != nil {
return err
}
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
deletionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
if err != nil {
return err
}
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
if err != nil {
return err
@@ -250,10 +200,6 @@ func (module *module) GetByMetricNames(ctx context.Context, orgID valuer.UUID, m
return module.pkgDashboardModule.GetByMetricNames(ctx, orgID, metricNames)
}
func (module *module) MustGetTypeables() []authtypes.Typeable {
return module.pkgDashboardModule.MustGetTypeables()
}
func (module *module) List(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
return module.pkgDashboardModule.List(ctx, orgID)
}
@@ -266,34 +212,27 @@ func (module *module) LockUnlock(ctx context.Context, orgID valuer.UUID, id valu
return module.pkgDashboardModule.LockUnlock(ctx, orgID, id, updatedBy, role, lock)
}
func (module *module) deletePublic(ctx context.Context, orgID valuer.UUID, dashboardID valuer.UUID) error {
publicDashboard, err := module.store.GetPublic(ctx, dashboardID.String())
if err != nil {
return err
}
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
deletionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
if err != nil {
return err
}
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
if err != nil {
return err
}
return nil
func (module *module) MustGetTypeables() []authtypes.Typeable {
return module.pkgDashboardModule.MustGetTypeables()
}
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
return map[string][]*authtypes.Transaction{
roletypes.SigNozAnonymousRoleName: {
{
Relation: authtypes.RelationRead,
Object: *authtypes.MustNewObject(
authtypes.Resource{
Type: authtypes.TypeMetaResource,
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, "*"),
),
},
},
}
}
func (module *module) deletePublic(ctx context.Context, _ valuer.UUID, dashboardID valuer.UUID) error {
return module.store.DeletePublic(ctx, dashboardID.StringValue())
}

View File

@@ -1,165 +0,0 @@
package implrole
import (
"context"
"slices"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type setter struct {
store roletypes.Store
authz authz.AuthZ
licensing licensing.Licensing
registry []role.RegisterTypeable
}
func NewSetter(store roletypes.Store, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
return &setter{
store: store,
authz: authz,
licensing: licensing,
registry: registry,
}
}
func (setter *setter) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
}
func (setter *setter) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
existingRole, err := setter.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
}
if existingRole != nil {
return roletypes.NewRoleFromStorableRole(existingRole), nil
}
err = setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return nil, err
}
return role, nil
}
func (setter *setter) GetResources(_ context.Context) []*authtypes.Resource {
typeables := make([]authtypes.Typeable, 0)
for _, register := range setter.registry {
typeables = append(typeables, register.MustGetTypeables()...)
}
// role module cannot self register itself!
typeables = append(typeables, setter.MustGetTypeables()...)
resources := make([]*authtypes.Resource, 0)
for _, typeable := range typeables {
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
}
return resources
}
func (setter *setter) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
storableRole, err := setter.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
objects := make([]*authtypes.Object, 0)
for _, resource := range setter.GetResources(ctx) {
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
resourceObjects, err := setter.
authz.
ListObjects(
ctx,
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
relation,
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
)
if err != nil {
return nil, err
}
objects = append(objects, resourceObjects...)
}
}
return objects, nil
}
func (setter *setter) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return setter.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
}
func (setter *setter) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
if err != nil {
return err
}
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
if err != nil {
return err
}
err = setter.authz.Write(ctx, additionTuples, deletionTuples)
if err != nil {
return err
}
return nil
}
func (setter *setter) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
storableRole, err := setter.store.Get(ctx, orgID, id)
if err != nil {
return err
}
role := roletypes.NewRoleFromStorableRole(storableRole)
err = role.CanEditDelete()
if err != nil {
return err
}
return setter.store.Delete(ctx, orgID, id)
}
func (setter *setter) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
}

View File

@@ -9,7 +9,6 @@ import (
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
@@ -56,7 +55,6 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
FluxInterval: opts.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),

View File

@@ -211,7 +211,7 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz, s.signoz.Modules.RoleGetter)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
r.Use(otelmux.Middleware(
"apiserver",
@@ -237,7 +237,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterLogsRoutes(r, am)
apiHandler.RegisterIntegrationRoutes(r, am)
apiHandler.RegisterCloudIntegrationsRoutes(r, am)
apiHandler.RegisterFieldsRoutes(r, am)
apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterInfraMetricsRoutes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am)

View File

@@ -15,7 +15,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/transition"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
@@ -63,6 +63,8 @@ type AnomalyRule struct {
seasonality anomaly.Seasonality
}
var _ baserules.Rule = (*AnomalyRule)(nil)
func NewAnomalyRule(
id string,
orgID valuer.UUID,
@@ -490,7 +492,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration() {
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration().Duration() {
a.State = model.StateFiring
a.FiredAt = ts
state := model.StateFiring
@@ -553,7 +555,7 @@ func (r *AnomalyRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.Name(),
RuleCondition: r.Condition(),
EvalWindow: ruletypes.Duration(r.EvalWindow()),
EvalWindow: r.EvalWindow(),
Labels: r.Labels().Map(),
Annotations: r.Annotations().Map(),
PreferredChannels: r.PreferredChannels(),

View File

@@ -40,7 +40,7 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
// Test basic AlertOnAbsent functionality (without AbsentFor grace period)
baseTime := time.Unix(1700000000, 0)
evalWindow := 5 * time.Minute
evalWindow := valuer.MustParseTextDuration("5m")
evalTime := baseTime.Add(5 * time.Minute)
target := 500.0
@@ -50,8 +50,8 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: RuleTypeAnomaly,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,
@@ -147,7 +147,7 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
// 3. Alert fires only if t2 - t1 > AbsentFor
baseTime := time.Unix(1700000000, 0)
evalWindow := 5 * time.Minute
evalWindow := valuer.MustParseTextDuration("5m")
// Set target higher than test data so regular threshold alerts don't fire
target := 500.0
@@ -157,8 +157,8 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: RuleTypeAnomaly,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(time.Minute),
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,

View File

@@ -48,7 +48,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, tr)
// create ch rule task for evaluation
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
@@ -72,7 +72,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, pr)
// create promql rule task for evaluation
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeProm, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
// create anomaly rule
@@ -96,7 +96,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, ar)
// create anomaly rule task for evaluation
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else {
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
@@ -213,8 +213,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
return alertsFound, nil
}
// newTask returns an appropriate group for
// rule type
// newTask returns an appropriate group for the rule type
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) baserules.Task {
if taskType == baserules.TaskTypeCh {
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)

View File

@@ -0,0 +1,222 @@
/**
* ! Do not edit manually
* * The file has been auto-generated using Orval for SigNoz
* * regenerate with 'yarn generate:api'
* SigNoz
*/
import type {
InvalidateOptions,
QueryClient,
QueryFunction,
QueryKey,
UseQueryOptions,
UseQueryResult,
} from 'react-query';
import { useQuery } from 'react-query';
import { GeneratedAPIInstance } from '../../../index';
import type {
GetFieldsKeys200,
GetFieldsKeysParams,
GetFieldsValues200,
GetFieldsValuesParams,
RenderErrorResponseDTO,
} from '../sigNoz.schemas';
type AwaitedInput<T> = PromiseLike<T> | T;
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
/**
* This endpoint returns field keys
* @summary Get field keys
*/
export const getFieldsKeys = (
params?: GetFieldsKeysParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetFieldsKeys200>({
url: `/api/v1/fields/keys`,
method: 'GET',
params,
signal,
});
};
export const getGetFieldsKeysQueryKey = (params?: GetFieldsKeysParams) => {
return ['getFieldsKeys', ...(params ? [params] : [])] as const;
};
export const getGetFieldsKeysQueryOptions = <
TData = Awaited<ReturnType<typeof getFieldsKeys>>,
TError = RenderErrorResponseDTO
>(
params?: GetFieldsKeysParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getFieldsKeys>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getGetFieldsKeysQueryKey(params);
const queryFn: QueryFunction<Awaited<ReturnType<typeof getFieldsKeys>>> = ({
signal,
}) => getFieldsKeys(params, signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getFieldsKeys>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetFieldsKeysQueryResult = NonNullable<
Awaited<ReturnType<typeof getFieldsKeys>>
>;
export type GetFieldsKeysQueryError = RenderErrorResponseDTO;
/**
* @summary Get field keys
*/
export function useGetFieldsKeys<
TData = Awaited<ReturnType<typeof getFieldsKeys>>,
TError = RenderErrorResponseDTO
>(
params?: GetFieldsKeysParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getFieldsKeys>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetFieldsKeysQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get field keys
*/
export const invalidateGetFieldsKeys = async (
queryClient: QueryClient,
params?: GetFieldsKeysParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetFieldsKeysQueryKey(params) },
options,
);
return queryClient;
};
/**
* This endpoint returns field values
* @summary Get field values
*/
export const getFieldsValues = (
params?: GetFieldsValuesParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetFieldsValues200>({
url: `/api/v1/fields/values`,
method: 'GET',
params,
signal,
});
};
export const getGetFieldsValuesQueryKey = (params?: GetFieldsValuesParams) => {
return ['getFieldsValues', ...(params ? [params] : [])] as const;
};
export const getGetFieldsValuesQueryOptions = <
TData = Awaited<ReturnType<typeof getFieldsValues>>,
TError = RenderErrorResponseDTO
>(
params?: GetFieldsValuesParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getFieldsValues>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getGetFieldsValuesQueryKey(params);
const queryFn: QueryFunction<Awaited<ReturnType<typeof getFieldsValues>>> = ({
signal,
}) => getFieldsValues(params, signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getFieldsValues>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetFieldsValuesQueryResult = NonNullable<
Awaited<ReturnType<typeof getFieldsValues>>
>;
export type GetFieldsValuesQueryError = RenderErrorResponseDTO;
/**
* @summary Get field values
*/
export function useGetFieldsValues<
TData = Awaited<ReturnType<typeof getFieldsValues>>,
TError = RenderErrorResponseDTO
>(
params?: GetFieldsValuesParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getFieldsValues>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetFieldsValuesQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get field values
*/
export const invalidateGetFieldsValues = async (
queryClient: QueryClient,
params?: GetFieldsValuesParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetFieldsValuesQueryKey(params) },
options,
);
return queryClient;
};

View File

@@ -762,18 +762,6 @@ export interface MetricsexplorertypesMetricHighlightsResponseDTO {
totalTimeSeries: number;
}
export enum MetricsexplorertypesMetricMetadataDTOTemporality {
delta = 'delta',
cumulative = 'cumulative',
unspecified = 'unspecified',
}
export enum MetricsexplorertypesMetricMetadataDTOType {
gauge = 'gauge',
sum = 'sum',
histogram = 'histogram',
summary = 'summary',
exponentialhistogram = 'exponentialhistogram',
}
export interface MetricsexplorertypesMetricMetadataDTO {
/**
* @type string
@@ -783,29 +771,14 @@ export interface MetricsexplorertypesMetricMetadataDTO {
* @type boolean
*/
isMonotonic: boolean;
/**
* @enum delta,cumulative,unspecified
* @type string
*/
temporality: MetricsexplorertypesMetricMetadataDTOTemporality;
/**
* @enum gauge,sum,histogram,summary,exponentialhistogram
* @type string
*/
type: MetricsexplorertypesMetricMetadataDTOType;
temporality: MetrictypesTemporalityDTO;
type: MetrictypesTypeDTO;
/**
* @type string
*/
unit: string;
}
export enum MetricsexplorertypesStatDTOType {
gauge = 'gauge',
sum = 'sum',
histogram = 'histogram',
summary = 'summary',
exponentialhistogram = 'exponentialhistogram',
}
export interface MetricsexplorertypesStatDTO {
/**
* @type string
@@ -825,11 +798,7 @@ export interface MetricsexplorertypesStatDTO {
* @minimum 0
*/
timeseries: number;
/**
* @enum gauge,sum,histogram,summary,exponentialhistogram
* @type string
*/
type: MetricsexplorertypesStatDTOType;
type: MetrictypesTypeDTO;
/**
* @type string
*/
@@ -889,7 +858,7 @@ export interface MetricsexplorertypesTreemapEntryDTO {
totalValue: number;
}
export enum MetricsexplorertypesTreemapRequestDTOMode {
export enum MetricsexplorertypesTreemapModeDTO {
timeseries = 'timeseries',
samples = 'samples',
}
@@ -904,11 +873,7 @@ export interface MetricsexplorertypesTreemapRequestDTO {
* @type integer
*/
limit: number;
/**
* @enum timeseries,samples
* @type string
*/
mode: MetricsexplorertypesTreemapRequestDTOMode;
mode: MetricsexplorertypesTreemapModeDTO;
/**
* @type integer
* @format int64
@@ -929,18 +894,6 @@ export interface MetricsexplorertypesTreemapResponseDTO {
timeseries: MetricsexplorertypesTreemapEntryDTO[] | null;
}
export enum MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality {
delta = 'delta',
cumulative = 'cumulative',
unspecified = 'unspecified',
}
export enum MetricsexplorertypesUpdateMetricMetadataRequestDTOType {
gauge = 'gauge',
sum = 'sum',
histogram = 'histogram',
summary = 'summary',
exponentialhistogram = 'exponentialhistogram',
}
export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
/**
* @type string
@@ -954,22 +907,26 @@ export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
* @type string
*/
metricName: string;
/**
* @enum delta,cumulative,unspecified
* @type string
*/
temporality: MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality;
/**
* @enum gauge,sum,histogram,summary,exponentialhistogram
* @type string
*/
type: MetricsexplorertypesUpdateMetricMetadataRequestDTOType;
temporality: MetrictypesTemporalityDTO;
type: MetrictypesTypeDTO;
/**
* @type string
*/
unit: string;
}
export enum MetrictypesTemporalityDTO {
delta = 'delta',
cumulative = 'cumulative',
unspecified = 'unspecified',
}
export enum MetrictypesTypeDTO {
gauge = 'gauge',
sum = 'sum',
histogram = 'histogram',
summary = 'summary',
exponentialhistogram = 'exponentialhistogram',
}
export interface PreferencetypesPreferenceDTO {
/**
* @type array
@@ -1092,7 +1049,7 @@ export interface Querybuildertypesv5OrderByKeyDTO {
/**
* @type string
*/
name?: string;
name: string;
/**
* @type string
*/
@@ -1184,6 +1141,79 @@ export interface RoletypesRoleDTO {
updatedAt?: Date;
}
/**
* @nullable
*/
export type TelemetrytypesGettableFieldKeysDTOKeys = {
[key: string]: TelemetrytypesTelemetryFieldKeyDTO[];
} | null;
export interface TelemetrytypesGettableFieldKeysDTO {
/**
* @type boolean
*/
complete: boolean;
/**
* @type object
* @nullable true
*/
keys: TelemetrytypesGettableFieldKeysDTOKeys;
}
export interface TelemetrytypesGettableFieldValuesDTO {
/**
* @type boolean
*/
complete: boolean;
values: TelemetrytypesTelemetryFieldValuesDTO;
}
export interface TelemetrytypesTelemetryFieldKeyDTO {
/**
* @type string
*/
description?: string;
/**
* @type string
*/
fieldContext?: string;
/**
* @type string
*/
fieldDataType?: string;
/**
* @type string
*/
name: string;
/**
* @type string
*/
signal?: string;
/**
* @type string
*/
unit?: string;
}
export interface TelemetrytypesTelemetryFieldValuesDTO {
/**
* @type array
*/
boolValues?: boolean[];
/**
* @type array
*/
numberValues?: number[];
/**
* @type array
*/
relatedValues?: string[];
/**
* @type array
*/
stringValues?: string[];
}
export interface TypesChangePasswordRequestDTO {
/**
* @type string
@@ -1388,7 +1418,7 @@ export interface TypesPostableForgotPasswordDTO {
/**
* @type string
*/
email?: string;
email: string;
/**
* @type string
*/
@@ -1396,7 +1426,7 @@ export interface TypesPostableForgotPasswordDTO {
/**
* @type string
*/
orgId?: string;
orgId: string;
}
export interface TypesPostableInviteDTO {
@@ -1631,6 +1661,132 @@ export type DeleteAuthDomainPathParameters = {
export type UpdateAuthDomainPathParameters = {
id: string;
};
export type GetFieldsKeysParams = {
/**
* @type string
* @description undefined
*/
signal?: string;
/**
* @type string
* @description undefined
*/
source?: string;
/**
* @type integer
* @description undefined
*/
limit?: number;
/**
* @type integer
* @format int64
* @description undefined
*/
startUnixMilli?: number;
/**
* @type integer
* @format int64
* @description undefined
*/
endUnixMilli?: number;
/**
* @type string
* @description undefined
*/
fieldContext?: string;
/**
* @type string
* @description undefined
*/
fieldDataType?: string;
/**
* @type string
* @description undefined
*/
metricName?: string;
/**
* @type string
* @description undefined
*/
searchText?: string;
};
export type GetFieldsKeys200 = {
data?: TelemetrytypesGettableFieldKeysDTO;
/**
* @type string
*/
status?: string;
};
export type GetFieldsValuesParams = {
/**
* @type string
* @description undefined
*/
signal?: string;
/**
* @type string
* @description undefined
*/
source?: string;
/**
* @type integer
* @description undefined
*/
limit?: number;
/**
* @type integer
* @format int64
* @description undefined
*/
startUnixMilli?: number;
/**
* @type integer
* @format int64
* @description undefined
*/
endUnixMilli?: number;
/**
* @type string
* @description undefined
*/
fieldContext?: string;
/**
* @type string
* @description undefined
*/
fieldDataType?: string;
/**
* @type string
* @description undefined
*/
metricName?: string;
/**
* @type string
* @description undefined
*/
searchText?: string;
/**
* @type string
* @description undefined
*/
name?: string;
/**
* @type string
* @description undefined
*/
existingQuery?: string;
};
export type GetFieldsValues200 = {
data?: TelemetrytypesGettableFieldValuesDTO;
/**
* @type string
*/
status?: string;
};
export type GetResetPasswordTokenPathParameters = {
id: string;
};

View File

@@ -34,159 +34,230 @@ const themeColors = {
cyan: '#00FFFF',
},
chartcolors: {
radicalRed: '#FF1A66',
// Blues (3)
dodgerBlue: '#2F80ED',
mediumOrchid: '#BB6BD9',
seaBuckthorn: '#F2994A',
seaGreen: '#219653',
turquoiseBlue: '#56CCF2',
festivalOrange: '#F2C94C',
silver: '#BDBDBD',
outrageousOrange: '#FF6633',
roseBud: '#FFB399',
canary: '#FFFF99',
deepSkyBlue: '#00B3E6',
goldTips: '#E6B333',
royalBlue: '#3366E6',
avocado: '#999966',
mintGreen: '#99FF99',
chestnut: '#B34D4D',
lima: '#80B300',
olive: '#809900',
beautyBush: '#E6B3B3',
danube: '#6680B3',
oliveDrab: '#66991A',
lavenderRose: '#FF99E6',
electricLime: '#CCFF1A',
robin: '#3F5ECC',
harleyOrange: '#E6331A',
turquoise: '#33FFCC',
gladeGreen: '#66994D',
hemlock: '#66664D',
vidaLoca: '#4D8000',
rust: '#B33300',
red: '#FF0000', // Adding more colors, we need to get better colors from design team
blue: '#0000FF',
green: '#00FF00',
yellow: '#FFFF00',
purple: '#800080',
cyan: '#00FFFF',
magenta: '#FF00FF',
orange: '#FFA500',
pink: '#FFC0CB',
brown: '#A52A2A',
teal: '#008080',
lime: '#00FF00',
maroon: '#800000',
navy: '#000080',
aquamarine: '#7FFFD4',
darkSeaGreen: '#8FBC8F',
gray: '#808080',
skyBlue: '#87CEEB',
indigo: '#4B0082',
slateGray: '#708090',
chocolate: '#D2691E',
tomato: '#FF6347',
steelBlue: '#4682B4',
peru: '#CD853F',
darkOliveGreen: '#556B2F',
indianRed: '#CD5C5C',
mediumSlateBlue: '#7B68EE',
rosyBrown: '#BC8F8F',
darkSlateGray: '#2F4F4F',
mediumAquamarine: '#66CDAA',
lavender: '#E6E6FA',
thistle: '#D8BFD8',
salmon: '#FA8072',
darkSalmon: '#E9967A',
paleVioletRed: '#DB7093',
mediumPurple: '#9370DB',
darkOrchid: '#9932CC',
lawnGreen: '#7CFC00',
// Teals / Cyans (3)
turquoise: '#00CEC9',
lagoon: '#1ABC9C',
cyanBright: '#22A6F2',
// Greens (3)
emeraldGreen: '#27AE60',
mediumSeaGreen: '#3CB371',
lightCoral: '#F08080',
gold: '#FFD700',
sandyBrown: '#F4A460',
darkKhaki: '#BDB76B',
cornflowerBlue: '#6495ED',
mediumVioletRed: '#C71585',
paleGreen: '#98FB98',
limeGreen: '#A3E635',
// Yellows / Golds (3)
festivalYellow: '#F2C94C',
sunflower: '#FFD93D',
warmAmber: '#FFCA28',
// Oranges (3)
festivalOrange: '#F2994A',
coralOrange: '#E17055',
pumpkin: '#FF7F50',
// Reds (3)
radicalRed: '#FF1A66',
crimsonRed: '#EB5757',
fireRed: '#E10600',
// Pinks (3)
hotPink: '#E84393',
rosePink: '#FD79A8',
blush: '#FF7EB6',
// Purples / Violets (3)
mediumPurple: '#BB6BD9',
royalPurple: '#9B51E0',
orchid: '#DA77F2',
// Accent / Neon / Unique Colors (3)
neonViolet: '#C77DFF',
electricPurple: '#6C5CE7',
arcticBlue: '#48DBFB',
// Extended palette — systematic variations to reach 100 colors
blue1: '#1F63E0',
blue2: '#3A7AED',
blue3: '#5A9DF5',
cyan1: '#00B0AA',
cyan2: '#33D6C2',
cyan3: '#66E9DA',
green1: '#1E8449',
green2: '#2ECC71',
green3: '#58D68D',
yellow1: '#F1C40F',
yellow2: '#F7DC6F',
yellow3: '#F9E79F',
orange1: '#D35400',
orange2: '#E67E22',
orange3: '#F5B041',
red1: '#C0392B',
red2: '#E74C3C',
red3: '#EC7063',
pink1: '#D81B60',
pink2: '#E91E63',
pink3: '#F06292',
purple1: '#8E44AD',
purple2: '#9B59B6',
purple3: '#BB8FCE',
teal1: '#009688',
teal2: '#1ABC9C',
teal3: '#48C9B0',
lime1: '#A3E635',
lime2: '#B9F18D',
lime3: '#D4FFB0',
gold1: '#F39C12',
gold2: '#F1C40F',
gold3: '#F7DC6F',
coral1: '#E67E22',
coral2: '#F39C12',
coral3: '#F5B041',
crimson1: '#C0392B',
crimson2: '#E74C3C',
crimson3: '#EC7063',
violet1: '#8E44AD',
violet2: '#9B59B6',
violet3: '#BB8FCE',
aqua1: '#00BFFF',
aqua2: '#1E90FF',
aqua3: '#63B8FF',
forest1: '#27AE60',
forest2: '#2ECC71',
forest3: '#58D68D',
blush1: '#FF6F91',
blush2: '#FF85A2',
blush3: '#FFA0B3',
lavender1: '#9B59B6',
lavender2: '#AF7AC5',
lavender3: '#C39BD3',
tomato1: '#E74C3C',
tomato2: '#EC7063',
tomato3: '#F1948A',
salmon1: '#FF6B6B',
salmon2: '#FF8787',
salmon3: '#FFA1A1',
mustard1: '#F1C40F',
mustard2: '#F7DC6F',
mustard3: '#F9E79F',
teal4: '#1ABC9C',
teal5: '#48C9B0',
teal6: '#76D7C4',
magenta1: '#D6336C',
magenta2: '#E84393',
magenta3: '#F06292',
violet4: '#7D3C98',
violet5: '#8E44AD',
violet6: '#9B59B6',
green4: '#229954',
green5: '#27AE60',
green6: '#52BE80',
blue4: '#2874A6',
blue5: '#2E86C1',
blue6: '#3498DB',
red4: '#C0392B',
red5: '#E74C3C',
red6: '#EC7063',
orange4: '#D35400',
orange5: '#E67E22',
orange6: '#EB984E',
pink4: '#C2185B',
pink5: '#D81B60',
pink6: '#E91E63',
gold4: '#B7950B',
gold5: '#F1C40F',
gold6: '#F4D03F',
},
lightModeColor: {
radicalRed: '#FF1A66',
dodgerBlueDark: '#0C6EED',
steelgrey: '#2f4b7c',
steelpurple: '#665191',
steelindigo: '#a05195',
steelpink: '#d45087',
steelcoral: '#f95d6a',
steelorange: '#ff7c43',
steelgold: '#ffa600',
steelrust: '#de425b',
steelgreen: '#41967e',
mediumOrchidDark: '#C326FD',
seaBuckthornDark: '#E66E05',
seaGreen: '#219653',
turquoiseBlueDark: '#0099CC',
silverDark: '#757575',
outrageousOrangeDark: '#F9521A',
roseBudDark: '#EB6437',
deepSkyBlueDark: '#0595BD',
royalBlue: '#3366E6',
avocadoDark: '#8E8E29',
mintGreenDark: '#00C700',
chestnut: '#B34D4D',
limaDark: '#6E9900',
olive: '#809900',
beautyBushDark: '#E25555',
danube: '#6680B3',
oliveDrab: '#66991A',
lavenderRoseDark: '#F024BD',
electricLimeDark: '#84A800',
robin: '#3F5ECC',
harleyOrange: '#E6331A',
gladeGreen: '#66994D',
hemlock: '#66664D',
vidaLoca: '#4D8000',
rust: '#B33300',
red: '#FF0000', // Adding more colors, we need to get better colors from design team
blue: '#0000FF',
green: '#00FF00',
purple: '#800080',
magentaDark: '#EB00EB',
pinkDark: '#FF3D5E',
brown: '#A52A2A',
teal: '#008080',
limeDark: '#07A207',
maroon: '#800000',
navy: '#000080',
gray: '#808080',
skyBlueDark: '#0CA7E4',
indigo: '#4B0082',
slateGray: '#708090',
chocolate: '#D2691E',
tomato: '#FF6347',
steelBlue: '#4682B4',
peruDark: '#D16E0A',
darkOliveGreen: '#556B2F',
indianRed: '#CD5C5C',
mediumSlateBlue: '#7B68EE',
rosyBrownDark: '#CB4848',
darkSlateGray: '#2F4F4F',
fuchsia: '#FF0AFF',
salmonDark: '#FF432E',
darkSalmonDark: '#D26541',
paleVioletRedDark: '#E83089',
mediumPurple: '#9370DB',
darkOrchid: '#9932CC',
mediumSeaGreenDark: '#109E50',
lightCoralDark: '#F85959',
gold: '#FFD700',
sandyBrownDark: '#D97117',
darkKhakiDark: '#99900A',
cornflowerBlueDark: '#3371E6',
mediumVioletRed: '#C71585',
paleGreenDark: '#0D910D',
radicalRed: '#D81B60',
dodgerBlueDark: '#1E5BD9',
steelgrey: '#344B6B',
steelpurple: '#5E548E',
steelindigo: '#8E4A7C',
steelpink: '#B63A6F',
steelcoral: '#E14B5A',
steelorange: '#E76F2F',
steelgold: '#E09B00',
steelrust: '#C93A50',
steelgreen: '#2F7D69',
mediumOrchidDark: '#8E24AA',
seaBuckthornDark: '#C75A00',
seaGreen: '#1E7F5A',
turquoiseBlueDark: '#007EA7',
silverDark: '#5F5F5F',
outrageousOrangeDark: '#E64A19',
roseBudDark: '#D84315',
deepSkyBlueDark: '#0277BD',
royalBlue: '#2A4FDB',
avocadoDark: '#6B6B1E',
mintGreenDark: '#2E9E55',
chestnut: '#8B3A3A',
limaDark: '#5C7F00',
olive: '#6E7F00',
beautyBushDark: '#C93C3C',
danube: '#4F6FB3',
oliveDrab: '#4F7F1A',
lavenderRoseDark: '#B0178F',
electricLimeDark: '#6B8F00',
robin: '#2F4FCC',
harleyOrange: '#CC2E12',
gladeGreen: '#4F7F46',
hemlock: '#5C5C45',
vidaLoca: '#3D6B00',
rust: '#993300',
red: '#C62828',
blue: '#1A237E',
green: '#1B7F3A',
purple: '#6A1B9A',
magentaDark: '#B000B5',
pinkDark: '#C2185B',
brown: '#7A3A1E',
teal: '#006D6F',
limeDark: '#4C8C2B',
maroon: '#6D1B1B',
navy: '#0D1B5E',
gray: '#616161',
skyBlueDark: '#0288D1',
indigo: '#303F9F',
slateGray: '#556B7C',
chocolate: '#9C4A1A',
tomato: '#E53935',
steelBlue: '#3A6EA5',
peruDark: '#B35E00',
darkOliveGreen: '#445B1F',
indianRed: '#B04040',
mediumSlateBlue: '#5C6BC0',
rosyBrownDark: '#A94444',
darkSlateGray: '#2E4A4A',
fuchsia: '#C511C5',
salmonDark: '#E64A3C',
darkSalmonDark: '#C85A3A',
paleVioletRedDark: '#C2186A',
mediumPurple: '#7E57C2',
darkOrchid: '#7B1FA2',
mediumSeaGreenDark: '#2E8B57',
lightCoralDark: '#E57373',
gold: '#D4AF37',
sandyBrownDark: '#C76A15',
darkKhakiDark: '#8A7F00',
cornflowerBlueDark: '#355FCC',
mediumVioletRed: '#AD1457',
paleGreenDark: '#2E7D32',
},
errorColor: '#d32f2f',
royalGrey: '#888888',

View File

@@ -18,8 +18,8 @@ import { useWidgetsByDynamicVariableId } from 'hooks/dashboard/useWidgetsByDynam
import { getWidgetsHavingDynamicVariableAttribute } from 'hooks/dashboard/utils';
import { useGetFieldValues } from 'hooks/dynamicVariables/useGetFieldValues';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import { commaValuesParser } from 'lib/dashboardVariables/customCommaValuesParser';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import { isEmpty, map } from 'lodash-es';
import {
ArrowLeft,

View File

@@ -0,0 +1,53 @@
import { memo, useMemo } from 'react';
import { commaValuesParser } from 'lib/dashboardVariables/customCommaValuesParser';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { VariableItemProps } from './VariableItem';
type CustomVariableInputProps = Pick<
VariableItemProps,
'variableData' | 'onValueUpdate'
>;
function CustomVariableInput({
variableData,
onValueUpdate,
}: CustomVariableInputProps): JSX.Element {
const optionsData: (string | number | boolean)[] = useMemo(() => {
return sortValues(
commaValuesParser(variableData.customValue || ''),
variableData.sort,
) as (string | number | boolean)[];
}, [variableData.customValue, variableData.sort]);
const {
value,
defaultValue,
enableSelectAll,
onChange,
onDropdownVisibleChange,
handleClear,
} = useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
});
return (
<SelectVariableInput
variableId={variableData.id}
options={optionsData}
value={value}
onChange={onChange}
onDropdownVisibleChange={onDropdownVisibleChange}
onClear={handleClear}
enableSelectAll={enableSelectAll}
defaultValue={defaultValue}
isMultiSelect={variableData.multiSelect}
/>
);
}
export default memo(CustomVariableInput);

View File

@@ -25,12 +25,6 @@
}
}
&.focused {
.variable-value {
outline: 1px solid var(--bg-robin-400);
}
}
.variable-value {
display: flex;
min-width: 120px;
@@ -48,6 +42,11 @@
font-style: normal;
font-weight: 400;
line-height: 16px; /* 133.333% */
&:hover,
&:focus-within {
outline: 1px solid var(--bg-robin-400);
}
}
.variable-select {
@@ -99,12 +98,6 @@
.lightMode {
.variable-item {
&.focused {
.variable-value {
border: 1px solid var(--bg-robin-400);
}
}
.variable-name {
border: 1px solid var(--bg-vanilla-300);
background: var(--bg-vanilla-100);
@@ -115,6 +108,11 @@
border: 1px solid var(--bg-vanilla-300);
background: var(--bg-vanilla-100);
color: var(--bg-ink-400);
&:hover,
&:focus-within {
outline: 1px solid var(--bg-robin-400);
}
}
}
}
@@ -124,3 +122,9 @@
padding: 4px 12px;
font-size: 12px;
}
.dashboard-variables-selection-container {
display: flex;
flex-wrap: wrap;
gap: 12px;
}

View File

@@ -1,4 +1,4 @@
import { memo, useEffect } from 'react';
import { memo, useCallback, useEffect, useMemo } from 'react';
import { useSelector } from 'react-redux';
import { Row } from 'antd';
import { ALL_SELECTED_VALUE } from 'components/NewSelect/utils';
@@ -7,7 +7,6 @@ import {
useDashboardVariablesSelector,
} from 'hooks/dashboard/useDashboardVariables';
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
import { isEmpty } from 'lodash-es';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
import { AppState } from 'store/reducers';
@@ -22,7 +21,6 @@ import './DashboardVariableSelection.styles.scss';
function DashboardVariableSelection(): JSX.Element | null {
const {
selectedDashboard,
setSelectedDashboard,
updateLocalStorageDashboardVariables,
variablesToGetUpdated,
@@ -52,34 +50,36 @@ function DashboardVariableSelection(): JSX.Element | null {
);
}, [getUrlVariables, updateUrlVariable, dashboardVariables]);
// this handles the case where the dependency order changes i.e. variable list updated via creation or deletion etc. and we need to refetch the variables
// also trigger when the global time changes
useEffect(
() => {
if (!isEmpty(dependencyData?.order)) {
setVariablesToGetUpdated(dependencyData?.order || []);
}
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[JSON.stringify(dependencyData?.order), minTime, maxTime],
// Memoize the order key to avoid unnecessary triggers
const dependencyOrderKey = useMemo(
() => dependencyData?.order?.join(',') ?? '',
[dependencyData?.order],
);
// Trigger refetch when dependency order changes or global time changes
useEffect(() => {
if (dependencyData?.order && dependencyData.order.length > 0) {
setVariablesToGetUpdated(dependencyData?.order || []);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [dependencyOrderKey, minTime, maxTime]);
// Performance optimization: For dynamic variables with allSelected=true, we don't store
// individual values in localStorage since we can always derive them from available options.
// This makes localStorage much lighter and more efficient.
const onValueUpdate = (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
// eslint-disable-next-line sonarjs/cognitive-complexity
): void => {
if (id) {
const onValueUpdate = useCallback(
(
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
// eslint-disable-next-line sonarjs/cognitive-complexity
): void => {
// For dynamic variables, only store in localStorage when NOT allSelected
// This makes localStorage much lighter by avoiding storing all individual values
const variable = dashboardVariables?.[id] || dashboardVariables?.[name];
const isDynamic = variable?.type === 'DYNAMIC';
const variable = dashboardVariables[id] || dashboardVariables[name];
const isDynamic = variable.type === 'DYNAMIC';
updateLocalStorageDashboardVariables(name, value, allSelected, isDynamic);
if (allSelected) {
@@ -88,41 +88,39 @@ function DashboardVariableSelection(): JSX.Element | null {
updateUrlVariable(name || id, value);
}
if (selectedDashboard) {
setSelectedDashboard((prev) => {
if (prev) {
const oldVariables = prev?.data.variables;
// this is added to handle case where we have two different
// schemas for variable response
if (oldVariables?.[id]) {
oldVariables[id] = {
...oldVariables[id],
selectedValue: value,
allSelected,
haveCustomValuesSelected,
};
}
if (oldVariables?.[name]) {
oldVariables[name] = {
...oldVariables[name],
selectedValue: value,
allSelected,
haveCustomValuesSelected,
};
}
return {
...prev,
data: {
...prev?.data,
variables: {
...oldVariables,
},
},
setSelectedDashboard((prev) => {
if (prev) {
const oldVariables = { ...prev?.data.variables };
// this is added to handle case where we have two different
// schemas for variable response
if (oldVariables?.[id]) {
oldVariables[id] = {
...oldVariables[id],
selectedValue: value,
allSelected,
haveCustomValuesSelected,
};
}
return prev;
});
}
if (oldVariables?.[name]) {
oldVariables[name] = {
...oldVariables[name],
selectedValue: value,
allSelected,
haveCustomValuesSelected,
};
}
return {
...prev,
data: {
...prev?.data,
variables: {
...oldVariables,
},
},
};
}
return prev;
});
if (dependencyData) {
const updatedVariables: string[] = [];
@@ -138,11 +136,20 @@ function DashboardVariableSelection(): JSX.Element | null {
} else {
setVariablesToGetUpdated((prev) => prev.filter((v) => v !== name));
}
}
};
},
[
// This can be removed
dashboardVariables,
updateLocalStorageDashboardVariables,
dependencyData,
updateUrlVariable,
setSelectedDashboard,
setVariablesToGetUpdated,
],
);
return (
<Row style={{ display: 'flex', gap: '12px' }}>
<Row className="dashboard-variables-selection-container">
{sortedVariablesArray.map((variable) => {
const key = `${variable.name}${variable.id}${variable.order}`;

View File

@@ -23,9 +23,9 @@ import { SelectItemStyle } from './styles';
import {
areArraysEqual,
getOptionsForDynamicVariable,
getSelectValue,
uniqueValues,
} from './util';
import { getSelectValue } from './VariableItem';
import './DashboardVariableSelection.styles.scss';

View File

@@ -0,0 +1,229 @@
import { memo, useCallback, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import { isArray, isString } from 'lodash-es';
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
import { GlobalReducer } from 'types/reducer/globalTime';
import { variablePropsToPayloadVariables } from '../utils';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { areArraysEqual, checkAPIInvocation } from './util';
interface QueryVariableInputProps {
variableData: IDashboardVariable;
existingVariables: Record<string, IDashboardVariable>;
onValueUpdate: (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
) => void;
variablesToGetUpdated: string[];
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
dependencyData: IDependencyData | null;
}
function QueryVariableInput({
variableData,
existingVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dependencyData,
onValueUpdate,
}: QueryVariableInputProps): JSX.Element {
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
[],
);
const [errorMessage, setErrorMessage] = useState<null | string>(null);
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
const {
tempSelection,
setTempSelection,
value,
defaultValue,
enableSelectAll,
onChange,
onDropdownVisibleChange,
handleClear,
} = useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
});
const validVariableUpdate = (): boolean => {
if (!variableData.name) {
return false;
}
return Boolean(
variablesToGetUpdated.length &&
variablesToGetUpdated[0] === variableData.name,
);
};
// eslint-disable-next-line sonarjs/cognitive-complexity
const getOptions = (variablesRes: VariableResponseProps | null): void => {
try {
setErrorMessage(null);
if (
variablesRes?.variableValues &&
Array.isArray(variablesRes?.variableValues)
) {
const newOptionsData = sortValues(
variablesRes?.variableValues,
variableData.sort,
);
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
let valueNotInList = false;
if (isArray(variableData.selectedValue)) {
variableData.selectedValue.forEach((val) => {
if (!newOptionsData.includes(val)) {
valueNotInList = true;
}
});
} else if (
isString(variableData.selectedValue) &&
!newOptionsData.includes(variableData.selectedValue)
) {
valueNotInList = true;
}
// variablesData.allSelected is added for the case where on change of options we need to update the
// local storage
if (
variableData.name &&
(validVariableUpdate() || valueNotInList || variableData.allSelected)
) {
if (
variableData.allSelected &&
variableData.multiSelect &&
variableData.showALLOption
) {
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
// Update tempSelection to maintain ALL state when dropdown is open
if (tempSelection !== undefined) {
setTempSelection(newOptionsData.map((option) => option.toString()));
}
} else {
const value = variableData.selectedValue;
let allSelected = false;
if (variableData.multiSelect) {
const { selectedValue } = variableData;
allSelected =
newOptionsData.length > 0 &&
Array.isArray(selectedValue) &&
newOptionsData.every((option) => selectedValue.includes(option));
}
if (variableData.name && variableData.id) {
onValueUpdate(variableData.name, variableData.id, value, allSelected);
}
}
}
setOptionsData(newOptionsData);
} else {
setVariablesToGetUpdated((prev) =>
prev.filter((name) => name !== variableData.name),
);
}
}
} catch (e) {
console.error(e);
}
};
const { isLoading, refetch } = useQuery(
[
REACT_QUERY_KEY.DASHBOARD_BY_ID,
variableData.name || '',
`${minTime}`,
`${maxTime}`,
JSON.stringify(dependencyData?.order),
],
{
enabled:
variableData &&
variableData.type === 'QUERY' &&
checkAPIInvocation(
variablesToGetUpdated,
variableData,
dependencyData?.parentDependencyGraph,
),
queryFn: () =>
dashboardVariablesQuery({
query: variableData.queryValue || '',
variables: variablePropsToPayloadVariables(existingVariables),
}),
refetchOnWindowFocus: false,
onSuccess: (response) => {
getOptions(response.payload);
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
},
onError: (error: {
details: {
error: string;
};
}) => {
const { details } = error;
if (details.error) {
let message = details.error;
if ((details.error ?? '').toString().includes('Syntax error:')) {
message =
'Please make sure query is valid and dependent variables are selected';
}
setErrorMessage(message);
}
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
},
},
);
const handleRetry = useCallback((): void => {
setErrorMessage(null);
refetch();
}, [refetch]);
return (
<SelectVariableInput
variableId={variableData.id}
options={optionsData}
value={value}
onChange={onChange}
onDropdownVisibleChange={onDropdownVisibleChange}
onClear={handleClear}
enableSelectAll={enableSelectAll}
defaultValue={defaultValue}
isMultiSelect={variableData.multiSelect}
// query variable specific, API related props
loading={isLoading}
errorMessage={errorMessage}
onRetry={handleRetry}
/>
);
}
export default memo(QueryVariableInput);

View File

@@ -0,0 +1,134 @@
import { memo, useMemo } from 'react';
import { orange } from '@ant-design/colors';
import { WarningOutlined } from '@ant-design/icons';
import { Popover, Tooltip, Typography } from 'antd';
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
import { popupContainer } from 'utils/selectPopupContainer';
import { ALL_SELECT_VALUE } from '../utils';
import { SelectItemStyle } from './styles';
const errorIconStyle = { margin: '0 0.5rem' };
interface SelectVariableInputProps {
variableId: string;
options: (string | number | boolean)[];
value: string | string[] | undefined;
enableSelectAll: boolean;
isMultiSelect: boolean;
onChange: (value: string | string[]) => void;
onClear: () => void;
defaultValue?: string | string[];
onDropdownVisibleChange?: (visible: boolean) => void;
loading?: boolean;
errorMessage?: string | null;
onRetry?: () => void;
}
const MAX_TAG_DISPLAY_VALUES = 10;
function maxTagPlaceholder(
omittedValues: { label?: React.ReactNode; value?: string | number }[],
): JSX.Element {
const valuesToShow = omittedValues.slice(0, MAX_TAG_DISPLAY_VALUES);
const hasMore = omittedValues.length > MAX_TAG_DISPLAY_VALUES;
const tooltipText =
valuesToShow.map(({ value: v }) => v ?? '').join(', ') +
(hasMore ? ` + ${omittedValues.length - MAX_TAG_DISPLAY_VALUES} more` : '');
return (
<Tooltip title={tooltipText}>
<span>+ {omittedValues.length} </span>
</Tooltip>
);
}
function SelectVariableInput({
variableId,
options,
value,
onChange,
onDropdownVisibleChange,
onClear,
loading,
errorMessage,
onRetry,
enableSelectAll,
isMultiSelect,
defaultValue,
}: SelectVariableInputProps): JSX.Element {
const selectOptions = useMemo(
() =>
options.map((option) => ({
label: option.toString(),
value: option.toString(),
})),
[options],
);
const commonProps = useMemo(
() => ({
// main props
key: variableId,
value,
defaultValue,
// setup props
placeholder: 'Select value',
className: 'variable-select',
popupClassName: 'dropdown-styles',
getPopupContainer: popupContainer,
style: SelectItemStyle,
showSearch: true,
bordered: false,
// dynamic props
'data-testid': 'variable-select',
onChange,
loading,
options: selectOptions,
errorMessage,
onRetry,
}),
[
variableId,
defaultValue,
onChange,
loading,
selectOptions,
value,
errorMessage,
onRetry,
],
);
return (
<>
{isMultiSelect ? (
<CustomMultiSelect
{...commonProps}
placement="bottomLeft"
maxTagCount={2}
onDropdownVisibleChange={onDropdownVisibleChange}
maxTagPlaceholder={maxTagPlaceholder}
onClear={onClear}
enableAllSelection={enableSelectAll}
maxTagTextLength={30}
allowClear={value !== ALL_SELECT_VALUE && value !== 'ALL'}
/>
) : (
<CustomSelect {...commonProps} />
)}
{errorMessage && (
<span style={errorIconStyle}>
<Popover placement="top" content={<Typography>{errorMessage}</Typography>}>
<WarningOutlined style={{ color: orange[5] }} />
</Popover>
</span>
)}
</>
);
}
export default memo(SelectVariableInput);

View File

@@ -0,0 +1,85 @@
import { memo, useCallback, useRef, useState } from 'react';
import { Input, InputRef } from 'antd';
import { VariableItemProps } from './VariableItem';
type TextboxVariableInputProps = Pick<
VariableItemProps,
'variableData' | 'onValueUpdate'
>;
function TextboxVariableInput({
variableData,
onValueUpdate,
}: TextboxVariableInputProps): JSX.Element {
const handleChange = useCallback(
(inputValue: string | string[]): void => {
if (inputValue === variableData.selectedValue) {
return;
}
if (variableData.name) {
onValueUpdate(variableData.name, variableData.id, inputValue, false);
}
},
[
onValueUpdate,
variableData.id,
variableData.name,
variableData.selectedValue,
],
);
const textboxInputRef = useRef<InputRef>(null);
const [textboxInputValue, setTextboxInputValue] = useState<string>(
(variableData.selectedValue?.toString() ||
variableData.defaultValue?.toString()) ??
'',
);
const handleInputOnChange = useCallback(
(event: React.ChangeEvent<HTMLInputElement>) => {
setTextboxInputValue(event.target.value);
},
[setTextboxInputValue],
);
const handleInputOnBlur = useCallback(
(event: React.FocusEvent<HTMLInputElement>): void => {
const value = event.target.value.trim();
// If empty, reset to default value
if (!value && variableData.defaultValue) {
setTextboxInputValue(variableData.defaultValue.toString());
handleChange(variableData.defaultValue.toString());
} else {
handleChange(value);
}
},
[handleChange, variableData.defaultValue],
);
const handleInputOnKeyDown = useCallback(
(event: React.KeyboardEvent<HTMLInputElement>): void => {
if (event.key === 'Enter') {
textboxInputRef.current?.blur();
}
},
[],
);
return (
<Input
key={variableData.id}
ref={textboxInputRef}
placeholder="Enter value"
data-testid={`variable-textbox-${variableData.id}`}
bordered={false}
value={textboxInputValue}
title={textboxInputValue}
onChange={handleInputOnChange}
onBlur={handleInputOnBlur}
onKeyDown={handleInputOnKeyDown}
/>
);
}
export default memo(TextboxVariableInput);

View File

@@ -1,35 +1,16 @@
/* eslint-disable sonarjs/cognitive-complexity */
/* eslint-disable jsx-a11y/click-events-have-key-events */
/* eslint-disable jsx-a11y/no-static-element-interactions */
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable react/jsx-props-no-spreading */
/* eslint-disable no-nested-ternary */
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import { orange } from '@ant-design/colors';
import { InfoCircleOutlined, WarningOutlined } from '@ant-design/icons';
import { Input, InputRef, Popover, Tooltip, Typography } from 'antd';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import { debounce, isArray, isEmpty, isString } from 'lodash-es';
import { memo } from 'react';
import { InfoCircleOutlined } from '@ant-design/icons';
import { Tooltip, Typography } from 'antd';
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
import { GlobalReducer } from 'types/reducer/globalTime';
import { popupContainer } from 'utils/selectPopupContainer';
import { ALL_SELECT_VALUE, variablePropsToPayloadVariables } from '../utils';
import { SelectItemStyle } from './styles';
import { areArraysEqual, checkAPIInvocation } from './util';
import CustomVariableInput from './CustomVariableInput';
import QueryVariableInput from './QueryVariableInput';
import TextboxVariableInput from './TextboxVariableInput';
import './DashboardVariableSelection.styles.scss';
interface VariableItemProps {
export interface VariableItemProps {
variableData: IDashboardVariable;
existingVariables: Record<string, IDashboardVariable>;
onValueUpdate: (
@@ -43,488 +24,49 @@ interface VariableItemProps {
dependencyData: IDependencyData | null;
}
export const getSelectValue = (
selectedValue: IDashboardVariable['selectedValue'],
variableData: IDashboardVariable,
): string | string[] | undefined => {
if (Array.isArray(selectedValue)) {
if (!variableData.multiSelect && selectedValue.length === 1) {
return selectedValue[0]?.toString();
}
return selectedValue.map((item) => item.toString());
}
return selectedValue?.toString();
};
// eslint-disable-next-line sonarjs/cognitive-complexity
function VariableItem({
variableData,
existingVariables,
onValueUpdate,
existingVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dependencyData,
}: VariableItemProps): JSX.Element {
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
[],
);
const [tempSelection, setTempSelection] = useState<
string | string[] | undefined
>(undefined);
// Local state for textbox input to ensure smooth editing experience
const [textboxInputValue, setTextboxInputValue] = useState<string>(
(variableData.selectedValue?.toString() ||
variableData.defaultValue?.toString()) ??
'',
);
const [isTextboxFocused, setIsTextboxFocused] = useState<boolean>(false);
const textboxInputRef = useRef<InputRef>(null);
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
const validVariableUpdate = (): boolean => {
if (!variableData.name) {
return false;
}
// variableData.name is present as the top element or next in the queue - variablesToGetUpdated
return Boolean(
variablesToGetUpdated.length &&
variablesToGetUpdated[0] === variableData.name,
);
};
const [errorMessage, setErrorMessage] = useState<null | string>(null);
// eslint-disable-next-line sonarjs/cognitive-complexity
const getOptions = (variablesRes: VariableResponseProps | null): void => {
if (variablesRes && variableData.type === 'QUERY') {
try {
setErrorMessage(null);
if (
variablesRes?.variableValues &&
Array.isArray(variablesRes?.variableValues)
) {
const newOptionsData = sortValues(
variablesRes?.variableValues,
variableData.sort,
);
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
/* eslint-disable no-useless-escape */
let valueNotInList = false;
if (isArray(variableData.selectedValue)) {
variableData.selectedValue.forEach((val) => {
const isUsed = newOptionsData.includes(val);
if (!isUsed) {
valueNotInList = true;
}
});
} else if (isString(variableData.selectedValue)) {
const isUsed = newOptionsData.includes(variableData.selectedValue);
if (!isUsed) {
valueNotInList = true;
}
}
// variablesData.allSelected is added for the case where on change of options we need to update the
// local storage
if (
variableData.type === 'QUERY' &&
variableData.name &&
(validVariableUpdate() || valueNotInList || variableData.allSelected)
) {
if (
variableData.allSelected &&
variableData.multiSelect &&
variableData.showALLOption
) {
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
// Update tempSelection to maintain ALL state when dropdown is open
if (tempSelection !== undefined) {
setTempSelection(newOptionsData.map((option) => option.toString()));
}
} else {
const value = variableData.selectedValue;
let allSelected = false;
if (variableData.multiSelect) {
const { selectedValue } = variableData;
allSelected =
newOptionsData.length > 0 &&
Array.isArray(selectedValue) &&
newOptionsData.every((option) => selectedValue.includes(option));
}
if (variableData && variableData?.name && variableData?.id) {
onValueUpdate(variableData.name, variableData.id, value, allSelected);
}
}
}
setOptionsData(newOptionsData);
} else {
setVariablesToGetUpdated((prev) =>
prev.filter((name) => name !== variableData.name),
);
}
}
} catch (e) {
console.error(e);
}
} else if (variableData.type === 'CUSTOM') {
const optionsData = sortValues(
commaValuesParser(variableData.customValue || ''),
variableData.sort,
) as never;
setOptionsData(optionsData);
}
};
const { isLoading, refetch } = useQuery(
[
REACT_QUERY_KEY.DASHBOARD_BY_ID,
variableData.name || '',
`${minTime}`,
`${maxTime}`,
JSON.stringify(dependencyData?.order),
],
{
enabled:
variableData &&
variableData.type === 'QUERY' &&
checkAPIInvocation(
variablesToGetUpdated,
variableData,
dependencyData?.parentDependencyGraph,
),
queryFn: () =>
dashboardVariablesQuery({
query: variableData.queryValue || '',
variables: variablePropsToPayloadVariables(existingVariables),
}),
refetchOnWindowFocus: false,
onSuccess: (response) => {
getOptions(response.payload);
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
},
onError: (error: {
details: {
error: string;
};
}) => {
const { details } = error;
if (details.error) {
let message = details.error;
if ((details.error ?? '').toString().includes('Syntax error:')) {
message =
'Please make sure query is valid and dependent variables are selected';
}
setErrorMessage(message);
}
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
},
},
);
const handleChange = useCallback(
(inputValue: string | string[]): void => {
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
if (
value === variableData.selectedValue ||
(Array.isArray(value) &&
Array.isArray(variableData.selectedValue) &&
areArraysEqual(value, variableData.selectedValue))
) {
return;
}
if (variableData.name) {
// Check if ALL is effectively selected by comparing with available options
const isAllSelected =
Array.isArray(value) &&
value.length > 0 &&
optionsData.every((option) => value.includes(option.toString()));
if (isAllSelected && variableData.showALLOption) {
// For ALL selection, pass null to avoid storing values
onValueUpdate(variableData.name, variableData.id, optionsData, true);
} else {
onValueUpdate(variableData.name, variableData.id, value, false);
}
}
},
[
variableData.multiSelect,
variableData.selectedValue,
variableData.name,
variableData.id,
onValueUpdate,
optionsData,
variableData.showALLOption,
],
);
// Add a handler for tracking temporary selection changes
const handleTempChange = (inputValue: string | string[]): void => {
// Store the selection in temporary state while dropdown is open
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
setTempSelection(value);
};
// Handle dropdown visibility changes
const handleDropdownVisibleChange = (visible: boolean): void => {
// Initialize temp selection when opening dropdown
if (visible) {
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
}
// Apply changes when closing dropdown
else if (!visible && tempSelection !== undefined) {
// Call handleChange with the temporarily stored selection
handleChange(tempSelection);
setTempSelection(undefined);
}
};
// do not debounce the above function as we do not need debounce in select variables
const debouncedHandleChange = debounce(handleChange, 500);
const { selectedValue } = variableData;
const selectedValueStringified = useMemo(
() => getSelectValue(selectedValue, variableData),
[selectedValue, variableData],
);
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
const selectValue =
variableData.allSelected && enableSelectAll
? 'ALL'
: selectedValueStringified;
// Apply default value on first render if no selection exists
// eslint-disable-next-line sonarjs/cognitive-complexity
const finalSelectedValues = useMemo(() => {
if (variableData.multiSelect) {
let value = tempSelection || selectedValue;
if (isEmpty(value)) {
if (variableData.showALLOption) {
if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData;
}
} else if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData?.[0];
}
}
return value;
}
if (isEmpty(selectedValue)) {
if (variableData.defaultValue) {
return variableData.defaultValue;
}
return optionsData[0]?.toString();
}
return selectedValue;
}, [
variableData.multiSelect,
variableData.showALLOption,
variableData.defaultValue,
selectedValue,
tempSelection,
optionsData,
]);
useEffect(() => {
if (
(variableData.multiSelect && !(tempSelection || selectValue)) ||
isEmpty(selectValue)
) {
handleChange(finalSelectedValues as string[] | string);
}
}, [
finalSelectedValues,
handleChange,
selectValue,
tempSelection,
variableData.multiSelect,
]);
useEffect(() => {
// Fetch options for CUSTOM Type
if (variableData.type === 'CUSTOM') {
getOptions(null);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [variableData.type, variableData.customValue]);
const { name, description, type: variableType } = variableData;
return (
<div className={`variable-item${isTextboxFocused ? ' focused' : ''}`}>
<div className="variable-item">
<Typography.Text className="variable-name" ellipsis>
${variableData.name}
{variableData.description && (
<Tooltip title={variableData.description}>
${name}
{description && (
<Tooltip title={description}>
<InfoCircleOutlined className="info-icon" />
</Tooltip>
)}
</Typography.Text>
<div className="variable-value">
{variableData.type === 'TEXTBOX' ? (
<Input
ref={textboxInputRef}
placeholder="Enter value"
data-testid={`variable-textbox-${variableData.id}`}
bordered={false}
value={textboxInputValue}
title={textboxInputValue}
onChange={(e): void => {
setTextboxInputValue(e.target.value);
}}
onFocus={(): void => {
setIsTextboxFocused(true);
}}
onBlur={(e): void => {
setIsTextboxFocused(false);
const value = e.target.value.trim();
// If empty, reset to default value
if (!value && variableData.defaultValue) {
setTextboxInputValue(variableData.defaultValue.toString());
debouncedHandleChange(variableData.defaultValue.toString());
} else {
debouncedHandleChange(value);
}
}}
onKeyDown={(e): void => {
if (e.key === 'Enter') {
const value = textboxInputValue.trim();
if (!value && variableData.defaultValue) {
setTextboxInputValue(variableData.defaultValue.toString());
debouncedHandleChange(variableData.defaultValue.toString());
} else {
debouncedHandleChange(value);
}
textboxInputRef.current?.blur();
}
}}
{variableType === 'TEXTBOX' && (
<TextboxVariableInput
variableData={variableData}
onValueUpdate={onValueUpdate}
/>
) : (
optionsData &&
(variableData.multiSelect ? (
<CustomMultiSelect
key={
selectValue && Array.isArray(selectValue)
? selectValue.join(' ')
: selectValue || variableData.id
}
options={optionsData.map((option) => ({
label: option.toString(),
value: option.toString(),
}))}
defaultValue={variableData.defaultValue || selectValue}
onChange={handleTempChange}
bordered={false}
placeholder="Select value"
placement="bottomLeft"
style={SelectItemStyle}
loading={isLoading}
showSearch
data-testid="variable-select"
className="variable-select"
popupClassName="dropdown-styles"
maxTagCount={2}
getPopupContainer={popupContainer}
value={tempSelection || selectValue}
onDropdownVisibleChange={handleDropdownVisibleChange}
errorMessage={errorMessage}
// eslint-disable-next-line react/no-unstable-nested-components
maxTagPlaceholder={(omittedValues): JSX.Element => {
const maxDisplayValues = 10;
const valuesToShow = omittedValues.slice(0, maxDisplayValues);
const hasMore = omittedValues.length > maxDisplayValues;
const tooltipText =
valuesToShow.map(({ value }) => value).join(', ') +
(hasMore ? ` + ${omittedValues.length - maxDisplayValues} more` : '');
return (
<Tooltip title={tooltipText}>
<span>+ {omittedValues.length} </span>
</Tooltip>
);
}}
onClear={(): void => {
handleChange([]);
}}
enableAllSelection={enableSelectAll}
maxTagTextLength={30}
allowClear={selectValue !== ALL_SELECT_VALUE && selectValue !== 'ALL'}
onRetry={(): void => {
setErrorMessage(null);
refetch();
}}
/>
) : (
<CustomSelect
key={
selectValue && Array.isArray(selectValue)
? selectValue.join(' ')
: selectValue || variableData.id
}
defaultValue={variableData.defaultValue || selectValue}
onChange={handleChange}
bordered={false}
placeholder="Select value"
style={SelectItemStyle}
loading={isLoading}
showSearch
data-testid="variable-select"
className="variable-select"
popupClassName="dropdown-styles"
getPopupContainer={popupContainer}
options={optionsData.map((option) => ({
label: option.toString(),
value: option.toString(),
}))}
value={selectValue}
errorMessage={errorMessage}
onRetry={(): void => {
setErrorMessage(null);
refetch();
}}
/>
))
)}
{variableData.type !== 'TEXTBOX' && errorMessage && (
<span style={{ margin: '0 0.5rem' }}>
<Popover
placement="top"
content={<Typography>{errorMessage}</Typography>}
>
<WarningOutlined style={{ color: orange[5] }} />
</Popover>
</span>
{variableType === 'CUSTOM' && (
<CustomVariableInput
variableData={variableData}
onValueUpdate={onValueUpdate}
/>
)}
{variableType === 'QUERY' && (
<QueryVariableInput
variableData={variableData}
onValueUpdate={onValueUpdate}
existingVariables={existingVariables}
variablesToGetUpdated={variablesToGetUpdated}
setVariablesToGetUpdated={setVariablesToGetUpdated}
dependencyData={dependencyData}
/>
)}
</div>
</div>

View File

@@ -0,0 +1,201 @@
import { useCallback, useEffect, useMemo, useState } from 'react';
import { isEmpty } from 'lodash-es';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { areArraysEqual, getSelectValue } from './util';
interface UseDashboardVariableSelectHelperParams {
variableData: IDashboardVariable;
optionsData: (string | number | boolean)[];
onValueUpdate: (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
) => void;
}
interface UseDashboardVariableSelectHelperReturn {
// State
tempSelection: string | string[] | undefined;
setTempSelection: React.Dispatch<
React.SetStateAction<string | string[] | undefined>
>;
value: string | string[] | undefined;
defaultValue: string | string[] | undefined;
// Derived values
enableSelectAll: boolean;
// Handlers
onChange: (value: string | string[]) => void;
onDropdownVisibleChange: (visible: boolean) => void;
handleClear: () => void;
}
// eslint-disable-next-line sonarjs/cognitive-complexity
export function useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
}: UseDashboardVariableSelectHelperParams): UseDashboardVariableSelectHelperReturn {
const { selectedValue } = variableData;
const [tempSelection, setTempSelection] = useState<
string | string[] | undefined
>(undefined);
const selectedValueStringified = useMemo(
() => getSelectValue(selectedValue, variableData),
[selectedValue, variableData],
);
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
const selectValue =
variableData.allSelected && enableSelectAll
? 'ALL'
: selectedValueStringified;
const handleChange = useCallback(
(inputValue: string | string[]): void => {
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
if (
value === variableData.selectedValue ||
(Array.isArray(value) &&
Array.isArray(variableData.selectedValue) &&
areArraysEqual(value, variableData.selectedValue))
) {
return;
}
if (variableData.name) {
// Check if ALL is effectively selected by comparing with available options
const isAllSelected =
Array.isArray(value) &&
value.length > 0 &&
optionsData.every((option) => value.includes(option.toString()));
if (isAllSelected && variableData.showALLOption) {
// For ALL selection, pass optionsData as the value and set allSelected to true
onValueUpdate(variableData.name, variableData.id, optionsData, true);
} else {
onValueUpdate(variableData.name, variableData.id, value, false);
}
}
},
[
variableData.multiSelect,
variableData.selectedValue,
variableData.name,
variableData.id,
variableData.showALLOption,
onValueUpdate,
optionsData,
],
);
const handleTempChange = useCallback(
(inputValue: string | string[]): void => {
// Store the selection in temporary state while dropdown is open
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
setTempSelection(value);
},
[variableData.multiSelect],
);
// Apply default value on first render if no selection exists
const finalSelectedValues = useMemo(() => {
if (variableData.multiSelect) {
let value = tempSelection || selectedValue;
if (isEmpty(value)) {
if (variableData.showALLOption) {
if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData;
}
} else if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData?.[0];
}
}
return value;
}
if (isEmpty(selectedValue)) {
if (variableData.defaultValue) {
return variableData.defaultValue;
}
return optionsData[0]?.toString();
}
return selectedValue;
}, [
variableData.multiSelect,
variableData.showALLOption,
variableData.defaultValue,
selectedValue,
tempSelection,
optionsData,
]);
// Apply default values when needed
useEffect(() => {
if (
(variableData.multiSelect && !(tempSelection || selectValue)) ||
isEmpty(selectValue)
) {
handleChange(finalSelectedValues as string[] | string);
}
}, [
finalSelectedValues,
handleChange,
selectValue,
tempSelection,
variableData.multiSelect,
]);
// Handle dropdown visibility changes
const onDropdownVisibleChange = useCallback(
(visible: boolean): void => {
// Initialize temp selection when opening dropdown
if (visible) {
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
}
// Apply changes when closing dropdown
else if (!visible && tempSelection !== undefined) {
// Call handleChange with the temporarily stored selection
handleChange(tempSelection);
setTempSelection(undefined);
}
},
[variableData, tempSelection, handleChange],
);
const handleClear = useCallback((): void => {
handleChange([]);
}, [handleChange]);
const value = variableData.multiSelect
? tempSelection || selectValue
: selectValue;
const defaultValue = variableData.defaultValue || selectValue;
const onChange = useMemo(() => {
return variableData.multiSelect ? handleTempChange : handleChange;
}, [variableData.multiSelect, handleTempChange, handleChange]);
return {
tempSelection,
setTempSelection,
enableSelectAll,
onDropdownVisibleChange,
handleClear,
value,
defaultValue,
onChange,
};
}

View File

@@ -381,3 +381,16 @@ export const uniqueValues = (values: string[] | string): string[] | string => {
return values;
};
export const getSelectValue = (
selectedValue: IDashboardVariable['selectedValue'],
variableData: IDashboardVariable,
): string | string[] | undefined => {
if (Array.isArray(selectedValue)) {
if (!variableData.multiSelect && selectedValue.length === 1) {
return selectedValue[0]?.toString();
}
return selectedValue.map((item) => item.toString());
}
return selectedValue?.toString();
};

View File

@@ -0,0 +1,21 @@
.chart-manager-container {
width: 100%;
max-height: calc(40% - 40px);
display: flex;
flex-direction: column;
gap: 16px;
.chart-manager-header {
display: flex;
justify-content: space-between;
align-items: center;
gap: 16px;
.chart-manager-actions-container {
display: flex;
justify-content: flex-end;
align-items: center;
gap: 8px;
}
}
}

View File

@@ -0,0 +1,147 @@
import { useCallback, useEffect, useMemo, useState } from 'react';
import { Button, Input } from 'antd';
import { ResizeTable } from 'components/ResizeTable';
import { getGraphManagerTableColumns } from 'container/GridCardLayout/GridCard/FullView/TableRender/GraphManagerColumns';
import { ExtendedChartDataset } from 'container/GridCardLayout/GridCard/FullView/types';
import { getDefaultTableDataSet } from 'container/GridCardLayout/GridCard/FullView/utils';
import { useNotifications } from 'hooks/useNotifications';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import { usePlotContext } from 'lib/uPlotV2/context/PlotContext';
import useLegendsSync from 'lib/uPlotV2/hooks/useLegendsSync';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import './ChartManager.styles.scss';
interface ChartManagerProps {
config: UPlotConfigBuilder;
alignedData: uPlot.AlignedData;
yAxisUnit?: string;
onCancel?: () => void;
}
/**
* ChartManager provides a tabular view to manage the visibility of
* individual series on a uPlot chart.
*
* It syncs with the legend state coming from the plot context and
* allows users to:
* - filter series by label
* - toggle individual series on/off
* - persist the visibility configuration to local storage.
*
* @param config - `UPlotConfigBuilder` instance used to derive chart options.
* @param alignedData - uPlot aligned data used to build the initial table dataset.
* @param yAxisUnit - Optional unit label for Y-axis values shown in the table.
* @param onCancel - Optional callback invoked when the user cancels the dialog.
*/
export default function ChartManager({
config,
alignedData,
yAxisUnit,
onCancel,
}: ChartManagerProps): JSX.Element {
const { notifications } = useNotifications();
const { legendItemsMap } = useLegendsSync({
config,
subscribeToFocusChange: false,
});
const {
onToggleSeriesOnOff,
onToggleSeriesVisibility,
syncSeriesVisibilityToLocalStorage,
} = usePlotContext();
const { isDashboardLocked } = useDashboard();
const [tableDataSet, setTableDataSet] = useState<ExtendedChartDataset[]>(() =>
getDefaultTableDataSet(config.getConfig() as uPlot.Options, alignedData),
);
const graphVisibilityState = useMemo(
() =>
Object.entries(legendItemsMap).reduce<boolean[]>((acc, [key, item]) => {
acc[Number(key)] = item.show;
return acc;
}, []),
[legendItemsMap],
);
useEffect(() => {
setTableDataSet(
getDefaultTableDataSet(config.getConfig() as uPlot.Options, alignedData),
);
}, [alignedData, config]);
const filterHandler = useCallback(
(event: React.ChangeEvent<HTMLInputElement>): void => {
const value = event.target.value.toString().toLowerCase();
const updatedDataSet = tableDataSet.map((item) => {
if (item.label?.toLocaleLowerCase().includes(value)) {
return { ...item, show: true };
}
return { ...item, show: false };
});
setTableDataSet(updatedDataSet);
},
[tableDataSet],
);
const dataSource = useMemo(
() =>
tableDataSet.filter(
(item, index) => index !== 0 && item.show, // skipping the first item as it is the x-axis
),
[tableDataSet],
);
const columns = useMemo(
() =>
getGraphManagerTableColumns({
tableDataSet,
checkBoxOnChangeHandler: (_e, index) => {
onToggleSeriesOnOff(index);
},
graphVisibilityState,
labelClickedHandler: onToggleSeriesVisibility,
yAxisUnit,
isGraphDisabled: isDashboardLocked,
}),
// eslint-disable-next-line react-hooks/exhaustive-deps
[tableDataSet, graphVisibilityState, yAxisUnit, isDashboardLocked],
);
const handleSave = useCallback((): void => {
syncSeriesVisibilityToLocalStorage();
notifications.success({
message: 'The updated graphs & legends are saved',
});
if (onCancel) {
onCancel();
}
}, [syncSeriesVisibilityToLocalStorage, notifications, onCancel]);
return (
<div className="chart-manager-container">
<div className="chart-manager-header">
<Input onChange={filterHandler} placeholder="Filter Series" />
<div className="chart-manager-actions-container">
<Button type="default" onClick={onCancel}>
Cancel
</Button>
<Button type="primary" onClick={handleSave}>
Save
</Button>
</div>
</div>
<div className="chart-manager-table-container">
<ResizeTable
columns={columns}
dataSource={dataSource}
virtual
rowKey="index"
scroll={{ y: 200 }}
pagination={false}
/>
</div>
</div>
);
}

View File

@@ -0,0 +1,120 @@
import { useCallback } from 'react';
import { UseQueryResult } from 'react-query';
import {
getTimeRangeFromStepInterval,
isApmMetric,
} from 'container/PanelWrapper/utils';
import { getUplotClickData } from 'container/QueryTable/Drilldown/drilldownUtils';
import useGraphContextMenu from 'container/QueryTable/Drilldown/useGraphContextMenu';
import {
PopoverPosition,
useCoordinates,
} from 'periscope/components/ContextMenu';
import { SuccessResponse } from 'types/api';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { DataSource } from 'types/common/queryBuilder';
interface UseTimeSeriesContextMenuParams {
widget: Widgets;
queryResponse: UseQueryResult<
SuccessResponse<MetricRangePayloadProps, unknown>,
Error
>;
}
export const usePanelContextMenu = ({
widget,
queryResponse,
}: UseTimeSeriesContextMenuParams): {
coordinates: { x: number; y: number } | null;
popoverPosition: PopoverPosition | null;
onClose: () => void;
menuItemsConfig: {
header?: string | React.ReactNode;
items?: React.ReactNode;
};
clickHandlerWithContextMenu: (...args: any[]) => void;
} => {
const {
coordinates,
popoverPosition,
clickedData,
onClose,
subMenu,
onClick,
setSubMenu,
} = useCoordinates();
const { menuItemsConfig } = useGraphContextMenu({
widgetId: widget.id || '',
query: widget.query,
graphData: clickedData,
onClose,
coordinates,
subMenu,
setSubMenu,
contextLinks: widget.contextLinks,
panelType: widget.panelTypes,
queryRange: queryResponse,
});
const clickHandlerWithContextMenu = useCallback(
(...args: any[]) => {
const [
xValue,
_yvalue,
_mouseX,
_mouseY,
metric,
queryData,
absoluteMouseX,
absoluteMouseY,
axesData,
focusedSeries,
] = args;
const data = getUplotClickData({
metric,
queryData,
absoluteMouseX,
absoluteMouseY,
focusedSeries,
});
let timeRange;
if (axesData && queryData?.queryName) {
const compositeQuery = (queryResponse?.data?.params as any)?.compositeQuery;
if (compositeQuery?.queries) {
const specificQuery = compositeQuery.queries.find(
(query: any) => query.spec?.name === queryData.queryName,
);
const stepInterval = specificQuery?.spec?.stepInterval || 60;
timeRange = getTimeRangeFromStepInterval(
stepInterval,
metric?.clickedTimestamp || xValue,
specificQuery?.spec?.signal === DataSource.METRICS &&
isApmMetric(specificQuery?.spec?.aggregations[0]?.metricName),
);
}
}
if (data && data?.record?.queryName) {
onClick(data.coord, { ...data.record, label: data.label, timeRange });
}
},
[onClick, queryResponse],
);
return {
coordinates,
popoverPosition,
onClose,
menuItemsConfig,
clickHandlerWithContextMenu,
};
};

View File

@@ -0,0 +1,4 @@
.panel-container {
height: 100%;
width: 100%;
}

View File

@@ -0,0 +1,176 @@
import { useEffect, useMemo, useRef, useState } from 'react';
import TimeSeries from 'container/DashboardContainer/visualization/charts/TimeSeries/TimeSeries';
import ChartManager from 'container/DashboardContainer/visualization/components/ChartManager/ChartManager';
import { usePanelContextMenu } from 'container/DashboardContainer/visualization/hooks/usePanelContextMenu';
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
import { LegendPosition } from 'lib/uPlotV2/components/types';
import { LineInterpolation } from 'lib/uPlotV2/config/types';
import { ContextMenu } from 'periscope/components/ContextMenu';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { useTimezone } from 'providers/Timezone';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import uPlot from 'uplot';
import { getTimeRange } from 'utils/getTimeRange';
import { prepareChartData, prepareUPlotConfig } from '../TimeSeriesPanel/utils';
import '../Panel.styles.scss';
function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
const {
panelMode,
queryResponse,
widget,
onDragSelect,
isFullViewMode,
onToggleModelHandler,
} = props;
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
const graphRef = useRef<HTMLDivElement>(null);
const [minTimeScale, setMinTimeScale] = useState<number>();
const [maxTimeScale, setMaxTimeScale] = useState<number>();
const containerDimensions = useResizeObserver(graphRef);
const isDarkMode = useIsDarkMode();
const { timezone } = useTimezone();
useEffect(() => {
if (toScrollWidgetId === widget.id) {
graphRef.current?.scrollIntoView({
behavior: 'smooth',
block: 'center',
});
graphRef.current?.focus();
setToScrollWidgetId('');
}
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
useEffect((): void => {
const { startTime, endTime } = getTimeRange(queryResponse);
setMinTimeScale(startTime);
setMaxTimeScale(endTime);
}, [queryResponse]);
const {
coordinates,
popoverPosition,
onClose,
menuItemsConfig,
clickHandlerWithContextMenu,
} = usePanelContextMenu({
widget,
queryResponse,
});
const chartData = useMemo(() => {
if (!queryResponse?.data?.payload) {
return [];
}
return prepareChartData(queryResponse?.data?.payload);
}, [queryResponse?.data?.payload]);
const config = useMemo(() => {
const tzDate = (timestamp: number): Date =>
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value);
return prepareUPlotConfig({
widgetId: widget.id || '',
apiResponse: queryResponse?.data?.payload as MetricRangePayloadProps,
tzDate,
minTimeScale: minTimeScale,
maxTimeScale: maxTimeScale,
isLogScale: widget?.isLogScale ?? false,
thresholds: {
scaleKey: 'y',
thresholds: (widget.thresholds || []).map((threshold) => ({
thresholdValue: threshold.thresholdValue ?? 0,
thresholdColor: threshold.thresholdColor,
thresholdUnit: threshold.thresholdUnit,
thresholdLabel: threshold.thresholdLabel,
})),
yAxisUnit: widget.yAxisUnit,
},
yAxisUnit: widget.yAxisUnit || '',
softMin: widget.softMin === undefined ? null : widget.softMin,
softMax: widget.softMax === undefined ? null : widget.softMax,
spanGaps: false,
colorMapping: widget.customLegendColors ?? {},
lineInterpolation: LineInterpolation.Spline,
isDarkMode,
onClick: clickHandlerWithContextMenu,
onDragSelect,
currentQuery: widget.query,
panelMode,
});
}, [
widget.id,
maxTimeScale,
minTimeScale,
timezone.value,
widget.customLegendColors,
widget.isLogScale,
widget.softMax,
widget.softMin,
isDarkMode,
queryResponse?.data?.payload,
widget.query,
widget.thresholds,
widget.yAxisUnit,
panelMode,
clickHandlerWithContextMenu,
onDragSelect,
]);
const layoutChildren = useMemo(() => {
if (!isFullViewMode) {
return null;
}
return (
<ChartManager
config={config}
alignedData={chartData}
yAxisUnit={widget.yAxisUnit}
onCancel={onToggleModelHandler}
/>
);
}, [
isFullViewMode,
config,
chartData,
widget.yAxisUnit,
onToggleModelHandler,
]);
return (
<div className="panel-container" ref={graphRef}>
{containerDimensions.width > 0 && containerDimensions.height > 0 && (
<TimeSeries
config={config}
legendConfig={{
position: widget?.legendPosition ?? LegendPosition.BOTTOM,
}}
yAxisUnit={widget.yAxisUnit}
decimalPrecision={widget.decimalPrecision}
timezone={timezone.value}
data={chartData as uPlot.AlignedData}
width={containerDimensions.width}
height={containerDimensions.height}
layoutChildren={layoutChildren}
>
<ContextMenu
coordinates={coordinates}
popoverPosition={popoverPosition}
title={menuItemsConfig.header as string}
items={menuItemsConfig.items}
onClose={onClose}
/>
</TimeSeries>
)}
</div>
);
}
export default TimeSeriesPanel;

View File

@@ -0,0 +1,170 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import {
fillMissingXAxisTimestamps,
getXAxisTimestamps,
} from 'container/DashboardContainer/visualization/panels/utils';
import { getLegend } from 'lib/dashboard/getQueryResults';
import getLabelName from 'lib/getLabelName';
import onClickPlugin, {
OnClickPluginOpts,
} from 'lib/uPlotLib/plugins/onClickPlugin';
import {
DistributionType,
DrawStyle,
LineInterpolation,
LineStyle,
SelectionPreferencesSource,
VisibilityMode,
} from 'lib/uPlotV2/config/types';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import { ThresholdsDrawHookOptions } from 'lib/uPlotV2/hooks/types';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { PanelMode } from '../types';
export const prepareChartData = (
apiResponse: MetricRangePayloadProps,
): uPlot.AlignedData => {
const seriesList = apiResponse?.data?.result || [];
const timestampArr = getXAxisTimestamps(seriesList);
const yAxisValuesArr = fillMissingXAxisTimestamps(timestampArr, seriesList);
return [timestampArr, ...yAxisValuesArr];
};
export const prepareUPlotConfig = ({
widgetId,
apiResponse,
tzDate,
minTimeScale,
maxTimeScale,
isLogScale,
thresholds,
softMin,
softMax,
spanGaps,
colorMapping,
lineInterpolation,
isDarkMode,
currentQuery,
onDragSelect,
onClick,
yAxisUnit,
panelMode,
}: {
widgetId: string;
apiResponse: MetricRangePayloadProps;
tzDate: uPlot.LocalDateFromUnix;
minTimeScale: number | undefined;
maxTimeScale: number | undefined;
isLogScale: boolean;
softMin: number | null;
softMax: number | null;
spanGaps: boolean;
colorMapping: Record<string, string>;
lineInterpolation: LineInterpolation;
isDarkMode: boolean;
thresholds: ThresholdsDrawHookOptions;
currentQuery: Query;
yAxisUnit: string;
onDragSelect: (startTime: number, endTime: number) => void;
onClick?: OnClickPluginOpts['onClick'];
panelMode: PanelMode;
}): UPlotConfigBuilder => {
const builder = new UPlotConfigBuilder({
onDragSelect,
widgetId,
tzDate,
shouldSaveSelectionPreference: panelMode === PanelMode.DASHBOARD_VIEW,
selectionPreferencesSource: [
PanelMode.DASHBOARD_VIEW,
PanelMode.STANDALONE_VIEW,
].includes(panelMode)
? SelectionPreferencesSource.LOCAL_STORAGE
: SelectionPreferencesSource.IN_MEMORY,
});
// X scale time axis
builder.addScale({
scaleKey: 'x',
time: true,
min: minTimeScale,
max: maxTimeScale,
logBase: isLogScale ? 10 : undefined,
distribution: isLogScale
? DistributionType.Logarithmic
: DistributionType.Linear,
});
// Y scale value axis, driven primarily by softMin/softMax and data
builder.addScale({
scaleKey: 'y',
time: false,
min: undefined,
max: undefined,
softMin: softMin ?? undefined,
softMax: softMax ?? undefined,
thresholds,
logBase: isLogScale ? 10 : undefined,
distribution: isLogScale
? DistributionType.Logarithmic
: DistributionType.Linear,
});
builder.addThresholds(thresholds);
if (typeof onClick === 'function') {
builder.addPlugin(
onClickPlugin({
onClick,
apiResponse,
}),
);
}
builder.addAxis({
scaleKey: 'x',
show: true,
side: 2,
isDarkMode,
isLogScale: false,
panelType: PANEL_TYPES.TIME_SERIES,
});
builder.addAxis({
scaleKey: 'y',
show: true,
side: 3,
isDarkMode,
isLogScale: false,
yAxisUnit,
panelType: PANEL_TYPES.TIME_SERIES,
});
apiResponse.data?.result?.forEach((series) => {
const baseLabelName = getLabelName(
series.metric,
series.queryName || '', // query
series.legend || '',
);
const label = currentQuery
? getLegend(series, currentQuery, baseLabelName)
: baseLabelName;
builder.addSeries({
scaleKey: 'y',
drawStyle: DrawStyle.Line,
label: label,
colorMapping,
spanGaps,
lineStyle: LineStyle.Solid,
lineInterpolation,
showPoints: VisibilityMode.Never,
pointSize: 5,
isDarkMode,
});
});
return builder;
};

View File

@@ -0,0 +1,54 @@
import { normalizePlotValue } from 'lib/uPlotV2/utils/dataUtils';
import { QueryData } from 'types/api/widgets/getQuery';
export function getXAxisTimestamps(seriesList: QueryData[]): number[] {
const timestamps = new Set<number>();
seriesList.forEach((series: { values?: [number, string][] }) => {
if (series?.values) {
series.values.forEach((value) => {
timestamps.add(value[0]);
});
}
});
const timestampsArr = Array.from(timestamps);
timestampsArr.sort((a, b) => a - b);
return timestampsArr;
}
export function fillMissingXAxisTimestamps(
timestampArr: number[],
data: Array<{ values?: [number, string][] }>,
): (number | null)[][] {
// Ensure we work with a sorted, deduplicated list of x-axis timestamps
const canonicalTimestamps = Array.from(new Set(timestampArr)).sort(
(a, b) => a - b,
);
return data.map(({ values }) =>
buildSeriesYValues(canonicalTimestamps, values),
);
}
function buildSeriesYValues(
timestamps: number[],
values?: [number, string][],
): (number | null)[] {
if (!values?.length) {
return [];
}
const valueByTimestamp = new Map<number, number | null>();
for (let i = 0; i < values.length; i++) {
const [timestamp, rawValue] = values[i];
valueByTimestamp.set(timestamp, normalizePlotValue(rawValue));
}
return timestamps.map((timestamp) => {
const value = valueByTimestamp.get(timestamp);
return value !== undefined ? value : null;
});
}

View File

@@ -33,8 +33,8 @@ import { useChartMutable } from 'hooks/useChartMutable';
import useComponentPermission from 'hooks/useComponentPermission';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import useUrlQuery from 'hooks/useUrlQuery';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import GetMinMax from 'lib/getMinMax';
import { isEmpty } from 'lodash-es';
import { useAppContext } from 'providers/App/App';

View File

@@ -8,8 +8,8 @@ import { populateMultipleResults } from 'container/NewWidget/LeftContainer/Widge
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/types';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
import { useIntersectionObserver } from 'hooks/useIntersectionObserver';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import getTimeString from 'lib/getTimeString';
import { isEqual } from 'lodash-es';
import isEmpty from 'lodash-es/isEmpty';

View File

@@ -6,7 +6,7 @@ import { prepareQueryRangePayloadV5 } from 'api/v5/v5';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
import { AppState } from 'store/reducers';
import { Query } from 'types/api/queryBuilder/queryBuilderData';

View File

@@ -27,8 +27,8 @@ import { useIsDarkMode } from 'hooks/useDarkMode';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import useUrlQuery from 'hooks/useUrlQuery';
import createQueryParams from 'lib/createQueryParams';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { cloneDeep, defaultTo, isEmpty, isUndefined } from 'lodash-es';
import { Check, X } from 'lucide-react';
import { DashboardWidgetPageParams } from 'pages/DashboardWidget';

View File

@@ -1,8 +1,8 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsApplication.factory';
import { updateStepInterval } from 'hooks/queryBuilder/useStepInterval';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { ServicesList } from 'types/api/metrics/getService';
import { QueryDataV3 } from 'types/api/widgets/getQuery';
import { EQueryType } from 'types/common/dashboard';

View File

@@ -13,7 +13,7 @@ import { MenuItemKeys } from 'container/GridCardLayout/WidgetHeader/contants';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
import { useNotifications } from 'hooks/useNotifications';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
import { isEmpty } from 'lodash-es';
import { useDashboard } from 'providers/Dashboard/Dashboard';

View File

@@ -3,8 +3,8 @@ import { useSelector } from 'react-redux';
import { initialQueriesMap } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { AppState } from 'store/reducers';
import { SuccessResponse } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';

View File

@@ -14,7 +14,7 @@ describe('Get Series Data', () => {
expect(seriesData.length).toBe(5);
expect(seriesData[1].label).toBe('firstLegend');
expect(seriesData[1].show).toBe(true);
expect(seriesData[1].fill).toBe('#C71585');
expect(seriesData[1].fill).toBe('#FF6F91');
expect(seriesData[1].width).toBe(2);
});

View File

@@ -24,6 +24,14 @@
opacity: 1;
}
.legend-empty-state {
font-size: 12px;
color: var(--bg-vanilla-400);
text-align: center;
padding: 12px;
padding: 2rem 0;
}
.legend-virtuoso-container {
height: 100%;
width: 100%;

View File

@@ -81,6 +81,13 @@ export default function Legend({
[focusedSeriesIndex, position],
);
const isEmptyState = useMemo(() => {
if (position !== LegendPosition.RIGHT || !legendSearchQuery.trim()) {
return false;
}
return visibleLegendItems.length === 0;
}, [position, legendSearchQuery, visibleLegendItems]);
return (
<div
ref={legendContainerRef}
@@ -103,15 +110,21 @@ export default function Legend({
/>
</div>
)}
<VirtuosoGrid
className={cx(
'legend-virtuoso-container',
`legend-virtuoso-container-${position.toLowerCase()}`,
{ 'legend-virtuoso-container-single-row': isSingleRow },
)}
data={visibleLegendItems}
itemContent={(_, item): JSX.Element => renderLegendItem(item)}
/>
{isEmptyState ? (
<div className="legend-empty-state">
No series found matching &quot;{legendSearchQuery}&quot;
</div>
) : (
<VirtuosoGrid
className={cx(
'legend-virtuoso-container',
`legend-virtuoso-container-${position.toLowerCase()}`,
{ 'legend-virtuoso-container-single-row': isSingleRow },
)}
data={visibleLegendItems}
itemContent={(_, item): JSX.Element => renderLegendItem(item)}
/>
)}
</div>
);
}

View File

@@ -1,7 +1,7 @@
import { ALL_SELECTED_VALUE } from 'components/NewSelect/utils';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { commaValuesParser } from '../../lib/dashbaordVariables/customCommaValuesParser';
import { commaValuesParser } from '../../lib/dashboardVariables/customCommaValuesParser';
interface UrlVariables {
[key: string]: any;

View File

@@ -4,6 +4,7 @@ import {
} from 'container/DashboardContainer/DashboardVariablesSelection/util';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { initializeVariableFetchStore } from '../variableFetchStore';
import {
IDashboardVariables,
IDashboardVariablesStoreState,
@@ -62,9 +63,30 @@ export function buildDependencyData(
};
}
/**
* Initialize the variable fetch store with the computed dependency data
*/
function initializeFetchStore(
sortedVariablesArray: IDashboardVariable[],
dependencyData: IDependencyData | null,
): void {
if (dependencyData) {
const allVariableNames = sortedVariablesArray
.map((v) => v.name)
.filter((name): name is string => !!name);
initializeVariableFetchStore(
allVariableNames,
dependencyData.graph,
dependencyData.parentDependencyGraph,
);
}
}
/**
* Compute derived values from variables
* This is a composition of buildSortedVariablesArray and buildDependencyData
* Also initializes the variable fetch store with the new dependency data
*/
export function computeDerivedValues(
variables: IDashboardVariablesStoreState['variables'],
@@ -75,15 +97,22 @@ export function computeDerivedValues(
const sortedVariablesArray = buildSortedVariablesArray(variables);
const dependencyData = buildDependencyData(sortedVariablesArray);
// Initialize the variable fetch store when dependency data is computed
initializeFetchStore(sortedVariablesArray, dependencyData);
return { sortedVariablesArray, dependencyData };
}
/**
* Update derived values in the store state (for use with immer)
* Also initializes the variable fetch store with the new dependency data
*/
export function updateDerivedValues(
draft: IDashboardVariablesStoreState,
): void {
draft.sortedVariablesArray = buildSortedVariablesArray(draft.variables);
draft.dependencyData = buildDependencyData(draft.sortedVariablesArray);
// Initialize the variable fetch store when dependency data is updated
initializeFetchStore(draft.sortedVariablesArray, draft.dependencyData);
}

View File

@@ -0,0 +1,57 @@
import { VariableGraph } from 'container/DashboardContainer/DashboardVariablesSelection/util';
import createStore from './store';
// Fetch state for each variable
export type VariableFetchState =
| 'idle' // stable state - initial or complete
| 'loading' // actively fetching data (first time)
| 'revalidating' // refetching existing data
| 'waiting' // blocked on parent dependencies
| 'error';
export interface IVariableFetchStoreState {
// Per-variable fetch state
states: Record<string, VariableFetchState>;
// Dependency graphs (set once when variables change)
dependencyGraph: VariableGraph; // variable -> children that depend on it
parentGraph: VariableGraph; // variable -> parents it depends on
// Track last update timestamp per variable to trigger re-fetches
lastUpdated: Record<string, number>;
}
const initialState: IVariableFetchStoreState = {
states: {},
dependencyGraph: {},
parentGraph: {},
lastUpdated: {},
};
export const variableFetchStore = createStore<IVariableFetchStoreState>(
initialState,
);
// ============== Actions ==============
/**
* Initialize the store with dependency graphs and set initial states
*/
export function initializeVariableFetchStore(
variableNames: string[],
dependencyGraph: VariableGraph,
parentGraph: VariableGraph,
): void {
variableFetchStore.update((draft) => {
draft.dependencyGraph = dependencyGraph;
draft.parentGraph = parentGraph;
// Initialize all variables to idle, preserving existing ready states
variableNames.forEach((name) => {
if (!draft.states[name]) {
draft.states[name] = 'idle';
}
});
});
}

View File

@@ -1,127 +0,0 @@
package fields
import (
"bytes"
"io"
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
"github.com/SigNoz/signoz/pkg/telemetrymeter"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrytraces"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type API struct {
telemetryStore telemetrystore.TelemetryStore
telemetryMetadataStore telemetrytypes.MetadataStore
}
// TODO: move this to module and remove metastore init
func NewAPI(
settings factory.ProviderSettings,
telemetryStore telemetrystore.TelemetryStore,
) *API {
telemetryMetadataStore := telemetrymetadata.NewTelemetryMetaStore(
settings,
telemetryStore,
telemetrytraces.DBName,
telemetrytraces.TagAttributesV2TableName,
telemetrytraces.SpanAttributesKeysTblName,
telemetrytraces.SpanIndexV3TableName,
telemetrymetrics.DBName,
telemetrymetrics.AttributesMetadataTableName,
telemetrymeter.DBName,
telemetrymeter.SamplesAgg1dTableName,
telemetrylogs.DBName,
telemetrylogs.LogsV2TableName,
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetrymetadata.DBName,
telemetrymetadata.AttributesMetadataLocalTableName,
)
return &API{
telemetryStore: telemetryStore,
telemetryMetadataStore: telemetryMetadataStore,
}
}
func (api *API) GetFieldsKeys(w http.ResponseWriter, r *http.Request) {
type fieldKeysResponse struct {
Keys map[string][]*telemetrytypes.TelemetryFieldKey `json:"keys"`
Complete bool `json:"complete"`
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
fieldKeySelector, err := parseFieldKeyRequest(r)
if err != nil {
render.Error(w, err)
return
}
keys, complete, err := api.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(w, err)
return
}
response := fieldKeysResponse{
Keys: keys,
Complete: complete,
}
render.Success(w, http.StatusOK, response)
}
func (api *API) GetFieldsValues(w http.ResponseWriter, r *http.Request) {
type fieldValuesResponse struct {
Values *telemetrytypes.TelemetryFieldValues `json:"values"`
Complete bool `json:"complete"`
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
fieldValueSelector, err := parseFieldValueRequest(r)
if err != nil {
render.Error(w, err)
return
}
allValues, allComplete, err := api.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(w, err)
return
}
relatedValues, relatedComplete, err := api.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
}
values := &telemetrytypes.TelemetryFieldValues{
StringValues: allValues.StringValues,
NumberValues: allValues.NumberValues,
RelatedValues: relatedValues,
}
response := fieldValuesResponse{
Values: values,
Complete: allComplete && relatedComplete,
}
render.Success(w, http.StatusOK, response)
}

View File

@@ -1,162 +0,0 @@
package fields
import (
"net/http"
"strconv"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, error) {
var req telemetrytypes.FieldKeySelector
var signal telemetrytypes.Signal
var source telemetrytypes.Source
var err error
signalStr := r.URL.Query().Get("signal")
if signalStr != "" {
signal = telemetrytypes.Signal{String: valuer.NewString(signalStr)}
} else {
signal = telemetrytypes.SignalUnspecified
}
sourceStr := r.URL.Query().Get("source")
if sourceStr != "" {
source = telemetrytypes.Source{String: valuer.NewString(sourceStr)}
} else {
source = telemetrytypes.SourceUnspecified
}
if r.URL.Query().Get("limit") != "" {
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse limit")
}
req.Limit = limit
} else {
req.Limit = 1000
}
var startUnixMilli, endUnixMilli int64
if r.URL.Query().Get("startUnixMilli") != "" {
startUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("startUnixMilli"), 10, 64)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse startUnixMilli")
}
// Round down to the nearest 6 hours (21600000 milliseconds)
startUnixMilli -= startUnixMilli % 21600000
}
if r.URL.Query().Get("endUnixMilli") != "" {
endUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("endUnixMilli"), 10, 64)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse endUnixMilli")
}
}
// Parse fieldContext directly instead of using JSON unmarshalling.
var fieldContext telemetrytypes.FieldContext
fieldContextStr := r.URL.Query().Get("fieldContext")
if fieldContextStr != "" {
fieldContext = telemetrytypes.FieldContext{String: valuer.NewString(fieldContextStr)}
}
// Parse fieldDataType directly instead of using JSON unmarshalling.
var fieldDataType telemetrytypes.FieldDataType
fieldDataTypeStr := r.URL.Query().Get("fieldDataType")
if fieldDataTypeStr != "" {
fieldDataType = telemetrytypes.FieldDataType{String: valuer.NewString(fieldDataTypeStr)}
}
metricName := r.URL.Query().Get("metricName")
var metricContext *telemetrytypes.MetricContext
if metricName != "" {
metricContext = &telemetrytypes.MetricContext{
MetricName: metricName,
}
}
name := r.URL.Query().Get("searchText")
if name != "" && fieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, signal) {
name = parsedFieldKey.Name
fieldContext = parsedFieldKey.FieldContext
}
}
}
req = telemetrytypes.FieldKeySelector{
StartUnixMilli: startUnixMilli,
EndUnixMilli: endUnixMilli,
Signal: signal,
Source: source,
Name: name,
FieldContext: fieldContext,
FieldDataType: fieldDataType,
Limit: req.Limit,
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeFuzzy,
MetricContext: metricContext,
}
return &req, nil
}
func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector, error) {
keySelector, err := parseFieldKeyRequest(r)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse field key request")
}
name := r.URL.Query().Get("name")
if name != "" && keySelector.FieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, keySelector.Signal) {
name = parsedFieldKey.Name
keySelector.FieldContext = parsedFieldKey.FieldContext
}
}
}
keySelector.Name = name
existingQuery := r.URL.Query().Get("existingQuery")
value := r.URL.Query().Get("searchText")
// Parse limit for fieldValue request, fallback to default 50 if parsing fails.
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil {
limit = 50
}
req := telemetrytypes.FieldValueSelector{
FieldKeySelector: keySelector,
ExistingQuery: existingQuery,
Value: value,
Limit: limit,
}
return &req, nil
}
func isContextValidForSignal(ctx telemetrytypes.FieldContext, signal telemetrytypes.Signal) bool {
if ctx == telemetrytypes.FieldContextResource ||
ctx == telemetrytypes.FieldContextAttribute ||
ctx == telemetrytypes.FieldContextScope {
return true
}
switch signal.StringValue() {
case telemetrytypes.SignalLogs.StringValue():
return ctx == telemetrytypes.FieldContextLog || ctx == telemetrytypes.FieldContextBody
case telemetrytypes.SignalTraces.StringValue():
return ctx == telemetrytypes.FieldContextSpan || ctx == telemetrytypes.FieldContextEvent || ctx == telemetrytypes.FieldContextTrace
case telemetrytypes.SignalMetrics.StringValue():
return ctx == telemetrytypes.FieldContextMetric
}
return true
}

View File

@@ -0,0 +1,50 @@
package signozapiserver
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/gorilla/mux"
)
func (provider *provider) addFieldsRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/fields/keys", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsKeys), handler.OpenAPIDef{
ID: "GetFieldsKeys",
Tags: []string{"fields"},
Summary: "Get field keys",
Description: "This endpoint returns field keys",
Request: nil,
RequestQuery: new(telemetrytypes.PostableFieldKeysParams),
RequestContentType: "",
Response: new(telemetrytypes.GettableFieldKeys),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v1/fields/values", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsValues), handler.OpenAPIDef{
ID: "GetFieldsValues",
Tags: []string{"fields"},
Summary: "Get field values",
Description: "This endpoint returns field values",
Request: nil,
RequestQuery: new(telemetrytypes.PostableFieldValueParams),
RequestContentType: "",
Response: new(telemetrytypes.GettableFieldValues),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -13,11 +13,11 @@ import (
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types"
@@ -42,8 +42,8 @@ type provider struct {
dashboardHandler dashboard.Handler
metricsExplorerHandler metricsexplorer.Handler
gatewayHandler gateway.Handler
roleGetter role.Getter
roleHandler role.Handler
fieldsHandler fields.Handler
authzHandler authz.Handler
}
func NewFactory(
@@ -61,11 +61,31 @@ func NewFactory(
dashboardHandler dashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
gatewayHandler gateway.Handler,
roleGetter role.Getter,
roleHandler role.Handler,
fieldsHandler fields.Handler,
authzHandler authz.Handler,
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler, flaggerHandler, dashboardModule, dashboardHandler, metricsExplorerHandler, gatewayHandler, roleGetter, roleHandler)
return newProvider(
ctx,
providerSettings,
config,
orgGetter,
authz,
orgHandler,
userHandler,
sessionHandler,
authDomainHandler,
preferenceHandler,
globalHandler,
promoteHandler,
flaggerHandler,
dashboardModule,
dashboardHandler,
metricsExplorerHandler,
gatewayHandler,
fieldsHandler,
authzHandler,
)
})
}
@@ -87,8 +107,8 @@ func newProvider(
dashboardHandler dashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
gatewayHandler gateway.Handler,
roleGetter role.Getter,
roleHandler role.Handler,
fieldsHandler fields.Handler,
authzHandler authz.Handler,
) (apiserver.APIServer, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
router := mux.NewRouter().UseEncodedPath()
@@ -109,11 +129,11 @@ func newProvider(
dashboardHandler: dashboardHandler,
metricsExplorerHandler: metricsExplorerHandler,
gatewayHandler: gatewayHandler,
roleGetter: roleGetter,
roleHandler: roleHandler,
fieldsHandler: fieldsHandler,
authzHandler: authzHandler,
}
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz, roleGetter)
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
if err := provider.AddToRouter(router); err != nil {
return nil, err
@@ -175,6 +195,10 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
return err
}
if err := provider.addFieldsRoutes(router); err != nil {
return err
}
return nil
}

View File

@@ -10,7 +10,7 @@ import (
)
func (provider *provider) addRoleRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Create), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Create), handler.OpenAPIDef{
ID: "CreateRole",
Tags: []string{"role"},
Summary: "Create role",
@@ -27,7 +27,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.roleHandler.List), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.authzHandler.List), handler.OpenAPIDef{
ID: "ListRoles",
Tags: []string{"role"},
Summary: "List roles",
@@ -44,7 +44,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Get), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Get), handler.OpenAPIDef{
ID: "GetRole",
Tags: []string{"role"},
Summary: "Get role",
@@ -61,7 +61,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Patch), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Patch), handler.OpenAPIDef{
ID: "PatchRole",
Tags: []string{"role"},
Summary: "Patch role",
@@ -78,7 +78,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Delete), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Delete), handler.OpenAPIDef{
ID: "DeleteRole",
Tags: []string{"role"},
Summary: "Delete role",

View File

@@ -325,7 +325,7 @@ func (provider *provider) addUserRoutes(router *mux.Router) error {
Response: nil,
ResponseContentType: "",
SuccessStatusCode: http.StatusNoContent,
ErrorStatusCodes: []int{http.StatusBadRequest},
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnprocessableEntity},
Deprecated: false,
SecuritySchemes: []handler.OpenAPISecurityScheme{},
})).Methods(http.MethodPost).GetError(); err != nil {

View File

@@ -2,9 +2,11 @@ package authz
import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -29,4 +31,76 @@ type AuthZ interface {
// Lists the selectors for objects assigned to subject (s) with relation (r) on resource (s)
ListObjects(context.Context, string, authtypes.Relation, authtypes.Typeable) ([]*authtypes.Object, error)
// Creates the role.
Create(context.Context, valuer.UUID, *roletypes.Role) error
// Gets the role if it exists or creates one.
GetOrCreate(context.Context, valuer.UUID, *roletypes.Role) (*roletypes.Role, error)
// Gets the objects associated with the given role and relation.
GetObjects(context.Context, valuer.UUID, valuer.UUID, authtypes.Relation) ([]*authtypes.Object, error)
// Gets all the typeable resources registered from role registry.
GetResources(context.Context) []*authtypes.Resource
// Patches the role.
Patch(context.Context, valuer.UUID, *roletypes.Role) error
// Patches the objects in authorization server associated with the given role and relation
PatchObjects(context.Context, valuer.UUID, string, authtypes.Relation, []*authtypes.Object, []*authtypes.Object) error
// Deletes the role and tuples in authorization server.
Delete(context.Context, valuer.UUID, valuer.UUID) error
// Gets the role
Get(context.Context, valuer.UUID, valuer.UUID) (*roletypes.Role, error)
// Gets the role by org_id and name
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*roletypes.Role, error)
// Lists all the roles for the organization.
List(context.Context, valuer.UUID) ([]*roletypes.Role, error)
// Lists all the roles for the organization filtered by name
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*roletypes.Role, error)
// Grants a role to the subject based on role name.
Grant(context.Context, valuer.UUID, string, string) error
// Revokes a granted role from the subject based on role name.
Revoke(context.Context, valuer.UUID, string, string) error
// Changes the granted role for the subject based on role name.
ModifyGrant(context.Context, valuer.UUID, string, string, string) error
// Bootstrap the managed roles.
CreateManagedRoles(context.Context, valuer.UUID, []*roletypes.Role) error
// Bootstrap managed roles transactions and user assignments
CreateManagedUserRoleTransactions(context.Context, valuer.UUID, valuer.UUID) error
}
type RegisterTypeable interface {
MustGetTypeables() []authtypes.Typeable
MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction
}
type Handler interface {
Create(http.ResponseWriter, *http.Request)
Get(http.ResponseWriter, *http.Request)
GetObjects(http.ResponseWriter, *http.Request)
GetResources(http.ResponseWriter, *http.Request)
List(http.ResponseWriter, *http.Request)
Patch(http.ResponseWriter, *http.Request)
PatchObjects(http.ResponseWriter, *http.Request)
Delete(http.ResponseWriter, *http.Request)
}

View File

@@ -1,4 +1,4 @@
package implrole
package sqlauthzstore
import (
"context"
@@ -14,7 +14,7 @@ type store struct {
sqlstore sqlstore.SQLStore
}
func NewStore(sqlstore sqlstore.SQLStore) roletypes.Store {
func NewSqlAuthzStore(sqlstore sqlstore.SQLStore) roletypes.Store {
return &store{sqlstore: sqlstore}
}

View File

@@ -2,35 +2,24 @@ package openfgaauthz
import (
"context"
"strconv"
"sync"
authz "github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/authzstore/sqlauthzstore"
"github.com/SigNoz/signoz/pkg/authz/openfgaserver"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
openfgapkgserver "github.com/openfga/openfga/pkg/server"
"google.golang.org/protobuf/encoding/protojson"
)
var (
openfgaDefaultStore = valuer.NewString("signoz")
)
type provider struct {
config authz.Config
settings factory.ScopedProviderSettings
openfgaSchema []openfgapkgtransformer.ModuleFile
openfgaServer *openfgapkgserver.Server
storeID string
modelID string
mtx sync.RWMutex
stopChan chan struct{}
server *openfgaserver.Server
store roletypes.Store
}
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
@@ -40,301 +29,194 @@ func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtr
}
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/authz/openfgaauthz")
store, err := NewSQLStore(sqlstore)
server, err := openfgaserver.NewOpenfgaServer(ctx, settings, config, sqlstore, openfgaSchema)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to initialize sqlstore for authz")
return nil, err
}
// setup the openfga server
opts := []openfgapkgserver.OpenFGAServiceV1Option{
openfgapkgserver.WithDatastore(store),
openfgapkgserver.WithLogger(NewLogger(scopedProviderSettings.Logger())),
openfgapkgserver.WithContextPropagationToDatastore(true),
}
openfgaServer, err := openfgapkgserver.NewServerWithOpts(opts...)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to create authz server")
return nil, err
}
return &provider{
config: config,
settings: scopedProviderSettings,
openfgaServer: openfgaServer,
openfgaSchema: openfgaSchema,
mtx: sync.RWMutex{},
stopChan: make(chan struct{}),
server: server,
store: sqlauthzstore.NewSqlAuthzStore(sqlstore),
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
storeId, err := provider.getOrCreateStore(ctx, openfgaDefaultStore.StringValue())
if err != nil {
return err
}
modelID, err := provider.getOrCreateModel(ctx, storeId)
if err != nil {
return err
}
provider.mtx.Lock()
provider.modelID = modelID
provider.storeID = storeId
provider.mtx.Unlock()
<-provider.stopChan
return nil
return provider.server.Start(ctx)
}
func (provider *provider) Stop(ctx context.Context) error {
provider.openfgaServer.Close()
close(provider.stopChan)
return nil
return provider.server.Stop(ctx)
}
func (provider *provider) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
storeID, modelID := provider.getStoreIDandModelID()
checkResponse, err := provider.openfgaServer.Check(
ctx,
&openfgav1.CheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tupleReq.User,
Relation: tupleReq.Relation,
Object: tupleReq.Object,
},
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
if !checkResponse.Allowed {
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
}
return nil
return provider.server.Check(ctx, tupleReq)
}
func (provider *provider) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
storeID, modelID := provider.getStoreIDandModelID()
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
for idx, tuple := range tupleReq {
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tuple.User,
Relation: tuple.Relation,
Object: tuple.Object,
},
// the batch check response is map[string] keyed by correlationID.
CorrelationId: strconv.Itoa(idx),
})
}
checkResponse, err := provider.openfgaServer.BatchCheck(
ctx,
&openfgav1.BatchCheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Checks: batchCheckItems,
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
for _, checkResponse := range checkResponse.Result {
if checkResponse.GetAllowed() {
return nil
}
}
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
return provider.server.BatchCheck(ctx, tupleReq)
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.server.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.server.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
if len(additions) == 0 && len(deletions) == 0 {
return nil
}
storeID, modelID := provider.getStoreIDandModelID()
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
for idx, tuple := range deletions {
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
}
_, err := provider.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Writes: func() *openfgav1.WriteRequestWrites {
if len(additions) == 0 {
return nil
}
return &openfgav1.WriteRequestWrites{
TupleKeys: additions,
OnDuplicate: "ignore",
}
}(),
Deletes: func() *openfgav1.WriteRequestDeletes {
if len(deletionTuplesWithoutCondition) == 0 {
return nil
}
return &openfgav1.WriteRequestDeletes{
TupleKeys: deletionTuplesWithoutCondition,
OnMissing: "ignore",
}
}(),
})
return err
return provider.server.Write(ctx, additions, deletions)
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
storeID, modelID := provider.getStoreIDandModelID()
response, err := provider.openfgaServer.ListObjects(ctx, &openfgav1.ListObjectsRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
User: subject,
Relation: relation.StringValue(),
Type: typeable.Type().StringValue(),
return provider.server.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
storableRole, err := provider.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
storableRole, err := provider.store.GetByOrgIDAndName(ctx, orgID, name)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
storableRoles, err := provider.store.List(ctx, orgID)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storableRole := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storableRole)
}
return roles, nil
}
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
storableRoles, err := provider.store.ListByOrgIDAndNames(ctx, orgID, names)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storable := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
}
return roles, nil
}
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return provider.Write(ctx, tuples, nil)
}
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
err := provider.Revoke(ctx, orgID, existingRoleName, subject)
if err != nil {
return err
}
err = provider.Grant(ctx, orgID, updatedRoleName, subject)
if err != nil {
return err
}
return nil
}
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return provider.Write(ctx, nil, tuples)
}
func (provider *provider) CreateManagedRoles(ctx context.Context, _ valuer.UUID, managedRoles []*roletypes.Role) error {
err := provider.store.RunInTx(ctx, func(ctx context.Context) error {
for _, role := range managedRoles {
err := provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return err
}
}
return nil
})
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "cannot list objects for subject %s with relation %s for type %s", subject, relation.StringValue(), typeable.Type().StringValue())
return err
}
return authtypes.MustNewObjectsFromStringSlice(response.Objects), nil
return nil
}
func (provider *provider) getOrCreateStore(ctx context.Context, name string) (string, error) {
stores, err := provider.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
if err != nil {
return "", err
}
for _, store := range stores.GetStores() {
if store.GetName() == name {
return store.Id, nil
}
}
store, err := provider.openfgaServer.CreateStore(ctx, &openfgav1.CreateStoreRequest{Name: name})
if err != nil {
return "", err
}
return store.Id, nil
func (provider *provider) SetManagedRoleTransactions(context.Context, valuer.UUID) error {
return nil
}
func (provider *provider) getOrCreateModel(ctx context.Context, storeID string) (string, error) {
schema, err := openfgapkgtransformer.TransformModuleFilesToModel(provider.openfgaSchema, "1.1")
if err != nil {
return "", err
}
authorisationModels, err := provider.openfgaServer.ReadAuthorizationModels(ctx, &openfgav1.ReadAuthorizationModelsRequest{StoreId: storeID})
if err != nil {
return "", err
}
for _, authModel := range authorisationModels.GetAuthorizationModels() {
equal, err := provider.isModelEqual(schema, authModel)
if err != nil {
return "", err
}
if equal {
return authModel.Id, nil
}
}
authorizationModel, err := provider.openfgaServer.WriteAuthorizationModel(ctx, &openfgav1.WriteAuthorizationModelRequest{
StoreId: storeID,
TypeDefinitions: schema.TypeDefinitions,
SchemaVersion: schema.SchemaVersion,
Conditions: schema.Conditions,
})
if err != nil {
return "", err
}
return authorizationModel.AuthorizationModelId, nil
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
return provider.Grant(ctx, orgID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil))
}
// the language model doesn't have any equality check
// https://github.com/openfga/language/blob/main/pkg/go/transformer/module-to-model_test.go#L38
func (provider *provider) isModelEqual(expected *openfgav1.AuthorizationModel, actual *openfgav1.AuthorizationModel) (bool, error) {
// we need to initialize a new model since the model extracted from schema doesn't have id
expectedAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: expected.SchemaVersion,
TypeDefinitions: expected.TypeDefinitions,
Conditions: expected.Conditions,
}
expectedAuthModelBytes, err := protojson.Marshal(&expectedAuthModel)
if err != nil {
return false, err
}
actualAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: actual.SchemaVersion,
TypeDefinitions: actual.TypeDefinitions,
Conditions: actual.Conditions,
}
actualAuthModelBytes, err := protojson.Marshal(&actualAuthModel)
if err != nil {
return false, err
}
return string(expectedAuthModelBytes) == string(actualAuthModelBytes), nil
func (setter *provider) Create(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) getStoreIDandModelID() (string, string) {
provider.mtx.RLock()
defer provider.mtx.RUnlock()
storeID := provider.storeID
modelID := provider.modelID
return storeID, modelID
func (provider *provider) GetOrCreate(_ context.Context, _ valuer.UUID, _ *roletypes.Role) (*roletypes.Role, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
return nil
}
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) Patch(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) PatchObjects(_ context.Context, _ valuer.UUID, _ string, _ authtypes.Relation, _, _ []*authtypes.Object) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) Delete(_ context.Context, _ valuer.UUID, _ valuer.UUID) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
return nil
}

View File

@@ -1,4 +1,4 @@
package openfgaauthz
package openfgaserver
import (
"context"

View File

@@ -0,0 +1,334 @@
package openfgaserver
import (
"context"
"strconv"
"sync"
authz "github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
openfgapkgserver "github.com/openfga/openfga/pkg/server"
"google.golang.org/protobuf/encoding/protojson"
)
var (
openfgaDefaultStore = valuer.NewString("signoz")
)
type Server struct {
config authz.Config
settings factory.ScopedProviderSettings
openfgaSchema []openfgapkgtransformer.ModuleFile
openfgaServer *openfgapkgserver.Server
storeID string
modelID string
mtx sync.RWMutex
stopChan chan struct{}
}
func NewOpenfgaServer(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (*Server, error) {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/authz/openfgaauthz")
store, err := NewSQLStore(sqlstore)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to initialize sqlstore for authz")
return nil, err
}
// setup the openfga server
opts := []openfgapkgserver.OpenFGAServiceV1Option{
openfgapkgserver.WithDatastore(store),
openfgapkgserver.WithLogger(NewLogger(scopedProviderSettings.Logger())),
openfgapkgserver.WithContextPropagationToDatastore(true),
}
openfgaServer, err := openfgapkgserver.NewServerWithOpts(opts...)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to create authz server")
return nil, err
}
return &Server{
config: config,
settings: scopedProviderSettings,
openfgaServer: openfgaServer,
openfgaSchema: openfgaSchema,
mtx: sync.RWMutex{},
stopChan: make(chan struct{}),
}, nil
}
func (server *Server) Start(ctx context.Context) error {
storeID, err := server.getOrCreateStore(ctx, openfgaDefaultStore.StringValue())
if err != nil {
return err
}
modelID, err := server.getOrCreateModel(ctx, storeID)
if err != nil {
return err
}
server.mtx.Lock()
server.modelID = modelID
server.storeID = storeID
server.mtx.Unlock()
<-server.stopChan
return nil
}
func (server *Server) Stop(ctx context.Context) error {
server.openfgaServer.Close()
close(server.stopChan)
return nil
}
func (server *Server) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
storeID, modelID := server.getStoreIDandModelID()
checkResponse, err := server.openfgaServer.Check(
ctx,
&openfgav1.CheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tupleReq.User,
Relation: tupleReq.Relation,
Object: tupleReq.Object,
},
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
if !checkResponse.Allowed {
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
}
return nil
}
func (server *Server) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
storeID, modelID := server.getStoreIDandModelID()
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
for idx, tuple := range tupleReq {
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tuple.User,
Relation: tuple.Relation,
Object: tuple.Object,
},
// the batch check response is map[string] keyed by correlationID.
CorrelationId: strconv.Itoa(idx),
})
}
checkResponse, err := server.openfgaServer.BatchCheck(
ctx,
&openfgav1.BatchCheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Checks: batchCheckItems,
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
for _, checkResponse := range checkResponse.Result {
if checkResponse.GetAllowed() {
return nil
}
}
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
}
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
if len(additions) == 0 && len(deletions) == 0 {
return nil
}
storeID, modelID := server.getStoreIDandModelID()
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
for idx, tuple := range deletions {
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
}
_, err := server.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Writes: func() *openfgav1.WriteRequestWrites {
if len(additions) == 0 {
return nil
}
return &openfgav1.WriteRequestWrites{
TupleKeys: additions,
OnDuplicate: "ignore",
}
}(),
Deletes: func() *openfgav1.WriteRequestDeletes {
if len(deletionTuplesWithoutCondition) == 0 {
return nil
}
return &openfgav1.WriteRequestDeletes{
TupleKeys: deletionTuplesWithoutCondition,
OnMissing: "ignore",
}
}(),
})
return err
}
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
storeID, modelID := server.getStoreIDandModelID()
response, err := server.openfgaServer.ListObjects(ctx, &openfgav1.ListObjectsRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
User: subject,
Relation: relation.StringValue(),
Type: typeable.Type().StringValue(),
})
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "cannot list objects for subject %s with relation %s for type %s", subject, relation.StringValue(), typeable.Type().StringValue())
}
return authtypes.MustNewObjectsFromStringSlice(response.Objects), nil
}
func (server *Server) getOrCreateStore(ctx context.Context, name string) (string, error) {
stores, err := server.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
if err != nil {
return "", err
}
for _, store := range stores.GetStores() {
if store.GetName() == name {
return store.Id, nil
}
}
store, err := server.openfgaServer.CreateStore(ctx, &openfgav1.CreateStoreRequest{Name: name})
if err != nil {
return "", err
}
return store.Id, nil
}
func (server *Server) getOrCreateModel(ctx context.Context, storeID string) (string, error) {
schema, err := openfgapkgtransformer.TransformModuleFilesToModel(server.openfgaSchema, "1.1")
if err != nil {
return "", err
}
authorisationModels, err := server.openfgaServer.ReadAuthorizationModels(ctx, &openfgav1.ReadAuthorizationModelsRequest{StoreId: storeID})
if err != nil {
return "", err
}
for _, authModel := range authorisationModels.GetAuthorizationModels() {
equal, err := server.isModelEqual(schema, authModel)
if err != nil {
return "", err
}
if equal {
return authModel.Id, nil
}
}
authorizationModel, err := server.openfgaServer.WriteAuthorizationModel(ctx, &openfgav1.WriteAuthorizationModelRequest{
StoreId: storeID,
TypeDefinitions: schema.TypeDefinitions,
SchemaVersion: schema.SchemaVersion,
Conditions: schema.Conditions,
})
if err != nil {
return "", err
}
return authorizationModel.AuthorizationModelId, nil
}
// the language model doesn't have any equality check
// https://github.com/openfga/language/blob/main/pkg/go/transformer/module-to-model_test.go#L38
func (server *Server) isModelEqual(expected *openfgav1.AuthorizationModel, actual *openfgav1.AuthorizationModel) (bool, error) {
// we need to initialize a new model since the model extracted from schema doesn't have id
expectedAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: expected.SchemaVersion,
TypeDefinitions: expected.TypeDefinitions,
Conditions: expected.Conditions,
}
expectedAuthModelBytes, err := protojson.Marshal(&expectedAuthModel)
if err != nil {
return false, err
}
actualAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: actual.SchemaVersion,
TypeDefinitions: actual.TypeDefinitions,
Conditions: actual.Conditions,
}
actualAuthModelBytes, err := protojson.Marshal(&actualAuthModel)
if err != nil {
return false, err
}
return string(expectedAuthModelBytes) == string(actualAuthModelBytes), nil
}
func (server *Server) getStoreIDandModelID() (string, string) {
server.mtx.RLock()
defer server.mtx.RUnlock()
storeID := server.storeID
modelID := server.modelID
return storeID, modelID
}

View File

@@ -1,4 +1,4 @@
package openfgaauthz
package openfgaserver
import (
"context"
@@ -20,7 +20,7 @@ func TestProviderStartStop(t *testing.T) {
expectedModel := `module base
type user`
provider, err := newOpenfgaProvider(context.Background(), providerSettings, authz.Config{}, sqlstore, []transformer.ModuleFile{{Name: "test.fga", Contents: expectedModel}})
provider, err := NewOpenfgaServer(context.Background(), providerSettings, authz.Config{}, sqlstore, []transformer.ModuleFile{{Name: "test.fga", Contents: expectedModel}})
require.NoError(t, err)
storeRows := sqlstore.Mock().NewRows([]string{"id", "name", "created_at", "updated_at"}).AddRow("01K3V0NTN47MPTMEV1PD5ST6ZC", "signoz", time.Now(), time.Now())

View File

@@ -1,4 +1,4 @@
package openfgaauthz
package openfgaserver
import (
"github.com/SigNoz/signoz/pkg/errors"

View File

@@ -1,12 +1,12 @@
package implrole
package signozauthzapi
import (
"net/http"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -14,12 +14,11 @@ import (
)
type handler struct {
setter role.Setter
getter role.Getter
authz authz.AuthZ
}
func NewHandler(setter role.Setter, getter role.Getter) role.Handler {
return &handler{setter: setter, getter: getter}
func NewHandler(authz authz.AuthZ) authz.Handler {
return &handler{authz: authz}
}
func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
@@ -36,7 +35,7 @@ func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.setter.Create(ctx, valuer.MustNewUUID(claims.OrgID), roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID)))
err = handler.authz.Create(ctx, valuer.MustNewUUID(claims.OrgID), roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID)))
if err != nil {
render.Error(rw, err)
return
@@ -64,7 +63,7 @@ func (handler *handler) Get(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), roleID)
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), roleID)
if err != nil {
render.Error(rw, err)
return
@@ -103,7 +102,7 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
return
}
objects, err := handler.setter.GetObjects(ctx, valuer.MustNewUUID(claims.OrgID), roleID, relation)
objects, err := handler.authz.GetObjects(ctx, valuer.MustNewUUID(claims.OrgID), roleID, relation)
if err != nil {
render.Error(rw, err)
return
@@ -114,7 +113,7 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
func (handler *handler) GetResources(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
resources := handler.setter.GetResources(ctx)
resources := handler.authz.GetResources(ctx)
var resourceRelations = struct {
Resources []*authtypes.Resource `json:"resources"`
@@ -134,7 +133,7 @@ func (handler *handler) List(rw http.ResponseWriter, r *http.Request) {
return
}
roles, err := handler.getter.List(ctx, valuer.MustNewUUID(claims.OrgID))
roles, err := handler.authz.List(ctx, valuer.MustNewUUID(claims.OrgID))
if err != nil {
render.Error(rw, err)
return
@@ -163,7 +162,7 @@ func (handler *handler) Patch(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return
@@ -175,7 +174,7 @@ func (handler *handler) Patch(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.setter.Patch(ctx, valuer.MustNewUUID(claims.OrgID), role)
err = handler.authz.Patch(ctx, valuer.MustNewUUID(claims.OrgID), role)
if err != nil {
render.Error(rw, err)
return
@@ -210,7 +209,7 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return
@@ -222,7 +221,7 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.setter.PatchObjects(ctx, valuer.MustNewUUID(claims.OrgID), role.Name, relation, patchableObjects.Additions, patchableObjects.Deletions)
err = handler.authz.PatchObjects(ctx, valuer.MustNewUUID(claims.OrgID), role.Name, relation, patchableObjects.Additions, patchableObjects.Deletions)
if err != nil {
render.Error(rw, err)
return
@@ -245,7 +244,7 @@ func (handler *handler) Delete(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.setter.Delete(ctx, valuer.MustNewUUID(claims.OrgID), id)
err = handler.authz.Delete(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return

View File

@@ -8,7 +8,6 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
@@ -24,15 +23,14 @@ type AuthZ struct {
logger *slog.Logger
orgGetter organization.Getter
authzService authz.AuthZ
roleGetter role.Getter
}
func NewAuthZ(logger *slog.Logger, orgGetter organization.Getter, authzService authz.AuthZ, roleGetter role.Getter) *AuthZ {
func NewAuthZ(logger *slog.Logger, orgGetter organization.Getter, authzService authz.AuthZ) *AuthZ {
if logger == nil {
panic("cannot build authz middleware, logger is empty")
}
return &AuthZ{logger: logger, orgGetter: orgGetter, authzService: authzService, roleGetter: roleGetter}
return &AuthZ{logger: logger, orgGetter: orgGetter, authzService: authzService}
}
func (middleware *AuthZ) ViewAccess(next http.HandlerFunc) http.HandlerFunc {

View File

@@ -4,7 +4,7 @@ import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/statsreporter"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
@@ -51,7 +51,7 @@ type Module interface {
statsreporter.StatsCollector
role.RegisterTypeable
authz.RegisterTypeable
}
type Handler interface {

View File

@@ -206,6 +206,10 @@ func (module *module) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{dashboardtypes.TypeableMetaResourceDashboard, dashboardtypes.TypeableMetaResourcesDashboards}
}
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
return nil
}
// not supported
func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publicDashboard *dashboardtypes.PublicDashboard) error {
return errors.Newf(errors.TypeUnsupported, dashboardtypes.ErrCodePublicDashboardUnsupported, "not implemented")

View File

@@ -0,0 +1,11 @@
package fields
import "net/http"
type Handler interface {
// Gets the fields keys for the given field key selector
GetFieldsKeys(http.ResponseWriter, *http.Request)
// Gets the fields values for the given field value selector
GetFieldsValues(http.ResponseWriter, *http.Request)
}

View File

@@ -0,0 +1,79 @@
package implfields
import (
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type handler struct {
telemetryMetadataStore telemetrytypes.MetadataStore
}
func NewHandler(settings factory.ProviderSettings, telemetryMetadataStore telemetrytypes.MetadataStore) fields.Handler {
return &handler{
telemetryMetadataStore: telemetryMetadataStore,
}
}
func (handler *handler) GetFieldsKeys(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
var params telemetrytypes.PostableFieldKeysParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
fieldKeySelector := telemetrytypes.NewFieldKeySelectorFromPostableFieldKeysParams(params)
keys, complete, err := handler.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldKeys{
Keys: keys,
Complete: complete,
})
}
func (handler *handler) GetFieldsValues(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
var params telemetrytypes.PostableFieldValueParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
fieldValueSelector := telemetrytypes.NewFieldValueSelectorFromPostableFieldValueParams(params)
allValues, allComplete, err := handler.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(rw, err)
return
}
relatedValues, relatedComplete, err := handler.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
}
values := &telemetrytypes.TelemetryFieldValues{
StringValues: allValues.StringValues,
NumberValues: allValues.NumberValues,
RelatedValues: relatedValues,
}
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldValues{
Values: values,
Complete: allComplete && relatedComplete,
})
}

View File

@@ -1,63 +0,0 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type getter struct {
store roletypes.Store
}
func NewGetter(store roletypes.Store) role.Getter {
return &getter{store: store}
}
func (getter *getter) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
storableRole, err := getter.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (getter *getter) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
storableRole, err := getter.store.GetByOrgIDAndName(ctx, orgID, name)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (getter *getter) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
storableRoles, err := getter.store.List(ctx, orgID)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storableRole := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storableRole)
}
return roles, nil
}
func (getter *getter) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
storableRoles, err := getter.store.ListByOrgIDAndNames(ctx, orgID, names)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storable := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
}
return roles, nil
}

View File

@@ -1,83 +0,0 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type granter struct {
store roletypes.Store
authz authz.AuthZ
}
func NewGranter(store roletypes.Store, authz authz.AuthZ) role.Granter {
return &granter{store: store, authz: authz}
}
func (granter *granter) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return granter.authz.Write(ctx, tuples, nil)
}
func (granter *granter) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
err := granter.Revoke(ctx, orgID, existingRoleName, subject)
if err != nil {
return err
}
err = granter.Grant(ctx, orgID, updatedRoleName, subject)
if err != nil {
return err
}
return nil
}
func (granter *granter) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return granter.authz.Write(ctx, nil, tuples)
}
func (granter *granter) CreateManagedRoles(ctx context.Context, _ valuer.UUID, managedRoles []*roletypes.Role) error {
err := granter.store.RunInTx(ctx, func(ctx context.Context) error {
for _, role := range managedRoles {
err := granter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return err
}
}
return nil
})
if err != nil {
return err
}
return nil
}

View File

@@ -1,53 +0,0 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type setter struct {
store roletypes.Store
authz authz.AuthZ
}
func NewSetter(store roletypes.Store, authz authz.AuthZ) role.Setter {
return &setter{store: store, authz: authz}
}
func (setter *setter) Create(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) GetOrCreate(_ context.Context, _ valuer.UUID, _ *roletypes.Role) (*roletypes.Role, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) GetResources(_ context.Context) []*authtypes.Resource {
return nil
}
func (setter *setter) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) Patch(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) PatchObjects(_ context.Context, _ valuer.UUID, _ string, _ authtypes.Relation, _, _ []*authtypes.Object) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) Delete(_ context.Context, _ valuer.UUID, _ valuer.UUID) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) MustGetTypeables() []authtypes.Typeable {
return nil
}

View File

@@ -1,85 +0,0 @@
package role
import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type Setter interface {
// Creates the role.
Create(context.Context, valuer.UUID, *roletypes.Role) error
// Gets the role if it exists or creates one.
GetOrCreate(context.Context, valuer.UUID, *roletypes.Role) (*roletypes.Role, error)
// Gets the objects associated with the given role and relation.
GetObjects(context.Context, valuer.UUID, valuer.UUID, authtypes.Relation) ([]*authtypes.Object, error)
// Gets all the typeable resources registered from role registry.
GetResources(context.Context) []*authtypes.Resource
// Patches the role.
Patch(context.Context, valuer.UUID, *roletypes.Role) error
// Patches the objects in authorization server associated with the given role and relation
PatchObjects(context.Context, valuer.UUID, string, authtypes.Relation, []*authtypes.Object, []*authtypes.Object) error
// Deletes the role and tuples in authorization server.
Delete(context.Context, valuer.UUID, valuer.UUID) error
RegisterTypeable
}
type Getter interface {
// Gets the role
Get(context.Context, valuer.UUID, valuer.UUID) (*roletypes.Role, error)
// Gets the role by org_id and name
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*roletypes.Role, error)
// Lists all the roles for the organization.
List(context.Context, valuer.UUID) ([]*roletypes.Role, error)
// Lists all the roles for the organization filtered by name
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*roletypes.Role, error)
}
type Granter interface {
// Grants a role to the subject based on role name.
Grant(context.Context, valuer.UUID, string, string) error
// Revokes a granted role from the subject based on role name.
Revoke(context.Context, valuer.UUID, string, string) error
// Changes the granted role for the subject based on role name.
ModifyGrant(context.Context, valuer.UUID, string, string, string) error
// Bootstrap the managed roles.
CreateManagedRoles(context.Context, valuer.UUID, []*roletypes.Role) error
}
type RegisterTypeable interface {
MustGetTypeables() []authtypes.Typeable
}
type Handler interface {
Create(http.ResponseWriter, *http.Request)
Get(http.ResponseWriter, *http.Request)
GetObjects(http.ResponseWriter, *http.Request)
GetResources(http.ResponseWriter, *http.Request)
List(http.ResponseWriter, *http.Request)
Patch(http.ResponseWriter, *http.Request)
PatchObjects(http.ResponseWriter, *http.Request)
Delete(http.ResponseWriter, *http.Request)
}

View File

@@ -8,11 +8,11 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/emailing"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/user"
root "github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/tokenizer"
@@ -32,13 +32,13 @@ type Module struct {
emailing emailing.Emailing
settings factory.ScopedProviderSettings
orgSetter organization.Setter
granter role.Granter
authz authz.AuthZ
analytics analytics.Analytics
config user.Config
}
// This module is a WIP, don't take inspiration from this.
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, granter role.Granter, analytics analytics.Analytics, config user.Config) root.Module {
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, authz authz.AuthZ, analytics analytics.Analytics, config user.Config) root.Module {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/modules/user/impluser")
return &Module{
store: store,
@@ -47,7 +47,7 @@ func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing em
settings: settings,
orgSetter: orgSetter,
analytics: analytics,
granter: granter,
authz: authz,
config: config,
}
}
@@ -172,7 +172,7 @@ func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ..
createUserOpts := root.NewCreateUserOptions(opts...)
// since assign is idempotant multiple calls to assign won't cause issues in case of retries.
err := module.granter.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
err := module.authz.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
if err != nil {
return err
}
@@ -238,7 +238,7 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
}
if user.Role != existingUser.Role {
err = m.granter.ModifyGrant(ctx,
err = m.authz.ModifyGrant(ctx,
orgID,
roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role),
roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role),
@@ -301,7 +301,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
}
// since revoke is idempotant multiple calls to revoke won't cause issues in case of retries
err = module.granter.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
err = module.authz.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
if err != nil {
return err
}
@@ -369,7 +369,7 @@ func (module *Module) GetOrCreateResetPasswordToken(ctx context.Context, userID
func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, email valuer.Email, frontendBaseURL string) error {
if !module.config.Password.Reset.AllowSelf {
return errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "users are not allowed to reset their password themselves, please contact an admin to reset your password")
return errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "Users are not allowed to reset their password themselves, please contact an admin to reset your password.")
}
user, err := module.store.GetUserByEmailAndOrgID(ctx, email, orgID)
@@ -504,14 +504,14 @@ func (module *Module) CreateFirstUser(ctx context.Context, organization *types.O
}
managedRoles := roletypes.NewManagedRoles(organization.ID)
err = module.granter.Grant(ctx, organization.ID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, user.ID.StringValue(), user.OrgID, nil))
err = module.authz.CreateManagedUserRoleTransactions(ctx, organization.ID, user.ID)
if err != nil {
return nil, err
}
if err = module.store.RunInTx(ctx, func(ctx context.Context) error {
err = module.orgSetter.Create(ctx, organization, func(ctx context.Context, orgID valuer.UUID) error {
err = module.granter.CreateManagedRoles(ctx, orgID, managedRoles)
err = module.authz.CreateManagedRoles(ctx, orgID, managedRoles)
if err != nil {
return err
}

View File

@@ -25,7 +25,6 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/http/render"
@@ -69,7 +68,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/opamptypes"
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunneltypes"
"go.uber.org/zap"
@@ -104,10 +103,11 @@ type APIHandler struct {
querierV2 interfaces.Querier
queryBuilder *queryBuilder.QueryBuilder
// temporalityMap is a map of metric name to temporality
// to avoid fetching temporality for the same metric multiple times
// querying the v4 table on low cardinal temporality column
// should be fast but we can still avoid the query if we have the data in memory
// temporalityMap is a map of metric name to temporality to avoid fetching
// temporality for the same metric multiple times.
//
// Querying the v4 table on a low cardinal temporality column should be
// fast, but we can still avoid the query if we have the data in memory.
temporalityMap map[string]map[v3.Temporality]bool
temporalityMux sync.Mutex
@@ -145,8 +145,6 @@ type APIHandler struct {
LicensingAPI licensing.API
FieldsAPI *fields.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
@@ -177,8 +175,6 @@ type APIHandlerOpts struct {
LicensingAPI licensing.API
FieldsAPI *fields.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
@@ -243,7 +239,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
AlertmanagerAPI: opts.AlertmanagerAPI,
LicensingAPI: opts.LicensingAPI,
Signoz: opts.Signoz,
FieldsAPI: opts.FieldsAPI,
QuerierAPI: opts.QuerierAPI,
QueryParserAPI: opts.QueryParserAPI,
}
@@ -399,13 +394,6 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *middlew
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.QuerierAPI.QueryRawStream)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterFieldsRoutes(router *mux.Router, am *middleware.AuthZ) {
subRouter := router.PathPrefix("/api/v1").Subrouter()
subRouter.HandleFunc("/fields/keys", am.ViewAccess(aH.FieldsAPI.GetFieldsKeys)).Methods(http.MethodGet)
subRouter.HandleFunc("/fields/values", am.ViewAccess(aH.FieldsAPI.GetFieldsValues)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *middleware.AuthZ) {
hostsSubRouter := router.PathPrefix("/api/v1/hosts").Subrouter()
hostsSubRouter.HandleFunc("/attribute_keys", am.ViewAccess(aH.getHostAttributeKeys)).Methods(http.MethodGet)
@@ -1023,7 +1011,7 @@ func (aH *APIHandler) getRuleStateHistory(w http.ResponseWriter, r *http.Request
// the query range is calculated based on the rule's evalWindow and evalDelay
// alerts have 2 minutes delay built in, so we need to subtract that from the start time
// to get the correct query range
start := end.Add(-time.Duration(rule.EvalWindow)).Add(-3 * time.Minute)
start := end.Add(-rule.EvalWindow.Duration() - 3*time.Minute)
if rule.AlertType == ruletypes.AlertTypeLogs {
if rule.Version != "v5" {
res.Items[idx].RelatedLogsLink = contextlinks.PrepareLinksToLogs(start, end, newFilters)
@@ -1230,12 +1218,12 @@ func (aH *APIHandler) Get(rw http.ResponseWriter, r *http.Request) {
dashboard := new(dashboardtypes.Dashboard)
if aH.CloudIntegrationsController.IsCloudIntegrationDashboardUuid(id) {
cloudintegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
cloudIntegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
if apiErr != nil {
render.Error(rw, errorsV2.Wrapf(apiErr, errorsV2.TypeInternal, errorsV2.CodeInternal, "failed to get dashboard"))
return
}
dashboard = cloudintegrationDashboard
dashboard = cloudIntegrationDashboard
} else if aH.IntegrationsController.IsInstalledIntegrationDashboardID(id) {
integrationDashboard, apiErr := aH.IntegrationsController.GetInstalledIntegrationDashboardById(ctx, orgID, id)
if apiErr != nil {
@@ -1564,13 +1552,13 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
RespondError(w, &model.ApiError{Typ: model.ErrorExec, Err: res.Err}, nil)
}
response_data := &model.QueryData{
responseData := &model.QueryData{
ResultType: res.Value.Type(),
Result: res.Value,
Stats: qs,
}
aH.Respond(w, response_data)
aH.Respond(w, responseData)
}
@@ -2652,12 +2640,12 @@ func (aH *APIHandler) getProducerData(w http.ResponseWriter, r *http.Request) {
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2705,12 +2693,12 @@ func (aH *APIHandler) getConsumerData(w http.ResponseWriter, r *http.Request) {
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2759,12 +2747,12 @@ func (aH *APIHandler) getPartitionOverviewLatencyData(w http.ResponseWriter, r *
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2813,12 +2801,12 @@ func (aH *APIHandler) getConsumerPartitionLatencyData(w http.ResponseWriter, r *
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -2870,12 +2858,12 @@ func (aH *APIHandler) getProducerThroughputOverview(w http.ResponseWriter, r *ht
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, producerQueryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, producerQueryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
@@ -2981,12 +2969,12 @@ func (aH *APIHandler) getProducerThroughputDetails(w http.ResponseWriter, r *htt
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -3035,12 +3023,12 @@ func (aH *APIHandler) getConsumerThroughputOverview(w http.ResponseWriter, r *ht
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -3089,12 +3077,12 @@ func (aH *APIHandler) getConsumerThroughputDetails(w http.ResponseWriter, r *htt
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
result = postprocess.TransformToTableForClickHouseQueries(result)
@@ -3149,12 +3137,12 @@ func (aH *APIHandler) getProducerConsumerEval(w http.ResponseWriter, r *http.Req
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
@@ -4138,11 +4126,11 @@ func (aH *APIHandler) ListLogsPipelinesHandler(w http.ResponseWriter, r *http.Re
aH.Respond(w, payload)
}
// listLogsPipelines lists logs piplines for latest version
// listLogsPipelines lists logs pipelines for latest version
func (aH *APIHandler) listLogsPipelines(ctx context.Context, orgID valuer.UUID) (
*logparsingpipeline.PipelinesResponse, error,
) {
// get lateset agent config
// get latest agent config
latestVersion := -1
lastestConfig, err := agentConf.GetLatestVersion(ctx, orgID, opamptypes.ElementTypeLogPipelines)
if err != nil && !errorsV2.Ast(err, errorsV2.TypeNotFound) {
@@ -4439,7 +4427,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
var spanKeys map[string]v3.AttributeKey
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
hasLogsQuery := false
@@ -4456,7 +4444,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
if logsv3.EnrichmentRequired(queryRangeParams) && hasLogsQuery {
logsFields, apiErr := aH.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(queryRangeParams.CompositeQuery))
if apiErr != nil {
RespondError(w, apiErr, errQuriesByName)
RespondError(w, apiErr, errQueriesByName)
return
}
// get the fields if any logs query is present
@@ -4467,7 +4455,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
spanKeys, err = aH.getSpanKeysV3(ctx, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
tracesV4.Enrich(queryRangeParams, spanKeys)
@@ -4512,11 +4500,11 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
}
}
result, errQuriesByName, err = aH.querier.QueryRange(ctx, orgID, queryRangeParams)
result, errQueriesByName, err = aH.querier.QueryRange(ctx, orgID, queryRangeParams)
if err != nil {
queryErrors := map[string]string{}
for name, err := range errQuriesByName {
for name, err := range errQueriesByName {
queryErrors[fmt.Sprintf("Query-%s", name)] = err.Error()
}
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
@@ -4792,7 +4780,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
}
var result []*v3.Result
var errQuriesByName map[string]error
var errQueriesByName map[string]error
var spanKeys map[string]v3.AttributeKey
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
hasLogsQuery := false
@@ -4822,7 +4810,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
spanKeys, err = aH.getSpanKeysV3(ctx, queryRangeParams)
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
tracesV4.Enrich(queryRangeParams, spanKeys)
@@ -4845,11 +4833,11 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
}
}
result, errQuriesByName, err = aH.querierV2.QueryRange(ctx, orgID, queryRangeParams)
result, errQueriesByName, err = aH.querierV2.QueryRange(ctx, orgID, queryRangeParams)
if err != nil {
queryErrors := map[string]string{}
for name, err := range errQuriesByName {
for name, err := range errQueriesByName {
queryErrors[fmt.Sprintf("Query-%s", name)] = err.Error()
}
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
@@ -4866,7 +4854,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
RespondError(w, apiErrObj, errQueriesByName)
return
}
aH.sendQueryResultEvents(r, result, queryRangeParams, "v4")

View File

@@ -17,7 +17,6 @@ import (
"github.com/gorilla/handlers"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
@@ -133,7 +132,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
FluxInterval: config.Querier.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: nooplicensing.NewLicenseAPI(),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
@@ -209,13 +207,12 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
r.Use(middleware.NewComment().Wrap)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz, s.signoz.Modules.RoleGetter)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
api.RegisterRoutes(r, am)
api.RegisterLogsRoutes(r, am)
api.RegisterIntegrationRoutes(r, am)
api.RegisterCloudIntegrationsRoutes(r, am)
api.RegisterFieldsRoutes(r, am)
api.RegisterQueryRangeV3Routes(r, am)
api.RegisterInfraMetricsRoutes(r, am)
api.RegisterWebSocketPaths(r, am)

View File

@@ -5,10 +5,10 @@ import (
"os"
"regexp"
"strconv"
"time"
"github.com/SigNoz/signoz/pkg/query-service/model"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/valuer"
)
const (
@@ -40,11 +40,11 @@ const NormalizedMetricsMapQueryThreads = 10
var NormalizedMetricsMapRegex = regexp.MustCompile(`[^a-zA-Z0-9]`)
var NormalizedMetricsMapQuantileRegex = regexp.MustCompile(`(?i)([._-]?quantile.*)$`)
func GetEvalDelay() time.Duration {
func GetEvalDelay() valuer.TextDuration {
evalDelayStr := GetOrDefaultEnv("RULES_EVAL_DELAY", "2m")
evalDelayDuration, err := time.ParseDuration(evalDelayStr)
evalDelayDuration, err := valuer.ParseTextDuration(evalDelayStr)
if err != nil {
return 0
return valuer.TextDuration{}
}
return evalDelayDuration
}

View File

@@ -71,7 +71,18 @@ func Parse(filters *v3.FilterSet) (string, error) {
// accustom log filters like `body.log.message EXISTS` into EXPR language
// where User is attempting to check for keys present in JSON log body
if strings.HasPrefix(v.Key.Key, "body.") {
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(strings.TrimPrefix(v.Key.Key, "body.")), logOperatorsToExpr[v.Operator], "fromJSON(body)")
// if body is a string and is a valid JSON, then check if the key exists in the JSON
filter = fmt.Sprintf(`((type(body) == "string" && isJSON(body)) && %s %s %s)`, exprFormattedValue(strings.TrimPrefix(v.Key.Key, "body.")), logOperatorsToExpr[v.Operator], "fromJSON(body)")
// if body is a map, then check if the key exists in the map
operator := v3.FilterOperatorNotEqual
if v.Operator == v3.FilterOperatorNotExists {
operator = v3.FilterOperatorEqual
}
nilCheckFilter := fmt.Sprintf("%s %s nil", v.Key.Key, logOperatorsToExpr[operator])
// join the two filters with OR
filter = fmt.Sprintf(`(%s or (type(body) == "map" && (%s)))`, filter, nilCheckFilter)
} else if typ := getTypeName(v.Key.Type); typ != "" {
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], typ)
} else {

View File

@@ -3,203 +3,538 @@ package queryBuilderToExpr
import (
"testing"
signozstanzahelper "github.com/SigNoz/signoz-otel-collector/processor/signozlogspipelineprocessor/stanza/operator/helper"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
. "github.com/smartystreets/goconvey/convey"
"github.com/expr-lang/expr/vm"
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/entry"
"github.com/stretchr/testify/assert"
)
var testCases = []struct {
Name string
Query *v3.FilterSet
Expr string
ExpectError bool
}{
{
Name: "equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "="},
}},
Expr: `attributes["key"] == "checkbody"`,
},
{
Name: "not equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "!="},
}},
Expr: `attributes["key"] != "checkbody"`,
},
{
Name: "less than",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<"},
}},
Expr: `attributes["key"] != nil && attributes["key"] < 10`,
},
{
Name: "greater than",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">"},
}},
Expr: `attributes["key"] != nil && attributes["key"] > 10`,
},
{
Name: "less than equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10`,
},
{
Name: "greater than equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">="},
}},
Expr: `attributes["key"] != nil && attributes["key"] >= 10`,
},
// case sensitive
{
Name: "body contains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
}},
Expr: `body != nil && lower(body) contains lower("checkbody")`,
},
{
Name: "body.log.message exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `"log.message" in fromJSON(body)`,
},
{
Name: "body.log.message not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `"log.message" not in fromJSON(body)`,
},
{
Name: "body ncontains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
}},
Expr: `body != nil && lower(body) not contains lower("checkbody")`,
},
{
Name: "body regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "regex"},
}},
Expr: `body != nil && body matches "[0-1]+regex$"`,
},
{
Name: "body not regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
}},
Expr: `body != nil && body not matches "[0-1]+regex$"`,
},
{
Name: "regex with escape characters",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: `^Executing \[\S+@\S+:[0-9]+\] \S+".*`, Operator: "regex"},
}},
Expr: `body != nil && body matches "^Executing \\[\\S+@\\S+:[0-9]+\\] \\S+\".*"`,
},
{
Name: "invalid regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
}},
Expr: `body != nil && lower(body) not matches "[0-9]++"`,
ExpectError: true,
},
{
Name: "in",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{1, 2, 3, 4}, Operator: "in"},
}},
Expr: `attributes["key"] != nil && attributes["key"] in [1,2,3,4]`,
},
{
Name: "not in",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{"1", "2"}, Operator: "nin"},
}},
Expr: `attributes["key"] != nil && attributes["key"] not in ['1','2']`,
},
{
Name: "exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
}},
Expr: `"key" in attributes`,
},
{
Name: "not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `"key" not in attributes`,
},
{
Name: "trace_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `trace_id == nil`,
},
{
Name: "trace_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `trace_id != nil`,
},
{
Name: "span_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `span_id == nil`,
},
{
Name: "span_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `span_id != nil`,
},
{
Name: "Multi filter",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body != nil && body not matches "[0-1]+regex$" and "key" not in attributes`,
},
{
Name: "incorrect multi filter",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body not matches "[0-9]++" and "key" not in attributes`,
ExpectError: true,
},
}
func TestParseExpression(t *testing.T) {
var testCases = []struct {
Name string
Query *v3.FilterSet
Expr string
ExpectError bool
}{
{
Name: "equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "="},
}},
Expr: `attributes["key"] == "checkbody"`,
},
{
Name: "not equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "!="},
}},
Expr: `attributes["key"] != "checkbody"`,
},
{
Name: "less than",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<"},
}},
Expr: `attributes["key"] != nil && attributes["key"] < 10`,
},
{
Name: "greater than",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">"},
}},
Expr: `attributes["key"] != nil && attributes["key"] > 10`,
},
{
Name: "less than equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10`,
},
{
Name: "greater than equal",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">="},
}},
Expr: `attributes["key"] != nil && attributes["key"] >= 10`,
},
// case sensitive
{
Name: "body contains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
}},
Expr: `body != nil && lower(body) contains lower("checkbody")`,
},
{
Name: "body.log.message exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `(((type(body) == "string" && isJSON(body)) && "log.message" in fromJSON(body)) or (type(body) == "map" && (body.log.message != nil)))`,
},
{
Name: "body.log.message not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `(((type(body) == "string" && isJSON(body)) && "log.message" not in fromJSON(body)) or (type(body) == "map" && (body.log.message == nil)))`,
},
{
Name: "body ncontains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
}},
Expr: `body != nil && lower(body) not contains lower("checkbody")`,
},
{
Name: "body regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "regex"},
}},
Expr: `body != nil && body matches "[0-1]+regex$"`,
},
{
Name: "body not regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
}},
Expr: `body != nil && body not matches "[0-1]+regex$"`,
},
{
Name: "regex with escape characters",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: `^Executing \[\S+@\S+:[0-9]+\] \S+".*`, Operator: "regex"},
}},
Expr: `body != nil && body matches "^Executing \\[\\S+@\\S+:[0-9]+\\] \\S+\".*"`,
},
{
Name: "invalid regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
}},
Expr: `body != nil && lower(body) not matches "[0-9]++"`,
ExpectError: true,
},
{
Name: "in",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{1, 2, 3, 4}, Operator: "in"},
}},
Expr: `attributes["key"] != nil && attributes["key"] in [1,2,3,4]`,
},
{
Name: "not in",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"1", "2"}, Operator: "nin"},
}},
Expr: `attributes["key"] != nil && attributes["key"] not in ['1','2']`,
},
{
Name: "exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
}},
Expr: `"key" in attributes`,
},
{
Name: "not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `"key" not in attributes`,
},
{
Name: "trace_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `trace_id == nil`,
},
{
Name: "trace_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `trace_id != nil`,
},
{
Name: "span_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
Expr: `span_id == nil`,
},
{
Name: "span_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
Expr: `span_id != nil`,
},
{
Name: "Multi filter",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body != nil && body not matches "[0-1]+regex$" and "key" not in attributes`,
},
{
Name: "incorrect multi filter",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body not matches "[0-9]++" and "key" not in attributes`,
ExpectError: true,
},
}
func TestParse(t *testing.T) {
for _, tt := range testCases {
Convey(tt.Name, t, func() {
t.Run(tt.Name, func(t *testing.T) {
x, err := Parse(tt.Query)
if tt.ExpectError {
So(err, ShouldNotBeNil)
assert.Error(t, err)
} else {
So(err, ShouldBeNil)
So(x, ShouldEqual, tt.Expr)
assert.NoError(t, err)
assert.Equal(t, tt.Expr, x)
}
})
}
}
type EntryComposite struct {
ID int
*entry.Entry
}
// makeEntry creates an EntryComposite for tests. Pass nil for traceID/spanID to mean "not set".
func makeEntry(id int, body any, attributes, resource map[string]any, traceID, spanID []byte) EntryComposite {
e := entry.New()
e.Body = body
if attributes != nil {
e.Attributes = attributes
} else {
e.Attributes = make(map[string]any)
}
if resource != nil {
e.Resource = resource
} else {
e.Resource = make(map[string]any)
}
if traceID != nil {
e.TraceID = traceID
}
if spanID != nil {
e.SpanID = spanID
}
return EntryComposite{ID: id, Entry: e}
}
func TestExpressionVSEntry(t *testing.T) {
// Dataset: entries with varied body (JSON and plain text), attributes, trace_id, span_id for filter testing.
// IDs 0..12: JSON bodies (body.msg / body.log etc. work). IDs 13..17: simple text log bodies.
dataset := []EntryComposite{
// JSON body entries (0-12)
makeEntry(0, `{"msg":"hello world"}`, map[string]any{"level": "info"}, map[string]any{"env": "prod", "host": "node-0"}, nil, nil),
makeEntry(1, `{"msg":"error occurred", "missing": "value"}`, map[string]any{"level": "error"}, map[string]any{"env": "prod", "host": "node-1"}, []byte("trace1"), []byte("span1")),
makeEntry(2, `{"msg":"checkbody substring"}`, map[string]any{"level": "info"}, map[string]any{"env": "staging", "host": "node-2"}, []byte("trace2"), nil),
makeEntry(3, `{"msg":"no match here"}`, map[string]any{"level": "debug"}, map[string]any{"env": "staging", "host": "node-3"}, nil, []byte("span3")),
makeEntry(4, `{"msg":"101regex suffix"}`, map[string]any{"code": "200", "count": int64(5)}, map[string]any{"env": "prod", "host": "node-4"}, nil, nil),
makeEntry(5, `{"msg":"plain text only"}`, map[string]any{"code": "404", "count": int64(10)}, map[string]any{"env": "prod", "host": "node-5"}, []byte("trace5"), []byte("span5")),
makeEntry(6, `{"log":{"message":"user login"}}`, map[string]any{"service": "auth"}, map[string]any{"env": "dev", "host": "node-6"}, nil, nil),
makeEntry(7, `{"log":{"message":"user logout"}}`, map[string]any{"service": "auth", "user_id": "u1"}, map[string]any{"env": "dev", "host": "node-7"}, []byte("trace7"), nil),
makeEntry(8, `{"event":"click"}`, map[string]any{"service": "api"}, map[string]any{"env": "dev", "host": "node-8"}, nil, nil),
makeEntry(9, `{"msg":"checkbody"}`, map[string]any{"tag": "exact", "num": int64(9)}, map[string]any{"env": "prod", "host": "node-9"}, nil, nil),
makeEntry(10, `{"msg":"CHECKBODY case"}`, map[string]any{"tag": "case", "num": int64(10)}, map[string]any{"env": "staging", "host": "node-10"}, nil, nil),
makeEntry(11, `{"msg":"foo"}`, map[string]any{"status": "active", "score": int64(100)}, map[string]any{"env": "prod", "host": "node-11"}, nil, nil),
makeEntry(12, `{"msg":"bar"}`, map[string]any{"status": "inactive", "score": int64(50)}, map[string]any{"env": "staging", "host": "node-12"}, []byte("trace12"), []byte("span12")),
// Plain text log body entries (13-17)
makeEntry(13, "Server started on port 8080", map[string]any{"component": "server"}, map[string]any{"env": "prod", "host": "node-13"}, nil, nil),
makeEntry(14, "Connection refused to 10.0.0.1:5432", map[string]any{"level": "error"}, map[string]any{"env": "prod", "host": "node-14"}, nil, nil),
makeEntry(15, "User login failed for admin", map[string]any{"service": "auth", "level": "warn"}, map[string]any{"env": "dev", "host": "node-15"}, []byte("trace15"), nil),
makeEntry(16, "checkbody in text log", map[string]any{"level": "info"}, map[string]any{"env": "staging", "host": "node-16"}, nil, nil),
makeEntry(17, "WARN: disk full on /var", map[string]any{"level": "warn"}, map[string]any{"env": "prod", "host": "node-17"}, nil, []byte("span17")),
// Body as map (not string) entries (18-20)
makeEntry(18, map[string]any{"msg": "checkbody substring", "level": "info"}, map[string]any{"source": "map"}, map[string]any{"env": "prod", "host": "node-18"}, nil, nil),
makeEntry(19, map[string]any{"log": map[string]any{"message": "nested value in map body"}, "missing": true}, map[string]any{"source": "map"}, map[string]any{"env": "staging", "host": "node-19"}, []byte("trace19"), nil),
makeEntry(20, map[string]any{"event": "deploy", "version": "1.2.0"}, map[string]any{"source": "map", "level": "info"}, map[string]any{"env": "dev", "host": "node-20"}, nil, []byte("span20")),
}
var testCases = []struct {
Name string
Query *v3.FilterSet
ExpectedMatches []int
}{
{
Name: "resource equal (env)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
}},
ExpectedMatches: []int{0, 1, 4, 5, 9, 11, 13, 14, 17, 18},
},
{
Name: "resource not equal (env)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "!="},
}},
ExpectedMatches: []int{2, 3, 6, 7, 8, 10, 12, 15, 16, 19, 20},
},
{
Name: "attribute less than (numeric)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "count", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 8, Operator: "<"},
}},
ExpectedMatches: []int{4},
},
{
Name: "attribute greater than (numeric)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "count", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 8, Operator: ">"},
}},
ExpectedMatches: []int{5},
},
{
Name: "body contains (case insensitive)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
}},
ExpectedMatches: []int{2, 9, 10, 16},
},
{
Name: "body ncontains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
}},
ExpectedMatches: []int{0, 1, 3, 4, 5, 6, 7, 8, 11, 12, 13, 14, 15, 17},
},
{
Name: "body.msg (case insensitive)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.msg", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: false}, Value: "checkbody", Operator: "contains"},
}},
ExpectedMatches: []int{2, 9, 10, 18},
},
{
Name: "body regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "regex"},
}},
ExpectedMatches: []int{4},
},
{
Name: "body not regex",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "nregex"},
}},
ExpectedMatches: []int{0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17},
},
// body.log.message exists/nexists: expr checks "log.message" in fromJSON(body); nested key
// semantics depend on signoz stanza helper. Omitted here to avoid coupling to env shape.
{
Name: "body top-level key exists (body.msg)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.msg", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
ExpectedMatches: []int{0, 1, 2, 3, 4, 5, 9, 10, 11, 12, 18},
},
{
Name: "body top-level key not exists (body.missing)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body.missing", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
ExpectedMatches: []int{0, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 18, 20},
},
{
Name: "attribute exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
}},
ExpectedMatches: []int{6, 7, 8, 15},
},
{
Name: "attribute not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
ExpectedMatches: []int{0, 1, 2, 3, 4, 5, 9, 10, 11, 12, 13, 14, 16, 17, 18, 19, 20},
},
{
Name: "trace_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
ExpectedMatches: []int{1, 2, 5, 7, 12, 15, 19},
},
{
Name: "trace_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
ExpectedMatches: []int{0, 3, 4, 6, 8, 9, 10, 11, 13, 14, 16, 17, 18, 20},
},
{
Name: "span_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
ExpectedMatches: []int{1, 3, 5, 12, 17, 20},
},
{
Name: "span_id not exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
}},
ExpectedMatches: []int{0, 2, 4, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 18, 19},
},
{
Name: "in (attribute in list)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"info", "error"}, Operator: "in"},
}},
ExpectedMatches: []int{0, 1, 2, 14, 16, 20},
},
{
Name: "not in (attribute not in list)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"error", "warn"}, Operator: "nin"},
}},
ExpectedMatches: []int{0, 2, 3, 16, 20},
},
{
Name: "multi filter AND",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "staging", Operator: "="},
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "info", Operator: "="},
}},
ExpectedMatches: []int{2, 16},
},
{
Name: "multi filter AND (two attributes)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
}},
ExpectedMatches: []int{6, 7},
},
// Multi-filter variations: body + attribute, three conditions, trace/span + attribute
{
Name: "multi filter AND body contains + attribute",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "Connection", Operator: "contains"},
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
}},
ExpectedMatches: []int{14},
},
{
Name: "multi filter AND body contains + service",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "login", Operator: "contains"},
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
}},
ExpectedMatches: []int{6, 15},
},
{
Name: "multi filter AND env + level (prod error)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "error", Operator: "="},
}},
ExpectedMatches: []int{1, 14},
},
{
Name: "multi filter AND three conditions (staging + checkbody + info)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "staging", Operator: "="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "info", Operator: "="},
}},
ExpectedMatches: []int{2, 16},
},
{
Name: "multi filter AND trace_id exists + body contains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
}},
ExpectedMatches: []int{2},
},
{
Name: "multi filter AND span_id nexists + service auth",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
}},
ExpectedMatches: []int{6, 7, 15},
},
{
Name: "multi filter AND body regex + attribute",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "regex"},
{Key: v3.AttributeKey{Key: "code", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "200", Operator: "="},
}},
ExpectedMatches: []int{4},
},
{
Name: "multi filter AND no trace_id + no span_id + env prod",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
}},
ExpectedMatches: []int{0, 4, 9, 11, 13, 14, 18},
},
{
Name: "multi filter AND level warn + body contains",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "warn", Operator: "="},
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "disk", Operator: "contains"},
}},
ExpectedMatches: []int{17},
},
{
Name: "no matches (attribute value not present)",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "never", Operator: "="},
}},
ExpectedMatches: []int{},
},
{
Name: "attribute equal and trace_id exists",
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
{Key: v3.AttributeKey{Key: "code", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "404", Operator: "="},
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
}},
ExpectedMatches: []int{5},
},
}
for _, tt := range testCases {
t.Run(tt.Name, func(t *testing.T) {
expression, err := Parse(tt.Query)
assert.NoError(t, err)
compiled, hasBodyFieldRef, err := signozstanzahelper.ExprCompileBool(expression)
assert.NoError(t, err)
matchedIDs := []int{}
for _, d := range dataset {
env := signozstanzahelper.GetExprEnv(d.Entry, hasBodyFieldRef)
matches, err := vm.Run(compiled, env)
signozstanzahelper.PutExprEnv(env)
if err != nil {
// Eval error (e.g. fromJSON on non-JSON body) => treat as no match
continue
}
if matches != nil && matches.(bool) {
matchedIDs = append(matchedIDs, d.ID)
}
}
assert.Equal(t, tt.ExpectedMatches, matchedIDs, "query %q", tt.Name)
})
}
}

View File

@@ -40,13 +40,13 @@ type BaseRule struct {
// evalWindow is the time window used for evaluating the rule
// i.e. each time we lookback from the current time, we look at data for the last
// evalWindow duration
evalWindow time.Duration
evalWindow valuer.TextDuration
// holdDuration is the duration for which the alert waits before firing
holdDuration time.Duration
holdDuration valuer.TextDuration
// evalDelay is the delay in evaluation of the rule
// this is useful in cases where the data is not available immediately
evalDelay time.Duration
evalDelay valuer.TextDuration
// holds the static set of labels and annotations for the rule
// these are the same for all alerts created for this rule
@@ -94,7 +94,7 @@ type BaseRule struct {
evaluation ruletypes.Evaluation
// newGroupEvalDelay is the grace period for new alert groups
newGroupEvalDelay *time.Duration
newGroupEvalDelay valuer.TextDuration
queryParser queryparser.QueryParser
}
@@ -113,7 +113,7 @@ func WithSendUnmatched() RuleOption {
}
}
func WithEvalDelay(dur time.Duration) RuleOption {
func WithEvalDelay(dur valuer.TextDuration) RuleOption {
return func(r *BaseRule) {
r.evalDelay = dur
}
@@ -163,7 +163,7 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader
source: p.Source,
typ: p.AlertType,
ruleCondition: p.RuleCondition,
evalWindow: time.Duration(p.EvalWindow),
evalWindow: p.EvalWindow,
labels: qslabels.FromMap(p.Labels),
annotations: qslabels.FromMap(p.Annotations),
preferredChannels: p.PreferredChannels,
@@ -176,13 +176,12 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader
}
// Store newGroupEvalDelay and groupBy keys from NotificationSettings
if p.NotificationSettings != nil && p.NotificationSettings.NewGroupEvalDelay != nil {
newGroupEvalDelay := time.Duration(*p.NotificationSettings.NewGroupEvalDelay)
baseRule.newGroupEvalDelay = &newGroupEvalDelay
if p.NotificationSettings != nil {
baseRule.newGroupEvalDelay = p.NotificationSettings.NewGroupEvalDelay
}
if baseRule.evalWindow == 0 {
baseRule.evalWindow = 5 * time.Minute
if baseRule.evalWindow.IsZero() {
baseRule.evalWindow = valuer.MustParseTextDuration("5m")
}
for _, opt := range opts {
@@ -245,15 +244,15 @@ func (r *BaseRule) ActiveAlertsLabelFP() map[uint64]struct{} {
return activeAlerts
}
func (r *BaseRule) EvalDelay() time.Duration {
func (r *BaseRule) EvalDelay() valuer.TextDuration {
return r.evalDelay
}
func (r *BaseRule) EvalWindow() time.Duration {
func (r *BaseRule) EvalWindow() valuer.TextDuration {
return r.evalWindow
}
func (r *BaseRule) HoldDuration() time.Duration {
func (r *BaseRule) HoldDuration() valuer.TextDuration {
return r.holdDuration
}
@@ -281,7 +280,7 @@ func (r *BaseRule) Timestamps(ts time.Time) (time.Time, time.Time) {
start := st.UnixMilli()
end := en.UnixMilli()
if r.evalDelay > 0 {
if r.evalDelay.IsPositive() {
start = start - r.evalDelay.Milliseconds()
end = end - r.evalDelay.Milliseconds()
}
@@ -552,7 +551,7 @@ func (r *BaseRule) PopulateTemporality(ctx context.Context, orgID valuer.UUID, q
// ShouldSkipNewGroups returns true if new group filtering should be applied
func (r *BaseRule) ShouldSkipNewGroups() bool {
return r.newGroupEvalDelay != nil && *r.newGroupEvalDelay > 0
return r.newGroupEvalDelay.IsPositive()
}
// isFilterNewSeriesSupported checks if the query is supported for new series filtering

View File

@@ -20,7 +20,7 @@ import (
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -124,8 +124,8 @@ func createPostableRule(compositeQuery *v3.CompositeQuery) ruletypes.PostableRul
Evaluation: &ruletypes.EvaluationEnvelope{
Kind: ruletypes.RollingEvaluation,
Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
},
},
RuleCondition: &ruletypes.RuleCondition{
@@ -151,7 +151,7 @@ type filterNewSeriesTestCase struct {
compositeQuery *v3.CompositeQuery
series []*v3.Series
firstSeenMap map[telemetrytypes.MetricMetadataLookupKey]int64
newGroupEvalDelay *time.Duration
newGroupEvalDelay valuer.TextDuration
evalTime time.Time
expectedFiltered []*v3.Series // series that should be in the final filtered result (old enough)
expectError bool
@@ -159,7 +159,8 @@ type filterNewSeriesTestCase struct {
func TestBaseRule_FilterNewSeries(t *testing.T) {
defaultEvalTime := time.Unix(1700000000, 0)
defaultDelay := 2 * time.Minute
defaultNewGroupEvalDelay := valuer.MustParseTextDuration("2m")
defaultDelay := defaultNewGroupEvalDelay.Duration()
defaultGroupByFields := []string{"service_name", "env"}
logger := instrumentationtest.New().Logger()
@@ -202,7 +203,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new", "prod"),
// svc-missing has no metadata, so it will be included
),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
@@ -234,7 +235,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new1", "prod"),
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new2", "stage"),
),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{}, // all should be filtered out (new series)
},
@@ -261,7 +262,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old1", "prod"),
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old2", "stage"),
),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
@@ -295,7 +296,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -325,7 +326,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -361,7 +362,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"status": "200"}, nil), // no service_name or env
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"status": "200"}, nil),
@@ -390,7 +391,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old", "prod"),
// svc-no-metadata has no entry in firstSeenMap
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
@@ -420,7 +421,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
{MetricName: "request_total", AttributeName: "service_name", AttributeValue: "svc-partial"}: calculateFirstSeen(defaultEvalTime, defaultDelay, true),
// env metadata is missing
},
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
@@ -454,7 +455,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
},
series: []*v3.Series{},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{},
},
@@ -488,7 +489,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
},
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
newGroupEvalDelay: func() *time.Duration { d := time.Duration(0); return &d }(), // zero delay
newGroupEvalDelay: valuer.TextDuration{}, // zero delay
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -532,7 +533,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
createFirstSeenMap("error_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
@@ -572,7 +573,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createFirstSeenMap("request_total", []string{"service_name"}, defaultEvalTime, defaultDelay, true, "svc1"),
createFirstSeenMap("request_total", []string{"env"}, defaultEvalTime, defaultDelay, false, "prod"),
),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{}, // max first_seen is new, so should be filtered out
},
@@ -604,7 +605,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
@@ -639,7 +640,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
},
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
newGroupEvalDelay: &defaultDelay,
newGroupEvalDelay: defaultNewGroupEvalDelay,
evalTime: defaultEvalTime,
expectedFiltered: []*v3.Series{
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
@@ -697,20 +698,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
telemetryStore,
prometheustest.New(context.Background(), settings, prometheus.Config{}, telemetryStore),
"",
time.Duration(time.Second),
time.Second,
nil,
readerCache,
options,
)
// Set newGroupEvalDelay in NotificationSettings if provided
if tt.newGroupEvalDelay != nil {
postableRule.NotificationSettings = &ruletypes.NotificationSettings{
NewGroupEvalDelay: func() *ruletypes.Duration {
d := ruletypes.Duration(*tt.newGroupEvalDelay)
return &d
}(),
}
postableRule.NotificationSettings = &ruletypes.NotificationSettings{
NewGroupEvalDelay: tt.newGroupEvalDelay,
}
// Create BaseRule using NewBaseRule

View File

@@ -30,7 +30,7 @@ import (
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/types/authtypes"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -66,7 +66,7 @@ type PrepareTestRuleOptions struct {
OrgID valuer.UUID
}
const taskNamesuffix = "webAppEditor"
const taskNameSuffix = "webAppEditor"
func RuleIdFromTaskName(n string) string {
return strings.Split(n, "-groupname")[0]
@@ -97,7 +97,7 @@ type ManagerOptions struct {
SLogger *slog.Logger
Cache cache.Cache
EvalDelay time.Duration
EvalDelay valuer.TextDuration
PrepareTaskFunc func(opts PrepareTaskOptions) (Task, error)
PrepareTestRuleFunc func(opts PrepareTestRuleOptions) (int, *model.ApiError)
@@ -182,8 +182,8 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
rules = append(rules, tr)
// create ch rule task for evalution
task = newTask(TaskTypeCh, opts.TaskName, taskNamesuffix, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
// create ch rule task for evaluation
task = newTask(TaskTypeCh, opts.TaskName, taskNameSuffix, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
@@ -206,8 +206,8 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
rules = append(rules, pr)
// create promql rule task for evalution
task = newTask(TaskTypeProm, opts.TaskName, taskNamesuffix, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
// create promql rule task for evaluation
task = newTask(TaskTypeProm, opts.TaskName, taskNameSuffix, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else {
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
@@ -323,7 +323,7 @@ func (m *Manager) run(_ context.Context) {
}
// Stop the rule manager's rule evaluation cycles.
func (m *Manager) Stop(ctx context.Context) {
func (m *Manager) Stop(_ context.Context) {
m.mtx.Lock()
defer m.mtx.Unlock()
@@ -336,7 +336,7 @@ func (m *Manager) Stop(ctx context.Context) {
zap.L().Info("Rule manager stopped")
}
// EditRuleDefinition writes the rule definition to the
// EditRule writes the rule definition to the
// datastore and also updates the rule executor
func (m *Manager) EditRule(ctx context.Context, ruleStr string, id valuer.UUID) error {
claims, err := authtypes.ClaimsFromContext(ctx)
@@ -643,7 +643,7 @@ func (m *Manager) addTask(_ context.Context, orgID valuer.UUID, rule *ruletypes.
m.rules[r.ID()] = r
}
// If there is an another task with the same identifier, raise an error
// If there is another task with the same identifier, raise an error
_, ok := m.tasks[taskName]
if ok {
return fmt.Errorf("a rule with the same name already exists")
@@ -678,7 +678,8 @@ func (m *Manager) RuleTasks() []Task {
return rgs
}
// RuleTasks returns the list of manager's rule tasks.
// RuleTasksWithoutLock returns the list of manager's rule tasks without
// acquiring a lock on the manager.
func (m *Manager) RuleTasksWithoutLock() []Task {
rgs := make([]Task, 0, len(m.tasks))
@@ -889,7 +890,7 @@ func (m *Manager) syncRuleStateWithTask(ctx context.Context, orgID valuer.UUID,
} else {
// check if rule has a task running
if _, ok := m.tasks[taskName]; !ok {
// rule has not task, start one
// rule has no task, start one
if err := m.addTask(ctx, orgID, rule, taskName); err != nil {
return err
}

View File

@@ -9,6 +9,7 @@ import (
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
// ThresholdRuleTestCase defines test case structure for threshold rule test notifications
@@ -40,8 +41,8 @@ func ThresholdRuleAtLeastOnceValueAbove(target float64, recovery *float64) rulet
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
Labels: map[string]string{
"service.name": "frontend",
@@ -99,8 +100,8 @@ func BuildPromAtLeastOnceValueAbove(target float64, recovery *float64) ruletypes
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
Labels: map[string]string{
"service.name": "frontend",

View File

@@ -28,6 +28,8 @@ type PromRule struct {
prometheus prometheus.Prometheus
}
var _ Rule = (*PromRule)(nil)
func NewPromRule(
id string,
orgID valuer.UUID,
@@ -332,7 +334,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration {
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
a.State = model.StateFiring
a.FiredAt = ts
state := model.StateFiring
@@ -396,7 +398,7 @@ func (r *PromRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.name,
RuleCondition: r.ruleCondition,
EvalWindow: ruletypes.Duration(r.evalWindow),
EvalWindow: r.evalWindow,
Labels: r.labels.Map(),
Annotations: r.annotations.Map(),
PreferredChannels: r.preferredChannels,

View File

@@ -41,12 +41,12 @@ type PromRuleTask struct {
orgID valuer.UUID
}
// newPromRuleTask holds rules that have promql condition
// and evalutes the rule at a given frequency
// NewPromRuleTask holds rules that have promql condition
// and evaluates the rule at a given frequency
func NewPromRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) *PromRuleTask {
zap.L().Info("Initiating a new rule group", zap.String("name", name), zap.Duration("frequency", frequency))
if time.Now() == time.Now().Add(frequency) {
if frequency == 0 {
frequency = DefaultFrequency
}

View File

@@ -41,8 +41,8 @@ func TestPromRuleEval(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -748,8 +748,8 @@ func TestPromRuleUnitCombinations(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1007,8 +1007,8 @@ func _Enable_this_after_9146_issue_fix_is_merged_TestPromRuleNoData(t *testing.T
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1118,8 +1118,8 @@ func TestMultipleThresholdPromRule(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1353,8 +1353,8 @@ func TestPromRule_NoData(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,
@@ -1466,7 +1466,7 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
// 3. Alert fires only if t2 - t1 > AbsentFor
baseTime := time.Unix(1700000000, 0)
evalWindow := 5 * time.Minute
evalWindow := valuer.MustParseTextDuration("5m")
// Set target higher than test data (100.0) so regular threshold alerts don't fire
target := 500.0
@@ -1476,8 +1476,8 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,
@@ -1619,7 +1619,7 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
baseTime := time.Unix(1700000000, 0)
evalTime := baseTime.Add(5 * time.Minute)
evalWindow := 5 * time.Minute
evalWindow := valuer.MustParseTextDuration("5m")
lookBackDelta := time.Minute
postableRule := ruletypes.PostableRule{
@@ -1627,8 +1627,8 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeProm,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(time.Minute),
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,

View File

@@ -7,6 +7,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
// A Rule encapsulates a vector expression which is evaluated at a specified
@@ -19,9 +20,9 @@ type Rule interface {
Labels() labels.BaseLabels
Annotations() labels.BaseLabels
Condition() *ruletypes.RuleCondition
EvalDelay() time.Duration
EvalWindow() time.Duration
HoldDuration() time.Duration
EvalDelay() valuer.TextDuration
EvalWindow() valuer.TextDuration
HoldDuration() valuer.TextDuration
State() model.AlertState
ActiveAlerts() []*ruletypes.Alert
// ActiveAlertsLabelFP returns a map of active alert labels fingerprint

View File

@@ -43,7 +43,7 @@ const DefaultFrequency = 1 * time.Minute
// NewRuleTask makes a new RuleTask with the given name, options, and rules.
func NewRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) *RuleTask {
if time.Now() == time.Now().Add(frequency) {
if frequency == 0 {
frequency = DefaultFrequency
}
zap.L().Info("initiating a new rule task", zap.String("name", name), zap.Duration("frequency", frequency))
@@ -78,6 +78,7 @@ func (g *RuleTask) Type() TaskType { return TaskTypeCh }
func (g *RuleTask) Rules() []Rule { return g.rules }
// Interval returns the group's interval.
// TODO: remove (unused)?
func (g *RuleTask) Interval() time.Duration { return g.frequency }
func (g *RuleTask) Pause(b bool) {

View File

@@ -61,6 +61,8 @@ type ThresholdRule struct {
spansKeys map[string]v3.AttributeKey
}
var _ Rule = (*ThresholdRule)(nil)
func NewThresholdRule(
id string,
orgID valuer.UUID,
@@ -746,7 +748,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration {
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
r.logger.DebugContext(ctx, "converting pending alert to firing", "name", r.Name())
a.State = model.StateFiring
a.FiredAt = ts
@@ -812,7 +814,7 @@ func (r *ThresholdRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.name,
RuleCondition: r.ruleCondition,
EvalWindow: ruletypes.Duration(r.evalWindow),
EvalWindow: r.evalWindow,
Labels: r.labels.Map(),
Annotations: r.annotations.Map(),
PreferredChannels: r.preferredChannels,

View File

@@ -36,8 +36,8 @@ func TestThresholdRuleEvalBackwardCompat(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -72,7 +72,7 @@ func TestThresholdRuleEvalBackwardCompat(t *testing.T) {
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -152,8 +152,8 @@ func TestPrepareLinksToLogs(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -189,7 +189,7 @@ func TestPrepareLinksToLogs(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -206,8 +206,8 @@ func TestPrepareLinksToLogsV5(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -250,7 +250,7 @@ func TestPrepareLinksToLogsV5(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -267,8 +267,8 @@ func TestPrepareLinksToTracesV5(t *testing.T) {
AlertType: ruletypes.AlertTypeTraces,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -311,7 +311,7 @@ func TestPrepareLinksToTracesV5(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -328,8 +328,8 @@ func TestPrepareLinksToTraces(t *testing.T) {
AlertType: ruletypes.AlertTypeTraces,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -365,7 +365,7 @@ func TestPrepareLinksToTraces(t *testing.T) {
},
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -382,8 +382,8 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -451,7 +451,7 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
},
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -490,8 +490,8 @@ func TestThresholdRuleEvalDelay(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -553,8 +553,8 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -594,7 +594,7 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
logger := instrumentationtest.New().Logger()
for idx, c := range cases {
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
}
@@ -615,8 +615,8 @@ func TestThresholdRuleUnitCombinations(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -816,8 +816,8 @@ func TestThresholdRuleNoData(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -927,8 +927,8 @@ func TestThresholdRuleTracesLink(t *testing.T) {
AlertType: ruletypes.AlertTypeTraces,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1052,8 +1052,8 @@ func TestThresholdRuleLogsLink(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1190,8 +1190,8 @@ func TestThresholdRuleShiftBy(t *testing.T) {
AlertType: ruletypes.AlertTypeLogs,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
Thresholds: &ruletypes.RuleThresholdData{
@@ -1264,8 +1264,8 @@ func TestMultipleThresholdRule(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1455,8 +1455,8 @@ func TestThresholdRuleEval_BasicCases(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1486,8 +1486,8 @@ func TestThresholdRuleEval_MatchPlusCompareOps(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1523,8 +1523,8 @@ func TestThresholdRuleEval_SendUnmatchedBypassesRecovery(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1559,7 +1559,7 @@ func TestThresholdRuleEval_SendUnmatchedBypassesRecovery(t *testing.T) {
}
logger := instrumentationtest.New().Logger()
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
require.NoError(t, err)
now := time.Now()
@@ -1611,8 +1611,8 @@ func TestThresholdRuleEval_SendUnmatchedVariants(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1735,8 +1735,8 @@ func TestThresholdRuleEval_RecoveryNotMetSendUnmatchedFalse(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -1820,7 +1820,7 @@ func runEvalTests(t *testing.T, postableRule ruletypes.PostableRule, testCases [
Spec: thresholds,
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
return
@@ -1927,7 +1927,7 @@ func runMultiThresholdEvalTests(t *testing.T, postableRule ruletypes.PostableRul
Spec: thresholds,
}
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
if err != nil {
assert.NoError(t, err)
return
@@ -2035,8 +2035,8 @@ func TestThresholdRuleEval_MultiThreshold(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompositeQuery: &v3.CompositeQuery{
@@ -2066,8 +2066,8 @@ func TestThresholdEval_RequireMinPoints(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: ruletypes.RuleTypeThreshold,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: ruletypes.Duration(5 * time.Minute),
Frequency: ruletypes.Duration(1 * time.Minute),
EvalWindow: valuer.MustParseTextDuration("5m"),
Frequency: valuer.MustParseTextDuration("1m"),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,

View File

@@ -1,6 +1,8 @@
package signoz
import (
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/signozauthzapi"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/gateway"
@@ -11,14 +13,14 @@ import (
"github.com/SigNoz/signoz/pkg/modules/apdex/implapdex"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/fields/implfields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/modules/savedview"
"github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview"
"github.com/SigNoz/signoz/pkg/modules/services"
@@ -28,6 +30,7 @@ import (
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type Handlers struct {
@@ -43,10 +46,21 @@ type Handlers struct {
Global global.Handler
FlaggerHandler flagger.Handler
GatewayHandler gateway.Handler
Role role.Handler
Fields fields.Handler
AuthzHandler authz.Handler
}
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global, flaggerService flagger.Flagger, gatewayService gateway.Gateway) Handlers {
func NewHandlers(
modules Modules,
providerSettings factory.ProviderSettings,
querier querier.Querier,
licensing licensing.Licensing,
global global.Global,
flaggerService flagger.Flagger,
gatewayService gateway.Gateway,
telemetryMetadataStore telemetrytypes.MetadataStore,
authz authz.AuthZ,
) Handlers {
return Handlers{
SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex),
@@ -60,6 +74,7 @@ func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, que
Global: signozglobal.NewHandler(global),
FlaggerHandler: flagger.NewHandler(flaggerService),
GatewayHandler: gateway.NewHandler(gatewayService),
Role: implrole.NewHandler(modules.RoleSetter, modules.RoleGetter),
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
AuthzHandler: signozauthzapi.NewHandler(authz),
}
}

View File

@@ -13,7 +13,6 @@ import (
"github.com/SigNoz/signoz/pkg/factory/factorytest"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sharder"
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
@@ -41,13 +40,9 @@ func TestNewHandlers(t *testing.T) {
queryParser := queryparser.New(providerSettings)
require.NoError(t, err)
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
roleSetter := implrole.NewSetter(implrole.NewStore(sqlstore), nil)
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
grantModule := implrole.NewGranter(implrole.NewStore(sqlstore), nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, roleSetter, roleGetter, grantModule)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil, nil, nil)
reflectVal := reflect.ValueOf(handlers)
for i := 0; i < reflectVal.NumField(); i++ {
f := reflectVal.Field(i)

View File

@@ -25,7 +25,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/savedview"
"github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview"
"github.com/SigNoz/signoz/pkg/modules/services"
@@ -67,9 +66,6 @@ type Modules struct {
SpanPercentile spanpercentile.Module
MetricsExplorer metricsexplorer.Module
Promote promote.Module
RoleSetter role.Setter
RoleGetter role.Getter
Granter role.Granter
}
func NewModules(
@@ -89,13 +85,10 @@ func NewModules(
queryParser queryparser.QueryParser,
config Config,
dashboard dashboard.Module,
roleSetter role.Setter,
roleGetter role.Getter,
granter role.Granter,
) Modules {
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, granter, analytics, config.User)
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, authz, analytics, config.User)
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
@@ -117,8 +110,5 @@ func NewModules(
Services: implservices.NewModule(querier, telemetryStore),
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, ruleStore, dashboard, providerSettings, config.MetricsExplorer),
Promote: implpromote.NewModule(telemetryMetadataStore, telemetryStore),
RoleSetter: roleSetter,
RoleGetter: roleGetter,
Granter: granter,
}
}

View File

@@ -13,7 +13,6 @@ import (
"github.com/SigNoz/signoz/pkg/factory/factorytest"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sharder"
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
@@ -41,10 +40,7 @@ func TestNewModules(t *testing.T) {
queryParser := queryparser.New(providerSettings)
require.NoError(t, err)
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
roleSetter := implrole.NewSetter(implrole.NewStore(sqlstore), nil)
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
grantModule := implrole.NewGranter(implrole.NewStore(sqlstore), nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, roleSetter, roleGetter, grantModule)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
reflectVal := reflect.ValueOf(modules)
for i := 0; i < reflectVal.NumField(); i++ {

View File

@@ -15,11 +15,11 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
@@ -50,8 +50,8 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
struct{ dashboard.Handler }{},
struct{ metricsexplorer.Handler }{},
struct{ gateway.Handler }{},
struct{ role.Getter }{},
struct{ role.Handler }{},
struct{ fields.Handler }{},
struct{ authz.Handler }{},
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
if err != nil {
return nil, err

View File

@@ -166,6 +166,7 @@ func NewSQLMigrationProviderFactories(
sqlmigration.NewAddAuthzIndexFactory(sqlstore, sqlschema),
sqlmigration.NewMigrateRbacToAuthzFactory(sqlstore),
sqlmigration.NewMigratePublicDashboardsFactory(sqlstore),
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
)
}
@@ -247,8 +248,8 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
handlers.Dashboard,
handlers.MetricsExplorer,
handlers.GatewayHandler,
modules.RoleGetter,
handlers.Role,
handlers.Fields,
handlers.AuthzHandler,
),
)
}

View File

@@ -21,8 +21,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
"github.com/SigNoz/signoz/pkg/prometheus"
"github.com/SigNoz/signoz/pkg/querier"
@@ -89,10 +87,9 @@ func New(
sqlstoreProviderFactories factory.NamedMap[factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config]],
telemetrystoreProviderFactories factory.NamedMap[factory.ProviderFactory[telemetrystore.TelemetryStore, telemetrystore.Config]],
authNsCallback func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error),
authzCallback func(context.Context, sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config],
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, role.Setter, role.Granter, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
authzCallback func(context.Context, sqlstore.SQLStore, licensing.Licensing, dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config],
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
gatewayProviderFactory func(licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config],
roleSetterCallback func(sqlstore.SQLStore, authz.AuthZ, licensing.Licensing, []role.RegisterTypeable) role.Setter,
) (*SigNoz, error) {
// Initialize instrumentation
instrumentation, err := instrumentation.New(ctx, config.Instrumentation, version.Info, "signoz")
@@ -284,11 +281,24 @@ func New(
// Initialize user getter
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
// Initialize the role getter
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
licensingProviderFactory := licenseProviderFactory(sqlstore, zeus, orgGetter, analytics)
licensing, err := licensingProviderFactory.New(
ctx,
providerSettings,
licenseConfig,
)
if err != nil {
return nil, err
}
// Initialize query parser (needed for dashboard module)
queryParser := queryparser.New(providerSettings)
// Initialize dashboard module (needed for authz registry)
dashboard := dashboardModuleCallback(sqlstore, providerSettings, analytics, orgGetter, queryParser, querier, licensing)
// Initialize authz
authzProviderFactory := authzCallback(ctx, sqlstore)
authzProviderFactory := authzCallback(ctx, sqlstore, licensing, dashboard)
authz, err := authzProviderFactory.New(ctx, providerSettings, authz.Config{})
if err != nil {
return nil, err
@@ -318,9 +328,6 @@ func New(
return nil, err
}
// Initialize query parser
queryParser := queryparser.New(providerSettings)
// Initialize ruler from the available ruler provider factories
ruler, err := factory.NewProviderFromNamedMap(
ctx,
@@ -333,16 +340,6 @@ func New(
return nil, err
}
licensingProviderFactory := licenseProviderFactory(sqlstore, zeus, orgGetter, analytics)
licensing, err := licensingProviderFactory.New(
ctx,
providerSettings,
licenseConfig,
)
if err != nil {
return nil, err
}
gatewayFactory := gatewayProviderFactory(licensing)
gateway, err := gatewayFactory.New(ctx, providerSettings, config.Gateway)
if err != nil {
@@ -390,13 +387,10 @@ func New(
}
// Initialize all modules
roleSetter := roleSetterCallback(sqlstore, authz, licensing, nil)
granter := implrole.NewGranter(implrole.NewStore(sqlstore), authz)
dashboard := dashboardModuleCallback(sqlstore, providerSettings, analytics, orgGetter, roleSetter, granter, queryParser, querier, licensing)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard, roleSetter, roleGetter, granter)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway)
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz)
// Initialize the API server
apiserver, err := factory.NewProviderFromNamedMap(

View File

@@ -0,0 +1,154 @@
package sqlmigration
import (
"context"
"time"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/oklog/ulid/v2"
"github.com/uptrace/bun"
"github.com/uptrace/bun/dialect"
"github.com/uptrace/bun/migrate"
)
type addAnonymousPublicDashboardTransaction struct {
sqlstore sqlstore.SQLStore
}
func NewAddAnonymousPublicDashboardTransactionFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("add_public_dashboard_txn"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return newAddAnonymousPublicDashboardTransaction(ctx, ps, c, sqlstore)
})
}
func newAddAnonymousPublicDashboardTransaction(_ context.Context, _ factory.ProviderSettings, _ Config, sqlstore sqlstore.SQLStore) (SQLMigration, error) {
return &addAnonymousPublicDashboardTransaction{
sqlstore: sqlstore,
}, nil
}
func (migration *addAnonymousPublicDashboardTransaction) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *addAnonymousPublicDashboardTransaction) Up(ctx context.Context, db *bun.DB) error {
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
var storeID string
err = tx.QueryRowContext(ctx, `SELECT id FROM store WHERE name = ? LIMIT 1`, "signoz").Scan(&storeID)
if err != nil {
return err
}
// fetch all the orgs for which we need to insert the anonymous public dashboard transaction tuple.
orgIDs := []string{}
rows, err := tx.QueryContext(ctx, `SELECT id FROM organizations`)
if err != nil {
return err
}
defer rows.Close()
for rows.Next() {
var orgID string
if err := rows.Scan(&orgID); err != nil {
return err
}
orgIDs = append(orgIDs, orgID)
}
for _, orgID := range orgIDs {
// based on openfga tuple and changelog id's are same for writes.
// ref: https://github.com/openfga/openfga/blob/main/pkg/storage/sqlite/sqlite.go#L467
entropy := ulid.DefaultEntropy()
now := time.Now().UTC()
tupleID := ulid.MustNew(ulid.Timestamp(now), entropy).String()
// Add wildcard (*) transaction for signoz-anonymous role to read all public-dashboards
// This grants the signoz-anonymous role read access to all public dashboards in the organization
if migration.sqlstore.BunDB().Dialect().Name() == dialect.PG {
result, err := tx.ExecContext(ctx, `
INSERT INTO tuple (store, object_type, object_id, relation, _user, user_type, ulid, inserted_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (store, object_type, object_id, relation, _user) DO NOTHING`,
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role:organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName+"#assignee", "userset", tupleID, now,
)
if err != nil {
return err
}
rowsAffected, err := result.RowsAffected()
if err != nil {
return err
}
if rowsAffected == 0 {
continue
}
_, err = tx.ExecContext(ctx, `
INSERT INTO changelog (store, object_type, object_id, relation, _user, operation, ulid, inserted_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (store, ulid, object_type) DO NOTHING`,
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role:organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName+"#assignee", "TUPLE_OPERATION_WRITE", tupleID, now,
)
if err != nil {
return err
}
} else {
result, err := tx.ExecContext(ctx, `
INSERT INTO tuple (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation, user_type, ulid, inserted_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation) DO NOTHING`,
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role", "organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName, "assignee", "userset", tupleID, now,
)
if err != nil {
return err
}
rowsAffected, err := result.RowsAffected()
if err != nil {
return err
}
if rowsAffected == 0 {
continue
}
_, err = tx.ExecContext(ctx, `
INSERT INTO changelog (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation, operation, ulid, inserted_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT (store, ulid, object_type) DO NOTHING`,
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role", "organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName, "assignee", 0, tupleID, now,
)
if err != nil {
return err
}
}
}
err = tx.Commit()
if err != nil {
return err
}
return nil
}
func (migration *addAnonymousPublicDashboardTransaction) Down(context.Context, *bun.DB) error {
return nil
}

View File

@@ -20,15 +20,19 @@ var (
)
var (
typeUserSelectorRegex = regexp.MustCompile(`^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
typeRoleSelectorRegex = regexp.MustCompile(`^[a-z-]{1,50}$`)
typeUserSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
typeRoleSelectorRegex = regexp.MustCompile(`^([a-z-]{1,50}|\*)$`)
typeAnonymousSelectorRegex = regexp.MustCompile(`^\*$`)
typeOrganizationSelectorRegex = regexp.MustCompile(`^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
typeOrganizationSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
typeMetaResourceSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
// metaresources selectors are used to select either all or none
// metaresources selectors are used to select either all or none until we introduce some hierarchy here.
typeMetaResourcesSelectorRegex = regexp.MustCompile(`^\*$`)
)
var (
WildCardSelectorString = "*"
)
type SelectorCallbackWithClaimsFn func(*http.Request, Claims) ([]Selector, error)
type SelectorCallbackWithoutClaimsFn func(*http.Request, []*types.Organization) ([]Selector, valuer.UUID, error)

Some files were not shown because too many files have changed in this diff Show More