mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-09 11:12:21 +00:00
Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d09ed1dca4 |
7
.github/CODEOWNERS
vendored
7
.github/CODEOWNERS
vendored
@@ -133,8 +133,5 @@
|
||||
/frontend/src/pages/PublicDashboard/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/PublicDashboardContainer/ @SigNoz/pulse-frontend
|
||||
|
||||
## Dashboard Libs + Components
|
||||
/frontend/src/lib/uPlotV2/ @SigNoz/pulse-frontend
|
||||
/frontend/src/lib/dashboard/ @SigNoz/pulse-frontend
|
||||
/frontend/src/lib/dashboardVariables/ @SigNoz/pulse-frontend
|
||||
/frontend/src/components/NewSelect/ @SigNoz/pulse-frontend
|
||||
## UplotV2
|
||||
/frontend/src/lib/uPlotV2/ @SigNoz/pulse-frontend
|
||||
@@ -18,6 +18,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
@@ -76,15 +78,18 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
|
||||
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
|
||||
},
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore, _ licensing.Licensing, _ dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
||||
},
|
||||
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
|
||||
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, _ role.Setter, _ role.Granter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
|
||||
return impldashboard.NewModule(impldashboard.NewStore(store), settings, analytics, orgGetter, queryParser)
|
||||
},
|
||||
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return noopgateway.NewProviderFactory()
|
||||
},
|
||||
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, _ []role.RegisterTypeable) role.Setter {
|
||||
return implrole.NewSetter(implrole.NewStore(store), authz)
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/ee/modules/role/implrole"
|
||||
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
|
||||
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
@@ -28,6 +29,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
pkgimplrole "github.com/SigNoz/signoz/pkg/modules/role/implrole"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
@@ -115,15 +118,18 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
|
||||
return authNs, nil
|
||||
},
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore, licensing licensing.Licensing, dashboardModule dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx), licensing, dashboardModule)
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
||||
},
|
||||
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
|
||||
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, queryParser, querier, licensing)
|
||||
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
|
||||
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, roleSetter, granter, queryParser, querier, licensing)
|
||||
},
|
||||
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return httpgateway.NewProviderFactory(licensing)
|
||||
},
|
||||
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
|
||||
return implrole.NewSetter(pkgimplrole.NewStore(store), authz, licensing, registry)
|
||||
},
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -607,178 +607,6 @@ paths:
|
||||
summary: Update auth domain
|
||||
tags:
|
||||
- authdomains
|
||||
/api/v1/fields/keys:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoint returns field keys
|
||||
operationId: GetFieldsKeys
|
||||
parameters:
|
||||
- in: query
|
||||
name: signal
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: source
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: limit
|
||||
schema:
|
||||
type: integer
|
||||
- in: query
|
||||
name: startUnixMilli
|
||||
schema:
|
||||
format: int64
|
||||
type: integer
|
||||
- in: query
|
||||
name: endUnixMilli
|
||||
schema:
|
||||
format: int64
|
||||
type: integer
|
||||
- in: query
|
||||
name: fieldContext
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: fieldDataType
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: metricName
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: searchText
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/TelemetrytypesGettableFieldKeys'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Get field keys
|
||||
tags:
|
||||
- fields
|
||||
/api/v1/fields/values:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoint returns field values
|
||||
operationId: GetFieldsValues
|
||||
parameters:
|
||||
- in: query
|
||||
name: signal
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: source
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: limit
|
||||
schema:
|
||||
type: integer
|
||||
- in: query
|
||||
name: startUnixMilli
|
||||
schema:
|
||||
format: int64
|
||||
type: integer
|
||||
- in: query
|
||||
name: endUnixMilli
|
||||
schema:
|
||||
format: int64
|
||||
type: integer
|
||||
- in: query
|
||||
name: fieldContext
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: fieldDataType
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: metricName
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: searchText
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: name
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: existingQuery
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/TelemetrytypesGettableFieldValues'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Get field values
|
||||
tags:
|
||||
- fields
|
||||
/api/v1/getResetPasswordToken/{id}:
|
||||
get:
|
||||
deprecated: false
|
||||
@@ -4100,9 +3928,19 @@ components:
|
||||
isMonotonic:
|
||||
type: boolean
|
||||
temporality:
|
||||
$ref: '#/components/schemas/MetrictypesTemporality'
|
||||
enum:
|
||||
- delta
|
||||
- cumulative
|
||||
- unspecified
|
||||
type: string
|
||||
type:
|
||||
$ref: '#/components/schemas/MetrictypesType'
|
||||
enum:
|
||||
- gauge
|
||||
- sum
|
||||
- histogram
|
||||
- summary
|
||||
- exponentialhistogram
|
||||
type: string
|
||||
unit:
|
||||
type: string
|
||||
required:
|
||||
@@ -4125,7 +3963,13 @@ components:
|
||||
minimum: 0
|
||||
type: integer
|
||||
type:
|
||||
$ref: '#/components/schemas/MetrictypesType'
|
||||
enum:
|
||||
- gauge
|
||||
- sum
|
||||
- histogram
|
||||
- summary
|
||||
- exponentialhistogram
|
||||
type: string
|
||||
unit:
|
||||
type: string
|
||||
required:
|
||||
@@ -4186,11 +4030,6 @@ components:
|
||||
- percentage
|
||||
- totalValue
|
||||
type: object
|
||||
MetricsexplorertypesTreemapMode:
|
||||
enum:
|
||||
- timeseries
|
||||
- samples
|
||||
type: string
|
||||
MetricsexplorertypesTreemapRequest:
|
||||
properties:
|
||||
end:
|
||||
@@ -4201,7 +4040,10 @@ components:
|
||||
limit:
|
||||
type: integer
|
||||
mode:
|
||||
$ref: '#/components/schemas/MetricsexplorertypesTreemapMode'
|
||||
enum:
|
||||
- timeseries
|
||||
- samples
|
||||
type: string
|
||||
start:
|
||||
format: int64
|
||||
type: integer
|
||||
@@ -4236,9 +4078,19 @@ components:
|
||||
metricName:
|
||||
type: string
|
||||
temporality:
|
||||
$ref: '#/components/schemas/MetrictypesTemporality'
|
||||
enum:
|
||||
- delta
|
||||
- cumulative
|
||||
- unspecified
|
||||
type: string
|
||||
type:
|
||||
$ref: '#/components/schemas/MetrictypesType'
|
||||
enum:
|
||||
- gauge
|
||||
- sum
|
||||
- histogram
|
||||
- summary
|
||||
- exponentialhistogram
|
||||
type: string
|
||||
unit:
|
||||
type: string
|
||||
required:
|
||||
@@ -4249,20 +4101,6 @@ components:
|
||||
- temporality
|
||||
- isMonotonic
|
||||
type: object
|
||||
MetrictypesTemporality:
|
||||
enum:
|
||||
- delta
|
||||
- cumulative
|
||||
- unspecified
|
||||
type: string
|
||||
MetrictypesType:
|
||||
enum:
|
||||
- gauge
|
||||
- sum
|
||||
- histogram
|
||||
- summary
|
||||
- exponentialhistogram
|
||||
type: string
|
||||
PreferencetypesPreference:
|
||||
properties:
|
||||
allowedScopes:
|
||||
@@ -4416,60 +4254,6 @@ components:
|
||||
format: date-time
|
||||
type: string
|
||||
type: object
|
||||
TelemetrytypesGettableFieldKeys:
|
||||
properties:
|
||||
complete:
|
||||
type: boolean
|
||||
keys:
|
||||
additionalProperties:
|
||||
items:
|
||||
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
|
||||
type: array
|
||||
nullable: true
|
||||
type: object
|
||||
type: object
|
||||
TelemetrytypesGettableFieldValues:
|
||||
properties:
|
||||
complete:
|
||||
type: boolean
|
||||
values:
|
||||
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldValues'
|
||||
type: object
|
||||
TelemetrytypesTelemetryFieldKey:
|
||||
properties:
|
||||
description:
|
||||
type: string
|
||||
fieldContext:
|
||||
type: string
|
||||
fieldDataType:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
signal:
|
||||
type: string
|
||||
unit:
|
||||
type: string
|
||||
type: object
|
||||
TelemetrytypesTelemetryFieldValues:
|
||||
properties:
|
||||
boolValues:
|
||||
items:
|
||||
type: boolean
|
||||
type: array
|
||||
numberValues:
|
||||
items:
|
||||
format: double
|
||||
type: number
|
||||
type: array
|
||||
relatedValues:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
stringValues:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
type: object
|
||||
TypesChangePasswordRequest:
|
||||
properties:
|
||||
newPassword:
|
||||
|
||||
@@ -2,18 +2,12 @@ package openfgaauthz
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaserver"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/authz/authzstore/sqlauthzstore"
|
||||
pkgopenfgaauthz "github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
|
||||
@@ -21,224 +15,50 @@ import (
|
||||
|
||||
type provider struct {
|
||||
pkgAuthzService authz.AuthZ
|
||||
openfgaServer *openfgaserver.Server
|
||||
licensing licensing.Licensing
|
||||
store roletypes.Store
|
||||
registry []authz.RegisterTypeable
|
||||
}
|
||||
|
||||
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry ...authz.RegisterTypeable) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("openfga"), func(ctx context.Context, ps factory.ProviderSettings, config authz.Config) (authz.AuthZ, error) {
|
||||
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema, licensing, registry)
|
||||
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema)
|
||||
})
|
||||
}
|
||||
|
||||
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry []authz.RegisterTypeable) (authz.AuthZ, error) {
|
||||
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
|
||||
pkgOpenfgaAuthzProvider := pkgopenfgaauthz.NewProviderFactory(sqlstore, openfgaSchema)
|
||||
pkgAuthzService, err := pkgOpenfgaAuthzProvider.New(ctx, settings, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
openfgaServer, err := openfgaserver.NewOpenfgaServer(ctx, pkgAuthzService)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &provider{
|
||||
pkgAuthzService: pkgAuthzService,
|
||||
openfgaServer: openfgaServer,
|
||||
licensing: licensing,
|
||||
store: sqlauthzstore.NewSqlAuthzStore(sqlstore),
|
||||
registry: registry,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Start(ctx context.Context) error {
|
||||
return provider.openfgaServer.Start(ctx)
|
||||
return provider.pkgAuthzService.Start(ctx)
|
||||
}
|
||||
|
||||
func (provider *provider) Stop(ctx context.Context) error {
|
||||
return provider.openfgaServer.Stop(ctx)
|
||||
return provider.pkgAuthzService.Stop(ctx)
|
||||
}
|
||||
|
||||
func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
|
||||
return provider.openfgaServer.Check(ctx, tuple)
|
||||
return provider.pkgAuthzService.Check(ctx, tuple)
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
|
||||
return provider.openfgaServer.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
|
||||
return provider.openfgaServer.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
|
||||
}
|
||||
|
||||
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
|
||||
return provider.openfgaServer.BatchCheck(ctx, tuples)
|
||||
}
|
||||
|
||||
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
|
||||
return provider.openfgaServer.ListObjects(ctx, subject, relation, typeable)
|
||||
}
|
||||
|
||||
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
|
||||
return provider.openfgaServer.Write(ctx, additions, deletions)
|
||||
}
|
||||
|
||||
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
|
||||
return provider.pkgAuthzService.Get(ctx, orgID, id)
|
||||
}
|
||||
|
||||
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
|
||||
return provider.pkgAuthzService.GetByOrgIDAndName(ctx, orgID, name)
|
||||
}
|
||||
|
||||
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
|
||||
return provider.pkgAuthzService.List(ctx, orgID)
|
||||
}
|
||||
|
||||
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
|
||||
return provider.pkgAuthzService.ListByOrgIDAndNames(ctx, orgID, names)
|
||||
}
|
||||
|
||||
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
|
||||
return provider.pkgAuthzService.Grant(ctx, orgID, name, subject)
|
||||
}
|
||||
|
||||
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
|
||||
return provider.pkgAuthzService.ModifyGrant(ctx, orgID, existingRoleName, updatedRoleName, subject)
|
||||
}
|
||||
|
||||
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
|
||||
return provider.pkgAuthzService.Revoke(ctx, orgID, name, subject)
|
||||
}
|
||||
|
||||
func (provider *provider) CreateManagedRoles(ctx context.Context, orgID valuer.UUID, managedRoles []*roletypes.Role) error {
|
||||
return provider.pkgAuthzService.CreateManagedRoles(ctx, orgID, managedRoles)
|
||||
}
|
||||
|
||||
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
|
||||
tuples := make([]*openfgav1.TupleKey, 0)
|
||||
|
||||
grantTuples, err := provider.getManagedRoleGrantTuples(orgID, userID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tuples = append(tuples, grantTuples...)
|
||||
|
||||
managedRoleTuples, err := provider.getManagedRoleTransactionTuples(orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tuples = append(tuples, managedRoleTuples...)
|
||||
|
||||
return provider.Write(ctx, tuples, nil)
|
||||
}
|
||||
|
||||
func (provider *provider) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
|
||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
||||
}
|
||||
|
||||
func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
|
||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
existingRole, err := provider.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
|
||||
if err != nil {
|
||||
if !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if existingRole != nil {
|
||||
return roletypes.NewRoleFromStorableRole(existingRole), nil
|
||||
}
|
||||
|
||||
err = provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return role, nil
|
||||
}
|
||||
|
||||
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
|
||||
typeables := make([]authtypes.Typeable, 0)
|
||||
for _, register := range provider.registry {
|
||||
typeables = append(typeables, register.MustGetTypeables()...)
|
||||
}
|
||||
// role module cannot self register itself!
|
||||
typeables = append(typeables, provider.MustGetTypeables()...)
|
||||
|
||||
resources := make([]*authtypes.Resource, 0)
|
||||
for _, typeable := range typeables {
|
||||
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
|
||||
}
|
||||
|
||||
return resources
|
||||
}
|
||||
|
||||
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
|
||||
storableRole, err := provider.store.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
objects := make([]*authtypes.Object, 0)
|
||||
for _, resource := range provider.GetResources(ctx) {
|
||||
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
|
||||
resourceObjects, err := provider.
|
||||
ListObjects(
|
||||
ctx,
|
||||
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
|
||||
relation,
|
||||
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
objects = append(objects, resourceObjects...)
|
||||
}
|
||||
}
|
||||
|
||||
return objects, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
|
||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return provider.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
|
||||
}
|
||||
|
||||
func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
|
||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
|
||||
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = provider.Write(ctx, additionTuples, deletionTuples)
|
||||
err = provider.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -246,95 +66,33 @@ func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, n
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
|
||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
storableRole, err := provider.store.Get(ctx, orgID, id)
|
||||
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
role := roletypes.NewRoleFromStorableRole(storableRole)
|
||||
err = role.CanEditDelete()
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return provider.store.Delete(ctx, orgID, id)
|
||||
}
|
||||
|
||||
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
|
||||
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
|
||||
}
|
||||
|
||||
func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||
tuples := []*openfgav1.TupleKey{}
|
||||
|
||||
// Grant the admin role to the user
|
||||
adminSubject := authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil)
|
||||
adminTuple, err := authtypes.TypeableRole.Tuples(
|
||||
adminSubject,
|
||||
authtypes.RelationAssignee,
|
||||
[]authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAdminRoleName),
|
||||
},
|
||||
orgID,
|
||||
)
|
||||
err = provider.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
tuples = append(tuples, adminTuple...)
|
||||
|
||||
// Grant the admin role to the anonymous user
|
||||
anonymousSubject := authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
|
||||
anonymousTuple, err := authtypes.TypeableRole.Tuples(
|
||||
anonymousSubject,
|
||||
authtypes.RelationAssignee,
|
||||
[]authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAnonymousRoleName),
|
||||
},
|
||||
orgID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tuples = append(tuples, anonymousTuple...)
|
||||
|
||||
return tuples, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) getManagedRoleTransactionTuples(orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||
transactionsByRole := make(map[string][]*authtypes.Transaction)
|
||||
for _, register := range provider.registry {
|
||||
for roleName, txns := range register.MustGetManagedRoleTransactions() {
|
||||
transactionsByRole[roleName] = append(transactionsByRole[roleName], txns...)
|
||||
}
|
||||
}
|
||||
|
||||
tuples := make([]*openfgav1.TupleKey, 0)
|
||||
for roleName, transactions := range transactionsByRole {
|
||||
for _, txn := range transactions {
|
||||
typeable := authtypes.MustNewTypeableFromType(txn.Object.Resource.Type, txn.Object.Resource.Name)
|
||||
txnTuples, err := typeable.Tuples(
|
||||
authtypes.MustNewSubject(
|
||||
authtypes.TypeableRole,
|
||||
roleName,
|
||||
orgID,
|
||||
&authtypes.RelationAssignee,
|
||||
),
|
||||
txn.Relation,
|
||||
[]authtypes.Selector{txn.Object.Selector},
|
||||
orgID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tuples = append(tuples, txnTuples...)
|
||||
}
|
||||
}
|
||||
|
||||
return tuples, nil
|
||||
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
|
||||
return provider.pkgAuthzService.BatchCheck(ctx, tuples)
|
||||
}
|
||||
|
||||
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
|
||||
return provider.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
|
||||
}
|
||||
|
||||
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
|
||||
return provider.pkgAuthzService.Write(ctx, additions, deletions)
|
||||
}
|
||||
|
||||
@@ -1,83 +0,0 @@
|
||||
package openfgaserver
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
)
|
||||
|
||||
type Server struct {
|
||||
pkgAuthzService authz.AuthZ
|
||||
}
|
||||
|
||||
func NewOpenfgaServer(ctx context.Context, pkgAuthzService authz.AuthZ) (*Server, error) {
|
||||
|
||||
return &Server{
|
||||
pkgAuthzService: pkgAuthzService,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (server *Server) Start(ctx context.Context) error {
|
||||
return server.pkgAuthzService.Start(ctx)
|
||||
}
|
||||
|
||||
func (server *Server) Stop(ctx context.Context) error {
|
||||
return server.pkgAuthzService.Stop(ctx)
|
||||
}
|
||||
|
||||
func (server *Server) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
|
||||
return server.pkgAuthzService.Check(ctx, tuple)
|
||||
}
|
||||
|
||||
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = server.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = server.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
|
||||
return server.pkgAuthzService.BatchCheck(ctx, tuples)
|
||||
}
|
||||
|
||||
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
|
||||
return server.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
|
||||
}
|
||||
|
||||
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
|
||||
return server.pkgAuthzService.Write(ctx, additions, deletions)
|
||||
}
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
@@ -25,11 +26,13 @@ type module struct {
|
||||
pkgDashboardModule dashboard.Module
|
||||
store dashboardtypes.Store
|
||||
settings factory.ScopedProviderSettings
|
||||
roleSetter role.Setter
|
||||
granter role.Granter
|
||||
querier querier.Querier
|
||||
licensing licensing.Licensing
|
||||
}
|
||||
|
||||
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
|
||||
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
|
||||
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard")
|
||||
pkgDashboardModule := pkgimpldashboard.NewModule(store, settings, analytics, orgGetter, queryParser)
|
||||
|
||||
@@ -37,6 +40,8 @@ func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, an
|
||||
pkgDashboardModule: pkgDashboardModule,
|
||||
store: store,
|
||||
settings: scopedProviderSettings,
|
||||
roleSetter: roleSetter,
|
||||
granter: granter,
|
||||
querier: querier,
|
||||
licensing: licensing,
|
||||
}
|
||||
@@ -56,6 +61,29 @@ func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publi
|
||||
return errors.Newf(errors.TypeAlreadyExists, dashboardtypes.ErrCodePublicDashboardAlreadyExists, "dashboard with id %s is already public", storablePublicDashboard.DashboardID)
|
||||
}
|
||||
|
||||
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.granter.Grant(ctx, orgID, roletypes.SigNozAnonymousRoleName, authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.StringValue(), orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
additionObject := authtypes.MustNewObject(
|
||||
authtypes.Resource{
|
||||
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
|
||||
Type: authtypes.TypeMetaResource,
|
||||
},
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
|
||||
)
|
||||
|
||||
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, []*authtypes.Object{additionObject}, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.CreatePublic(ctx, dashboardtypes.NewStorablePublicDashboardFromPublicDashboard(publicDashboard))
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -100,7 +128,6 @@ func (module *module) GetPublicDashboardSelectorsAndOrg(ctx context.Context, id
|
||||
|
||||
return []authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, id.StringValue()),
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, authtypes.WildCardSelectorString),
|
||||
}, storableDashboard.OrgID, nil
|
||||
}
|
||||
|
||||
@@ -163,6 +190,29 @@ func (module *module) DeletePublic(ctx context.Context, orgID valuer.UUID, dashb
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
publicDashboard, err := module.GetPublic(ctx, orgID, dashboardID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deletionObject := authtypes.MustNewObject(
|
||||
authtypes.Resource{
|
||||
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
|
||||
Type: authtypes.TypeMetaResource,
|
||||
},
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
|
||||
)
|
||||
|
||||
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -200,6 +250,10 @@ func (module *module) GetByMetricNames(ctx context.Context, orgID valuer.UUID, m
|
||||
return module.pkgDashboardModule.GetByMetricNames(ctx, orgID, metricNames)
|
||||
}
|
||||
|
||||
func (module *module) MustGetTypeables() []authtypes.Typeable {
|
||||
return module.pkgDashboardModule.MustGetTypeables()
|
||||
}
|
||||
|
||||
func (module *module) List(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
|
||||
return module.pkgDashboardModule.List(ctx, orgID)
|
||||
}
|
||||
@@ -212,27 +266,34 @@ func (module *module) LockUnlock(ctx context.Context, orgID valuer.UUID, id valu
|
||||
return module.pkgDashboardModule.LockUnlock(ctx, orgID, id, updatedBy, role, lock)
|
||||
}
|
||||
|
||||
func (module *module) MustGetTypeables() []authtypes.Typeable {
|
||||
return module.pkgDashboardModule.MustGetTypeables()
|
||||
}
|
||||
|
||||
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
|
||||
return map[string][]*authtypes.Transaction{
|
||||
roletypes.SigNozAnonymousRoleName: {
|
||||
{
|
||||
Relation: authtypes.RelationRead,
|
||||
Object: *authtypes.MustNewObject(
|
||||
authtypes.Resource{
|
||||
Type: authtypes.TypeMetaResource,
|
||||
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
|
||||
},
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, "*"),
|
||||
),
|
||||
},
|
||||
},
|
||||
func (module *module) deletePublic(ctx context.Context, orgID valuer.UUID, dashboardID valuer.UUID) error {
|
||||
publicDashboard, err := module.store.GetPublic(ctx, dashboardID.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
func (module *module) deletePublic(ctx context.Context, _ valuer.UUID, dashboardID valuer.UUID) error {
|
||||
return module.store.DeletePublic(ctx, dashboardID.StringValue())
|
||||
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deletionObject := authtypes.MustNewObject(
|
||||
authtypes.Resource{
|
||||
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
|
||||
Type: authtypes.TypeMetaResource,
|
||||
},
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
|
||||
)
|
||||
|
||||
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
165
ee/modules/role/implrole/setter.go
Normal file
165
ee/modules/role/implrole/setter.go
Normal file
@@ -0,0 +1,165 @@
|
||||
package implrole
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type setter struct {
|
||||
store roletypes.Store
|
||||
authz authz.AuthZ
|
||||
licensing licensing.Licensing
|
||||
registry []role.RegisterTypeable
|
||||
}
|
||||
|
||||
func NewSetter(store roletypes.Store, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
|
||||
return &setter{
|
||||
store: store,
|
||||
authz: authz,
|
||||
licensing: licensing,
|
||||
registry: registry,
|
||||
}
|
||||
}
|
||||
|
||||
func (setter *setter) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
|
||||
_, err := setter.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
||||
}
|
||||
|
||||
func (setter *setter) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
|
||||
_, err := setter.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
existingRole, err := setter.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
|
||||
if err != nil {
|
||||
if !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if existingRole != nil {
|
||||
return roletypes.NewRoleFromStorableRole(existingRole), nil
|
||||
}
|
||||
|
||||
err = setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return role, nil
|
||||
}
|
||||
|
||||
func (setter *setter) GetResources(_ context.Context) []*authtypes.Resource {
|
||||
typeables := make([]authtypes.Typeable, 0)
|
||||
for _, register := range setter.registry {
|
||||
typeables = append(typeables, register.MustGetTypeables()...)
|
||||
}
|
||||
// role module cannot self register itself!
|
||||
typeables = append(typeables, setter.MustGetTypeables()...)
|
||||
|
||||
resources := make([]*authtypes.Resource, 0)
|
||||
for _, typeable := range typeables {
|
||||
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
|
||||
}
|
||||
|
||||
return resources
|
||||
}
|
||||
|
||||
func (setter *setter) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
|
||||
storableRole, err := setter.store.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
objects := make([]*authtypes.Object, 0)
|
||||
for _, resource := range setter.GetResources(ctx) {
|
||||
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
|
||||
resourceObjects, err := setter.
|
||||
authz.
|
||||
ListObjects(
|
||||
ctx,
|
||||
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
|
||||
relation,
|
||||
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
objects = append(objects, resourceObjects...)
|
||||
}
|
||||
}
|
||||
|
||||
return objects, nil
|
||||
}
|
||||
|
||||
func (setter *setter) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
|
||||
_, err := setter.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return setter.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
|
||||
}
|
||||
|
||||
func (setter *setter) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
|
||||
_, err := setter.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = setter.authz.Write(ctx, additionTuples, deletionTuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (setter *setter) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
|
||||
_, err := setter.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
storableRole, err := setter.store.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
role := roletypes.NewRoleFromStorableRole(storableRole)
|
||||
err = role.CanEditDelete()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return setter.store.Delete(ctx, orgID, id)
|
||||
}
|
||||
|
||||
func (setter *setter) MustGetTypeables() []authtypes.Typeable {
|
||||
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
|
||||
}
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
@@ -55,6 +56,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
FluxInterval: opts.FluxInterval,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
|
||||
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
|
||||
@@ -211,7 +211,7 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
|
||||
|
||||
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
|
||||
r := baseapp.NewRouter()
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz, s.signoz.Modules.RoleGetter)
|
||||
|
||||
r.Use(otelmux.Middleware(
|
||||
"apiserver",
|
||||
@@ -237,6 +237,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.RegisterLogsRoutes(r, am)
|
||||
apiHandler.RegisterIntegrationRoutes(r, am)
|
||||
apiHandler.RegisterCloudIntegrationsRoutes(r, am)
|
||||
apiHandler.RegisterFieldsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV3Routes(r, am)
|
||||
apiHandler.RegisterInfraMetricsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV4Routes(r, am)
|
||||
|
||||
@@ -15,7 +15,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/transition"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
@@ -63,8 +63,6 @@ type AnomalyRule struct {
|
||||
seasonality anomaly.Seasonality
|
||||
}
|
||||
|
||||
var _ baserules.Rule = (*AnomalyRule)(nil)
|
||||
|
||||
func NewAnomalyRule(
|
||||
id string,
|
||||
orgID valuer.UUID,
|
||||
@@ -492,7 +490,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
continue
|
||||
}
|
||||
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration().Duration() {
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration() {
|
||||
a.State = model.StateFiring
|
||||
a.FiredAt = ts
|
||||
state := model.StateFiring
|
||||
@@ -555,7 +553,7 @@ func (r *AnomalyRule) String() string {
|
||||
ar := ruletypes.PostableRule{
|
||||
AlertName: r.Name(),
|
||||
RuleCondition: r.Condition(),
|
||||
EvalWindow: r.EvalWindow(),
|
||||
EvalWindow: ruletypes.Duration(r.EvalWindow()),
|
||||
Labels: r.Labels().Map(),
|
||||
Annotations: r.Annotations().Map(),
|
||||
PreferredChannels: r.PreferredChannels(),
|
||||
|
||||
@@ -40,7 +40,7 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
|
||||
// Test basic AlertOnAbsent functionality (without AbsentFor grace period)
|
||||
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalWindow := valuer.MustParseTextDuration("5m")
|
||||
evalWindow := 5 * time.Minute
|
||||
evalTime := baseTime.Add(5 * time.Minute)
|
||||
|
||||
target := 500.0
|
||||
@@ -50,8 +50,8 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: RuleTypeAnomaly,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: evalWindow,
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(evalWindow),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
@@ -147,7 +147,7 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
// 3. Alert fires only if t2 - t1 > AbsentFor
|
||||
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalWindow := valuer.MustParseTextDuration("5m")
|
||||
evalWindow := 5 * time.Minute
|
||||
|
||||
// Set target higher than test data so regular threshold alerts don't fire
|
||||
target := 500.0
|
||||
@@ -157,8 +157,8 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: RuleTypeAnomaly,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: evalWindow,
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(evalWindow),
|
||||
Frequency: ruletypes.Duration(time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
|
||||
@@ -48,7 +48,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, tr)
|
||||
|
||||
// create ch rule task for evaluation
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
|
||||
|
||||
@@ -72,7 +72,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, pr)
|
||||
|
||||
// create promql rule task for evaluation
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
@@ -96,7 +96,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, ar)
|
||||
|
||||
// create anomaly rule task for evaluation
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else {
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
|
||||
@@ -213,7 +213,8 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
return alertsFound, nil
|
||||
}
|
||||
|
||||
// newTask returns an appropriate group for the rule type
|
||||
// newTask returns an appropriate group for
|
||||
// rule type
|
||||
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) baserules.Task {
|
||||
if taskType == baserules.TaskTypeCh {
|
||||
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)
|
||||
|
||||
@@ -762,6 +762,18 @@ export interface MetricsexplorertypesMetricHighlightsResponseDTO {
|
||||
totalTimeSeries: number;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesMetricMetadataDTOTemporality {
|
||||
delta = 'delta',
|
||||
cumulative = 'cumulative',
|
||||
unspecified = 'unspecified',
|
||||
}
|
||||
export enum MetricsexplorertypesMetricMetadataDTOType {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface MetricsexplorertypesMetricMetadataDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -771,14 +783,29 @@ export interface MetricsexplorertypesMetricMetadataDTO {
|
||||
* @type boolean
|
||||
*/
|
||||
isMonotonic: boolean;
|
||||
temporality: MetrictypesTemporalityDTO;
|
||||
type: MetrictypesTypeDTO;
|
||||
/**
|
||||
* @enum delta,cumulative,unspecified
|
||||
* @type string
|
||||
*/
|
||||
temporality: MetricsexplorertypesMetricMetadataDTOTemporality;
|
||||
/**
|
||||
* @enum gauge,sum,histogram,summary,exponentialhistogram
|
||||
* @type string
|
||||
*/
|
||||
type: MetricsexplorertypesMetricMetadataDTOType;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
unit: string;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesStatDTOType {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface MetricsexplorertypesStatDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -798,7 +825,11 @@ export interface MetricsexplorertypesStatDTO {
|
||||
* @minimum 0
|
||||
*/
|
||||
timeseries: number;
|
||||
type: MetrictypesTypeDTO;
|
||||
/**
|
||||
* @enum gauge,sum,histogram,summary,exponentialhistogram
|
||||
* @type string
|
||||
*/
|
||||
type: MetricsexplorertypesStatDTOType;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -858,7 +889,7 @@ export interface MetricsexplorertypesTreemapEntryDTO {
|
||||
totalValue: number;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesTreemapModeDTO {
|
||||
export enum MetricsexplorertypesTreemapRequestDTOMode {
|
||||
timeseries = 'timeseries',
|
||||
samples = 'samples',
|
||||
}
|
||||
@@ -873,7 +904,11 @@ export interface MetricsexplorertypesTreemapRequestDTO {
|
||||
* @type integer
|
||||
*/
|
||||
limit: number;
|
||||
mode: MetricsexplorertypesTreemapModeDTO;
|
||||
/**
|
||||
* @enum timeseries,samples
|
||||
* @type string
|
||||
*/
|
||||
mode: MetricsexplorertypesTreemapRequestDTOMode;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
@@ -894,6 +929,18 @@ export interface MetricsexplorertypesTreemapResponseDTO {
|
||||
timeseries: MetricsexplorertypesTreemapEntryDTO[] | null;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality {
|
||||
delta = 'delta',
|
||||
cumulative = 'cumulative',
|
||||
unspecified = 'unspecified',
|
||||
}
|
||||
export enum MetricsexplorertypesUpdateMetricMetadataRequestDTOType {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -907,26 +954,22 @@ export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
|
||||
* @type string
|
||||
*/
|
||||
metricName: string;
|
||||
temporality: MetrictypesTemporalityDTO;
|
||||
type: MetrictypesTypeDTO;
|
||||
/**
|
||||
* @enum delta,cumulative,unspecified
|
||||
* @type string
|
||||
*/
|
||||
temporality: MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality;
|
||||
/**
|
||||
* @enum gauge,sum,histogram,summary,exponentialhistogram
|
||||
* @type string
|
||||
*/
|
||||
type: MetricsexplorertypesUpdateMetricMetadataRequestDTOType;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
unit: string;
|
||||
}
|
||||
|
||||
export enum MetrictypesTemporalityDTO {
|
||||
delta = 'delta',
|
||||
cumulative = 'cumulative',
|
||||
unspecified = 'unspecified',
|
||||
}
|
||||
export enum MetrictypesTypeDTO {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface PreferencetypesPreferenceDTO {
|
||||
/**
|
||||
* @type array
|
||||
@@ -1345,7 +1388,7 @@ export interface TypesPostableForgotPasswordDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
email: string;
|
||||
email?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -1353,7 +1396,7 @@ export interface TypesPostableForgotPasswordDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
orgId: string;
|
||||
orgId?: string;
|
||||
}
|
||||
|
||||
export interface TypesPostableInviteDTO {
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import { useCallback, useEffect, useMemo } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { Virtuoso } from 'react-virtuoso';
|
||||
import { Card } from 'antd';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
import RawLogView from 'components/Logs/RawLogView';
|
||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
|
||||
@@ -10,6 +12,7 @@ import LogsError from 'container/LogsError/LogsError';
|
||||
import { LogsLoading } from 'container/LogsLoading/LogsLoading';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import { useHandleLogsPagination } from 'hooks/infraMonitoring/useHandleLogsPagination';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -28,6 +31,16 @@ interface Props {
|
||||
}
|
||||
|
||||
function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
|
||||
const {
|
||||
activeLog,
|
||||
onSetActiveLog,
|
||||
onClearActiveLog,
|
||||
onAddToQuery,
|
||||
} = useActiveLog();
|
||||
const [selectedTab, setSelectedTab] = useState<
|
||||
typeof VIEW_TYPES[keyof typeof VIEW_TYPES] | undefined
|
||||
>();
|
||||
|
||||
const basePayload = getHostLogsQueryPayload(
|
||||
timeRange.startTime,
|
||||
timeRange.endTime,
|
||||
@@ -72,6 +85,25 @@ function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
|
||||
setIsPaginating(false);
|
||||
}, [data, setIsPaginating]);
|
||||
|
||||
const handleSetActiveLogWithTab = useCallback(
|
||||
(log: ILog): void => {
|
||||
// If clicking the same log that's already active, close it
|
||||
if (activeLog?.id === log.id) {
|
||||
onClearActiveLog();
|
||||
setSelectedTab(undefined);
|
||||
return;
|
||||
}
|
||||
onSetActiveLog(log);
|
||||
setSelectedTab(VIEW_TYPES.OVERVIEW);
|
||||
},
|
||||
[onSetActiveLog, activeLog, onClearActiveLog],
|
||||
);
|
||||
|
||||
const handleCloseLogDetail = useCallback((): void => {
|
||||
onClearActiveLog();
|
||||
setSelectedTab(undefined);
|
||||
}, [onClearActiveLog]);
|
||||
|
||||
const getItemContent = useCallback(
|
||||
(_: number, logToRender: ILog): JSX.Element => (
|
||||
<RawLogView
|
||||
@@ -92,9 +124,13 @@ function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
|
||||
name: 'timestamp',
|
||||
},
|
||||
]}
|
||||
onSetActiveLog={handleSetActiveLogWithTab}
|
||||
onClearActiveLog={handleCloseLogDetail}
|
||||
isActiveLog={activeLog?.id === logToRender.id}
|
||||
managedExternally
|
||||
/>
|
||||
),
|
||||
[],
|
||||
[activeLog, handleSetActiveLogWithTab, handleCloseLogDetail],
|
||||
);
|
||||
|
||||
const renderFooter = useCallback(
|
||||
@@ -141,6 +177,17 @@ function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
|
||||
{!isLoading && !isError && logs.length > 0 && (
|
||||
<div className="host-metrics-logs-list-container">{renderContent}</div>
|
||||
)}
|
||||
{selectedTab && activeLog && (
|
||||
<LogDetail
|
||||
log={activeLog}
|
||||
onClose={handleCloseLogDetail}
|
||||
logs={logs}
|
||||
onNavigateLog={handleSetActiveLogWithTab}
|
||||
selectedTab={selectedTab}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -13,6 +13,8 @@ export type LogDetailProps = {
|
||||
handleChangeSelectedView?: ChangeViewFunctionType;
|
||||
isListViewPanel?: boolean;
|
||||
listViewPanelSelectedFields?: IField[] | null;
|
||||
logs?: ILog[];
|
||||
onNavigateLog?: (log: ILog) => void;
|
||||
} & Pick<AddToQueryHOCProps, 'onAddToQuery'> &
|
||||
Partial<Pick<ActionItemProps, 'onClickActionItem'>> &
|
||||
Pick<DrawerProps, 'onClose'>;
|
||||
|
||||
@@ -66,6 +66,10 @@
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.log-detail-drawer__content {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.log-detail-drawer__log {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
@@ -183,6 +187,34 @@
|
||||
.ant-drawer-close {
|
||||
padding: 0px;
|
||||
}
|
||||
.log-detail-drawer__footer-hint {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
padding: 8px 16px;
|
||||
text-align: left;
|
||||
color: var(--text-vanilla-200);
|
||||
background: var(--bg-ink-400);
|
||||
z-index: 10;
|
||||
|
||||
.log-detail-drawer__footer-hint-content {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.log-detail-drawer__footer-hint-icon {
|
||||
display: inline;
|
||||
vertical-align: middle;
|
||||
color: var(--text-vanilla-200);
|
||||
}
|
||||
|
||||
.log-detail-drawer__footer-hint-text {
|
||||
font-size: 13px;
|
||||
margin: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
@@ -252,4 +284,33 @@
|
||||
color: var(--text-ink-300);
|
||||
}
|
||||
}
|
||||
|
||||
.log-detail-drawer__footer-hint {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
padding: 8px 16px;
|
||||
text-align: left;
|
||||
color: var(--text-vanilla-700);
|
||||
background: var(--bg-vanilla-100);
|
||||
z-index: 10;
|
||||
|
||||
.log-detail-drawer__footer-hint-content {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.log-detail-drawer__footer-hint-icon {
|
||||
display: inline;
|
||||
vertical-align: middle;
|
||||
color: var(--text-vanilla-700);
|
||||
}
|
||||
|
||||
.log-detail-drawer__footer-hint-text {
|
||||
font-size: 13px;
|
||||
margin: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { useCopyToClipboard, useLocation } from 'react-use';
|
||||
import { Color, Spacing } from '@signozhq/design-tokens';
|
||||
@@ -32,6 +32,8 @@ import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import createQueryParams from 'lib/createQueryParams';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import {
|
||||
ArrowDown,
|
||||
ArrowUp,
|
||||
BarChart2,
|
||||
Braces,
|
||||
Compass,
|
||||
@@ -60,6 +62,8 @@ function LogDetailInner({
|
||||
isListViewPanel = false,
|
||||
listViewPanelSelectedFields,
|
||||
handleChangeSelectedView,
|
||||
logs,
|
||||
onNavigateLog,
|
||||
}: LogDetailInnerProps): JSX.Element {
|
||||
const initialContextQuery = useInitialQuery(log);
|
||||
const [contextQuery, setContextQuery] = useState<Query | undefined>(
|
||||
@@ -74,6 +78,72 @@ function LogDetailInner({
|
||||
const [isEdit, setIsEdit] = useState<boolean>(false);
|
||||
const { stagedQuery, updateAllQueriesOperators } = useQueryBuilder();
|
||||
|
||||
// Handle clicks outside to close drawer, except on explicitly ignored regions
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (e: MouseEvent): void => {
|
||||
const target = e.target as HTMLElement;
|
||||
|
||||
// Don't close if clicking on explicitly ignored regions
|
||||
if (target.closest('[data-log-detail-ignore="true"]')) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Close the drawer for any other outside click
|
||||
onClose?.(e as any);
|
||||
};
|
||||
|
||||
// Add listener after a small delay to avoid immediate closure
|
||||
const timeoutId = setTimeout(() => {
|
||||
document.addEventListener('mousedown', handleClickOutside);
|
||||
}, 100);
|
||||
|
||||
return (): void => {
|
||||
clearTimeout(timeoutId);
|
||||
document.removeEventListener('mousedown', handleClickOutside);
|
||||
};
|
||||
}, [onClose]);
|
||||
|
||||
// Keyboard navigation - handle up/down arrow keys
|
||||
// Only listen when in OVERVIEW tab
|
||||
useEffect(() => {
|
||||
if (
|
||||
!logs ||
|
||||
!onNavigateLog ||
|
||||
logs.length === 0 ||
|
||||
selectedView !== VIEW_TYPES.OVERVIEW
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const handleKeyDown = (e: KeyboardEvent): void => {
|
||||
const currentIndex = logs.findIndex((l) => l.id === log.id);
|
||||
if (currentIndex === -1) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (e.key === 'ArrowUp') {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
// Navigate to previous log
|
||||
if (currentIndex > 0) {
|
||||
onNavigateLog(logs[currentIndex - 1]);
|
||||
}
|
||||
} else if (e.key === 'ArrowDown') {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
// Navigate to next log
|
||||
if (currentIndex < logs.length - 1) {
|
||||
onNavigateLog(logs[currentIndex + 1]);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener('keydown', handleKeyDown);
|
||||
return (): void => {
|
||||
document.removeEventListener('keydown', handleKeyDown);
|
||||
};
|
||||
}, [log.id, logs, onNavigateLog, selectedView]);
|
||||
|
||||
const listQuery = useMemo(() => {
|
||||
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) {
|
||||
return null;
|
||||
@@ -231,9 +301,10 @@ function LogDetailInner({
|
||||
return (
|
||||
<Drawer
|
||||
width="60%"
|
||||
maskStyle={{ background: 'none' }}
|
||||
mask={false}
|
||||
maskClosable={false}
|
||||
title={
|
||||
<div className="log-detail-drawer__title">
|
||||
<div className="log-detail-drawer__title" data-log-detail-ignore="true">
|
||||
<div className="log-detail-drawer__title-left">
|
||||
<Divider type="vertical" className={cx('log-type-indicator', LogType)} />
|
||||
<Typography.Text className="title">Log details</Typography.Text>
|
||||
@@ -252,7 +323,6 @@ function LogDetailInner({
|
||||
</div>
|
||||
}
|
||||
placement="right"
|
||||
// closable
|
||||
onClose={drawerCloseHandler}
|
||||
open={log !== null}
|
||||
style={{
|
||||
@@ -263,138 +333,164 @@ function LogDetailInner({
|
||||
destroyOnClose
|
||||
closeIcon={<X size={16} style={{ marginTop: Spacing.MARGIN_1 }} />}
|
||||
>
|
||||
<div className="log-detail-drawer__log">
|
||||
<Divider type="vertical" className={cx('log-type-indicator', logType)} />
|
||||
<Tooltip title={removeEscapeCharacters(log?.body)} placement="left">
|
||||
<div className="log-body" dangerouslySetInnerHTML={htmlBody} />
|
||||
</Tooltip>
|
||||
<div className="log-detail-drawer__content" data-log-detail-ignore="true">
|
||||
<div className="log-detail-drawer__log">
|
||||
<Divider type="vertical" className={cx('log-type-indicator', logType)} />
|
||||
<Tooltip title={removeEscapeCharacters(log?.body)} placement="left">
|
||||
<div className="log-body" dangerouslySetInnerHTML={htmlBody} />
|
||||
</Tooltip>
|
||||
|
||||
<div className="log-overflow-shadow"> </div>
|
||||
</div>
|
||||
<div className="log-overflow-shadow"> </div>
|
||||
</div>
|
||||
|
||||
<div className="tabs-and-search">
|
||||
<Radio.Group
|
||||
className="views-tabs"
|
||||
onChange={handleModeChange}
|
||||
value={selectedView}
|
||||
>
|
||||
<Radio.Button
|
||||
className={
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
selectedView === VIEW_TYPES.OVERVIEW ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.OVERVIEW}
|
||||
<div className="tabs-and-search">
|
||||
<Radio.Group
|
||||
className="views-tabs"
|
||||
onChange={handleModeChange}
|
||||
value={selectedView}
|
||||
>
|
||||
<div className="view-title">
|
||||
<Table size={14} />
|
||||
Overview
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={selectedView === VIEW_TYPES.JSON ? 'selected_view tab' : 'tab'}
|
||||
value={VIEW_TYPES.JSON}
|
||||
>
|
||||
<div className="view-title">
|
||||
<Braces size={14} />
|
||||
JSON
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.CONTEXT ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.CONTEXT}
|
||||
>
|
||||
<div className="view-title">
|
||||
<TextSelect size={14} />
|
||||
Context
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.INFRAMETRICS ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.INFRAMETRICS}
|
||||
>
|
||||
<div className="view-title">
|
||||
<BarChart2 size={14} />
|
||||
Metrics
|
||||
</div>
|
||||
</Radio.Button>
|
||||
</Radio.Group>
|
||||
<Radio.Button
|
||||
className={
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
selectedView === VIEW_TYPES.OVERVIEW ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.OVERVIEW}
|
||||
>
|
||||
<div className="view-title">
|
||||
<Table size={14} />
|
||||
Overview
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.JSON ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.JSON}
|
||||
>
|
||||
<div className="view-title">
|
||||
<Braces size={14} />
|
||||
JSON
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.CONTEXT ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.CONTEXT}
|
||||
>
|
||||
<div className="view-title">
|
||||
<TextSelect size={14} />
|
||||
Context
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.INFRAMETRICS ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.INFRAMETRICS}
|
||||
>
|
||||
<div className="view-title">
|
||||
<BarChart2 size={14} />
|
||||
Metrics
|
||||
</div>
|
||||
</Radio.Button>
|
||||
</Radio.Group>
|
||||
|
||||
<div className="log-detail-drawer__actions">
|
||||
{selectedView === VIEW_TYPES.CONTEXT && (
|
||||
<Tooltip
|
||||
title="Show Filters"
|
||||
placement="topLeft"
|
||||
aria-label="Show Filters"
|
||||
>
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Filter size={16} />}
|
||||
onClick={handleFilterVisible}
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
<div className="log-detail-drawer__actions">
|
||||
{selectedView === VIEW_TYPES.CONTEXT && (
|
||||
<Tooltip
|
||||
title="Show Filters"
|
||||
title={selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'}
|
||||
placement="topLeft"
|
||||
aria-label="Show Filters"
|
||||
aria-label={
|
||||
selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'
|
||||
}
|
||||
>
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Filter size={16} />}
|
||||
onClick={handleFilterVisible}
|
||||
icon={<Copy size={16} />}
|
||||
onClick={selectedView === VIEW_TYPES.JSON ? handleJSONCopy : onLogCopy}
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
<Tooltip
|
||||
title={selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'}
|
||||
placement="topLeft"
|
||||
aria-label={
|
||||
selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'
|
||||
}
|
||||
>
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Copy size={16} />}
|
||||
onClick={selectedView === VIEW_TYPES.JSON ? handleJSONCopy : onLogCopy}
|
||||
</div>
|
||||
</div>
|
||||
{isFilterVisible && contextQuery?.builder.queryData[0] && (
|
||||
<div className="log-detail-drawer-query-container">
|
||||
<QuerySearch
|
||||
onChange={(value): void => handleQueryExpressionChange(value, 0)}
|
||||
dataSource={DataSource.LOGS}
|
||||
queryData={contextQuery?.builder.queryData[0]}
|
||||
onRun={handleRunQuery}
|
||||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
{isFilterVisible && contextQuery?.builder.queryData[0] && (
|
||||
<div className="log-detail-drawer-query-container">
|
||||
<QuerySearch
|
||||
onChange={(value): void => handleQueryExpressionChange(value, 0)}
|
||||
dataSource={DataSource.LOGS}
|
||||
queryData={contextQuery?.builder.queryData[0]}
|
||||
onRun={handleRunQuery}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{selectedView === VIEW_TYPES.OVERVIEW && (
|
||||
<Overview
|
||||
logData={log}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onClickActionItem}
|
||||
isListViewPanel={isListViewPanel}
|
||||
selectedOptions={options}
|
||||
listViewPanelSelectedFields={listViewPanelSelectedFields}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
)}
|
||||
{selectedView === VIEW_TYPES.JSON && <JSONView logData={log} />}
|
||||
|
||||
{selectedView === VIEW_TYPES.OVERVIEW && (
|
||||
<Overview
|
||||
logData={log}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onClickActionItem}
|
||||
isListViewPanel={isListViewPanel}
|
||||
selectedOptions={options}
|
||||
listViewPanelSelectedFields={listViewPanelSelectedFields}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
)}
|
||||
{selectedView === VIEW_TYPES.JSON && <JSONView logData={log} />}
|
||||
{selectedView === VIEW_TYPES.CONTEXT && (
|
||||
<ContextView
|
||||
log={log}
|
||||
filters={filters}
|
||||
contextQuery={contextQuery}
|
||||
isEdit={isEdit}
|
||||
/>
|
||||
)}
|
||||
{selectedView === VIEW_TYPES.INFRAMETRICS && (
|
||||
<InfraMetrics
|
||||
clusterName={log.resources_string?.[RESOURCE_KEYS.CLUSTER_NAME] || ''}
|
||||
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
|
||||
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
|
||||
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
|
||||
timestamp={log.timestamp.toString()}
|
||||
dataSource={DataSource.LOGS}
|
||||
/>
|
||||
)}
|
||||
|
||||
{selectedView === VIEW_TYPES.CONTEXT && (
|
||||
<ContextView
|
||||
log={log}
|
||||
filters={filters}
|
||||
contextQuery={contextQuery}
|
||||
isEdit={isEdit}
|
||||
/>
|
||||
)}
|
||||
{selectedView === VIEW_TYPES.INFRAMETRICS && (
|
||||
<InfraMetrics
|
||||
clusterName={log.resources_string?.[RESOURCE_KEYS.CLUSTER_NAME] || ''}
|
||||
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
|
||||
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
|
||||
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
|
||||
timestamp={log.timestamp.toString()}
|
||||
dataSource={DataSource.LOGS}
|
||||
/>
|
||||
)}
|
||||
{selectedView === VIEW_TYPES.OVERVIEW && (
|
||||
<div className="log-detail-drawer__footer-hint">
|
||||
<div className="log-detail-drawer__footer-hint-content">
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="log-detail-drawer__footer-hint-text"
|
||||
>
|
||||
Use
|
||||
</Typography.Text>
|
||||
<ArrowUp size={14} className="log-detail-drawer__footer-hint-icon" />
|
||||
<span>/</span>
|
||||
<ArrowDown size={14} className="log-detail-drawer__footer-hint-icon" />
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="log-detail-drawer__footer-hint-text"
|
||||
>
|
||||
to view previous/next log
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</Drawer>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -110,6 +110,9 @@ type ListLogViewProps = {
|
||||
linesPerRow: number;
|
||||
fontSize: FontSize;
|
||||
handleChangeSelectedView?: ChangeViewFunctionType;
|
||||
isActiveLog?: boolean;
|
||||
onClearActiveLog?: () => void;
|
||||
logs?: ILog[];
|
||||
};
|
||||
|
||||
function ListLogView({
|
||||
@@ -121,6 +124,9 @@ function ListLogView({
|
||||
linesPerRow,
|
||||
fontSize,
|
||||
handleChangeSelectedView,
|
||||
isActiveLog: isActiveLogProp,
|
||||
onClearActiveLog: onClearActiveLogProp,
|
||||
logs,
|
||||
}: ListLogViewProps): JSX.Element {
|
||||
const flattenLogData = useMemo(() => FlatLogData(logData), [logData]);
|
||||
|
||||
@@ -134,6 +140,7 @@ function ListLogView({
|
||||
onAddToQuery: handleAddToQuery,
|
||||
onSetActiveLog: handleSetActiveContextLog,
|
||||
onClearActiveLog: handleClearActiveContextLog,
|
||||
onClearActiveLog: onClearActiveLogHook,
|
||||
} = useActiveLog();
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
@@ -148,8 +155,20 @@ function ListLogView({
|
||||
);
|
||||
|
||||
const handleDetailedView = useCallback(() => {
|
||||
if (isActiveLogProp) {
|
||||
const clearActiveFn = onClearActiveLogProp || onClearActiveLogHook;
|
||||
clearActiveFn();
|
||||
return;
|
||||
}
|
||||
|
||||
onSetActiveLog(logData);
|
||||
}, [logData, onSetActiveLog]);
|
||||
}, [
|
||||
logData,
|
||||
onSetActiveLog,
|
||||
isActiveLogProp,
|
||||
onClearActiveLogProp,
|
||||
onClearActiveLogHook,
|
||||
]);
|
||||
|
||||
const handleShowContext = useCallback(
|
||||
(event: React.MouseEvent) => {
|
||||
@@ -258,6 +277,8 @@ function ListLogView({
|
||||
selectedTab={VIEW_TYPES.CONTEXT}
|
||||
onClose={handlerClearActiveContextLog}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
logs={logs}
|
||||
onNavigateLog={onSetActiveLog}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
|
||||
@@ -40,6 +40,9 @@ function RawLogView({
|
||||
fontSize,
|
||||
onLogClick,
|
||||
handleChangeSelectedView,
|
||||
onSetActiveLog: onSetActiveLogProp,
|
||||
onClearActiveLog: onClearActiveLogProp,
|
||||
managedExternally = false,
|
||||
}: RawLogViewProps): JSX.Element {
|
||||
const {
|
||||
isHighlighted: isUrlHighlighted,
|
||||
@@ -50,8 +53,8 @@ function RawLogView({
|
||||
|
||||
const {
|
||||
activeLog,
|
||||
onSetActiveLog,
|
||||
onClearActiveLog,
|
||||
onSetActiveLog: onSetActiveLogHook,
|
||||
onClearActiveLog: onClearActiveLogHook,
|
||||
onAddToQuery,
|
||||
} = useActiveLog();
|
||||
|
||||
@@ -134,12 +137,35 @@ function RawLogView({
|
||||
// Use custom click handler if provided, otherwise use default behavior
|
||||
if (onLogClick) {
|
||||
onLogClick(data, event);
|
||||
} else {
|
||||
onSetActiveLog(data);
|
||||
setSelectedTab(VIEW_TYPES.OVERVIEW);
|
||||
return;
|
||||
}
|
||||
|
||||
if (managedExternally) {
|
||||
// If the log is already active, clear it. Otherwise, set it as active.
|
||||
if (isActiveLog) {
|
||||
const clearActiveFn = onClearActiveLogProp || onClearActiveLogHook;
|
||||
clearActiveFn();
|
||||
return;
|
||||
}
|
||||
const setActiveFn = onSetActiveLogProp || onSetActiveLogHook;
|
||||
setActiveFn(data);
|
||||
return;
|
||||
}
|
||||
|
||||
onSetActiveLogHook(data);
|
||||
setSelectedTab(VIEW_TYPES.OVERVIEW);
|
||||
},
|
||||
[isReadOnly, data, onSetActiveLog, onLogClick],
|
||||
[
|
||||
isReadOnly,
|
||||
data,
|
||||
onSetActiveLogProp,
|
||||
onSetActiveLogHook,
|
||||
onClearActiveLogProp,
|
||||
onClearActiveLogHook,
|
||||
onLogClick,
|
||||
isActiveLog,
|
||||
managedExternally,
|
||||
],
|
||||
);
|
||||
|
||||
const handleCloseLogDetail: DrawerProps['onClose'] = useCallback(
|
||||
@@ -149,21 +175,22 @@ function RawLogView({
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
|
||||
onClearActiveLog();
|
||||
onClearActiveLogHook();
|
||||
setSelectedTab(undefined);
|
||||
},
|
||||
[onClearActiveLog],
|
||||
[onClearActiveLogHook],
|
||||
);
|
||||
|
||||
const handleShowContext: MouseEventHandler<HTMLElement> = useCallback(
|
||||
(event) => {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
// handleSetActiveContextLog(data);
|
||||
|
||||
const setActiveFn = onSetActiveLogProp || onSetActiveLogHook;
|
||||
setSelectedTab(VIEW_TYPES.CONTEXT);
|
||||
onSetActiveLog(data);
|
||||
setActiveFn(data);
|
||||
},
|
||||
[data, onSetActiveLog],
|
||||
[data, onSetActiveLogProp, onSetActiveLogHook],
|
||||
);
|
||||
|
||||
const html = useMemo(
|
||||
@@ -173,6 +200,9 @@ function RawLogView({
|
||||
[text],
|
||||
);
|
||||
|
||||
// Only render LogDetail when not managed externally and this log is active
|
||||
const showLogDetail = !managedExternally && isActiveLog && selectedTab;
|
||||
|
||||
return (
|
||||
<RawLogViewContainer
|
||||
onClick={handleClickExpand}
|
||||
@@ -219,7 +249,7 @@ function RawLogView({
|
||||
/>
|
||||
)}
|
||||
|
||||
{selectedTab && (
|
||||
{showLogDetail && (
|
||||
<LogDetail
|
||||
selectedTab={selectedTab}
|
||||
log={activeLog}
|
||||
|
||||
@@ -45,9 +45,6 @@ export const RawLogViewContainer = styled(Row)<{
|
||||
: `margin: 2px 0;`}
|
||||
}
|
||||
|
||||
${({ $isActiveLog, $logType }): string =>
|
||||
getActiveLogBackground($isActiveLog, true, $logType)}
|
||||
|
||||
${({ $isReadOnly, $isActiveLog, $isDarkMode, $logType }): string =>
|
||||
$isActiveLog
|
||||
? getActiveLogBackground($isActiveLog, $isDarkMode, $logType)
|
||||
|
||||
@@ -16,6 +16,9 @@ export interface RawLogViewProps {
|
||||
selectedFields?: IField[];
|
||||
onLogClick?: (log: ILog, event: MouseEvent) => void;
|
||||
handleChangeSelectedView?: ChangeViewFunctionType;
|
||||
onSetActiveLog?: (log: ILog) => void;
|
||||
onClearActiveLog?: () => void;
|
||||
managedExternally?: boolean;
|
||||
}
|
||||
|
||||
export interface RawLogContentProps {
|
||||
|
||||
@@ -34,230 +34,159 @@ const themeColors = {
|
||||
cyan: '#00FFFF',
|
||||
},
|
||||
chartcolors: {
|
||||
// Blues (3)
|
||||
dodgerBlue: '#2F80ED',
|
||||
royalBlue: '#3366E6',
|
||||
steelBlue: '#4682B4',
|
||||
|
||||
// Teals / Cyans (3)
|
||||
turquoise: '#00CEC9',
|
||||
lagoon: '#1ABC9C',
|
||||
cyanBright: '#22A6F2',
|
||||
|
||||
// Greens (3)
|
||||
emeraldGreen: '#27AE60',
|
||||
mediumSeaGreen: '#3CB371',
|
||||
limeGreen: '#A3E635',
|
||||
|
||||
// Yellows / Golds (3)
|
||||
festivalYellow: '#F2C94C',
|
||||
sunflower: '#FFD93D',
|
||||
warmAmber: '#FFCA28',
|
||||
|
||||
// Oranges (3)
|
||||
festivalOrange: '#F2994A',
|
||||
coralOrange: '#E17055',
|
||||
pumpkin: '#FF7F50',
|
||||
|
||||
// Reds (3)
|
||||
radicalRed: '#FF1A66',
|
||||
crimsonRed: '#EB5757',
|
||||
fireRed: '#E10600',
|
||||
|
||||
// Pinks (3)
|
||||
hotPink: '#E84393',
|
||||
rosePink: '#FD79A8',
|
||||
blush: '#FF7EB6',
|
||||
|
||||
// Purples / Violets (3)
|
||||
mediumPurple: '#BB6BD9',
|
||||
royalPurple: '#9B51E0',
|
||||
orchid: '#DA77F2',
|
||||
|
||||
// Accent / Neon / Unique Colors (3)
|
||||
neonViolet: '#C77DFF',
|
||||
electricPurple: '#6C5CE7',
|
||||
arcticBlue: '#48DBFB',
|
||||
|
||||
// Extended palette — systematic variations to reach 100 colors
|
||||
blue1: '#1F63E0',
|
||||
blue2: '#3A7AED',
|
||||
blue3: '#5A9DF5',
|
||||
cyan1: '#00B0AA',
|
||||
cyan2: '#33D6C2',
|
||||
cyan3: '#66E9DA',
|
||||
green1: '#1E8449',
|
||||
green2: '#2ECC71',
|
||||
green3: '#58D68D',
|
||||
yellow1: '#F1C40F',
|
||||
yellow2: '#F7DC6F',
|
||||
yellow3: '#F9E79F',
|
||||
orange1: '#D35400',
|
||||
orange2: '#E67E22',
|
||||
orange3: '#F5B041',
|
||||
red1: '#C0392B',
|
||||
red2: '#E74C3C',
|
||||
red3: '#EC7063',
|
||||
pink1: '#D81B60',
|
||||
pink2: '#E91E63',
|
||||
pink3: '#F06292',
|
||||
purple1: '#8E44AD',
|
||||
purple2: '#9B59B6',
|
||||
purple3: '#BB8FCE',
|
||||
teal1: '#009688',
|
||||
teal2: '#1ABC9C',
|
||||
teal3: '#48C9B0',
|
||||
lime1: '#A3E635',
|
||||
lime2: '#B9F18D',
|
||||
lime3: '#D4FFB0',
|
||||
gold1: '#F39C12',
|
||||
gold2: '#F1C40F',
|
||||
gold3: '#F7DC6F',
|
||||
coral1: '#E67E22',
|
||||
coral2: '#F39C12',
|
||||
coral3: '#F5B041',
|
||||
crimson1: '#C0392B',
|
||||
crimson2: '#E74C3C',
|
||||
crimson3: '#EC7063',
|
||||
violet1: '#8E44AD',
|
||||
violet2: '#9B59B6',
|
||||
violet3: '#BB8FCE',
|
||||
aqua1: '#00BFFF',
|
||||
aqua2: '#1E90FF',
|
||||
aqua3: '#63B8FF',
|
||||
forest1: '#27AE60',
|
||||
forest2: '#2ECC71',
|
||||
forest3: '#58D68D',
|
||||
blush1: '#FF6F91',
|
||||
blush2: '#FF85A2',
|
||||
blush3: '#FFA0B3',
|
||||
lavender1: '#9B59B6',
|
||||
lavender2: '#AF7AC5',
|
||||
lavender3: '#C39BD3',
|
||||
tomato1: '#E74C3C',
|
||||
tomato2: '#EC7063',
|
||||
tomato3: '#F1948A',
|
||||
salmon1: '#FF6B6B',
|
||||
salmon2: '#FF8787',
|
||||
salmon3: '#FFA1A1',
|
||||
mustard1: '#F1C40F',
|
||||
mustard2: '#F7DC6F',
|
||||
mustard3: '#F9E79F',
|
||||
teal4: '#1ABC9C',
|
||||
teal5: '#48C9B0',
|
||||
teal6: '#76D7C4',
|
||||
magenta1: '#D6336C',
|
||||
magenta2: '#E84393',
|
||||
magenta3: '#F06292',
|
||||
violet4: '#7D3C98',
|
||||
violet5: '#8E44AD',
|
||||
violet6: '#9B59B6',
|
||||
green4: '#229954',
|
||||
green5: '#27AE60',
|
||||
green6: '#52BE80',
|
||||
blue4: '#2874A6',
|
||||
blue5: '#2E86C1',
|
||||
blue6: '#3498DB',
|
||||
red4: '#C0392B',
|
||||
red5: '#E74C3C',
|
||||
red6: '#EC7063',
|
||||
orange4: '#D35400',
|
||||
orange5: '#E67E22',
|
||||
orange6: '#EB984E',
|
||||
pink4: '#C2185B',
|
||||
pink5: '#D81B60',
|
||||
pink6: '#E91E63',
|
||||
gold4: '#B7950B',
|
||||
gold5: '#F1C40F',
|
||||
gold6: '#F4D03F',
|
||||
dodgerBlue: '#2F80ED',
|
||||
mediumOrchid: '#BB6BD9',
|
||||
seaBuckthorn: '#F2994A',
|
||||
seaGreen: '#219653',
|
||||
turquoiseBlue: '#56CCF2',
|
||||
festivalOrange: '#F2C94C',
|
||||
silver: '#BDBDBD',
|
||||
outrageousOrange: '#FF6633',
|
||||
roseBud: '#FFB399',
|
||||
canary: '#FFFF99',
|
||||
deepSkyBlue: '#00B3E6',
|
||||
goldTips: '#E6B333',
|
||||
royalBlue: '#3366E6',
|
||||
avocado: '#999966',
|
||||
mintGreen: '#99FF99',
|
||||
chestnut: '#B34D4D',
|
||||
lima: '#80B300',
|
||||
olive: '#809900',
|
||||
beautyBush: '#E6B3B3',
|
||||
danube: '#6680B3',
|
||||
oliveDrab: '#66991A',
|
||||
lavenderRose: '#FF99E6',
|
||||
electricLime: '#CCFF1A',
|
||||
robin: '#3F5ECC',
|
||||
harleyOrange: '#E6331A',
|
||||
turquoise: '#33FFCC',
|
||||
gladeGreen: '#66994D',
|
||||
hemlock: '#66664D',
|
||||
vidaLoca: '#4D8000',
|
||||
rust: '#B33300',
|
||||
red: '#FF0000', // Adding more colors, we need to get better colors from design team
|
||||
blue: '#0000FF',
|
||||
green: '#00FF00',
|
||||
yellow: '#FFFF00',
|
||||
purple: '#800080',
|
||||
cyan: '#00FFFF',
|
||||
magenta: '#FF00FF',
|
||||
orange: '#FFA500',
|
||||
pink: '#FFC0CB',
|
||||
brown: '#A52A2A',
|
||||
teal: '#008080',
|
||||
lime: '#00FF00',
|
||||
maroon: '#800000',
|
||||
navy: '#000080',
|
||||
aquamarine: '#7FFFD4',
|
||||
darkSeaGreen: '#8FBC8F',
|
||||
gray: '#808080',
|
||||
skyBlue: '#87CEEB',
|
||||
indigo: '#4B0082',
|
||||
slateGray: '#708090',
|
||||
chocolate: '#D2691E',
|
||||
tomato: '#FF6347',
|
||||
steelBlue: '#4682B4',
|
||||
peru: '#CD853F',
|
||||
darkOliveGreen: '#556B2F',
|
||||
indianRed: '#CD5C5C',
|
||||
mediumSlateBlue: '#7B68EE',
|
||||
rosyBrown: '#BC8F8F',
|
||||
darkSlateGray: '#2F4F4F',
|
||||
mediumAquamarine: '#66CDAA',
|
||||
lavender: '#E6E6FA',
|
||||
thistle: '#D8BFD8',
|
||||
salmon: '#FA8072',
|
||||
darkSalmon: '#E9967A',
|
||||
paleVioletRed: '#DB7093',
|
||||
mediumPurple: '#9370DB',
|
||||
darkOrchid: '#9932CC',
|
||||
lawnGreen: '#7CFC00',
|
||||
mediumSeaGreen: '#3CB371',
|
||||
lightCoral: '#F08080',
|
||||
gold: '#FFD700',
|
||||
sandyBrown: '#F4A460',
|
||||
darkKhaki: '#BDB76B',
|
||||
cornflowerBlue: '#6495ED',
|
||||
mediumVioletRed: '#C71585',
|
||||
paleGreen: '#98FB98',
|
||||
},
|
||||
lightModeColor: {
|
||||
radicalRed: '#D81B60',
|
||||
|
||||
dodgerBlueDark: '#1E5BD9',
|
||||
steelgrey: '#344B6B',
|
||||
steelpurple: '#5E548E',
|
||||
steelindigo: '#8E4A7C',
|
||||
steelpink: '#B63A6F',
|
||||
steelcoral: '#E14B5A',
|
||||
steelorange: '#E76F2F',
|
||||
steelgold: '#E09B00',
|
||||
steelrust: '#C93A50',
|
||||
steelgreen: '#2F7D69',
|
||||
|
||||
mediumOrchidDark: '#8E24AA',
|
||||
seaBuckthornDark: '#C75A00',
|
||||
seaGreen: '#1E7F5A',
|
||||
turquoiseBlueDark: '#007EA7',
|
||||
silverDark: '#5F5F5F',
|
||||
outrageousOrangeDark: '#E64A19',
|
||||
roseBudDark: '#D84315',
|
||||
deepSkyBlueDark: '#0277BD',
|
||||
royalBlue: '#2A4FDB',
|
||||
|
||||
avocadoDark: '#6B6B1E',
|
||||
mintGreenDark: '#2E9E55',
|
||||
chestnut: '#8B3A3A',
|
||||
limaDark: '#5C7F00',
|
||||
olive: '#6E7F00',
|
||||
beautyBushDark: '#C93C3C',
|
||||
|
||||
danube: '#4F6FB3',
|
||||
oliveDrab: '#4F7F1A',
|
||||
lavenderRoseDark: '#B0178F',
|
||||
electricLimeDark: '#6B8F00',
|
||||
robin: '#2F4FCC',
|
||||
|
||||
harleyOrange: '#CC2E12',
|
||||
gladeGreen: '#4F7F46',
|
||||
hemlock: '#5C5C45',
|
||||
vidaLoca: '#3D6B00',
|
||||
rust: '#993300',
|
||||
|
||||
red: '#C62828',
|
||||
blue: '#1A237E',
|
||||
green: '#1B7F3A',
|
||||
purple: '#6A1B9A',
|
||||
magentaDark: '#B000B5',
|
||||
pinkDark: '#C2185B',
|
||||
|
||||
brown: '#7A3A1E',
|
||||
teal: '#006D6F',
|
||||
limeDark: '#4C8C2B',
|
||||
maroon: '#6D1B1B',
|
||||
navy: '#0D1B5E',
|
||||
gray: '#616161',
|
||||
|
||||
skyBlueDark: '#0288D1',
|
||||
indigo: '#303F9F',
|
||||
slateGray: '#556B7C',
|
||||
chocolate: '#9C4A1A',
|
||||
tomato: '#E53935',
|
||||
steelBlue: '#3A6EA5',
|
||||
|
||||
peruDark: '#B35E00',
|
||||
darkOliveGreen: '#445B1F',
|
||||
indianRed: '#B04040',
|
||||
mediumSlateBlue: '#5C6BC0',
|
||||
rosyBrownDark: '#A94444',
|
||||
darkSlateGray: '#2E4A4A',
|
||||
|
||||
fuchsia: '#C511C5',
|
||||
salmonDark: '#E64A3C',
|
||||
darkSalmonDark: '#C85A3A',
|
||||
paleVioletRedDark: '#C2186A',
|
||||
|
||||
mediumPurple: '#7E57C2',
|
||||
darkOrchid: '#7B1FA2',
|
||||
mediumSeaGreenDark: '#2E8B57',
|
||||
lightCoralDark: '#E57373',
|
||||
|
||||
gold: '#D4AF37',
|
||||
sandyBrownDark: '#C76A15',
|
||||
darkKhakiDark: '#8A7F00',
|
||||
cornflowerBlueDark: '#355FCC',
|
||||
mediumVioletRed: '#AD1457',
|
||||
paleGreenDark: '#2E7D32',
|
||||
radicalRed: '#FF1A66',
|
||||
dodgerBlueDark: '#0C6EED',
|
||||
steelgrey: '#2f4b7c',
|
||||
steelpurple: '#665191',
|
||||
steelindigo: '#a05195',
|
||||
steelpink: '#d45087',
|
||||
steelcoral: '#f95d6a',
|
||||
steelorange: '#ff7c43',
|
||||
steelgold: '#ffa600',
|
||||
steelrust: '#de425b',
|
||||
steelgreen: '#41967e',
|
||||
mediumOrchidDark: '#C326FD',
|
||||
seaBuckthornDark: '#E66E05',
|
||||
seaGreen: '#219653',
|
||||
turquoiseBlueDark: '#0099CC',
|
||||
silverDark: '#757575',
|
||||
outrageousOrangeDark: '#F9521A',
|
||||
roseBudDark: '#EB6437',
|
||||
deepSkyBlueDark: '#0595BD',
|
||||
royalBlue: '#3366E6',
|
||||
avocadoDark: '#8E8E29',
|
||||
mintGreenDark: '#00C700',
|
||||
chestnut: '#B34D4D',
|
||||
limaDark: '#6E9900',
|
||||
olive: '#809900',
|
||||
beautyBushDark: '#E25555',
|
||||
danube: '#6680B3',
|
||||
oliveDrab: '#66991A',
|
||||
lavenderRoseDark: '#F024BD',
|
||||
electricLimeDark: '#84A800',
|
||||
robin: '#3F5ECC',
|
||||
harleyOrange: '#E6331A',
|
||||
gladeGreen: '#66994D',
|
||||
hemlock: '#66664D',
|
||||
vidaLoca: '#4D8000',
|
||||
rust: '#B33300',
|
||||
red: '#FF0000', // Adding more colors, we need to get better colors from design team
|
||||
blue: '#0000FF',
|
||||
green: '#00FF00',
|
||||
purple: '#800080',
|
||||
magentaDark: '#EB00EB',
|
||||
pinkDark: '#FF3D5E',
|
||||
brown: '#A52A2A',
|
||||
teal: '#008080',
|
||||
limeDark: '#07A207',
|
||||
maroon: '#800000',
|
||||
navy: '#000080',
|
||||
gray: '#808080',
|
||||
skyBlueDark: '#0CA7E4',
|
||||
indigo: '#4B0082',
|
||||
slateGray: '#708090',
|
||||
chocolate: '#D2691E',
|
||||
tomato: '#FF6347',
|
||||
steelBlue: '#4682B4',
|
||||
peruDark: '#D16E0A',
|
||||
darkOliveGreen: '#556B2F',
|
||||
indianRed: '#CD5C5C',
|
||||
mediumSlateBlue: '#7B68EE',
|
||||
rosyBrownDark: '#CB4848',
|
||||
darkSlateGray: '#2F4F4F',
|
||||
fuchsia: '#FF0AFF',
|
||||
salmonDark: '#FF432E',
|
||||
darkSalmonDark: '#D26541',
|
||||
paleVioletRedDark: '#E83089',
|
||||
mediumPurple: '#9370DB',
|
||||
darkOrchid: '#9932CC',
|
||||
mediumSeaGreenDark: '#109E50',
|
||||
lightCoralDark: '#F85959',
|
||||
gold: '#FFD700',
|
||||
sandyBrownDark: '#D97117',
|
||||
darkKhakiDark: '#99900A',
|
||||
cornflowerBlueDark: '#3371E6',
|
||||
mediumVioletRed: '#C71585',
|
||||
paleGreenDark: '#0D910D',
|
||||
},
|
||||
errorColor: '#d32f2f',
|
||||
royalGrey: '#888888',
|
||||
|
||||
@@ -18,8 +18,8 @@ import { useWidgetsByDynamicVariableId } from 'hooks/dashboard/useWidgetsByDynam
|
||||
import { getWidgetsHavingDynamicVariableAttribute } from 'hooks/dashboard/utils';
|
||||
import { useGetFieldValues } from 'hooks/dynamicVariables/useGetFieldValues';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { commaValuesParser } from 'lib/dashboardVariables/customCommaValuesParser';
|
||||
import sortValues from 'lib/dashboardVariables/sortVariableValues';
|
||||
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
|
||||
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
||||
import { isEmpty, map } from 'lodash-es';
|
||||
import {
|
||||
ArrowLeft,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { memo, useMemo } from 'react';
|
||||
import { commaValuesParser } from 'lib/dashboardVariables/customCommaValuesParser';
|
||||
import sortValues from 'lib/dashboardVariables/sortVariableValues';
|
||||
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
|
||||
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
||||
|
||||
import SelectVariableInput from './SelectVariableInput';
|
||||
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
|
||||
|
||||
@@ -3,7 +3,7 @@ import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import sortValues from 'lib/dashboardVariables/sortVariableValues';
|
||||
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
||||
import { isArray, isString } from 'lodash-es';
|
||||
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import { AppState } from 'store/reducers';
|
||||
|
||||
@@ -33,8 +33,8 @@ import { useChartMutable } from 'hooks/useChartMutable';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
|
||||
import GetMinMax from 'lib/getMinMax';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
|
||||
@@ -8,8 +8,8 @@ import { populateMultipleResults } from 'container/NewWidget/LeftContainer/Widge
|
||||
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/types';
|
||||
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
|
||||
import { useIntersectionObserver } from 'hooks/useIntersectionObserver';
|
||||
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
|
||||
import getTimeString from 'lib/getTimeString';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import isEmpty from 'lodash-es/isEmpty';
|
||||
|
||||
@@ -6,7 +6,7 @@ import { prepareQueryRangePayloadV5 } from 'api/v5/v5';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
|
||||
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
|
||||
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
|
||||
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
||||
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import { useCallback, useEffect, useMemo } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { Virtuoso } from 'react-virtuoso';
|
||||
import { Card } from 'antd';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
import RawLogView from 'components/Logs/RawLogView';
|
||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
|
||||
@@ -11,6 +13,7 @@ import LogsError from 'container/LogsError/LogsError';
|
||||
import { LogsLoading } from 'container/LogsLoading/LogsLoading';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import { useHandleLogsPagination } from 'hooks/infraMonitoring/useHandleLogsPagination';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -40,6 +43,16 @@ function EntityLogs({
|
||||
category,
|
||||
queryKeyFilters,
|
||||
}: Props): JSX.Element {
|
||||
const {
|
||||
activeLog,
|
||||
onSetActiveLog,
|
||||
onClearActiveLog,
|
||||
onAddToQuery,
|
||||
} = useActiveLog();
|
||||
const [selectedTab, setSelectedTab] = useState<
|
||||
typeof VIEW_TYPES[keyof typeof VIEW_TYPES] | undefined
|
||||
>();
|
||||
|
||||
const basePayload = getEntityEventsOrLogsQueryPayload(
|
||||
timeRange.startTime,
|
||||
timeRange.endTime,
|
||||
@@ -62,6 +75,25 @@ function EntityLogs({
|
||||
basePayload,
|
||||
});
|
||||
|
||||
const handleSetActiveLogWithTab = useCallback(
|
||||
(log: ILog): void => {
|
||||
// If clicking the same log that's already active, close it
|
||||
if (activeLog?.id === log.id) {
|
||||
onClearActiveLog();
|
||||
setSelectedTab(undefined);
|
||||
return;
|
||||
}
|
||||
onSetActiveLog(log);
|
||||
setSelectedTab(VIEW_TYPES.OVERVIEW);
|
||||
},
|
||||
[onSetActiveLog, activeLog, onClearActiveLog],
|
||||
);
|
||||
|
||||
const handleCloseLogDetail = useCallback((): void => {
|
||||
onClearActiveLog();
|
||||
setSelectedTab(undefined);
|
||||
}, [onClearActiveLog]);
|
||||
|
||||
const getItemContent = useCallback(
|
||||
(_: number, logToRender: ILog): JSX.Element => (
|
||||
<RawLogView
|
||||
@@ -82,9 +114,13 @@ function EntityLogs({
|
||||
name: 'timestamp',
|
||||
},
|
||||
]}
|
||||
onSetActiveLog={handleSetActiveLogWithTab}
|
||||
onClearActiveLog={handleCloseLogDetail}
|
||||
isActiveLog={activeLog?.id === logToRender.id}
|
||||
managedExternally
|
||||
/>
|
||||
),
|
||||
[],
|
||||
[activeLog, handleSetActiveLogWithTab, handleCloseLogDetail],
|
||||
);
|
||||
|
||||
const { data, isLoading, isFetching, isError } = useQuery({
|
||||
@@ -156,6 +192,17 @@ function EntityLogs({
|
||||
{!isLoading && !isError && logs.length > 0 && (
|
||||
<div className="entity-logs-list-container">{renderContent}</div>
|
||||
)}
|
||||
{selectedTab && activeLog && (
|
||||
<LogDetail
|
||||
log={activeLog}
|
||||
onClose={handleCloseLogDetail}
|
||||
logs={logs}
|
||||
onNavigateLog={handleSetActiveLogWithTab}
|
||||
selectedTab={selectedTab}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { memo, useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { Virtuoso, VirtuosoHandle } from 'react-virtuoso';
|
||||
import { Card, Typography } from 'antd';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
@@ -43,6 +43,10 @@ function LiveLogsList({
|
||||
onSetActiveLog,
|
||||
} = useActiveLog();
|
||||
|
||||
const [selectedTab, setSelectedTab] = useState<
|
||||
typeof VIEW_TYPES[keyof typeof VIEW_TYPES] | undefined
|
||||
>();
|
||||
|
||||
// get only data from the logs object
|
||||
const formattedLogs: ILog[] = useMemo(
|
||||
() => logs.map((log) => log?.data).flat(),
|
||||
@@ -65,6 +69,19 @@ function LiveLogsList({
|
||||
...options.selectColumns,
|
||||
]);
|
||||
|
||||
const handleSetActiveLogWithTab = useCallback(
|
||||
(log: ILog) => {
|
||||
onSetActiveLog(log);
|
||||
setSelectedTab(VIEW_TYPES.OVERVIEW);
|
||||
},
|
||||
[onSetActiveLog],
|
||||
);
|
||||
|
||||
const handleCloseLogDetail = useCallback(() => {
|
||||
onClearActiveLog();
|
||||
setSelectedTab(undefined);
|
||||
}, [onClearActiveLog]);
|
||||
|
||||
const getItemContent = useCallback(
|
||||
(_: number, log: ILog): JSX.Element => {
|
||||
if (options.format === 'raw') {
|
||||
@@ -72,10 +89,14 @@ function LiveLogsList({
|
||||
<RawLogView
|
||||
key={log.id}
|
||||
data={log}
|
||||
isActiveLog={activeLog?.id === log.id}
|
||||
linesPerRow={options.maxLines}
|
||||
selectedFields={selectedFields}
|
||||
fontSize={options.fontSize}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
onSetActiveLog={handleSetActiveLogWithTab}
|
||||
onClearActiveLog={handleCloseLogDetail}
|
||||
managedExternally
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -84,23 +105,29 @@ function LiveLogsList({
|
||||
<ListLogView
|
||||
key={log.id}
|
||||
logData={log}
|
||||
isActiveLog={activeLog?.id === log.id}
|
||||
selectedFields={selectedFields}
|
||||
linesPerRow={options.maxLines}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onSetActiveLog={onSetActiveLog}
|
||||
onSetActiveLog={handleSetActiveLogWithTab}
|
||||
onClearActiveLog={handleCloseLogDetail}
|
||||
fontSize={options.fontSize}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
logs={formattedLogs}
|
||||
/>
|
||||
);
|
||||
},
|
||||
[
|
||||
handleChangeSelectedView,
|
||||
onAddToQuery,
|
||||
onSetActiveLog,
|
||||
options.fontSize,
|
||||
options.format,
|
||||
options.maxLines,
|
||||
options.fontSize,
|
||||
activeLog?.id,
|
||||
selectedFields,
|
||||
onAddToQuery,
|
||||
handleSetActiveLogWithTab,
|
||||
handleCloseLogDetail,
|
||||
handleChangeSelectedView,
|
||||
formattedLogs,
|
||||
],
|
||||
);
|
||||
|
||||
@@ -156,6 +183,10 @@ function LiveLogsList({
|
||||
activeLogIndex,
|
||||
}}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
logs={formattedLogs}
|
||||
onSetActiveLog={handleSetActiveLogWithTab}
|
||||
onClearActiveLog={handleCloseLogDetail}
|
||||
activeLog={activeLog}
|
||||
/>
|
||||
) : (
|
||||
<Card style={{ width: '100%' }} bodyStyle={CARD_BODY_STYLE}>
|
||||
@@ -173,14 +204,16 @@ function LiveLogsList({
|
||||
</InfinityWrapperStyled>
|
||||
)}
|
||||
|
||||
{activeLog && (
|
||||
{activeLog && selectedTab && (
|
||||
<LogDetail
|
||||
selectedTab={VIEW_TYPES.OVERVIEW}
|
||||
log={activeLog}
|
||||
onClose={onClearActiveLog}
|
||||
onClose={handleCloseLogDetail}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
logs={formattedLogs}
|
||||
onNavigateLog={handleSetActiveLogWithTab}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -325,7 +325,7 @@ export default function TableViewActions(
|
||||
onOpenChange={setIsOpen}
|
||||
arrow={false}
|
||||
content={
|
||||
<div>
|
||||
<div data-log-detail-ignore="true">
|
||||
<Button
|
||||
className="group-by-clause"
|
||||
type="text"
|
||||
@@ -403,7 +403,7 @@ export default function TableViewActions(
|
||||
onOpenChange={setIsOpen}
|
||||
arrow={false}
|
||||
content={
|
||||
<div>
|
||||
<div data-log-detail-ignore="true">
|
||||
<Button
|
||||
className="group-by-clause"
|
||||
type="text"
|
||||
|
||||
@@ -27,6 +27,8 @@ interface TableRowProps {
|
||||
logs: ILog[];
|
||||
hasActions: boolean;
|
||||
fontSize: FontSize;
|
||||
isActiveLog?: boolean;
|
||||
onClearActiveLog?: () => void;
|
||||
}
|
||||
|
||||
export default function TableRow({
|
||||
@@ -38,6 +40,8 @@ export default function TableRow({
|
||||
logs,
|
||||
hasActions,
|
||||
fontSize,
|
||||
isActiveLog,
|
||||
onClearActiveLog,
|
||||
}: TableRowProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
@@ -62,11 +66,21 @@ export default function TableRow({
|
||||
);
|
||||
|
||||
const handleShowLogDetails = useCallback(() => {
|
||||
if (!onShowLogDetails || !currentLog) {
|
||||
if (!currentLog) {
|
||||
return;
|
||||
}
|
||||
onShowLogDetails(currentLog);
|
||||
}, [currentLog, onShowLogDetails]);
|
||||
|
||||
// If this log is already active, close the detail drawer
|
||||
if (isActiveLog && onClearActiveLog) {
|
||||
onClearActiveLog();
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, open the detail drawer for this log
|
||||
if (onShowLogDetails) {
|
||||
onShowLogDetails(currentLog);
|
||||
}
|
||||
}, [currentLog, onShowLogDetails, isActiveLog, onClearActiveLog]);
|
||||
|
||||
const hasSingleColumn =
|
||||
tableColumns.filter((column) => column.key !== 'state-indicator').length ===
|
||||
|
||||
@@ -58,7 +58,16 @@ const CustomTableRow: TableComponents<ILog>['TableRow'] = ({
|
||||
|
||||
const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
|
||||
function InfinityTableView(
|
||||
{ isLoading, tableViewProps, infitiyTableProps, handleChangeSelectedView },
|
||||
{
|
||||
isLoading,
|
||||
tableViewProps,
|
||||
infitiyTableProps,
|
||||
handleChangeSelectedView,
|
||||
logs,
|
||||
onSetActiveLog: onSetActiveLogProp,
|
||||
onClearActiveLog: onClearActiveLogProp,
|
||||
activeLog: activeLogProp,
|
||||
},
|
||||
ref,
|
||||
): JSX.Element | null {
|
||||
const {
|
||||
@@ -69,11 +78,30 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
|
||||
} = useActiveLog();
|
||||
const {
|
||||
activeLog,
|
||||
onSetActiveLog,
|
||||
onClearActiveLog,
|
||||
onSetActiveLog: onSetActiveLogHook,
|
||||
onClearActiveLog: onClearActiveLogHook,
|
||||
onAddToQuery,
|
||||
} = useActiveLog();
|
||||
|
||||
const hasExternalActiveLog =
|
||||
onSetActiveLogProp !== undefined ||
|
||||
onClearActiveLogProp !== undefined ||
|
||||
activeLogProp !== undefined;
|
||||
const activeOverviewLog = activeLogProp ?? activeLog;
|
||||
|
||||
const onSetActiveLog = useCallback(
|
||||
(log: ILog) => {
|
||||
onSetActiveLogProp?.(log);
|
||||
onSetActiveLogHook(log);
|
||||
},
|
||||
[onSetActiveLogProp, onSetActiveLogHook],
|
||||
);
|
||||
|
||||
const onClearActiveLog = useCallback(() => {
|
||||
onClearActiveLogProp?.();
|
||||
onClearActiveLogHook();
|
||||
}, [onClearActiveLogProp, onClearActiveLogHook]);
|
||||
|
||||
const { dataSource, columns } = useTableView({
|
||||
...tableViewProps,
|
||||
onClickExpand: onSetActiveLog,
|
||||
@@ -108,6 +136,8 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
|
||||
hasActions
|
||||
fontSize={tableViewProps.fontSize}
|
||||
onShowLogDetails={onSetActiveLog}
|
||||
isActiveLog={activeOverviewLog?.id === log.id}
|
||||
onClearActiveLog={onClearActiveLog}
|
||||
/>
|
||||
),
|
||||
[
|
||||
@@ -116,9 +146,10 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
|
||||
tableViewProps.fontSize,
|
||||
tableViewProps.logs,
|
||||
onSetActiveLog,
|
||||
activeOverviewLog?.id,
|
||||
onClearActiveLog,
|
||||
],
|
||||
);
|
||||
|
||||
const tableHeader = useCallback(
|
||||
() => (
|
||||
<tr>
|
||||
@@ -167,7 +198,7 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
|
||||
...props,
|
||||
context: {
|
||||
activeContextLogId: activeContextLog?.id,
|
||||
activeLogId: activeLog?.id,
|
||||
activeLogId: activeOverviewLog?.id,
|
||||
},
|
||||
} as any),
|
||||
}}
|
||||
@@ -189,14 +220,18 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
)}
|
||||
<LogDetail
|
||||
selectedTab={VIEW_TYPES.OVERVIEW}
|
||||
log={activeLog}
|
||||
onClose={onClearActiveLog}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
{!hasExternalActiveLog && (
|
||||
<LogDetail
|
||||
selectedTab={VIEW_TYPES.OVERVIEW}
|
||||
log={activeLog}
|
||||
onClose={onClearActiveLog}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
logs={logs}
|
||||
onNavigateLog={onSetActiveLog}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
},
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { UseTableViewProps } from 'components/Logs/TableView/types';
|
||||
import { ChangeViewFunctionType } from 'container/ExplorerOptions/types';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
|
||||
export type InfinityTableProps = {
|
||||
isLoading?: boolean;
|
||||
@@ -8,4 +9,8 @@ export type InfinityTableProps = {
|
||||
onEndReached: (index: number) => void;
|
||||
};
|
||||
handleChangeSelectedView?: ChangeViewFunctionType;
|
||||
logs?: ILog[];
|
||||
onSetActiveLog?: (log: ILog) => void;
|
||||
onClearActiveLog?: () => void;
|
||||
activeLog?: ILog | null;
|
||||
};
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { memo, useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { Virtuoso, VirtuosoHandle } from 'react-virtuoso';
|
||||
import { Card } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
@@ -60,6 +60,10 @@ function LogsExplorerList({
|
||||
onSetActiveLog,
|
||||
} = useActiveLog();
|
||||
|
||||
const [selectedTab, setSelectedTab] = useState<
|
||||
typeof VIEW_TYPES[keyof typeof VIEW_TYPES] | undefined
|
||||
>();
|
||||
|
||||
const { options } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
|
||||
dataSource: DataSource.LOGS,
|
||||
@@ -82,6 +86,20 @@ function LogsExplorerList({
|
||||
() => convertKeysToColumnFields(options.selectColumns),
|
||||
[options],
|
||||
);
|
||||
|
||||
const handleSetActiveLogWithTab = useCallback(
|
||||
(log: ILog) => {
|
||||
onSetActiveLog(log);
|
||||
setSelectedTab(VIEW_TYPES.OVERVIEW);
|
||||
},
|
||||
[onSetActiveLog],
|
||||
);
|
||||
|
||||
const handleCloseLogDetail = useCallback(() => {
|
||||
onClearActiveLog();
|
||||
setSelectedTab(undefined);
|
||||
}, [onClearActiveLog]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isLoading && !isFetching && !isError && logs.length !== 0) {
|
||||
logEvent('Logs Explorer: Data present', {
|
||||
@@ -97,10 +115,14 @@ function LogsExplorerList({
|
||||
<RawLogView
|
||||
key={log.id}
|
||||
data={log}
|
||||
isActiveLog={activeLog?.id === log.id}
|
||||
linesPerRow={options.maxLines}
|
||||
selectedFields={selectedFields}
|
||||
fontSize={options.fontSize}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
onSetActiveLog={handleSetActiveLogWithTab}
|
||||
onClearActiveLog={handleCloseLogDetail}
|
||||
managedExternally
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -109,21 +131,26 @@ function LogsExplorerList({
|
||||
<ListLogView
|
||||
key={log.id}
|
||||
logData={log}
|
||||
isActiveLog={activeLog?.id === log.id}
|
||||
selectedFields={selectedFields}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onSetActiveLog={onSetActiveLog}
|
||||
onSetActiveLog={handleSetActiveLogWithTab}
|
||||
activeLog={activeLog}
|
||||
fontSize={options.fontSize}
|
||||
linesPerRow={options.maxLines}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
onClearActiveLog={handleCloseLogDetail}
|
||||
logs={logs}
|
||||
/>
|
||||
);
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[
|
||||
activeLog,
|
||||
handleChangeSelectedView,
|
||||
onAddToQuery,
|
||||
onSetActiveLog,
|
||||
handleSetActiveLogWithTab,
|
||||
options.fontSize,
|
||||
options.format,
|
||||
options.maxLines,
|
||||
@@ -153,6 +180,10 @@ function LogsExplorerList({
|
||||
}}
|
||||
infitiyTableProps={{ onEndReached }}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
logs={logs}
|
||||
onSetActiveLog={handleSetActiveLogWithTab}
|
||||
onClearActiveLog={handleCloseLogDetail}
|
||||
activeLog={activeLog}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -199,6 +230,9 @@ function LogsExplorerList({
|
||||
getItemContent,
|
||||
selectedFields,
|
||||
handleChangeSelectedView,
|
||||
handleSetActiveLogWithTab,
|
||||
handleCloseLogDetail,
|
||||
activeLog,
|
||||
]);
|
||||
|
||||
const isTraceToLogsNavigation = useMemo(() => {
|
||||
@@ -278,14 +312,18 @@ function LogsExplorerList({
|
||||
{renderContent}
|
||||
</InfinityWrapperStyled>
|
||||
|
||||
<LogDetail
|
||||
selectedTab={VIEW_TYPES.OVERVIEW}
|
||||
log={activeLog}
|
||||
onClose={onClearActiveLog}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
{selectedTab && activeLog && (
|
||||
<LogDetail
|
||||
selectedTab={selectedTab}
|
||||
log={activeLog}
|
||||
onClose={handleCloseLogDetail}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
logs={logs}
|
||||
onNavigateLog={handleSetActiveLogWithTab}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -466,7 +466,10 @@ function LogsExplorerViewsContainer({
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="logs-explorer-views-type-content">
|
||||
<div
|
||||
className="logs-explorer-views-type-content"
|
||||
data-log-detail-ignore="true"
|
||||
>
|
||||
{showLiveLogs && (
|
||||
<LiveLogs handleChangeSelectedView={handleChangeSelectedView} />
|
||||
)}
|
||||
|
||||
@@ -27,8 +27,8 @@ import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import createQueryParams from 'lib/createQueryParams';
|
||||
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
|
||||
import { cloneDeep, defaultTo, isEmpty, isUndefined } from 'lodash-es';
|
||||
import { Check, X } from 'lucide-react';
|
||||
import { DashboardWidgetPageParams } from 'pages/DashboardWidget';
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsApplication.factory';
|
||||
import { updateStepInterval } from 'hooks/queryBuilder/useStepInterval';
|
||||
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
|
||||
import { ServicesList } from 'types/api/metrics/getService';
|
||||
import { QueryDataV3 } from 'types/api/widgets/getQuery';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
|
||||
@@ -13,7 +13,7 @@ import { MenuItemKeys } from 'container/GridCardLayout/WidgetHeader/contants';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
|
||||
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
||||
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
|
||||
@@ -3,8 +3,8 @@ import { useSelector } from 'react-redux';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
|
||||
@@ -14,7 +14,7 @@ describe('Get Series Data', () => {
|
||||
expect(seriesData.length).toBe(5);
|
||||
expect(seriesData[1].label).toBe('firstLegend');
|
||||
expect(seriesData[1].show).toBe(true);
|
||||
expect(seriesData[1].fill).toBe('#FF6F91');
|
||||
expect(seriesData[1].fill).toBe('#C71585');
|
||||
expect(seriesData[1].width).toBe(2);
|
||||
});
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { ALL_SELECTED_VALUE } from 'components/NewSelect/utils';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { commaValuesParser } from '../../lib/dashboardVariables/customCommaValuesParser';
|
||||
import { commaValuesParser } from '../../lib/dashbaordVariables/customCommaValuesParser';
|
||||
|
||||
interface UrlVariables {
|
||||
[key: string]: any;
|
||||
|
||||
@@ -567,6 +567,15 @@ body {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
|
||||
.ant-tooltip {
|
||||
--antd-arrow-background-color: var(--bg-vanilla-100);
|
||||
|
||||
.ant-tooltip-inner {
|
||||
background-color: var(--bg-vanilla-100);
|
||||
color: var(---bg-ink-500);
|
||||
}
|
||||
}
|
||||
|
||||
.ant-dropdown-menu {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
|
||||
@@ -9,10 +9,9 @@ export const getDefaultLogBackground = (
|
||||
if (isReadOnly) {
|
||||
return '';
|
||||
}
|
||||
// TODO handle the light mode here
|
||||
return `&:hover {
|
||||
background-color: ${
|
||||
isDarkMode ? 'rgba(171, 189, 255, 0.04)' : 'var(--bg-vanilla-200)'
|
||||
isDarkMode ? 'rgba(171, 189, 255, 0.04)' : 'rgba(0, 0, 0, 0.04)'
|
||||
};
|
||||
}`;
|
||||
};
|
||||
@@ -28,22 +27,38 @@ export const getActiveLogBackground = (
|
||||
if (isDarkMode) {
|
||||
switch (logType) {
|
||||
case LogType.INFO:
|
||||
return `background-color: ${Color.BG_ROBIN_500}10 !important;`;
|
||||
return `background-color: ${Color.BG_ROBIN_500}40 !important;`;
|
||||
case LogType.WARN:
|
||||
return `background-color: ${Color.BG_AMBER_500}10 !important;`;
|
||||
return `background-color: ${Color.BG_AMBER_500}40 !important;`;
|
||||
case LogType.ERROR:
|
||||
return `background-color: ${Color.BG_CHERRY_500}10 !important;`;
|
||||
return `background-color: ${Color.BG_CHERRY_500}40 !important;`;
|
||||
case LogType.TRACE:
|
||||
return `background-color: ${Color.BG_FOREST_400}10 !important;`;
|
||||
return `background-color: ${Color.BG_FOREST_400}40 !important;`;
|
||||
case LogType.DEBUG:
|
||||
return `background-color: ${Color.BG_AQUA_500}10 !important;`;
|
||||
return `background-color: ${Color.BG_AQUA_500}40 !important;`;
|
||||
case LogType.FATAL:
|
||||
return `background-color: ${Color.BG_SAKURA_500}10 !important;`;
|
||||
return `background-color: ${Color.BG_SAKURA_500}40 !important;`;
|
||||
default:
|
||||
return `background-color: ${Color.BG_SLATE_200} !important;`;
|
||||
return `background-color: ${Color.BG_ROBIN_500}40 !important;`;
|
||||
}
|
||||
}
|
||||
return `background-color: ${Color.BG_VANILLA_400}!important; color: ${Color.TEXT_SLATE_400} !important;`;
|
||||
// Light mode - use lighter background colors
|
||||
switch (logType) {
|
||||
case LogType.INFO:
|
||||
return `background-color: ${Color.BG_ROBIN_100} !important;`;
|
||||
case LogType.WARN:
|
||||
return `background-color: ${Color.BG_AMBER_100} !important;`;
|
||||
case LogType.ERROR:
|
||||
return `background-color: ${Color.BG_CHERRY_100} !important;`;
|
||||
case LogType.TRACE:
|
||||
return `background-color: ${Color.BG_FOREST_200} !important;`;
|
||||
case LogType.DEBUG:
|
||||
return `background-color: ${Color.BG_AQUA_100} !important;`;
|
||||
case LogType.FATAL:
|
||||
return `background-color: ${Color.BG_SAKURA_100} !important;`;
|
||||
default:
|
||||
return `background-color: ${Color.BG_VANILLA_300} !important;`;
|
||||
}
|
||||
};
|
||||
|
||||
export const getHightLightedLogBackground = (
|
||||
|
||||
127
pkg/apis/fields/api.go
Normal file
127
pkg/apis/fields/api.go
Normal file
@@ -0,0 +1,127 @@
|
||||
package fields
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymeter"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrytraces"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
type API struct {
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
telemetryMetadataStore telemetrytypes.MetadataStore
|
||||
}
|
||||
|
||||
// TODO: move this to module and remove metastore init
|
||||
func NewAPI(
|
||||
settings factory.ProviderSettings,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
) *API {
|
||||
telemetryMetadataStore := telemetrymetadata.NewTelemetryMetaStore(
|
||||
settings,
|
||||
telemetryStore,
|
||||
telemetrytraces.DBName,
|
||||
telemetrytraces.TagAttributesV2TableName,
|
||||
telemetrytraces.SpanAttributesKeysTblName,
|
||||
telemetrytraces.SpanIndexV3TableName,
|
||||
telemetrymetrics.DBName,
|
||||
telemetrymetrics.AttributesMetadataTableName,
|
||||
telemetrymeter.DBName,
|
||||
telemetrymeter.SamplesAgg1dTableName,
|
||||
telemetrylogs.DBName,
|
||||
telemetrylogs.LogsV2TableName,
|
||||
telemetrylogs.TagAttributesV2TableName,
|
||||
telemetrylogs.LogAttributeKeysTblName,
|
||||
telemetrylogs.LogResourceKeysTblName,
|
||||
telemetrymetadata.DBName,
|
||||
telemetrymetadata.AttributesMetadataLocalTableName,
|
||||
)
|
||||
|
||||
return &API{
|
||||
telemetryStore: telemetryStore,
|
||||
telemetryMetadataStore: telemetryMetadataStore,
|
||||
}
|
||||
}
|
||||
|
||||
func (api *API) GetFieldsKeys(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
type fieldKeysResponse struct {
|
||||
Keys map[string][]*telemetrytypes.TelemetryFieldKey `json:"keys"`
|
||||
Complete bool `json:"complete"`
|
||||
}
|
||||
|
||||
bodyBytes, _ := io.ReadAll(r.Body)
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
ctx := r.Context()
|
||||
|
||||
fieldKeySelector, err := parseFieldKeyRequest(r)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
keys, complete, err := api.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
response := fieldKeysResponse{
|
||||
Keys: keys,
|
||||
Complete: complete,
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, response)
|
||||
}
|
||||
|
||||
func (api *API) GetFieldsValues(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
type fieldValuesResponse struct {
|
||||
Values *telemetrytypes.TelemetryFieldValues `json:"values"`
|
||||
Complete bool `json:"complete"`
|
||||
}
|
||||
|
||||
bodyBytes, _ := io.ReadAll(r.Body)
|
||||
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
ctx := r.Context()
|
||||
|
||||
fieldValueSelector, err := parseFieldValueRequest(r)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
allValues, allComplete, err := api.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
relatedValues, relatedComplete, err := api.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
|
||||
if err != nil {
|
||||
// we don't want to return error if we fail to get related values for some reason
|
||||
relatedValues = []string{}
|
||||
}
|
||||
|
||||
values := &telemetrytypes.TelemetryFieldValues{
|
||||
StringValues: allValues.StringValues,
|
||||
NumberValues: allValues.NumberValues,
|
||||
RelatedValues: relatedValues,
|
||||
}
|
||||
|
||||
response := fieldValuesResponse{
|
||||
Values: values,
|
||||
Complete: allComplete && relatedComplete,
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, response)
|
||||
}
|
||||
162
pkg/apis/fields/parse.go
Normal file
162
pkg/apis/fields/parse.go
Normal file
@@ -0,0 +1,162 @@
|
||||
package fields
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, error) {
|
||||
var req telemetrytypes.FieldKeySelector
|
||||
var signal telemetrytypes.Signal
|
||||
var source telemetrytypes.Source
|
||||
var err error
|
||||
|
||||
signalStr := r.URL.Query().Get("signal")
|
||||
if signalStr != "" {
|
||||
signal = telemetrytypes.Signal{String: valuer.NewString(signalStr)}
|
||||
} else {
|
||||
signal = telemetrytypes.SignalUnspecified
|
||||
}
|
||||
|
||||
sourceStr := r.URL.Query().Get("source")
|
||||
if sourceStr != "" {
|
||||
source = telemetrytypes.Source{String: valuer.NewString(sourceStr)}
|
||||
} else {
|
||||
source = telemetrytypes.SourceUnspecified
|
||||
}
|
||||
|
||||
if r.URL.Query().Get("limit") != "" {
|
||||
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse limit")
|
||||
}
|
||||
req.Limit = limit
|
||||
} else {
|
||||
req.Limit = 1000
|
||||
}
|
||||
|
||||
var startUnixMilli, endUnixMilli int64
|
||||
|
||||
if r.URL.Query().Get("startUnixMilli") != "" {
|
||||
startUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("startUnixMilli"), 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse startUnixMilli")
|
||||
}
|
||||
// Round down to the nearest 6 hours (21600000 milliseconds)
|
||||
startUnixMilli -= startUnixMilli % 21600000
|
||||
}
|
||||
if r.URL.Query().Get("endUnixMilli") != "" {
|
||||
endUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("endUnixMilli"), 10, 64)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse endUnixMilli")
|
||||
}
|
||||
}
|
||||
|
||||
// Parse fieldContext directly instead of using JSON unmarshalling.
|
||||
var fieldContext telemetrytypes.FieldContext
|
||||
fieldContextStr := r.URL.Query().Get("fieldContext")
|
||||
if fieldContextStr != "" {
|
||||
fieldContext = telemetrytypes.FieldContext{String: valuer.NewString(fieldContextStr)}
|
||||
}
|
||||
|
||||
// Parse fieldDataType directly instead of using JSON unmarshalling.
|
||||
var fieldDataType telemetrytypes.FieldDataType
|
||||
fieldDataTypeStr := r.URL.Query().Get("fieldDataType")
|
||||
if fieldDataTypeStr != "" {
|
||||
fieldDataType = telemetrytypes.FieldDataType{String: valuer.NewString(fieldDataTypeStr)}
|
||||
}
|
||||
|
||||
metricName := r.URL.Query().Get("metricName")
|
||||
var metricContext *telemetrytypes.MetricContext
|
||||
if metricName != "" {
|
||||
metricContext = &telemetrytypes.MetricContext{
|
||||
MetricName: metricName,
|
||||
}
|
||||
}
|
||||
|
||||
name := r.URL.Query().Get("searchText")
|
||||
|
||||
if name != "" && fieldContext == telemetrytypes.FieldContextUnspecified {
|
||||
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
|
||||
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
|
||||
// Only apply inferred context if it is valid for the current signal
|
||||
if isContextValidForSignal(parsedFieldKey.FieldContext, signal) {
|
||||
name = parsedFieldKey.Name
|
||||
fieldContext = parsedFieldKey.FieldContext
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
req = telemetrytypes.FieldKeySelector{
|
||||
StartUnixMilli: startUnixMilli,
|
||||
EndUnixMilli: endUnixMilli,
|
||||
Signal: signal,
|
||||
Source: source,
|
||||
Name: name,
|
||||
FieldContext: fieldContext,
|
||||
FieldDataType: fieldDataType,
|
||||
Limit: req.Limit,
|
||||
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeFuzzy,
|
||||
MetricContext: metricContext,
|
||||
}
|
||||
return &req, nil
|
||||
}
|
||||
|
||||
func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector, error) {
|
||||
keySelector, err := parseFieldKeyRequest(r)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse field key request")
|
||||
}
|
||||
|
||||
name := r.URL.Query().Get("name")
|
||||
if name != "" && keySelector.FieldContext == telemetrytypes.FieldContextUnspecified {
|
||||
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
|
||||
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
|
||||
// Only apply inferred context if it is valid for the current signal
|
||||
if isContextValidForSignal(parsedFieldKey.FieldContext, keySelector.Signal) {
|
||||
name = parsedFieldKey.Name
|
||||
keySelector.FieldContext = parsedFieldKey.FieldContext
|
||||
}
|
||||
}
|
||||
}
|
||||
keySelector.Name = name
|
||||
existingQuery := r.URL.Query().Get("existingQuery")
|
||||
value := r.URL.Query().Get("searchText")
|
||||
|
||||
// Parse limit for fieldValue request, fallback to default 50 if parsing fails.
|
||||
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
|
||||
if err != nil {
|
||||
limit = 50
|
||||
}
|
||||
|
||||
req := telemetrytypes.FieldValueSelector{
|
||||
FieldKeySelector: keySelector,
|
||||
ExistingQuery: existingQuery,
|
||||
Value: value,
|
||||
Limit: limit,
|
||||
}
|
||||
|
||||
return &req, nil
|
||||
}
|
||||
|
||||
func isContextValidForSignal(ctx telemetrytypes.FieldContext, signal telemetrytypes.Signal) bool {
|
||||
if ctx == telemetrytypes.FieldContextResource ||
|
||||
ctx == telemetrytypes.FieldContextAttribute ||
|
||||
ctx == telemetrytypes.FieldContextScope {
|
||||
return true
|
||||
}
|
||||
|
||||
switch signal.StringValue() {
|
||||
case telemetrytypes.SignalLogs.StringValue():
|
||||
return ctx == telemetrytypes.FieldContextLog || ctx == telemetrytypes.FieldContextBody
|
||||
case telemetrytypes.SignalTraces.StringValue():
|
||||
return ctx == telemetrytypes.FieldContextSpan || ctx == telemetrytypes.FieldContextEvent || ctx == telemetrytypes.FieldContextTrace
|
||||
case telemetrytypes.SignalMetrics.StringValue():
|
||||
return ctx == telemetrytypes.FieldContextMetric
|
||||
}
|
||||
return true
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addFieldsRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v1/fields/keys", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsKeys), handler.OpenAPIDef{
|
||||
ID: "GetFieldsKeys",
|
||||
Tags: []string{"fields"},
|
||||
Summary: "Get field keys",
|
||||
Description: "This endpoint returns field keys",
|
||||
Request: nil,
|
||||
RequestQuery: new(telemetrytypes.PostableFieldKeysParams),
|
||||
RequestContentType: "",
|
||||
Response: new(telemetrytypes.GettableFieldKeys),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/fields/values", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsValues), handler.OpenAPIDef{
|
||||
ID: "GetFieldsValues",
|
||||
Tags: []string{"fields"},
|
||||
Summary: "Get field values",
|
||||
Description: "This endpoint returns field values",
|
||||
Request: nil,
|
||||
RequestQuery: new(telemetrytypes.PostableFieldValueParams),
|
||||
RequestContentType: "",
|
||||
Response: new(telemetrytypes.GettableFieldValues),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -13,11 +13,11 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/fields"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
@@ -42,8 +42,8 @@ type provider struct {
|
||||
dashboardHandler dashboard.Handler
|
||||
metricsExplorerHandler metricsexplorer.Handler
|
||||
gatewayHandler gateway.Handler
|
||||
fieldsHandler fields.Handler
|
||||
authzHandler authz.Handler
|
||||
roleGetter role.Getter
|
||||
roleHandler role.Handler
|
||||
}
|
||||
|
||||
func NewFactory(
|
||||
@@ -61,31 +61,11 @@ func NewFactory(
|
||||
dashboardHandler dashboard.Handler,
|
||||
metricsExplorerHandler metricsexplorer.Handler,
|
||||
gatewayHandler gateway.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
roleGetter role.Getter,
|
||||
roleHandler role.Handler,
|
||||
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
|
||||
return newProvider(
|
||||
ctx,
|
||||
providerSettings,
|
||||
config,
|
||||
orgGetter,
|
||||
authz,
|
||||
orgHandler,
|
||||
userHandler,
|
||||
sessionHandler,
|
||||
authDomainHandler,
|
||||
preferenceHandler,
|
||||
globalHandler,
|
||||
promoteHandler,
|
||||
flaggerHandler,
|
||||
dashboardModule,
|
||||
dashboardHandler,
|
||||
metricsExplorerHandler,
|
||||
gatewayHandler,
|
||||
fieldsHandler,
|
||||
authzHandler,
|
||||
)
|
||||
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler, flaggerHandler, dashboardModule, dashboardHandler, metricsExplorerHandler, gatewayHandler, roleGetter, roleHandler)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -107,8 +87,8 @@ func newProvider(
|
||||
dashboardHandler dashboard.Handler,
|
||||
metricsExplorerHandler metricsexplorer.Handler,
|
||||
gatewayHandler gateway.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
roleGetter role.Getter,
|
||||
roleHandler role.Handler,
|
||||
) (apiserver.APIServer, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
|
||||
router := mux.NewRouter().UseEncodedPath()
|
||||
@@ -129,11 +109,11 @@ func newProvider(
|
||||
dashboardHandler: dashboardHandler,
|
||||
metricsExplorerHandler: metricsExplorerHandler,
|
||||
gatewayHandler: gatewayHandler,
|
||||
fieldsHandler: fieldsHandler,
|
||||
authzHandler: authzHandler,
|
||||
roleGetter: roleGetter,
|
||||
roleHandler: roleHandler,
|
||||
}
|
||||
|
||||
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
|
||||
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz, roleGetter)
|
||||
|
||||
if err := provider.AddToRouter(router); err != nil {
|
||||
return nil, err
|
||||
@@ -195,10 +175,6 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addFieldsRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ import (
|
||||
)
|
||||
|
||||
func (provider *provider) addRoleRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Create), handler.OpenAPIDef{
|
||||
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Create), handler.OpenAPIDef{
|
||||
ID: "CreateRole",
|
||||
Tags: []string{"role"},
|
||||
Summary: "Create role",
|
||||
@@ -27,7 +27,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.authzHandler.List), handler.OpenAPIDef{
|
||||
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.roleHandler.List), handler.OpenAPIDef{
|
||||
ID: "ListRoles",
|
||||
Tags: []string{"role"},
|
||||
Summary: "List roles",
|
||||
@@ -44,7 +44,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Get), handler.OpenAPIDef{
|
||||
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Get), handler.OpenAPIDef{
|
||||
ID: "GetRole",
|
||||
Tags: []string{"role"},
|
||||
Summary: "Get role",
|
||||
@@ -61,7 +61,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Patch), handler.OpenAPIDef{
|
||||
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Patch), handler.OpenAPIDef{
|
||||
ID: "PatchRole",
|
||||
Tags: []string{"role"},
|
||||
Summary: "Patch role",
|
||||
@@ -78,7 +78,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Delete), handler.OpenAPIDef{
|
||||
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Delete), handler.OpenAPIDef{
|
||||
ID: "DeleteRole",
|
||||
Tags: []string{"role"},
|
||||
Summary: "Delete role",
|
||||
|
||||
@@ -2,11 +2,9 @@ package authz
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
)
|
||||
@@ -31,76 +29,4 @@ type AuthZ interface {
|
||||
|
||||
// Lists the selectors for objects assigned to subject (s) with relation (r) on resource (s)
|
||||
ListObjects(context.Context, string, authtypes.Relation, authtypes.Typeable) ([]*authtypes.Object, error)
|
||||
|
||||
// Creates the role.
|
||||
Create(context.Context, valuer.UUID, *roletypes.Role) error
|
||||
|
||||
// Gets the role if it exists or creates one.
|
||||
GetOrCreate(context.Context, valuer.UUID, *roletypes.Role) (*roletypes.Role, error)
|
||||
|
||||
// Gets the objects associated with the given role and relation.
|
||||
GetObjects(context.Context, valuer.UUID, valuer.UUID, authtypes.Relation) ([]*authtypes.Object, error)
|
||||
|
||||
// Gets all the typeable resources registered from role registry.
|
||||
GetResources(context.Context) []*authtypes.Resource
|
||||
|
||||
// Patches the role.
|
||||
Patch(context.Context, valuer.UUID, *roletypes.Role) error
|
||||
|
||||
// Patches the objects in authorization server associated with the given role and relation
|
||||
PatchObjects(context.Context, valuer.UUID, string, authtypes.Relation, []*authtypes.Object, []*authtypes.Object) error
|
||||
|
||||
// Deletes the role and tuples in authorization server.
|
||||
Delete(context.Context, valuer.UUID, valuer.UUID) error
|
||||
|
||||
// Gets the role
|
||||
Get(context.Context, valuer.UUID, valuer.UUID) (*roletypes.Role, error)
|
||||
|
||||
// Gets the role by org_id and name
|
||||
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*roletypes.Role, error)
|
||||
|
||||
// Lists all the roles for the organization.
|
||||
List(context.Context, valuer.UUID) ([]*roletypes.Role, error)
|
||||
|
||||
// Lists all the roles for the organization filtered by name
|
||||
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*roletypes.Role, error)
|
||||
|
||||
// Grants a role to the subject based on role name.
|
||||
Grant(context.Context, valuer.UUID, string, string) error
|
||||
|
||||
// Revokes a granted role from the subject based on role name.
|
||||
Revoke(context.Context, valuer.UUID, string, string) error
|
||||
|
||||
// Changes the granted role for the subject based on role name.
|
||||
ModifyGrant(context.Context, valuer.UUID, string, string, string) error
|
||||
|
||||
// Bootstrap the managed roles.
|
||||
CreateManagedRoles(context.Context, valuer.UUID, []*roletypes.Role) error
|
||||
|
||||
// Bootstrap managed roles transactions and user assignments
|
||||
CreateManagedUserRoleTransactions(context.Context, valuer.UUID, valuer.UUID) error
|
||||
}
|
||||
|
||||
type RegisterTypeable interface {
|
||||
MustGetTypeables() []authtypes.Typeable
|
||||
|
||||
MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
Create(http.ResponseWriter, *http.Request)
|
||||
|
||||
Get(http.ResponseWriter, *http.Request)
|
||||
|
||||
GetObjects(http.ResponseWriter, *http.Request)
|
||||
|
||||
GetResources(http.ResponseWriter, *http.Request)
|
||||
|
||||
List(http.ResponseWriter, *http.Request)
|
||||
|
||||
Patch(http.ResponseWriter, *http.Request)
|
||||
|
||||
PatchObjects(http.ResponseWriter, *http.Request)
|
||||
|
||||
Delete(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package openfgaserver
|
||||
package openfgaauthz
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -2,24 +2,35 @@ package openfgaauthz
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
"sync"
|
||||
|
||||
authz "github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/authz/authzstore/sqlauthzstore"
|
||||
"github.com/SigNoz/signoz/pkg/authz/openfgaserver"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
|
||||
openfgapkgserver "github.com/openfga/openfga/pkg/server"
|
||||
"google.golang.org/protobuf/encoding/protojson"
|
||||
)
|
||||
|
||||
var (
|
||||
openfgaDefaultStore = valuer.NewString("signoz")
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
server *openfgaserver.Server
|
||||
store roletypes.Store
|
||||
config authz.Config
|
||||
settings factory.ScopedProviderSettings
|
||||
openfgaSchema []openfgapkgtransformer.ModuleFile
|
||||
openfgaServer *openfgapkgserver.Server
|
||||
storeID string
|
||||
modelID string
|
||||
mtx sync.RWMutex
|
||||
stopChan chan struct{}
|
||||
}
|
||||
|
||||
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
@@ -29,194 +40,301 @@ func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtr
|
||||
}
|
||||
|
||||
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
|
||||
server, err := openfgaserver.NewOpenfgaServer(ctx, settings, config, sqlstore, openfgaSchema)
|
||||
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/authz/openfgaauthz")
|
||||
|
||||
store, err := NewSQLStore(sqlstore)
|
||||
if err != nil {
|
||||
scopedProviderSettings.Logger().DebugContext(ctx, "failed to initialize sqlstore for authz")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// setup the openfga server
|
||||
opts := []openfgapkgserver.OpenFGAServiceV1Option{
|
||||
openfgapkgserver.WithDatastore(store),
|
||||
openfgapkgserver.WithLogger(NewLogger(scopedProviderSettings.Logger())),
|
||||
openfgapkgserver.WithContextPropagationToDatastore(true),
|
||||
}
|
||||
openfgaServer, err := openfgapkgserver.NewServerWithOpts(opts...)
|
||||
if err != nil {
|
||||
scopedProviderSettings.Logger().DebugContext(ctx, "failed to create authz server")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &provider{
|
||||
server: server,
|
||||
store: sqlauthzstore.NewSqlAuthzStore(sqlstore),
|
||||
config: config,
|
||||
settings: scopedProviderSettings,
|
||||
openfgaServer: openfgaServer,
|
||||
openfgaSchema: openfgaSchema,
|
||||
mtx: sync.RWMutex{},
|
||||
stopChan: make(chan struct{}),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Start(ctx context.Context) error {
|
||||
return provider.server.Start(ctx)
|
||||
storeId, err := provider.getOrCreateStore(ctx, openfgaDefaultStore.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
modelID, err := provider.getOrCreateModel(ctx, storeId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
provider.mtx.Lock()
|
||||
provider.modelID = modelID
|
||||
provider.storeID = storeId
|
||||
provider.mtx.Unlock()
|
||||
|
||||
<-provider.stopChan
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Stop(ctx context.Context) error {
|
||||
return provider.server.Stop(ctx)
|
||||
provider.openfgaServer.Close()
|
||||
close(provider.stopChan)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
|
||||
return provider.server.Check(ctx, tupleReq)
|
||||
storeID, modelID := provider.getStoreIDandModelID()
|
||||
checkResponse, err := provider.openfgaServer.Check(
|
||||
ctx,
|
||||
&openfgav1.CheckRequest{
|
||||
StoreId: storeID,
|
||||
AuthorizationModelId: modelID,
|
||||
TupleKey: &openfgav1.CheckRequestTupleKey{
|
||||
User: tupleReq.User,
|
||||
Relation: tupleReq.Relation,
|
||||
Object: tupleReq.Object,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
if !checkResponse.Allowed {
|
||||
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
|
||||
return provider.server.BatchCheck(ctx, tupleReq)
|
||||
storeID, modelID := provider.getStoreIDandModelID()
|
||||
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
|
||||
for idx, tuple := range tupleReq {
|
||||
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
|
||||
TupleKey: &openfgav1.CheckRequestTupleKey{
|
||||
User: tuple.User,
|
||||
Relation: tuple.Relation,
|
||||
Object: tuple.Object,
|
||||
},
|
||||
// the batch check response is map[string] keyed by correlationID.
|
||||
CorrelationId: strconv.Itoa(idx),
|
||||
})
|
||||
}
|
||||
|
||||
checkResponse, err := provider.openfgaServer.BatchCheck(
|
||||
ctx,
|
||||
&openfgav1.BatchCheckRequest{
|
||||
StoreId: storeID,
|
||||
AuthorizationModelId: modelID,
|
||||
Checks: batchCheckItems,
|
||||
})
|
||||
if err != nil {
|
||||
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
for _, checkResponse := range checkResponse.Result {
|
||||
if checkResponse.GetAllowed() {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
|
||||
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
|
||||
return provider.server.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
|
||||
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = provider.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
|
||||
return provider.server.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
|
||||
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = provider.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
|
||||
return provider.server.Write(ctx, additions, deletions)
|
||||
if len(additions) == 0 && len(deletions) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
storeID, modelID := provider.getStoreIDandModelID()
|
||||
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
|
||||
for idx, tuple := range deletions {
|
||||
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
|
||||
}
|
||||
|
||||
_, err := provider.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
|
||||
StoreId: storeID,
|
||||
AuthorizationModelId: modelID,
|
||||
Writes: func() *openfgav1.WriteRequestWrites {
|
||||
if len(additions) == 0 {
|
||||
return nil
|
||||
}
|
||||
return &openfgav1.WriteRequestWrites{
|
||||
TupleKeys: additions,
|
||||
OnDuplicate: "ignore",
|
||||
}
|
||||
}(),
|
||||
Deletes: func() *openfgav1.WriteRequestDeletes {
|
||||
if len(deletionTuplesWithoutCondition) == 0 {
|
||||
return nil
|
||||
}
|
||||
return &openfgav1.WriteRequestDeletes{
|
||||
TupleKeys: deletionTuplesWithoutCondition,
|
||||
OnMissing: "ignore",
|
||||
}
|
||||
}(),
|
||||
})
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
|
||||
return provider.server.ListObjects(ctx, subject, relation, typeable)
|
||||
}
|
||||
|
||||
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
|
||||
storableRole, err := provider.store.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return roletypes.NewRoleFromStorableRole(storableRole), nil
|
||||
}
|
||||
|
||||
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
|
||||
storableRole, err := provider.store.GetByOrgIDAndName(ctx, orgID, name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return roletypes.NewRoleFromStorableRole(storableRole), nil
|
||||
}
|
||||
|
||||
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
|
||||
storableRoles, err := provider.store.List(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
roles := make([]*roletypes.Role, len(storableRoles))
|
||||
for idx, storableRole := range storableRoles {
|
||||
roles[idx] = roletypes.NewRoleFromStorableRole(storableRole)
|
||||
}
|
||||
|
||||
return roles, nil
|
||||
}
|
||||
|
||||
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
|
||||
storableRoles, err := provider.store.ListByOrgIDAndNames(ctx, orgID, names)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
roles := make([]*roletypes.Role, len(storableRoles))
|
||||
for idx, storable := range storableRoles {
|
||||
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
|
||||
}
|
||||
|
||||
return roles, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
|
||||
tuples, err := authtypes.TypeableRole.Tuples(
|
||||
subject,
|
||||
authtypes.RelationAssignee,
|
||||
[]authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, name),
|
||||
},
|
||||
orgID,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return provider.Write(ctx, tuples, nil)
|
||||
}
|
||||
|
||||
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
|
||||
err := provider.Revoke(ctx, orgID, existingRoleName, subject)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = provider.Grant(ctx, orgID, updatedRoleName, subject)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
|
||||
tuples, err := authtypes.TypeableRole.Tuples(
|
||||
subject,
|
||||
authtypes.RelationAssignee,
|
||||
[]authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, name),
|
||||
},
|
||||
orgID,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return provider.Write(ctx, nil, tuples)
|
||||
}
|
||||
|
||||
func (provider *provider) CreateManagedRoles(ctx context.Context, _ valuer.UUID, managedRoles []*roletypes.Role) error {
|
||||
err := provider.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
for _, role := range managedRoles {
|
||||
err := provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
storeID, modelID := provider.getStoreIDandModelID()
|
||||
response, err := provider.openfgaServer.ListObjects(ctx, &openfgav1.ListObjectsRequest{
|
||||
StoreId: storeID,
|
||||
AuthorizationModelId: modelID,
|
||||
User: subject,
|
||||
Relation: relation.StringValue(),
|
||||
Type: typeable.Type().StringValue(),
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "cannot list objects for subject %s with relation %s for type %s", subject, relation.StringValue(), typeable.Type().StringValue())
|
||||
}
|
||||
|
||||
return nil
|
||||
return authtypes.MustNewObjectsFromStringSlice(response.Objects), nil
|
||||
}
|
||||
|
||||
func (provider *provider) SetManagedRoleTransactions(context.Context, valuer.UUID) error {
|
||||
return nil
|
||||
func (provider *provider) getOrCreateStore(ctx context.Context, name string) (string, error) {
|
||||
stores, err := provider.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
for _, store := range stores.GetStores() {
|
||||
if store.GetName() == name {
|
||||
return store.Id, nil
|
||||
}
|
||||
}
|
||||
|
||||
store, err := provider.openfgaServer.CreateStore(ctx, &openfgav1.CreateStoreRequest{Name: name})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return store.Id, nil
|
||||
}
|
||||
|
||||
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
|
||||
return provider.Grant(ctx, orgID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil))
|
||||
func (provider *provider) getOrCreateModel(ctx context.Context, storeID string) (string, error) {
|
||||
schema, err := openfgapkgtransformer.TransformModuleFilesToModel(provider.openfgaSchema, "1.1")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
authorisationModels, err := provider.openfgaServer.ReadAuthorizationModels(ctx, &openfgav1.ReadAuthorizationModelsRequest{StoreId: storeID})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
for _, authModel := range authorisationModels.GetAuthorizationModels() {
|
||||
equal, err := provider.isModelEqual(schema, authModel)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if equal {
|
||||
return authModel.Id, nil
|
||||
}
|
||||
}
|
||||
|
||||
authorizationModel, err := provider.openfgaServer.WriteAuthorizationModel(ctx, &openfgav1.WriteAuthorizationModelRequest{
|
||||
StoreId: storeID,
|
||||
TypeDefinitions: schema.TypeDefinitions,
|
||||
SchemaVersion: schema.SchemaVersion,
|
||||
Conditions: schema.Conditions,
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return authorizationModel.AuthorizationModelId, nil
|
||||
}
|
||||
|
||||
func (setter *provider) Create(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
|
||||
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
// the language model doesn't have any equality check
|
||||
// https://github.com/openfga/language/blob/main/pkg/go/transformer/module-to-model_test.go#L38
|
||||
func (provider *provider) isModelEqual(expected *openfgav1.AuthorizationModel, actual *openfgav1.AuthorizationModel) (bool, error) {
|
||||
// we need to initialize a new model since the model extracted from schema doesn't have id
|
||||
expectedAuthModel := openfgav1.AuthorizationModel{
|
||||
SchemaVersion: expected.SchemaVersion,
|
||||
TypeDefinitions: expected.TypeDefinitions,
|
||||
Conditions: expected.Conditions,
|
||||
}
|
||||
expectedAuthModelBytes, err := protojson.Marshal(&expectedAuthModel)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
actualAuthModel := openfgav1.AuthorizationModel{
|
||||
SchemaVersion: actual.SchemaVersion,
|
||||
TypeDefinitions: actual.TypeDefinitions,
|
||||
Conditions: actual.Conditions,
|
||||
}
|
||||
actualAuthModelBytes, err := protojson.Marshal(&actualAuthModel)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return string(expectedAuthModelBytes) == string(actualAuthModelBytes), nil
|
||||
|
||||
}
|
||||
|
||||
func (provider *provider) GetOrCreate(_ context.Context, _ valuer.UUID, _ *roletypes.Role) (*roletypes.Role, error) {
|
||||
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
func (provider *provider) getStoreIDandModelID() (string, string) {
|
||||
provider.mtx.RLock()
|
||||
defer provider.mtx.RUnlock()
|
||||
|
||||
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
|
||||
return nil
|
||||
}
|
||||
storeID := provider.storeID
|
||||
modelID := provider.modelID
|
||||
|
||||
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
|
||||
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
|
||||
func (provider *provider) Patch(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
|
||||
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
|
||||
func (provider *provider) PatchObjects(_ context.Context, _ valuer.UUID, _ string, _ authtypes.Relation, _, _ []*authtypes.Object) error {
|
||||
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
|
||||
func (provider *provider) Delete(_ context.Context, _ valuer.UUID, _ valuer.UUID) error {
|
||||
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
|
||||
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
|
||||
return nil
|
||||
return storeID, modelID
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package openfgaserver
|
||||
package openfgaauthz
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -20,7 +20,7 @@ func TestProviderStartStop(t *testing.T) {
|
||||
|
||||
expectedModel := `module base
|
||||
type user`
|
||||
provider, err := NewOpenfgaServer(context.Background(), providerSettings, authz.Config{}, sqlstore, []transformer.ModuleFile{{Name: "test.fga", Contents: expectedModel}})
|
||||
provider, err := newOpenfgaProvider(context.Background(), providerSettings, authz.Config{}, sqlstore, []transformer.ModuleFile{{Name: "test.fga", Contents: expectedModel}})
|
||||
require.NoError(t, err)
|
||||
|
||||
storeRows := sqlstore.Mock().NewRows([]string{"id", "name", "created_at", "updated_at"}).AddRow("01K3V0NTN47MPTMEV1PD5ST6ZC", "signoz", time.Now(), time.Now())
|
||||
@@ -1,4 +1,4 @@
|
||||
package openfgaserver
|
||||
package openfgaauthz
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
@@ -1,334 +0,0 @@
|
||||
package openfgaserver
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
"sync"
|
||||
|
||||
authz "github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
|
||||
openfgapkgserver "github.com/openfga/openfga/pkg/server"
|
||||
"google.golang.org/protobuf/encoding/protojson"
|
||||
)
|
||||
|
||||
var (
|
||||
openfgaDefaultStore = valuer.NewString("signoz")
|
||||
)
|
||||
|
||||
type Server struct {
|
||||
config authz.Config
|
||||
settings factory.ScopedProviderSettings
|
||||
openfgaSchema []openfgapkgtransformer.ModuleFile
|
||||
openfgaServer *openfgapkgserver.Server
|
||||
storeID string
|
||||
modelID string
|
||||
mtx sync.RWMutex
|
||||
stopChan chan struct{}
|
||||
}
|
||||
|
||||
func NewOpenfgaServer(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (*Server, error) {
|
||||
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/authz/openfgaauthz")
|
||||
|
||||
store, err := NewSQLStore(sqlstore)
|
||||
if err != nil {
|
||||
scopedProviderSettings.Logger().DebugContext(ctx, "failed to initialize sqlstore for authz")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// setup the openfga server
|
||||
opts := []openfgapkgserver.OpenFGAServiceV1Option{
|
||||
openfgapkgserver.WithDatastore(store),
|
||||
openfgapkgserver.WithLogger(NewLogger(scopedProviderSettings.Logger())),
|
||||
openfgapkgserver.WithContextPropagationToDatastore(true),
|
||||
}
|
||||
openfgaServer, err := openfgapkgserver.NewServerWithOpts(opts...)
|
||||
if err != nil {
|
||||
scopedProviderSettings.Logger().DebugContext(ctx, "failed to create authz server")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Server{
|
||||
config: config,
|
||||
settings: scopedProviderSettings,
|
||||
openfgaServer: openfgaServer,
|
||||
openfgaSchema: openfgaSchema,
|
||||
mtx: sync.RWMutex{},
|
||||
stopChan: make(chan struct{}),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (server *Server) Start(ctx context.Context) error {
|
||||
storeID, err := server.getOrCreateStore(ctx, openfgaDefaultStore.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
modelID, err := server.getOrCreateModel(ctx, storeID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
server.mtx.Lock()
|
||||
server.modelID = modelID
|
||||
server.storeID = storeID
|
||||
server.mtx.Unlock()
|
||||
|
||||
<-server.stopChan
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) Stop(ctx context.Context) error {
|
||||
server.openfgaServer.Close()
|
||||
close(server.stopChan)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
|
||||
storeID, modelID := server.getStoreIDandModelID()
|
||||
checkResponse, err := server.openfgaServer.Check(
|
||||
ctx,
|
||||
&openfgav1.CheckRequest{
|
||||
StoreId: storeID,
|
||||
AuthorizationModelId: modelID,
|
||||
TupleKey: &openfgav1.CheckRequestTupleKey{
|
||||
User: tupleReq.User,
|
||||
Relation: tupleReq.Relation,
|
||||
Object: tupleReq.Object,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
if !checkResponse.Allowed {
|
||||
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
|
||||
storeID, modelID := server.getStoreIDandModelID()
|
||||
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
|
||||
for idx, tuple := range tupleReq {
|
||||
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
|
||||
TupleKey: &openfgav1.CheckRequestTupleKey{
|
||||
User: tuple.User,
|
||||
Relation: tuple.Relation,
|
||||
Object: tuple.Object,
|
||||
},
|
||||
// the batch check response is map[string] keyed by correlationID.
|
||||
CorrelationId: strconv.Itoa(idx),
|
||||
})
|
||||
}
|
||||
|
||||
checkResponse, err := server.openfgaServer.BatchCheck(
|
||||
ctx,
|
||||
&openfgav1.BatchCheckRequest{
|
||||
StoreId: storeID,
|
||||
AuthorizationModelId: modelID,
|
||||
Checks: batchCheckItems,
|
||||
})
|
||||
if err != nil {
|
||||
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
for _, checkResponse := range checkResponse.Result {
|
||||
if checkResponse.GetAllowed() {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
|
||||
|
||||
}
|
||||
|
||||
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = server.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = server.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
|
||||
if len(additions) == 0 && len(deletions) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
storeID, modelID := server.getStoreIDandModelID()
|
||||
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
|
||||
for idx, tuple := range deletions {
|
||||
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
|
||||
}
|
||||
|
||||
_, err := server.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
|
||||
StoreId: storeID,
|
||||
AuthorizationModelId: modelID,
|
||||
Writes: func() *openfgav1.WriteRequestWrites {
|
||||
if len(additions) == 0 {
|
||||
return nil
|
||||
}
|
||||
return &openfgav1.WriteRequestWrites{
|
||||
TupleKeys: additions,
|
||||
OnDuplicate: "ignore",
|
||||
}
|
||||
}(),
|
||||
Deletes: func() *openfgav1.WriteRequestDeletes {
|
||||
if len(deletionTuplesWithoutCondition) == 0 {
|
||||
return nil
|
||||
}
|
||||
return &openfgav1.WriteRequestDeletes{
|
||||
TupleKeys: deletionTuplesWithoutCondition,
|
||||
OnMissing: "ignore",
|
||||
}
|
||||
}(),
|
||||
})
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
|
||||
storeID, modelID := server.getStoreIDandModelID()
|
||||
response, err := server.openfgaServer.ListObjects(ctx, &openfgav1.ListObjectsRequest{
|
||||
StoreId: storeID,
|
||||
AuthorizationModelId: modelID,
|
||||
User: subject,
|
||||
Relation: relation.StringValue(),
|
||||
Type: typeable.Type().StringValue(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "cannot list objects for subject %s with relation %s for type %s", subject, relation.StringValue(), typeable.Type().StringValue())
|
||||
}
|
||||
|
||||
return authtypes.MustNewObjectsFromStringSlice(response.Objects), nil
|
||||
}
|
||||
|
||||
func (server *Server) getOrCreateStore(ctx context.Context, name string) (string, error) {
|
||||
stores, err := server.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
for _, store := range stores.GetStores() {
|
||||
if store.GetName() == name {
|
||||
return store.Id, nil
|
||||
}
|
||||
}
|
||||
|
||||
store, err := server.openfgaServer.CreateStore(ctx, &openfgav1.CreateStoreRequest{Name: name})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return store.Id, nil
|
||||
}
|
||||
|
||||
func (server *Server) getOrCreateModel(ctx context.Context, storeID string) (string, error) {
|
||||
schema, err := openfgapkgtransformer.TransformModuleFilesToModel(server.openfgaSchema, "1.1")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
authorisationModels, err := server.openfgaServer.ReadAuthorizationModels(ctx, &openfgav1.ReadAuthorizationModelsRequest{StoreId: storeID})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
for _, authModel := range authorisationModels.GetAuthorizationModels() {
|
||||
equal, err := server.isModelEqual(schema, authModel)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if equal {
|
||||
return authModel.Id, nil
|
||||
}
|
||||
}
|
||||
|
||||
authorizationModel, err := server.openfgaServer.WriteAuthorizationModel(ctx, &openfgav1.WriteAuthorizationModelRequest{
|
||||
StoreId: storeID,
|
||||
TypeDefinitions: schema.TypeDefinitions,
|
||||
SchemaVersion: schema.SchemaVersion,
|
||||
Conditions: schema.Conditions,
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return authorizationModel.AuthorizationModelId, nil
|
||||
}
|
||||
|
||||
// the language model doesn't have any equality check
|
||||
// https://github.com/openfga/language/blob/main/pkg/go/transformer/module-to-model_test.go#L38
|
||||
func (server *Server) isModelEqual(expected *openfgav1.AuthorizationModel, actual *openfgav1.AuthorizationModel) (bool, error) {
|
||||
// we need to initialize a new model since the model extracted from schema doesn't have id
|
||||
expectedAuthModel := openfgav1.AuthorizationModel{
|
||||
SchemaVersion: expected.SchemaVersion,
|
||||
TypeDefinitions: expected.TypeDefinitions,
|
||||
Conditions: expected.Conditions,
|
||||
}
|
||||
expectedAuthModelBytes, err := protojson.Marshal(&expectedAuthModel)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
actualAuthModel := openfgav1.AuthorizationModel{
|
||||
SchemaVersion: actual.SchemaVersion,
|
||||
TypeDefinitions: actual.TypeDefinitions,
|
||||
Conditions: actual.Conditions,
|
||||
}
|
||||
actualAuthModelBytes, err := protojson.Marshal(&actualAuthModel)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return string(expectedAuthModelBytes) == string(actualAuthModelBytes), nil
|
||||
|
||||
}
|
||||
|
||||
func (server *Server) getStoreIDandModelID() (string, string) {
|
||||
server.mtx.RLock()
|
||||
defer server.mtx.RUnlock()
|
||||
|
||||
storeID := server.storeID
|
||||
modelID := server.modelID
|
||||
|
||||
return storeID, modelID
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
@@ -23,14 +24,15 @@ type AuthZ struct {
|
||||
logger *slog.Logger
|
||||
orgGetter organization.Getter
|
||||
authzService authz.AuthZ
|
||||
roleGetter role.Getter
|
||||
}
|
||||
|
||||
func NewAuthZ(logger *slog.Logger, orgGetter organization.Getter, authzService authz.AuthZ) *AuthZ {
|
||||
func NewAuthZ(logger *slog.Logger, orgGetter organization.Getter, authzService authz.AuthZ, roleGetter role.Getter) *AuthZ {
|
||||
if logger == nil {
|
||||
panic("cannot build authz middleware, logger is empty")
|
||||
}
|
||||
|
||||
return &AuthZ{logger: logger, orgGetter: orgGetter, authzService: authzService}
|
||||
return &AuthZ{logger: logger, orgGetter: orgGetter, authzService: authzService, roleGetter: roleGetter}
|
||||
}
|
||||
|
||||
func (middleware *AuthZ) ViewAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
|
||||
@@ -4,7 +4,7 @@ import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/statsreporter"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
@@ -51,7 +51,7 @@ type Module interface {
|
||||
|
||||
statsreporter.StatsCollector
|
||||
|
||||
authz.RegisterTypeable
|
||||
role.RegisterTypeable
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
|
||||
@@ -206,10 +206,6 @@ func (module *module) MustGetTypeables() []authtypes.Typeable {
|
||||
return []authtypes.Typeable{dashboardtypes.TypeableMetaResourceDashboard, dashboardtypes.TypeableMetaResourcesDashboards}
|
||||
}
|
||||
|
||||
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
|
||||
return nil
|
||||
}
|
||||
|
||||
// not supported
|
||||
func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publicDashboard *dashboardtypes.PublicDashboard) error {
|
||||
return errors.Newf(errors.TypeUnsupported, dashboardtypes.ErrCodePublicDashboardUnsupported, "not implemented")
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
package fields
|
||||
|
||||
import "net/http"
|
||||
|
||||
type Handler interface {
|
||||
// Gets the fields keys for the given field key selector
|
||||
GetFieldsKeys(http.ResponseWriter, *http.Request)
|
||||
|
||||
// Gets the fields values for the given field value selector
|
||||
GetFieldsValues(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
package implfields
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/fields"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
telemetryMetadataStore telemetrytypes.MetadataStore
|
||||
}
|
||||
|
||||
func NewHandler(settings factory.ProviderSettings, telemetryMetadataStore telemetrytypes.MetadataStore) fields.Handler {
|
||||
return &handler{
|
||||
telemetryMetadataStore: telemetryMetadataStore,
|
||||
}
|
||||
}
|
||||
|
||||
func (handler *handler) GetFieldsKeys(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
|
||||
var params telemetrytypes.PostableFieldKeysParams
|
||||
if err := binding.Query.BindQuery(req.URL.Query(), ¶ms); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
fieldKeySelector := telemetrytypes.NewFieldKeySelectorFromPostableFieldKeysParams(params)
|
||||
|
||||
keys, complete, err := handler.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldKeys{
|
||||
Keys: keys,
|
||||
Complete: complete,
|
||||
})
|
||||
}
|
||||
|
||||
func (handler *handler) GetFieldsValues(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
|
||||
var params telemetrytypes.PostableFieldValueParams
|
||||
if err := binding.Query.BindQuery(req.URL.Query(), ¶ms); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
fieldValueSelector := telemetrytypes.NewFieldValueSelectorFromPostableFieldValueParams(params)
|
||||
|
||||
allValues, allComplete, err := handler.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
relatedValues, relatedComplete, err := handler.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
|
||||
if err != nil {
|
||||
// we don't want to return error if we fail to get related values for some reason
|
||||
relatedValues = []string{}
|
||||
}
|
||||
|
||||
values := &telemetrytypes.TelemetryFieldValues{
|
||||
StringValues: allValues.StringValues,
|
||||
NumberValues: allValues.NumberValues,
|
||||
RelatedValues: relatedValues,
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldValues{
|
||||
Values: values,
|
||||
Complete: allComplete && relatedComplete,
|
||||
})
|
||||
}
|
||||
63
pkg/modules/role/implrole/getter.go
Normal file
63
pkg/modules/role/implrole/getter.go
Normal file
@@ -0,0 +1,63 @@
|
||||
package implrole
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type getter struct {
|
||||
store roletypes.Store
|
||||
}
|
||||
|
||||
func NewGetter(store roletypes.Store) role.Getter {
|
||||
return &getter{store: store}
|
||||
}
|
||||
|
||||
func (getter *getter) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
|
||||
storableRole, err := getter.store.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return roletypes.NewRoleFromStorableRole(storableRole), nil
|
||||
}
|
||||
|
||||
func (getter *getter) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
|
||||
storableRole, err := getter.store.GetByOrgIDAndName(ctx, orgID, name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return roletypes.NewRoleFromStorableRole(storableRole), nil
|
||||
}
|
||||
|
||||
func (getter *getter) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
|
||||
storableRoles, err := getter.store.List(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
roles := make([]*roletypes.Role, len(storableRoles))
|
||||
for idx, storableRole := range storableRoles {
|
||||
roles[idx] = roletypes.NewRoleFromStorableRole(storableRole)
|
||||
}
|
||||
|
||||
return roles, nil
|
||||
}
|
||||
|
||||
func (getter *getter) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
|
||||
storableRoles, err := getter.store.ListByOrgIDAndNames(ctx, orgID, names)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
roles := make([]*roletypes.Role, len(storableRoles))
|
||||
for idx, storable := range storableRoles {
|
||||
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
|
||||
}
|
||||
|
||||
return roles, nil
|
||||
}
|
||||
83
pkg/modules/role/implrole/granter.go
Normal file
83
pkg/modules/role/implrole/granter.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package implrole
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type granter struct {
|
||||
store roletypes.Store
|
||||
authz authz.AuthZ
|
||||
}
|
||||
|
||||
func NewGranter(store roletypes.Store, authz authz.AuthZ) role.Granter {
|
||||
return &granter{store: store, authz: authz}
|
||||
}
|
||||
|
||||
func (granter *granter) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
|
||||
tuples, err := authtypes.TypeableRole.Tuples(
|
||||
subject,
|
||||
authtypes.RelationAssignee,
|
||||
[]authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, name),
|
||||
},
|
||||
orgID,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return granter.authz.Write(ctx, tuples, nil)
|
||||
}
|
||||
|
||||
func (granter *granter) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
|
||||
err := granter.Revoke(ctx, orgID, existingRoleName, subject)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = granter.Grant(ctx, orgID, updatedRoleName, subject)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (granter *granter) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
|
||||
tuples, err := authtypes.TypeableRole.Tuples(
|
||||
subject,
|
||||
authtypes.RelationAssignee,
|
||||
[]authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, name),
|
||||
},
|
||||
orgID,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return granter.authz.Write(ctx, nil, tuples)
|
||||
}
|
||||
|
||||
func (granter *granter) CreateManagedRoles(ctx context.Context, _ valuer.UUID, managedRoles []*roletypes.Role) error {
|
||||
err := granter.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
for _, role := range managedRoles {
|
||||
err := granter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
package signozauthzapi
|
||||
package implrole
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -14,11 +14,12 @@ import (
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
authz authz.AuthZ
|
||||
setter role.Setter
|
||||
getter role.Getter
|
||||
}
|
||||
|
||||
func NewHandler(authz authz.AuthZ) authz.Handler {
|
||||
return &handler{authz: authz}
|
||||
func NewHandler(setter role.Setter, getter role.Getter) role.Handler {
|
||||
return &handler{setter: setter, getter: getter}
|
||||
}
|
||||
|
||||
func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
|
||||
@@ -35,7 +36,7 @@ func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.authz.Create(ctx, valuer.MustNewUUID(claims.OrgID), roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID)))
|
||||
err = handler.setter.Create(ctx, valuer.MustNewUUID(claims.OrgID), roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID)))
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
@@ -63,7 +64,7 @@ func (handler *handler) Get(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), roleID)
|
||||
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), roleID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
@@ -102,7 +103,7 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
objects, err := handler.authz.GetObjects(ctx, valuer.MustNewUUID(claims.OrgID), roleID, relation)
|
||||
objects, err := handler.setter.GetObjects(ctx, valuer.MustNewUUID(claims.OrgID), roleID, relation)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
@@ -113,7 +114,7 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
|
||||
|
||||
func (handler *handler) GetResources(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
resources := handler.authz.GetResources(ctx)
|
||||
resources := handler.setter.GetResources(ctx)
|
||||
|
||||
var resourceRelations = struct {
|
||||
Resources []*authtypes.Resource `json:"resources"`
|
||||
@@ -133,7 +134,7 @@ func (handler *handler) List(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
roles, err := handler.authz.List(ctx, valuer.MustNewUUID(claims.OrgID))
|
||||
roles, err := handler.getter.List(ctx, valuer.MustNewUUID(claims.OrgID))
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
@@ -162,7 +163,7 @@ func (handler *handler) Patch(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
|
||||
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
@@ -174,7 +175,7 @@ func (handler *handler) Patch(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.authz.Patch(ctx, valuer.MustNewUUID(claims.OrgID), role)
|
||||
err = handler.setter.Patch(ctx, valuer.MustNewUUID(claims.OrgID), role)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
@@ -209,7 +210,7 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
|
||||
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
@@ -221,7 +222,7 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.authz.PatchObjects(ctx, valuer.MustNewUUID(claims.OrgID), role.Name, relation, patchableObjects.Additions, patchableObjects.Deletions)
|
||||
err = handler.setter.PatchObjects(ctx, valuer.MustNewUUID(claims.OrgID), role.Name, relation, patchableObjects.Additions, patchableObjects.Deletions)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
@@ -244,7 +245,7 @@ func (handler *handler) Delete(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.authz.Delete(ctx, valuer.MustNewUUID(claims.OrgID), id)
|
||||
err = handler.setter.Delete(ctx, valuer.MustNewUUID(claims.OrgID), id)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
53
pkg/modules/role/implrole/setter.go
Normal file
53
pkg/modules/role/implrole/setter.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package implrole
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type setter struct {
|
||||
store roletypes.Store
|
||||
authz authz.AuthZ
|
||||
}
|
||||
|
||||
func NewSetter(store roletypes.Store, authz authz.AuthZ) role.Setter {
|
||||
return &setter{store: store, authz: authz}
|
||||
}
|
||||
|
||||
func (setter *setter) Create(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
|
||||
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
|
||||
func (setter *setter) GetOrCreate(_ context.Context, _ valuer.UUID, _ *roletypes.Role) (*roletypes.Role, error) {
|
||||
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
|
||||
func (setter *setter) GetResources(_ context.Context) []*authtypes.Resource {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (setter *setter) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
|
||||
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
|
||||
func (setter *setter) Patch(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
|
||||
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
|
||||
func (setter *setter) PatchObjects(_ context.Context, _ valuer.UUID, _ string, _ authtypes.Relation, _, _ []*authtypes.Object) error {
|
||||
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
|
||||
func (setter *setter) Delete(_ context.Context, _ valuer.UUID, _ valuer.UUID) error {
|
||||
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
|
||||
}
|
||||
|
||||
func (setter *setter) MustGetTypeables() []authtypes.Typeable {
|
||||
return nil
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package sqlauthzstore
|
||||
package implrole
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -14,7 +14,7 @@ type store struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func NewSqlAuthzStore(sqlstore sqlstore.SQLStore) roletypes.Store {
|
||||
func NewStore(sqlstore sqlstore.SQLStore) roletypes.Store {
|
||||
return &store{sqlstore: sqlstore}
|
||||
}
|
||||
|
||||
85
pkg/modules/role/role.go
Normal file
85
pkg/modules/role/role.go
Normal file
@@ -0,0 +1,85 @@
|
||||
package role
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Setter interface {
|
||||
// Creates the role.
|
||||
Create(context.Context, valuer.UUID, *roletypes.Role) error
|
||||
|
||||
// Gets the role if it exists or creates one.
|
||||
GetOrCreate(context.Context, valuer.UUID, *roletypes.Role) (*roletypes.Role, error)
|
||||
|
||||
// Gets the objects associated with the given role and relation.
|
||||
GetObjects(context.Context, valuer.UUID, valuer.UUID, authtypes.Relation) ([]*authtypes.Object, error)
|
||||
|
||||
// Gets all the typeable resources registered from role registry.
|
||||
GetResources(context.Context) []*authtypes.Resource
|
||||
|
||||
// Patches the role.
|
||||
Patch(context.Context, valuer.UUID, *roletypes.Role) error
|
||||
|
||||
// Patches the objects in authorization server associated with the given role and relation
|
||||
PatchObjects(context.Context, valuer.UUID, string, authtypes.Relation, []*authtypes.Object, []*authtypes.Object) error
|
||||
|
||||
// Deletes the role and tuples in authorization server.
|
||||
Delete(context.Context, valuer.UUID, valuer.UUID) error
|
||||
|
||||
RegisterTypeable
|
||||
}
|
||||
|
||||
type Getter interface {
|
||||
// Gets the role
|
||||
Get(context.Context, valuer.UUID, valuer.UUID) (*roletypes.Role, error)
|
||||
|
||||
// Gets the role by org_id and name
|
||||
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*roletypes.Role, error)
|
||||
|
||||
// Lists all the roles for the organization.
|
||||
List(context.Context, valuer.UUID) ([]*roletypes.Role, error)
|
||||
|
||||
// Lists all the roles for the organization filtered by name
|
||||
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*roletypes.Role, error)
|
||||
}
|
||||
|
||||
type Granter interface {
|
||||
// Grants a role to the subject based on role name.
|
||||
Grant(context.Context, valuer.UUID, string, string) error
|
||||
|
||||
// Revokes a granted role from the subject based on role name.
|
||||
Revoke(context.Context, valuer.UUID, string, string) error
|
||||
|
||||
// Changes the granted role for the subject based on role name.
|
||||
ModifyGrant(context.Context, valuer.UUID, string, string, string) error
|
||||
|
||||
// Bootstrap the managed roles.
|
||||
CreateManagedRoles(context.Context, valuer.UUID, []*roletypes.Role) error
|
||||
}
|
||||
|
||||
type RegisterTypeable interface {
|
||||
MustGetTypeables() []authtypes.Typeable
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
Create(http.ResponseWriter, *http.Request)
|
||||
|
||||
Get(http.ResponseWriter, *http.Request)
|
||||
|
||||
GetObjects(http.ResponseWriter, *http.Request)
|
||||
|
||||
GetResources(http.ResponseWriter, *http.Request)
|
||||
|
||||
List(http.ResponseWriter, *http.Request)
|
||||
|
||||
Patch(http.ResponseWriter, *http.Request)
|
||||
|
||||
PatchObjects(http.ResponseWriter, *http.Request)
|
||||
|
||||
Delete(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
@@ -8,11 +8,11 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/emailing"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
root "github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/tokenizer"
|
||||
@@ -32,13 +32,13 @@ type Module struct {
|
||||
emailing emailing.Emailing
|
||||
settings factory.ScopedProviderSettings
|
||||
orgSetter organization.Setter
|
||||
authz authz.AuthZ
|
||||
granter role.Granter
|
||||
analytics analytics.Analytics
|
||||
config user.Config
|
||||
}
|
||||
|
||||
// This module is a WIP, don't take inspiration from this.
|
||||
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, authz authz.AuthZ, analytics analytics.Analytics, config user.Config) root.Module {
|
||||
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, granter role.Granter, analytics analytics.Analytics, config user.Config) root.Module {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/modules/user/impluser")
|
||||
return &Module{
|
||||
store: store,
|
||||
@@ -47,7 +47,7 @@ func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing em
|
||||
settings: settings,
|
||||
orgSetter: orgSetter,
|
||||
analytics: analytics,
|
||||
authz: authz,
|
||||
granter: granter,
|
||||
config: config,
|
||||
}
|
||||
}
|
||||
@@ -172,7 +172,7 @@ func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ..
|
||||
createUserOpts := root.NewCreateUserOptions(opts...)
|
||||
|
||||
// since assign is idempotant multiple calls to assign won't cause issues in case of retries.
|
||||
err := module.authz.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
|
||||
err := module.granter.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -238,7 +238,7 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
|
||||
}
|
||||
|
||||
if user.Role != existingUser.Role {
|
||||
err = m.authz.ModifyGrant(ctx,
|
||||
err = m.granter.ModifyGrant(ctx,
|
||||
orgID,
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role),
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role),
|
||||
@@ -301,7 +301,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
|
||||
}
|
||||
|
||||
// since revoke is idempotant multiple calls to revoke won't cause issues in case of retries
|
||||
err = module.authz.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
|
||||
err = module.granter.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -504,14 +504,14 @@ func (module *Module) CreateFirstUser(ctx context.Context, organization *types.O
|
||||
}
|
||||
|
||||
managedRoles := roletypes.NewManagedRoles(organization.ID)
|
||||
err = module.authz.CreateManagedUserRoleTransactions(ctx, organization.ID, user.ID)
|
||||
err = module.granter.Grant(ctx, organization.ID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, user.ID.StringValue(), user.OrgID, nil))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = module.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
err = module.orgSetter.Create(ctx, organization, func(ctx context.Context, orgID valuer.UUID) error {
|
||||
err = module.authz.CreateManagedRoles(ctx, orgID, managedRoles)
|
||||
err = module.granter.CreateManagedRoles(ctx, orgID, managedRoles)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -25,6 +25,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
@@ -68,7 +69,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/opamptypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunneltypes"
|
||||
|
||||
"go.uber.org/zap"
|
||||
@@ -103,11 +104,10 @@ type APIHandler struct {
|
||||
querierV2 interfaces.Querier
|
||||
queryBuilder *queryBuilder.QueryBuilder
|
||||
|
||||
// temporalityMap is a map of metric name to temporality to avoid fetching
|
||||
// temporality for the same metric multiple times.
|
||||
//
|
||||
// Querying the v4 table on a low cardinal temporality column should be
|
||||
// fast, but we can still avoid the query if we have the data in memory.
|
||||
// temporalityMap is a map of metric name to temporality
|
||||
// to avoid fetching temporality for the same metric multiple times
|
||||
// querying the v4 table on low cardinal temporality column
|
||||
// should be fast but we can still avoid the query if we have the data in memory
|
||||
temporalityMap map[string]map[v3.Temporality]bool
|
||||
temporalityMux sync.Mutex
|
||||
|
||||
@@ -145,6 +145,8 @@ type APIHandler struct {
|
||||
|
||||
LicensingAPI licensing.API
|
||||
|
||||
FieldsAPI *fields.API
|
||||
|
||||
QuerierAPI *querierAPI.API
|
||||
|
||||
QueryParserAPI *queryparser.API
|
||||
@@ -175,6 +177,8 @@ type APIHandlerOpts struct {
|
||||
|
||||
LicensingAPI licensing.API
|
||||
|
||||
FieldsAPI *fields.API
|
||||
|
||||
QuerierAPI *querierAPI.API
|
||||
|
||||
QueryParserAPI *queryparser.API
|
||||
@@ -239,6 +243,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
AlertmanagerAPI: opts.AlertmanagerAPI,
|
||||
LicensingAPI: opts.LicensingAPI,
|
||||
Signoz: opts.Signoz,
|
||||
FieldsAPI: opts.FieldsAPI,
|
||||
QuerierAPI: opts.QuerierAPI,
|
||||
QueryParserAPI: opts.QueryParserAPI,
|
||||
}
|
||||
@@ -394,6 +399,13 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *middlew
|
||||
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.QuerierAPI.QueryRawStream)).Methods(http.MethodGet)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) RegisterFieldsRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
subRouter := router.PathPrefix("/api/v1").Subrouter()
|
||||
|
||||
subRouter.HandleFunc("/fields/keys", am.ViewAccess(aH.FieldsAPI.GetFieldsKeys)).Methods(http.MethodGet)
|
||||
subRouter.HandleFunc("/fields/values", am.ViewAccess(aH.FieldsAPI.GetFieldsValues)).Methods(http.MethodGet)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
hostsSubRouter := router.PathPrefix("/api/v1/hosts").Subrouter()
|
||||
hostsSubRouter.HandleFunc("/attribute_keys", am.ViewAccess(aH.getHostAttributeKeys)).Methods(http.MethodGet)
|
||||
@@ -1011,7 +1023,7 @@ func (aH *APIHandler) getRuleStateHistory(w http.ResponseWriter, r *http.Request
|
||||
// the query range is calculated based on the rule's evalWindow and evalDelay
|
||||
// alerts have 2 minutes delay built in, so we need to subtract that from the start time
|
||||
// to get the correct query range
|
||||
start := end.Add(-rule.EvalWindow.Duration() - 3*time.Minute)
|
||||
start := end.Add(-time.Duration(rule.EvalWindow)).Add(-3 * time.Minute)
|
||||
if rule.AlertType == ruletypes.AlertTypeLogs {
|
||||
if rule.Version != "v5" {
|
||||
res.Items[idx].RelatedLogsLink = contextlinks.PrepareLinksToLogs(start, end, newFilters)
|
||||
@@ -1218,12 +1230,12 @@ func (aH *APIHandler) Get(rw http.ResponseWriter, r *http.Request) {
|
||||
|
||||
dashboard := new(dashboardtypes.Dashboard)
|
||||
if aH.CloudIntegrationsController.IsCloudIntegrationDashboardUuid(id) {
|
||||
cloudIntegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
|
||||
cloudintegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
|
||||
if apiErr != nil {
|
||||
render.Error(rw, errorsV2.Wrapf(apiErr, errorsV2.TypeInternal, errorsV2.CodeInternal, "failed to get dashboard"))
|
||||
return
|
||||
}
|
||||
dashboard = cloudIntegrationDashboard
|
||||
dashboard = cloudintegrationDashboard
|
||||
} else if aH.IntegrationsController.IsInstalledIntegrationDashboardID(id) {
|
||||
integrationDashboard, apiErr := aH.IntegrationsController.GetInstalledIntegrationDashboardById(ctx, orgID, id)
|
||||
if apiErr != nil {
|
||||
@@ -1552,13 +1564,13 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorExec, Err: res.Err}, nil)
|
||||
}
|
||||
|
||||
responseData := &model.QueryData{
|
||||
response_data := &model.QueryData{
|
||||
ResultType: res.Value.Type(),
|
||||
Result: res.Value,
|
||||
Stats: qs,
|
||||
}
|
||||
|
||||
aH.Respond(w, responseData)
|
||||
aH.Respond(w, response_data)
|
||||
|
||||
}
|
||||
|
||||
@@ -2640,12 +2652,12 @@ func (aH *APIHandler) getProducerData(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQueriesByName map[string]error
|
||||
var errQuriesByName map[string]error
|
||||
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
@@ -2693,12 +2705,12 @@ func (aH *APIHandler) getConsumerData(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQueriesByName map[string]error
|
||||
var errQuriesByName map[string]error
|
||||
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
@@ -2747,12 +2759,12 @@ func (aH *APIHandler) getPartitionOverviewLatencyData(w http.ResponseWriter, r *
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQueriesByName map[string]error
|
||||
var errQuriesByName map[string]error
|
||||
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
@@ -2801,12 +2813,12 @@ func (aH *APIHandler) getConsumerPartitionLatencyData(w http.ResponseWriter, r *
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQueriesByName map[string]error
|
||||
var errQuriesByName map[string]error
|
||||
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
@@ -2858,12 +2870,12 @@ func (aH *APIHandler) getProducerThroughputOverview(w http.ResponseWriter, r *ht
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQueriesByName map[string]error
|
||||
var errQuriesByName map[string]error
|
||||
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, producerQueryRangeParams)
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, producerQueryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -2969,12 +2981,12 @@ func (aH *APIHandler) getProducerThroughputDetails(w http.ResponseWriter, r *htt
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQueriesByName map[string]error
|
||||
var errQuriesByName map[string]error
|
||||
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
@@ -3023,12 +3035,12 @@ func (aH *APIHandler) getConsumerThroughputOverview(w http.ResponseWriter, r *ht
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQueriesByName map[string]error
|
||||
var errQuriesByName map[string]error
|
||||
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
@@ -3077,12 +3089,12 @@ func (aH *APIHandler) getConsumerThroughputDetails(w http.ResponseWriter, r *htt
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQueriesByName map[string]error
|
||||
var errQuriesByName map[string]error
|
||||
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
@@ -3137,12 +3149,12 @@ func (aH *APIHandler) getProducerConsumerEval(w http.ResponseWriter, r *http.Req
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQueriesByName map[string]error
|
||||
var errQuriesByName map[string]error
|
||||
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -4126,11 +4138,11 @@ func (aH *APIHandler) ListLogsPipelinesHandler(w http.ResponseWriter, r *http.Re
|
||||
aH.Respond(w, payload)
|
||||
}
|
||||
|
||||
// listLogsPipelines lists logs pipelines for latest version
|
||||
// listLogsPipelines lists logs piplines for latest version
|
||||
func (aH *APIHandler) listLogsPipelines(ctx context.Context, orgID valuer.UUID) (
|
||||
*logparsingpipeline.PipelinesResponse, error,
|
||||
) {
|
||||
// get latest agent config
|
||||
// get lateset agent config
|
||||
latestVersion := -1
|
||||
lastestConfig, err := agentConf.GetLatestVersion(ctx, orgID, opamptypes.ElementTypeLogPipelines)
|
||||
if err != nil && !errorsV2.Ast(err, errorsV2.TypeNotFound) {
|
||||
@@ -4427,7 +4439,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQueriesByName map[string]error
|
||||
var errQuriesByName map[string]error
|
||||
var spanKeys map[string]v3.AttributeKey
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
hasLogsQuery := false
|
||||
@@ -4444,7 +4456,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
if logsv3.EnrichmentRequired(queryRangeParams) && hasLogsQuery {
|
||||
logsFields, apiErr := aH.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(queryRangeParams.CompositeQuery))
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, errQueriesByName)
|
||||
RespondError(w, apiErr, errQuriesByName)
|
||||
return
|
||||
}
|
||||
// get the fields if any logs query is present
|
||||
@@ -4455,7 +4467,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
spanKeys, err = aH.getSpanKeysV3(ctx, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
tracesV4.Enrich(queryRangeParams, spanKeys)
|
||||
@@ -4500,11 +4512,11 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
}
|
||||
}
|
||||
|
||||
result, errQueriesByName, err = aH.querier.QueryRange(ctx, orgID, queryRangeParams)
|
||||
result, errQuriesByName, err = aH.querier.QueryRange(ctx, orgID, queryRangeParams)
|
||||
|
||||
if err != nil {
|
||||
queryErrors := map[string]string{}
|
||||
for name, err := range errQueriesByName {
|
||||
for name, err := range errQuriesByName {
|
||||
queryErrors[fmt.Sprintf("Query-%s", name)] = err.Error()
|
||||
}
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
@@ -4780,7 +4792,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQueriesByName map[string]error
|
||||
var errQuriesByName map[string]error
|
||||
var spanKeys map[string]v3.AttributeKey
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
hasLogsQuery := false
|
||||
@@ -4810,7 +4822,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
||||
spanKeys, err = aH.getSpanKeysV3(ctx, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
tracesV4.Enrich(queryRangeParams, spanKeys)
|
||||
@@ -4833,11 +4845,11 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
||||
}
|
||||
}
|
||||
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(ctx, orgID, queryRangeParams)
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(ctx, orgID, queryRangeParams)
|
||||
|
||||
if err != nil {
|
||||
queryErrors := map[string]string{}
|
||||
for name, err := range errQueriesByName {
|
||||
for name, err := range errQuriesByName {
|
||||
queryErrors[fmt.Sprintf("Query-%s", name)] = err.Error()
|
||||
}
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
@@ -4854,7 +4866,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
||||
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
aH.sendQueryResultEvents(r, result, queryRangeParams, "v4")
|
||||
|
||||
@@ -17,6 +17,7 @@ import (
|
||||
"github.com/gorilla/handlers"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
|
||||
@@ -132,6 +133,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
FluxInterval: config.Querier.FluxInterval,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
LicensingAPI: nooplicensing.NewLicenseAPI(),
|
||||
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
@@ -207,12 +209,13 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
|
||||
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
|
||||
r.Use(middleware.NewComment().Wrap)
|
||||
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz, s.signoz.Modules.RoleGetter)
|
||||
|
||||
api.RegisterRoutes(r, am)
|
||||
api.RegisterLogsRoutes(r, am)
|
||||
api.RegisterIntegrationRoutes(r, am)
|
||||
api.RegisterCloudIntegrationsRoutes(r, am)
|
||||
api.RegisterFieldsRoutes(r, am)
|
||||
api.RegisterQueryRangeV3Routes(r, am)
|
||||
api.RegisterInfraMetricsRoutes(r, am)
|
||||
api.RegisterWebSocketPaths(r, am)
|
||||
|
||||
@@ -5,10 +5,10 @@ import (
|
||||
"os"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -40,11 +40,11 @@ const NormalizedMetricsMapQueryThreads = 10
|
||||
var NormalizedMetricsMapRegex = regexp.MustCompile(`[^a-zA-Z0-9]`)
|
||||
var NormalizedMetricsMapQuantileRegex = regexp.MustCompile(`(?i)([._-]?quantile.*)$`)
|
||||
|
||||
func GetEvalDelay() valuer.TextDuration {
|
||||
func GetEvalDelay() time.Duration {
|
||||
evalDelayStr := GetOrDefaultEnv("RULES_EVAL_DELAY", "2m")
|
||||
evalDelayDuration, err := valuer.ParseTextDuration(evalDelayStr)
|
||||
evalDelayDuration, err := time.ParseDuration(evalDelayStr)
|
||||
if err != nil {
|
||||
return valuer.TextDuration{}
|
||||
return 0
|
||||
}
|
||||
return evalDelayDuration
|
||||
}
|
||||
|
||||
@@ -40,13 +40,13 @@ type BaseRule struct {
|
||||
// evalWindow is the time window used for evaluating the rule
|
||||
// i.e. each time we lookback from the current time, we look at data for the last
|
||||
// evalWindow duration
|
||||
evalWindow valuer.TextDuration
|
||||
evalWindow time.Duration
|
||||
// holdDuration is the duration for which the alert waits before firing
|
||||
holdDuration valuer.TextDuration
|
||||
holdDuration time.Duration
|
||||
|
||||
// evalDelay is the delay in evaluation of the rule
|
||||
// this is useful in cases where the data is not available immediately
|
||||
evalDelay valuer.TextDuration
|
||||
evalDelay time.Duration
|
||||
|
||||
// holds the static set of labels and annotations for the rule
|
||||
// these are the same for all alerts created for this rule
|
||||
@@ -94,7 +94,7 @@ type BaseRule struct {
|
||||
evaluation ruletypes.Evaluation
|
||||
|
||||
// newGroupEvalDelay is the grace period for new alert groups
|
||||
newGroupEvalDelay valuer.TextDuration
|
||||
newGroupEvalDelay *time.Duration
|
||||
|
||||
queryParser queryparser.QueryParser
|
||||
}
|
||||
@@ -113,7 +113,7 @@ func WithSendUnmatched() RuleOption {
|
||||
}
|
||||
}
|
||||
|
||||
func WithEvalDelay(dur valuer.TextDuration) RuleOption {
|
||||
func WithEvalDelay(dur time.Duration) RuleOption {
|
||||
return func(r *BaseRule) {
|
||||
r.evalDelay = dur
|
||||
}
|
||||
@@ -163,7 +163,7 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader
|
||||
source: p.Source,
|
||||
typ: p.AlertType,
|
||||
ruleCondition: p.RuleCondition,
|
||||
evalWindow: p.EvalWindow,
|
||||
evalWindow: time.Duration(p.EvalWindow),
|
||||
labels: qslabels.FromMap(p.Labels),
|
||||
annotations: qslabels.FromMap(p.Annotations),
|
||||
preferredChannels: p.PreferredChannels,
|
||||
@@ -176,12 +176,13 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader
|
||||
}
|
||||
|
||||
// Store newGroupEvalDelay and groupBy keys from NotificationSettings
|
||||
if p.NotificationSettings != nil {
|
||||
baseRule.newGroupEvalDelay = p.NotificationSettings.NewGroupEvalDelay
|
||||
if p.NotificationSettings != nil && p.NotificationSettings.NewGroupEvalDelay != nil {
|
||||
newGroupEvalDelay := time.Duration(*p.NotificationSettings.NewGroupEvalDelay)
|
||||
baseRule.newGroupEvalDelay = &newGroupEvalDelay
|
||||
}
|
||||
|
||||
if baseRule.evalWindow.IsZero() {
|
||||
baseRule.evalWindow = valuer.MustParseTextDuration("5m")
|
||||
if baseRule.evalWindow == 0 {
|
||||
baseRule.evalWindow = 5 * time.Minute
|
||||
}
|
||||
|
||||
for _, opt := range opts {
|
||||
@@ -244,15 +245,15 @@ func (r *BaseRule) ActiveAlertsLabelFP() map[uint64]struct{} {
|
||||
return activeAlerts
|
||||
}
|
||||
|
||||
func (r *BaseRule) EvalDelay() valuer.TextDuration {
|
||||
func (r *BaseRule) EvalDelay() time.Duration {
|
||||
return r.evalDelay
|
||||
}
|
||||
|
||||
func (r *BaseRule) EvalWindow() valuer.TextDuration {
|
||||
func (r *BaseRule) EvalWindow() time.Duration {
|
||||
return r.evalWindow
|
||||
}
|
||||
|
||||
func (r *BaseRule) HoldDuration() valuer.TextDuration {
|
||||
func (r *BaseRule) HoldDuration() time.Duration {
|
||||
return r.holdDuration
|
||||
}
|
||||
|
||||
@@ -280,7 +281,7 @@ func (r *BaseRule) Timestamps(ts time.Time) (time.Time, time.Time) {
|
||||
start := st.UnixMilli()
|
||||
end := en.UnixMilli()
|
||||
|
||||
if r.evalDelay.IsPositive() {
|
||||
if r.evalDelay > 0 {
|
||||
start = start - r.evalDelay.Milliseconds()
|
||||
end = end - r.evalDelay.Milliseconds()
|
||||
}
|
||||
@@ -551,7 +552,7 @@ func (r *BaseRule) PopulateTemporality(ctx context.Context, orgID valuer.UUID, q
|
||||
|
||||
// ShouldSkipNewGroups returns true if new group filtering should be applied
|
||||
func (r *BaseRule) ShouldSkipNewGroups() bool {
|
||||
return r.newGroupEvalDelay.IsPositive()
|
||||
return r.newGroupEvalDelay != nil && *r.newGroupEvalDelay > 0
|
||||
}
|
||||
|
||||
// isFilterNewSeriesSupported checks if the query is supported for new series filtering
|
||||
|
||||
@@ -20,7 +20,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -124,8 +124,8 @@ func createPostableRule(compositeQuery *v3.CompositeQuery) ruletypes.PostableRul
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{
|
||||
Kind: ruletypes.RollingEvaluation,
|
||||
Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
},
|
||||
},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
@@ -151,7 +151,7 @@ type filterNewSeriesTestCase struct {
|
||||
compositeQuery *v3.CompositeQuery
|
||||
series []*v3.Series
|
||||
firstSeenMap map[telemetrytypes.MetricMetadataLookupKey]int64
|
||||
newGroupEvalDelay valuer.TextDuration
|
||||
newGroupEvalDelay *time.Duration
|
||||
evalTime time.Time
|
||||
expectedFiltered []*v3.Series // series that should be in the final filtered result (old enough)
|
||||
expectError bool
|
||||
@@ -159,8 +159,7 @@ type filterNewSeriesTestCase struct {
|
||||
|
||||
func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
defaultEvalTime := time.Unix(1700000000, 0)
|
||||
defaultNewGroupEvalDelay := valuer.MustParseTextDuration("2m")
|
||||
defaultDelay := defaultNewGroupEvalDelay.Duration()
|
||||
defaultDelay := 2 * time.Minute
|
||||
defaultGroupByFields := []string{"service_name", "env"}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
@@ -203,7 +202,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new", "prod"),
|
||||
// svc-missing has no metadata, so it will be included
|
||||
),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
|
||||
@@ -235,7 +234,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new1", "prod"),
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new2", "stage"),
|
||||
),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{}, // all should be filtered out (new series)
|
||||
},
|
||||
@@ -262,7 +261,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old1", "prod"),
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old2", "stage"),
|
||||
),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
|
||||
@@ -296,7 +295,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
@@ -326,7 +325,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
@@ -362,7 +361,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createTestSeries(map[string]string{"status": "200"}, nil), // no service_name or env
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"status": "200"}, nil),
|
||||
@@ -391,7 +390,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old", "prod"),
|
||||
// svc-no-metadata has no entry in firstSeenMap
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
|
||||
@@ -421,7 +420,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
{MetricName: "request_total", AttributeName: "service_name", AttributeValue: "svc-partial"}: calculateFirstSeen(defaultEvalTime, defaultDelay, true),
|
||||
// env metadata is missing
|
||||
},
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
|
||||
@@ -455,7 +454,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
series: []*v3.Series{},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{},
|
||||
},
|
||||
@@ -489,7 +488,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
|
||||
newGroupEvalDelay: valuer.TextDuration{}, // zero delay
|
||||
newGroupEvalDelay: func() *time.Duration { d := time.Duration(0); return &d }(), // zero delay
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
@@ -533,7 +532,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
|
||||
createFirstSeenMap("error_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
|
||||
),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
@@ -573,7 +572,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createFirstSeenMap("request_total", []string{"service_name"}, defaultEvalTime, defaultDelay, true, "svc1"),
|
||||
createFirstSeenMap("request_total", []string{"env"}, defaultEvalTime, defaultDelay, false, "prod"),
|
||||
),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{}, // max first_seen is new, so should be filtered out
|
||||
},
|
||||
@@ -605,7 +604,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
|
||||
@@ -640,7 +639,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
|
||||
@@ -698,14 +697,20 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
telemetryStore,
|
||||
prometheustest.New(context.Background(), settings, prometheus.Config{}, telemetryStore),
|
||||
"",
|
||||
time.Second,
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
readerCache,
|
||||
options,
|
||||
)
|
||||
|
||||
postableRule.NotificationSettings = &ruletypes.NotificationSettings{
|
||||
NewGroupEvalDelay: tt.newGroupEvalDelay,
|
||||
// Set newGroupEvalDelay in NotificationSettings if provided
|
||||
if tt.newGroupEvalDelay != nil {
|
||||
postableRule.NotificationSettings = &ruletypes.NotificationSettings{
|
||||
NewGroupEvalDelay: func() *ruletypes.Duration {
|
||||
d := ruletypes.Duration(*tt.newGroupEvalDelay)
|
||||
return &d
|
||||
}(),
|
||||
}
|
||||
}
|
||||
|
||||
// Create BaseRule using NewBaseRule
|
||||
|
||||
@@ -30,7 +30,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
@@ -66,7 +66,7 @@ type PrepareTestRuleOptions struct {
|
||||
OrgID valuer.UUID
|
||||
}
|
||||
|
||||
const taskNameSuffix = "webAppEditor"
|
||||
const taskNamesuffix = "webAppEditor"
|
||||
|
||||
func RuleIdFromTaskName(n string) string {
|
||||
return strings.Split(n, "-groupname")[0]
|
||||
@@ -97,7 +97,7 @@ type ManagerOptions struct {
|
||||
SLogger *slog.Logger
|
||||
Cache cache.Cache
|
||||
|
||||
EvalDelay valuer.TextDuration
|
||||
EvalDelay time.Duration
|
||||
|
||||
PrepareTaskFunc func(opts PrepareTaskOptions) (Task, error)
|
||||
PrepareTestRuleFunc func(opts PrepareTestRuleOptions) (int, *model.ApiError)
|
||||
@@ -182,8 +182,8 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
|
||||
|
||||
rules = append(rules, tr)
|
||||
|
||||
// create ch rule task for evaluation
|
||||
task = newTask(TaskTypeCh, opts.TaskName, taskNameSuffix, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
// create ch rule task for evalution
|
||||
task = newTask(TaskTypeCh, opts.TaskName, taskNamesuffix, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
|
||||
|
||||
@@ -206,8 +206,8 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
|
||||
|
||||
rules = append(rules, pr)
|
||||
|
||||
// create promql rule task for evaluation
|
||||
task = newTask(TaskTypeProm, opts.TaskName, taskNameSuffix, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
// create promql rule task for evalution
|
||||
task = newTask(TaskTypeProm, opts.TaskName, taskNamesuffix, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else {
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
|
||||
@@ -323,7 +323,7 @@ func (m *Manager) run(_ context.Context) {
|
||||
}
|
||||
|
||||
// Stop the rule manager's rule evaluation cycles.
|
||||
func (m *Manager) Stop(_ context.Context) {
|
||||
func (m *Manager) Stop(ctx context.Context) {
|
||||
m.mtx.Lock()
|
||||
defer m.mtx.Unlock()
|
||||
|
||||
@@ -336,7 +336,7 @@ func (m *Manager) Stop(_ context.Context) {
|
||||
zap.L().Info("Rule manager stopped")
|
||||
}
|
||||
|
||||
// EditRule writes the rule definition to the
|
||||
// EditRuleDefinition writes the rule definition to the
|
||||
// datastore and also updates the rule executor
|
||||
func (m *Manager) EditRule(ctx context.Context, ruleStr string, id valuer.UUID) error {
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
@@ -643,7 +643,7 @@ func (m *Manager) addTask(_ context.Context, orgID valuer.UUID, rule *ruletypes.
|
||||
m.rules[r.ID()] = r
|
||||
}
|
||||
|
||||
// If there is another task with the same identifier, raise an error
|
||||
// If there is an another task with the same identifier, raise an error
|
||||
_, ok := m.tasks[taskName]
|
||||
if ok {
|
||||
return fmt.Errorf("a rule with the same name already exists")
|
||||
@@ -678,8 +678,7 @@ func (m *Manager) RuleTasks() []Task {
|
||||
return rgs
|
||||
}
|
||||
|
||||
// RuleTasksWithoutLock returns the list of manager's rule tasks without
|
||||
// acquiring a lock on the manager.
|
||||
// RuleTasks returns the list of manager's rule tasks.
|
||||
func (m *Manager) RuleTasksWithoutLock() []Task {
|
||||
|
||||
rgs := make([]Task, 0, len(m.tasks))
|
||||
@@ -890,7 +889,7 @@ func (m *Manager) syncRuleStateWithTask(ctx context.Context, orgID valuer.UUID,
|
||||
} else {
|
||||
// check if rule has a task running
|
||||
if _, ok := m.tasks[taskName]; !ok {
|
||||
// rule has no task, start one
|
||||
// rule has not task, start one
|
||||
if err := m.addTask(ctx, orgID, rule, taskName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// ThresholdRuleTestCase defines test case structure for threshold rule test notifications
|
||||
@@ -41,8 +40,8 @@ func ThresholdRuleAtLeastOnceValueAbove(target float64, recovery *float64) rulet
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
Labels: map[string]string{
|
||||
"service.name": "frontend",
|
||||
@@ -100,8 +99,8 @@ func BuildPromAtLeastOnceValueAbove(target float64, recovery *float64) ruletypes
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
Labels: map[string]string{
|
||||
"service.name": "frontend",
|
||||
|
||||
@@ -28,8 +28,6 @@ type PromRule struct {
|
||||
prometheus prometheus.Prometheus
|
||||
}
|
||||
|
||||
var _ Rule = (*PromRule)(nil)
|
||||
|
||||
func NewPromRule(
|
||||
id string,
|
||||
orgID valuer.UUID,
|
||||
@@ -334,7 +332,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
continue
|
||||
}
|
||||
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration {
|
||||
a.State = model.StateFiring
|
||||
a.FiredAt = ts
|
||||
state := model.StateFiring
|
||||
@@ -398,7 +396,7 @@ func (r *PromRule) String() string {
|
||||
ar := ruletypes.PostableRule{
|
||||
AlertName: r.name,
|
||||
RuleCondition: r.ruleCondition,
|
||||
EvalWindow: r.evalWindow,
|
||||
EvalWindow: ruletypes.Duration(r.evalWindow),
|
||||
Labels: r.labels.Map(),
|
||||
Annotations: r.annotations.Map(),
|
||||
PreferredChannels: r.preferredChannels,
|
||||
|
||||
@@ -41,12 +41,12 @@ type PromRuleTask struct {
|
||||
orgID valuer.UUID
|
||||
}
|
||||
|
||||
// NewPromRuleTask holds rules that have promql condition
|
||||
// and evaluates the rule at a given frequency
|
||||
// newPromRuleTask holds rules that have promql condition
|
||||
// and evalutes the rule at a given frequency
|
||||
func NewPromRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) *PromRuleTask {
|
||||
zap.L().Info("Initiating a new rule group", zap.String("name", name), zap.Duration("frequency", frequency))
|
||||
|
||||
if frequency == 0 {
|
||||
if time.Now() == time.Now().Add(frequency) {
|
||||
frequency = DefaultFrequency
|
||||
}
|
||||
|
||||
|
||||
@@ -41,8 +41,8 @@ func TestPromRuleEval(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -748,8 +748,8 @@ func TestPromRuleUnitCombinations(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1007,8 +1007,8 @@ func _Enable_this_after_9146_issue_fix_is_merged_TestPromRuleNoData(t *testing.T
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1118,8 +1118,8 @@ func TestMultipleThresholdPromRule(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1353,8 +1353,8 @@ func TestPromRule_NoData(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
@@ -1466,7 +1466,7 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
|
||||
// 3. Alert fires only if t2 - t1 > AbsentFor
|
||||
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalWindow := valuer.MustParseTextDuration("5m")
|
||||
evalWindow := 5 * time.Minute
|
||||
|
||||
// Set target higher than test data (100.0) so regular threshold alerts don't fire
|
||||
target := 500.0
|
||||
@@ -1476,8 +1476,8 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: evalWindow,
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(evalWindow),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
@@ -1619,7 +1619,7 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalTime := baseTime.Add(5 * time.Minute)
|
||||
|
||||
evalWindow := valuer.MustParseTextDuration("5m")
|
||||
evalWindow := 5 * time.Minute
|
||||
lookBackDelta := time.Minute
|
||||
|
||||
postableRule := ruletypes.PostableRule{
|
||||
@@ -1627,8 +1627,8 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: evalWindow,
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(evalWindow),
|
||||
Frequency: ruletypes.Duration(time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
|
||||
@@ -7,7 +7,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// A Rule encapsulates a vector expression which is evaluated at a specified
|
||||
@@ -20,9 +19,9 @@ type Rule interface {
|
||||
Labels() labels.BaseLabels
|
||||
Annotations() labels.BaseLabels
|
||||
Condition() *ruletypes.RuleCondition
|
||||
EvalDelay() valuer.TextDuration
|
||||
EvalWindow() valuer.TextDuration
|
||||
HoldDuration() valuer.TextDuration
|
||||
EvalDelay() time.Duration
|
||||
EvalWindow() time.Duration
|
||||
HoldDuration() time.Duration
|
||||
State() model.AlertState
|
||||
ActiveAlerts() []*ruletypes.Alert
|
||||
// ActiveAlertsLabelFP returns a map of active alert labels fingerprint
|
||||
|
||||
@@ -43,7 +43,7 @@ const DefaultFrequency = 1 * time.Minute
|
||||
// NewRuleTask makes a new RuleTask with the given name, options, and rules.
|
||||
func NewRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) *RuleTask {
|
||||
|
||||
if frequency == 0 {
|
||||
if time.Now() == time.Now().Add(frequency) {
|
||||
frequency = DefaultFrequency
|
||||
}
|
||||
zap.L().Info("initiating a new rule task", zap.String("name", name), zap.Duration("frequency", frequency))
|
||||
@@ -78,7 +78,6 @@ func (g *RuleTask) Type() TaskType { return TaskTypeCh }
|
||||
func (g *RuleTask) Rules() []Rule { return g.rules }
|
||||
|
||||
// Interval returns the group's interval.
|
||||
// TODO: remove (unused)?
|
||||
func (g *RuleTask) Interval() time.Duration { return g.frequency }
|
||||
|
||||
func (g *RuleTask) Pause(b bool) {
|
||||
|
||||
@@ -61,8 +61,6 @@ type ThresholdRule struct {
|
||||
spansKeys map[string]v3.AttributeKey
|
||||
}
|
||||
|
||||
var _ Rule = (*ThresholdRule)(nil)
|
||||
|
||||
func NewThresholdRule(
|
||||
id string,
|
||||
orgID valuer.UUID,
|
||||
@@ -748,7 +746,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
continue
|
||||
}
|
||||
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration {
|
||||
r.logger.DebugContext(ctx, "converting pending alert to firing", "name", r.Name())
|
||||
a.State = model.StateFiring
|
||||
a.FiredAt = ts
|
||||
@@ -814,7 +812,7 @@ func (r *ThresholdRule) String() string {
|
||||
ar := ruletypes.PostableRule{
|
||||
AlertName: r.name,
|
||||
RuleCondition: r.ruleCondition,
|
||||
EvalWindow: r.evalWindow,
|
||||
EvalWindow: ruletypes.Duration(r.evalWindow),
|
||||
Labels: r.labels.Map(),
|
||||
Annotations: r.annotations.Map(),
|
||||
PreferredChannels: r.preferredChannels,
|
||||
|
||||
@@ -36,8 +36,8 @@ func TestThresholdRuleEvalBackwardCompat(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -72,7 +72,7 @@ func TestThresholdRuleEvalBackwardCompat(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -152,8 +152,8 @@ func TestPrepareLinksToLogs(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeLogs,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -189,7 +189,7 @@ func TestPrepareLinksToLogs(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -206,8 +206,8 @@ func TestPrepareLinksToLogsV5(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeLogs,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -250,7 +250,7 @@ func TestPrepareLinksToLogsV5(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -267,8 +267,8 @@ func TestPrepareLinksToTracesV5(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeTraces,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -311,7 +311,7 @@ func TestPrepareLinksToTracesV5(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -328,8 +328,8 @@ func TestPrepareLinksToTraces(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeTraces,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -365,7 +365,7 @@ func TestPrepareLinksToTraces(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -382,8 +382,8 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -451,7 +451,7 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -490,8 +490,8 @@ func TestThresholdRuleEvalDelay(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -553,8 +553,8 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -594,7 +594,7 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
|
||||
logger := instrumentationtest.New().Logger()
|
||||
|
||||
for idx, c := range cases {
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -615,8 +615,8 @@ func TestThresholdRuleUnitCombinations(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -816,8 +816,8 @@ func TestThresholdRuleNoData(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -927,8 +927,8 @@ func TestThresholdRuleTracesLink(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeTraces,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1052,8 +1052,8 @@ func TestThresholdRuleLogsLink(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeLogs,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1190,8 +1190,8 @@ func TestThresholdRuleShiftBy(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeLogs,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
@@ -1264,8 +1264,8 @@ func TestMultipleThresholdRule(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1455,8 +1455,8 @@ func TestThresholdRuleEval_BasicCases(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1486,8 +1486,8 @@ func TestThresholdRuleEval_MatchPlusCompareOps(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1523,8 +1523,8 @@ func TestThresholdRuleEval_SendUnmatchedBypassesRecovery(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1559,7 +1559,7 @@ func TestThresholdRuleEval_SendUnmatchedBypassesRecovery(t *testing.T) {
|
||||
}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
require.NoError(t, err)
|
||||
|
||||
now := time.Now()
|
||||
@@ -1611,8 +1611,8 @@ func TestThresholdRuleEval_SendUnmatchedVariants(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1735,8 +1735,8 @@ func TestThresholdRuleEval_RecoveryNotMetSendUnmatchedFalse(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1820,7 +1820,7 @@ func runEvalTests(t *testing.T, postableRule ruletypes.PostableRule, testCases [
|
||||
Spec: thresholds,
|
||||
}
|
||||
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
return
|
||||
@@ -1927,7 +1927,7 @@ func runMultiThresholdEvalTests(t *testing.T, postableRule ruletypes.PostableRul
|
||||
Spec: thresholds,
|
||||
}
|
||||
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
return
|
||||
@@ -2035,8 +2035,8 @@ func TestThresholdRuleEval_MultiThreshold(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -2066,8 +2066,8 @@ func TestThresholdEval_RequireMinPoints(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
package signoz
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/authz/signozauthzapi"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
@@ -13,14 +11,14 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/apdex/implapdex"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/fields"
|
||||
"github.com/SigNoz/signoz/pkg/modules/fields/implfields"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
|
||||
"github.com/SigNoz/signoz/pkg/modules/savedview"
|
||||
"github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview"
|
||||
"github.com/SigNoz/signoz/pkg/modules/services"
|
||||
@@ -30,7 +28,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
|
||||
"github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
type Handlers struct {
|
||||
@@ -46,21 +43,10 @@ type Handlers struct {
|
||||
Global global.Handler
|
||||
FlaggerHandler flagger.Handler
|
||||
GatewayHandler gateway.Handler
|
||||
Fields fields.Handler
|
||||
AuthzHandler authz.Handler
|
||||
Role role.Handler
|
||||
}
|
||||
|
||||
func NewHandlers(
|
||||
modules Modules,
|
||||
providerSettings factory.ProviderSettings,
|
||||
querier querier.Querier,
|
||||
licensing licensing.Licensing,
|
||||
global global.Global,
|
||||
flaggerService flagger.Flagger,
|
||||
gatewayService gateway.Gateway,
|
||||
telemetryMetadataStore telemetrytypes.MetadataStore,
|
||||
authz authz.AuthZ,
|
||||
) Handlers {
|
||||
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global, flaggerService flagger.Flagger, gatewayService gateway.Gateway) Handlers {
|
||||
return Handlers{
|
||||
SavedView: implsavedview.NewHandler(modules.SavedView),
|
||||
Apdex: implapdex.NewHandler(modules.Apdex),
|
||||
@@ -74,7 +60,6 @@ func NewHandlers(
|
||||
Global: signozglobal.NewHandler(global),
|
||||
FlaggerHandler: flagger.NewHandler(flaggerService),
|
||||
GatewayHandler: gateway.NewHandler(gatewayService),
|
||||
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
|
||||
AuthzHandler: signozauthzapi.NewHandler(authz),
|
||||
Role: implrole.NewHandler(modules.RoleSetter, modules.RoleGetter),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/sharder"
|
||||
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
|
||||
@@ -40,9 +41,13 @@ func TestNewHandlers(t *testing.T) {
|
||||
queryParser := queryparser.New(providerSettings)
|
||||
require.NoError(t, err)
|
||||
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
|
||||
roleSetter := implrole.NewSetter(implrole.NewStore(sqlstore), nil)
|
||||
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
|
||||
grantModule := implrole.NewGranter(implrole.NewStore(sqlstore), nil)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, roleSetter, roleGetter, grantModule)
|
||||
|
||||
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil)
|
||||
|
||||
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil, nil, nil)
|
||||
reflectVal := reflect.ValueOf(handlers)
|
||||
for i := 0; i < reflectVal.NumField(); i++ {
|
||||
f := reflectVal.Field(i)
|
||||
|
||||
@@ -25,6 +25,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/modules/savedview"
|
||||
"github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview"
|
||||
"github.com/SigNoz/signoz/pkg/modules/services"
|
||||
@@ -66,6 +67,9 @@ type Modules struct {
|
||||
SpanPercentile spanpercentile.Module
|
||||
MetricsExplorer metricsexplorer.Module
|
||||
Promote promote.Module
|
||||
RoleSetter role.Setter
|
||||
RoleGetter role.Getter
|
||||
Granter role.Granter
|
||||
}
|
||||
|
||||
func NewModules(
|
||||
@@ -85,10 +89,13 @@ func NewModules(
|
||||
queryParser queryparser.QueryParser,
|
||||
config Config,
|
||||
dashboard dashboard.Module,
|
||||
roleSetter role.Setter,
|
||||
roleGetter role.Getter,
|
||||
granter role.Granter,
|
||||
) Modules {
|
||||
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
|
||||
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
|
||||
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, authz, analytics, config.User)
|
||||
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, granter, analytics, config.User)
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
|
||||
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
|
||||
|
||||
@@ -110,5 +117,8 @@ func NewModules(
|
||||
Services: implservices.NewModule(querier, telemetryStore),
|
||||
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, ruleStore, dashboard, providerSettings, config.MetricsExplorer),
|
||||
Promote: implpromote.NewModule(telemetryMetadataStore, telemetryStore),
|
||||
RoleSetter: roleSetter,
|
||||
RoleGetter: roleGetter,
|
||||
Granter: granter,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/sharder"
|
||||
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
|
||||
@@ -40,7 +41,10 @@ func TestNewModules(t *testing.T) {
|
||||
queryParser := queryparser.New(providerSettings)
|
||||
require.NoError(t, err)
|
||||
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
|
||||
roleSetter := implrole.NewSetter(implrole.NewStore(sqlstore), nil)
|
||||
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
|
||||
grantModule := implrole.NewGranter(implrole.NewStore(sqlstore), nil)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, roleSetter, roleGetter, grantModule)
|
||||
|
||||
reflectVal := reflect.ValueOf(modules)
|
||||
for i := 0; i < reflectVal.NumField(); i++ {
|
||||
|
||||
@@ -15,11 +15,11 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/fields"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
@@ -50,8 +50,8 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
|
||||
struct{ dashboard.Handler }{},
|
||||
struct{ metricsexplorer.Handler }{},
|
||||
struct{ gateway.Handler }{},
|
||||
struct{ fields.Handler }{},
|
||||
struct{ authz.Handler }{},
|
||||
struct{ role.Getter }{},
|
||||
struct{ role.Handler }{},
|
||||
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -166,7 +166,6 @@ func NewSQLMigrationProviderFactories(
|
||||
sqlmigration.NewAddAuthzIndexFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewMigrateRbacToAuthzFactory(sqlstore),
|
||||
sqlmigration.NewMigratePublicDashboardsFactory(sqlstore),
|
||||
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -248,8 +247,8 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
|
||||
handlers.Dashboard,
|
||||
handlers.MetricsExplorer,
|
||||
handlers.GatewayHandler,
|
||||
handlers.Fields,
|
||||
handlers.AuthzHandler,
|
||||
modules.RoleGetter,
|
||||
handlers.Role,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -21,6 +21,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
@@ -87,9 +89,10 @@ func New(
|
||||
sqlstoreProviderFactories factory.NamedMap[factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config]],
|
||||
telemetrystoreProviderFactories factory.NamedMap[factory.ProviderFactory[telemetrystore.TelemetryStore, telemetrystore.Config]],
|
||||
authNsCallback func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error),
|
||||
authzCallback func(context.Context, sqlstore.SQLStore, licensing.Licensing, dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config],
|
||||
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
|
||||
authzCallback func(context.Context, sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config],
|
||||
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, role.Setter, role.Granter, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
|
||||
gatewayProviderFactory func(licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config],
|
||||
roleSetterCallback func(sqlstore.SQLStore, authz.AuthZ, licensing.Licensing, []role.RegisterTypeable) role.Setter,
|
||||
) (*SigNoz, error) {
|
||||
// Initialize instrumentation
|
||||
instrumentation, err := instrumentation.New(ctx, config.Instrumentation, version.Info, "signoz")
|
||||
@@ -281,24 +284,11 @@ func New(
|
||||
// Initialize user getter
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
|
||||
|
||||
licensingProviderFactory := licenseProviderFactory(sqlstore, zeus, orgGetter, analytics)
|
||||
licensing, err := licensingProviderFactory.New(
|
||||
ctx,
|
||||
providerSettings,
|
||||
licenseConfig,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Initialize query parser (needed for dashboard module)
|
||||
queryParser := queryparser.New(providerSettings)
|
||||
|
||||
// Initialize dashboard module (needed for authz registry)
|
||||
dashboard := dashboardModuleCallback(sqlstore, providerSettings, analytics, orgGetter, queryParser, querier, licensing)
|
||||
// Initialize the role getter
|
||||
roleGetter := implrole.NewGetter(implrole.NewStore(sqlstore))
|
||||
|
||||
// Initialize authz
|
||||
authzProviderFactory := authzCallback(ctx, sqlstore, licensing, dashboard)
|
||||
authzProviderFactory := authzCallback(ctx, sqlstore)
|
||||
authz, err := authzProviderFactory.New(ctx, providerSettings, authz.Config{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -328,6 +318,9 @@ func New(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Initialize query parser
|
||||
queryParser := queryparser.New(providerSettings)
|
||||
|
||||
// Initialize ruler from the available ruler provider factories
|
||||
ruler, err := factory.NewProviderFromNamedMap(
|
||||
ctx,
|
||||
@@ -340,6 +333,16 @@ func New(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
licensingProviderFactory := licenseProviderFactory(sqlstore, zeus, orgGetter, analytics)
|
||||
licensing, err := licensingProviderFactory.New(
|
||||
ctx,
|
||||
providerSettings,
|
||||
licenseConfig,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gatewayFactory := gatewayProviderFactory(licensing)
|
||||
gateway, err := gatewayFactory.New(ctx, providerSettings, config.Gateway)
|
||||
if err != nil {
|
||||
@@ -387,10 +390,13 @@ func New(
|
||||
}
|
||||
|
||||
// Initialize all modules
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard)
|
||||
roleSetter := roleSetterCallback(sqlstore, authz, licensing, nil)
|
||||
granter := implrole.NewGranter(implrole.NewStore(sqlstore), authz)
|
||||
dashboard := dashboardModuleCallback(sqlstore, providerSettings, analytics, orgGetter, roleSetter, granter, queryParser, querier, licensing)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard, roleSetter, roleGetter, granter)
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz)
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway)
|
||||
|
||||
// Initialize the API server
|
||||
apiserver, err := factory.NewProviderFromNamedMap(
|
||||
|
||||
@@ -1,154 +0,0 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/oklog/ulid/v2"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/dialect"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type addAnonymousPublicDashboardTransaction struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func NewAddAnonymousPublicDashboardTransactionFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("add_public_dashboard_txn"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newAddAnonymousPublicDashboardTransaction(ctx, ps, c, sqlstore)
|
||||
})
|
||||
}
|
||||
|
||||
func newAddAnonymousPublicDashboardTransaction(_ context.Context, _ factory.ProviderSettings, _ Config, sqlstore sqlstore.SQLStore) (SQLMigration, error) {
|
||||
return &addAnonymousPublicDashboardTransaction{
|
||||
sqlstore: sqlstore,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (migration *addAnonymousPublicDashboardTransaction) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addAnonymousPublicDashboardTransaction) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
var storeID string
|
||||
err = tx.QueryRowContext(ctx, `SELECT id FROM store WHERE name = ? LIMIT 1`, "signoz").Scan(&storeID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// fetch all the orgs for which we need to insert the anonymous public dashboard transaction tuple.
|
||||
orgIDs := []string{}
|
||||
rows, err := tx.QueryContext(ctx, `SELECT id FROM organizations`)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
var orgID string
|
||||
if err := rows.Scan(&orgID); err != nil {
|
||||
return err
|
||||
}
|
||||
orgIDs = append(orgIDs, orgID)
|
||||
}
|
||||
|
||||
for _, orgID := range orgIDs {
|
||||
// based on openfga tuple and changelog id's are same for writes.
|
||||
// ref: https://github.com/openfga/openfga/blob/main/pkg/storage/sqlite/sqlite.go#L467
|
||||
entropy := ulid.DefaultEntropy()
|
||||
now := time.Now().UTC()
|
||||
tupleID := ulid.MustNew(ulid.Timestamp(now), entropy).String()
|
||||
|
||||
// Add wildcard (*) transaction for signoz-anonymous role to read all public-dashboards
|
||||
// This grants the signoz-anonymous role read access to all public dashboards in the organization
|
||||
if migration.sqlstore.BunDB().Dialect().Name() == dialect.PG {
|
||||
result, err := tx.ExecContext(ctx, `
|
||||
INSERT INTO tuple (store, object_type, object_id, relation, _user, user_type, ulid, inserted_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT (store, object_type, object_id, relation, _user) DO NOTHING`,
|
||||
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role:organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName+"#assignee", "userset", tupleID, now,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
rowsAffected, err := result.RowsAffected()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if rowsAffected == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
_, err = tx.ExecContext(ctx, `
|
||||
INSERT INTO changelog (store, object_type, object_id, relation, _user, operation, ulid, inserted_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT (store, ulid, object_type) DO NOTHING`,
|
||||
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role:organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName+"#assignee", "TUPLE_OPERATION_WRITE", tupleID, now,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
} else {
|
||||
result, err := tx.ExecContext(ctx, `
|
||||
INSERT INTO tuple (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation, user_type, ulid, inserted_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation) DO NOTHING`,
|
||||
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role", "organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName, "assignee", "userset", tupleID, now,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
rowsAffected, err := result.RowsAffected()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if rowsAffected == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
_, err = tx.ExecContext(ctx, `
|
||||
INSERT INTO changelog (store, object_type, object_id, relation, user_object_type, user_object_id, user_relation, operation, ulid, inserted_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT (store, ulid, object_type) DO NOTHING`,
|
||||
storeID, "metaresource", "organization/"+orgID+"/public-dashboard/*", "read", "role", "organization/"+orgID+"/role/"+roletypes.SigNozAnonymousRoleName, "assignee", 0, tupleID, now,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addAnonymousPublicDashboardTransaction) Down(context.Context, *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
@@ -20,19 +20,15 @@ var (
|
||||
)
|
||||
|
||||
var (
|
||||
typeUserSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
|
||||
typeRoleSelectorRegex = regexp.MustCompile(`^([a-z-]{1,50}|\*)$`)
|
||||
typeUserSelectorRegex = regexp.MustCompile(`^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
|
||||
typeRoleSelectorRegex = regexp.MustCompile(`^[a-z-]{1,50}$`)
|
||||
typeAnonymousSelectorRegex = regexp.MustCompile(`^\*$`)
|
||||
typeOrganizationSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
|
||||
typeOrganizationSelectorRegex = regexp.MustCompile(`^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$`)
|
||||
typeMetaResourceSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
|
||||
// metaresources selectors are used to select either all or none until we introduce some hierarchy here.
|
||||
// metaresources selectors are used to select either all or none
|
||||
typeMetaResourcesSelectorRegex = regexp.MustCompile(`^\*$`)
|
||||
)
|
||||
|
||||
var (
|
||||
WildCardSelectorString = "*"
|
||||
)
|
||||
|
||||
type SelectorCallbackWithClaimsFn func(*http.Request, Claims) ([]Selector, error)
|
||||
type SelectorCallbackWithoutClaimsFn func(*http.Request, []*types.Organization) ([]Selector, valuer.UUID, error)
|
||||
|
||||
|
||||
@@ -24,10 +24,9 @@ func MustNewTypeableMetaResource(name Name) Typeable {
|
||||
return typeableesource
|
||||
}
|
||||
|
||||
func (typeableMetaResource *typeableMetaResource) Tuples(subject string, relation Relation, selectors []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||
func (typeableMetaResource *typeableMetaResource) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||
tuples := make([]*openfgav1.TupleKey, 0)
|
||||
|
||||
for _, selector := range selectors {
|
||||
for _, selector := range selector {
|
||||
object := typeableMetaResource.Prefix(orgID) + "/" + selector.String()
|
||||
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
|
||||
}
|
||||
|
||||
@@ -24,10 +24,9 @@ func MustNewTypeableMetaResources(name Name) Typeable {
|
||||
return resources
|
||||
}
|
||||
|
||||
func (typeableResources *typeableMetaResources) Tuples(subject string, relation Relation, selectors []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||
func (typeableResources *typeableMetaResources) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||
tuples := make([]*openfgav1.TupleKey, 0)
|
||||
|
||||
for _, selector := range selectors {
|
||||
for _, selector := range selector {
|
||||
object := typeableResources.Prefix(orgID) + "/" + selector.String()
|
||||
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
|
||||
}
|
||||
|
||||
@@ -11,10 +11,9 @@ var _ Typeable = new(typeableOrganization)
|
||||
|
||||
type typeableOrganization struct{}
|
||||
|
||||
func (typeableOrganization *typeableOrganization) Tuples(subject string, relation Relation, selectors []Selector, _ valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||
func (typeableOrganization *typeableOrganization) Tuples(subject string, relation Relation, selector []Selector, _ valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||
tuples := make([]*openfgav1.TupleKey, 0)
|
||||
|
||||
for _, selector := range selectors {
|
||||
for _, selector := range selector {
|
||||
object := strings.Join([]string{typeableOrganization.Type().StringValue(), selector.String()}, ":")
|
||||
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
|
||||
}
|
||||
|
||||
@@ -9,10 +9,9 @@ var _ Typeable = new(typeableRole)
|
||||
|
||||
type typeableRole struct{}
|
||||
|
||||
func (typeableRole *typeableRole) Tuples(subject string, relation Relation, selectors []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||
func (typeableRole *typeableRole) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||
tuples := make([]*openfgav1.TupleKey, 0)
|
||||
|
||||
for _, selector := range selectors {
|
||||
for _, selector := range selector {
|
||||
object := typeableRole.Prefix(orgID) + "/" + selector.String()
|
||||
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
|
||||
}
|
||||
|
||||
@@ -9,10 +9,9 @@ var _ Typeable = new(typeableUser)
|
||||
|
||||
type typeableUser struct{}
|
||||
|
||||
func (typeableUser *typeableUser) Tuples(subject string, relation Relation, selectors []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||
func (typeableUser *typeableUser) Tuples(subject string, relation Relation, selector []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||
tuples := make([]*openfgav1.TupleKey, 0)
|
||||
|
||||
for _, selector := range selectors {
|
||||
for _, selector := range selector {
|
||||
object := typeableUser.Prefix(orgID) + "/" + selector.String()
|
||||
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
|
||||
}
|
||||
|
||||
@@ -31,13 +31,6 @@ var (
|
||||
TreemapModeSamples = TreemapMode{valuer.NewString("samples")}
|
||||
)
|
||||
|
||||
func (TreemapMode) Enum() []any {
|
||||
return []any{
|
||||
TreemapModeTimeSeries,
|
||||
TreemapModeSamples,
|
||||
}
|
||||
}
|
||||
|
||||
// StatsRequest represents the payload accepted by the metrics stats endpoint.
|
||||
type StatsRequest struct {
|
||||
Filter *qbtypes.Filter `json:"filter,omitempty"`
|
||||
@@ -105,7 +98,7 @@ func (req *StatsRequest) UnmarshalJSON(data []byte) error {
|
||||
type Stat struct {
|
||||
MetricName string `json:"metricName" required:"true"`
|
||||
Description string `json:"description" required:"true"`
|
||||
MetricType metrictypes.Type `json:"type" required:"true"`
|
||||
MetricType metrictypes.Type `json:"type" required:"true" enum:"gauge,sum,histogram,summary,exponentialhistogram"`
|
||||
MetricUnit string `json:"unit" required:"true"`
|
||||
TimeSeries uint64 `json:"timeseries" required:"true"`
|
||||
Samples uint64 `json:"samples" required:"true"`
|
||||
@@ -119,9 +112,9 @@ type StatsResponse struct {
|
||||
|
||||
type MetricMetadata struct {
|
||||
Description string `json:"description" required:"true"`
|
||||
MetricType metrictypes.Type `json:"type" required:"true"`
|
||||
MetricType metrictypes.Type `json:"type" required:"true" enum:"gauge,sum,histogram,summary,exponentialhistogram"`
|
||||
MetricUnit string `json:"unit" required:"true"`
|
||||
Temporality metrictypes.Temporality `json:"temporality" required:"true"`
|
||||
Temporality metrictypes.Temporality `json:"temporality" required:"true" enum:"delta,cumulative,unspecified"`
|
||||
IsMonotonic bool `json:"isMonotonic" required:"true"`
|
||||
}
|
||||
|
||||
@@ -138,10 +131,10 @@ func (m *MetricMetadata) UnmarshalBinary(data []byte) error {
|
||||
// UpdateMetricMetadataRequest represents the payload for updating metric metadata.
|
||||
type UpdateMetricMetadataRequest struct {
|
||||
MetricName string `json:"metricName" required:"true"`
|
||||
Type metrictypes.Type `json:"type" required:"true"`
|
||||
Type metrictypes.Type `json:"type" required:"true" enum:"gauge,sum,histogram,summary,exponentialhistogram"`
|
||||
Description string `json:"description" required:"true"`
|
||||
Unit string `json:"unit" required:"true"`
|
||||
Temporality metrictypes.Temporality `json:"temporality" required:"true"`
|
||||
Temporality metrictypes.Temporality `json:"temporality" required:"true" enum:"delta,cumulative,unspecified"`
|
||||
IsMonotonic bool `json:"isMonotonic" required:"true"`
|
||||
}
|
||||
|
||||
@@ -151,7 +144,7 @@ type TreemapRequest struct {
|
||||
Start int64 `json:"start" required:"true"`
|
||||
End int64 `json:"end" required:"true"`
|
||||
Limit int `json:"limit" required:"true"`
|
||||
Mode TreemapMode `json:"mode" required:"true"`
|
||||
Mode TreemapMode `json:"mode" required:"true" enum:"timeseries,samples"`
|
||||
}
|
||||
|
||||
// Validate enforces basic constraints on TreemapRequest.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user