Compare commits

..

3 Commits

Author SHA1 Message Date
srikanthccv
da2ad3b0c6 chore: resolve conflicts 2026-02-09 13:49:18 +05:30
Srikanth Chekuri
1251fb7e1d Merge branch 'main' into query-range-v5-openapi-spec 2026-02-09 07:05:46 +05:30
srikanthccv
25e81bef02 chore: add OpenAPI spec for /v5/query_range 2026-02-09 06:26:10 +05:30
159 changed files with 8841 additions and 14854 deletions

7
.github/CODEOWNERS vendored
View File

@@ -133,8 +133,5 @@
/frontend/src/pages/PublicDashboard/ @SigNoz/pulse-frontend
/frontend/src/container/PublicDashboardContainer/ @SigNoz/pulse-frontend
## Dashboard Libs + Components
/frontend/src/lib/uPlotV2/ @SigNoz/pulse-frontend
/frontend/src/lib/dashboard/ @SigNoz/pulse-frontend
/frontend/src/lib/dashboardVariables/ @SigNoz/pulse-frontend
/frontend/src/components/NewSelect/ @SigNoz/pulse-frontend
## UplotV2
/frontend/src/lib/uPlotV2/ @SigNoz/pulse-frontend

View File

@@ -18,6 +18,8 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/queryparser"
@@ -76,15 +78,18 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
},
func(ctx context.Context, sqlstore sqlstore.SQLStore, _ licensing.Licensing, _ dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
},
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, _ role.Setter, _ role.Granter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(impldashboard.NewStore(store), settings, analytics, orgGetter, queryParser)
},
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return noopgateway.NewProviderFactory()
},
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, _ []role.RegisterTypeable) role.Setter {
return implrole.NewSetter(implrole.NewStore(store), authz)
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)

View File

@@ -14,6 +14,7 @@ import (
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/ee/modules/role/implrole"
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
@@ -28,6 +29,8 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
pkgimplrole "github.com/SigNoz/signoz/pkg/modules/role/implrole"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/signoz"
@@ -115,15 +118,18 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
return authNs, nil
},
func(ctx context.Context, sqlstore sqlstore.SQLStore, licensing licensing.Licensing, dashboardModule dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx), licensing, dashboardModule)
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
},
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, queryParser, querier, licensing)
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, roleSetter, granter, queryParser, querier, licensing)
},
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return httpgateway.NewProviderFactory(licensing)
},
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
return implrole.NewSetter(pkgimplrole.NewStore(store), authz, licensing, registry)
},
)
if err != nil {

View File

@@ -616,11 +616,11 @@ paths:
- in: query
name: signal
schema:
type: string
$ref: '#/components/schemas/TelemetrytypesSignal'
- in: query
name: source
schema:
type: string
$ref: '#/components/schemas/TelemetrytypesSource'
- in: query
name: limit
schema:
@@ -638,11 +638,11 @@ paths:
- in: query
name: fieldContext
schema:
type: string
$ref: '#/components/schemas/TelemetrytypesFieldContext'
- in: query
name: fieldDataType
schema:
type: string
$ref: '#/components/schemas/TelemetrytypesFieldDataType'
- in: query
name: metricName
schema:
@@ -698,11 +698,11 @@ paths:
- in: query
name: signal
schema:
type: string
$ref: '#/components/schemas/TelemetrytypesSignal'
- in: query
name: source
schema:
type: string
$ref: '#/components/schemas/TelemetrytypesSource'
- in: query
name: limit
schema:
@@ -720,11 +720,11 @@ paths:
- in: query
name: fieldContext
schema:
type: string
$ref: '#/components/schemas/TelemetrytypesFieldContext'
- in: query
name: fieldDataType
schema:
type: string
$ref: '#/components/schemas/TelemetrytypesFieldDataType'
- in: query
name: metricName
schema:
@@ -3526,6 +3526,62 @@ paths:
summary: Rotate session
tags:
- sessions
/api/v5/query_range:
post:
deprecated: false
description: Execute a composite query over a time range. Supports builder queries
(traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse
SQL.
operationId: QueryRangeV5
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeResponse'
status:
type: string
type: object
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Query range
tags:
- query
components:
schemas:
AuthtypesAttributeMapping:
@@ -4249,12 +4305,39 @@ components:
- temporality
- isMonotonic
type: object
MetrictypesSpaceAggregation:
enum:
- ""
- sum
- avg
- min
- max
- count
- p50
- p75
- p90
- p95
- p99
type: string
MetrictypesTemporality:
enum:
- delta
- cumulative
- unspecified
type: string
MetrictypesTimeAggregation:
enum:
- ""
- latest
- sum
- avg
- min
- max
- count
- count_distinct
- rate
- increase
type: string
MetrictypesType:
enum:
- gauge
@@ -4312,7 +4395,101 @@ components:
type:
type: string
type: object
Querybuildertypesv5AggregationBucket:
properties:
alias:
type: string
anomalyScores:
items:
$ref: '#/components/schemas/Querybuildertypesv5TimeSeries'
type: array
index:
type: integer
lowerBoundSeries:
items:
$ref: '#/components/schemas/Querybuildertypesv5TimeSeries'
type: array
meta:
properties:
unit:
type: string
type: object
predictedSeries:
items:
$ref: '#/components/schemas/Querybuildertypesv5TimeSeries'
type: array
series:
items:
$ref: '#/components/schemas/Querybuildertypesv5TimeSeries'
nullable: true
type: array
upperBoundSeries:
items:
$ref: '#/components/schemas/Querybuildertypesv5TimeSeries'
type: array
type: object
Querybuildertypesv5Bucket:
properties:
step:
format: double
type: number
type: object
Querybuildertypesv5ClickHouseQuery:
properties:
disabled:
type: boolean
legend:
type: string
name:
type: string
query:
type: string
type: object
Querybuildertypesv5ColumnDescriptor:
properties:
aggregationIndex:
format: int64
type: integer
columnType:
$ref: '#/components/schemas/Querybuildertypesv5ColumnType'
description:
type: string
fieldContext:
$ref: '#/components/schemas/TelemetrytypesFieldContext'
fieldDataType:
$ref: '#/components/schemas/TelemetrytypesFieldDataType'
meta:
properties:
unit:
type: string
type: object
name:
type: string
queryName:
type: string
signal:
$ref: '#/components/schemas/TelemetrytypesSignal'
unit:
type: string
type: object
Querybuildertypesv5ColumnType:
enum:
- group
- aggregation
type: string
Querybuildertypesv5CompositeQuery:
description: Composite query containing one or more query envelopes. Each query
envelope specifies its type and corresponding spec.
properties:
queries:
items:
$ref: '#/components/schemas/Querybuildertypesv5QueryEnvelope'
nullable: true
type: array
type: object
Querybuildertypesv5ExecStats:
description: Execution statistics for the query, including rows scanned, bytes
scanned, and duration.
properties:
bytesScanned:
minimum: 0
@@ -4334,10 +4511,109 @@ components:
expression:
type: string
type: object
Querybuildertypesv5FormatOptions:
properties:
fillGaps:
type: boolean
formatTableResultForUI:
type: boolean
type: object
Querybuildertypesv5Function:
properties:
args:
items:
$ref: '#/components/schemas/Querybuildertypesv5FunctionArg'
type: array
name:
$ref: '#/components/schemas/Querybuildertypesv5FunctionName'
type: object
Querybuildertypesv5FunctionArg:
properties:
name:
type: string
value: {}
type: object
Querybuildertypesv5FunctionName:
enum:
- cutoffmin
- cutoffmax
- clampmin
- clampmax
- absolute
- runningdiff
- log2
- log10
- cumulativesum
- ewma3
- ewma5
- ewma7
- median3
- median5
- median7
- timeshift
- anomaly
- fillzero
type: string
Querybuildertypesv5GroupByKey:
properties:
description:
type: string
fieldContext:
$ref: '#/components/schemas/TelemetrytypesFieldContext'
fieldDataType:
$ref: '#/components/schemas/TelemetrytypesFieldDataType'
name:
type: string
signal:
$ref: '#/components/schemas/TelemetrytypesSignal'
unit:
type: string
type: object
Querybuildertypesv5Having:
properties:
expression:
type: string
type: object
Querybuildertypesv5Label:
properties:
key:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
value: {}
type: object
Querybuildertypesv5LimitBy:
properties:
keys:
items:
type: string
nullable: true
type: array
value:
type: string
type: object
Querybuildertypesv5LogAggregation:
properties:
alias:
type: string
expression:
type: string
type: object
Querybuildertypesv5MetricAggregation:
properties:
metricName:
type: string
reduceTo:
$ref: '#/components/schemas/Querybuildertypesv5ReduceTo'
spaceAggregation:
$ref: '#/components/schemas/MetrictypesSpaceAggregation'
temporality:
$ref: '#/components/schemas/MetrictypesTemporality'
timeAggregation:
$ref: '#/components/schemas/MetrictypesTimeAggregation'
type: object
Querybuildertypesv5OrderBy:
properties:
direction:
type: string
$ref: '#/components/schemas/Querybuildertypesv5OrderDirection'
key:
$ref: '#/components/schemas/Querybuildertypesv5OrderByKey'
type: object
@@ -4346,34 +4622,404 @@ components:
description:
type: string
fieldContext:
type: string
$ref: '#/components/schemas/TelemetrytypesFieldContext'
fieldDataType:
type: string
$ref: '#/components/schemas/TelemetrytypesFieldDataType'
name:
type: string
signal:
type: string
$ref: '#/components/schemas/TelemetrytypesSignal'
unit:
type: string
type: object
Querybuildertypesv5OrderDirection:
enum:
- asc
- desc
type: string
Querybuildertypesv5PromQuery:
properties:
disabled:
type: boolean
legend:
type: string
name:
type: string
query:
type: string
stats:
type: boolean
step:
$ref: '#/components/schemas/Querybuildertypesv5Step'
type: object
Querybuildertypesv5QueryBuilderFormula:
properties:
disabled:
type: boolean
expression:
type: string
functions:
items:
$ref: '#/components/schemas/Querybuildertypesv5Function'
type: array
having:
$ref: '#/components/schemas/Querybuildertypesv5Having'
legend:
type: string
limit:
type: integer
name:
type: string
order:
items:
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
type: array
type: object
Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5LogAggregation:
properties:
aggregations:
items:
$ref: '#/components/schemas/Querybuildertypesv5LogAggregation'
type: array
cursor:
type: string
disabled:
type: boolean
filter:
$ref: '#/components/schemas/Querybuildertypesv5Filter'
functions:
items:
$ref: '#/components/schemas/Querybuildertypesv5Function'
type: array
groupBy:
items:
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
type: array
having:
$ref: '#/components/schemas/Querybuildertypesv5Having'
legend:
type: string
limit:
type: integer
limitBy:
$ref: '#/components/schemas/Querybuildertypesv5LimitBy'
name:
type: string
offset:
type: integer
order:
items:
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
type: array
secondaryAggregations:
items:
$ref: '#/components/schemas/Querybuildertypesv5SecondaryAggregation'
type: array
selectFields:
items:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
type: array
signal:
$ref: '#/components/schemas/TelemetrytypesSignal'
source:
$ref: '#/components/schemas/TelemetrytypesSource'
stepInterval:
$ref: '#/components/schemas/Querybuildertypesv5Step'
type: object
Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5MetricAggregation:
properties:
aggregations:
items:
$ref: '#/components/schemas/Querybuildertypesv5MetricAggregation'
type: array
cursor:
type: string
disabled:
type: boolean
filter:
$ref: '#/components/schemas/Querybuildertypesv5Filter'
functions:
items:
$ref: '#/components/schemas/Querybuildertypesv5Function'
type: array
groupBy:
items:
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
type: array
having:
$ref: '#/components/schemas/Querybuildertypesv5Having'
legend:
type: string
limit:
type: integer
limitBy:
$ref: '#/components/schemas/Querybuildertypesv5LimitBy'
name:
type: string
offset:
type: integer
order:
items:
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
type: array
secondaryAggregations:
items:
$ref: '#/components/schemas/Querybuildertypesv5SecondaryAggregation'
type: array
selectFields:
items:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
type: array
signal:
$ref: '#/components/schemas/TelemetrytypesSignal'
source:
$ref: '#/components/schemas/TelemetrytypesSource'
stepInterval:
$ref: '#/components/schemas/Querybuildertypesv5Step'
type: object
Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5TraceAggregation:
properties:
aggregations:
items:
$ref: '#/components/schemas/Querybuildertypesv5TraceAggregation'
type: array
cursor:
type: string
disabled:
type: boolean
filter:
$ref: '#/components/schemas/Querybuildertypesv5Filter'
functions:
items:
$ref: '#/components/schemas/Querybuildertypesv5Function'
type: array
groupBy:
items:
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
type: array
having:
$ref: '#/components/schemas/Querybuildertypesv5Having'
legend:
type: string
limit:
type: integer
limitBy:
$ref: '#/components/schemas/Querybuildertypesv5LimitBy'
name:
type: string
offset:
type: integer
order:
items:
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
type: array
secondaryAggregations:
items:
$ref: '#/components/schemas/Querybuildertypesv5SecondaryAggregation'
type: array
selectFields:
items:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
type: array
signal:
$ref: '#/components/schemas/TelemetrytypesSignal'
source:
$ref: '#/components/schemas/TelemetrytypesSource'
stepInterval:
$ref: '#/components/schemas/Querybuildertypesv5Step'
type: object
Querybuildertypesv5QueryBuilderTraceOperator:
properties:
aggregations:
items:
$ref: '#/components/schemas/Querybuildertypesv5TraceAggregation'
type: array
cursor:
type: string
disabled:
type: boolean
expression:
type: string
filter:
$ref: '#/components/schemas/Querybuildertypesv5Filter'
functions:
items:
$ref: '#/components/schemas/Querybuildertypesv5Function'
type: array
groupBy:
items:
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
type: array
having:
$ref: '#/components/schemas/Querybuildertypesv5Having'
legend:
type: string
limit:
type: integer
name:
type: string
offset:
type: integer
order:
items:
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
type: array
returnSpansFrom:
type: string
selectFields:
items:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
type: array
stepInterval:
$ref: '#/components/schemas/Querybuildertypesv5Step'
type: object
Querybuildertypesv5QueryData:
oneOf:
- $ref: '#/components/schemas/Querybuildertypesv5TimeSeriesData'
- $ref: '#/components/schemas/Querybuildertypesv5ScalarData'
- $ref: '#/components/schemas/Querybuildertypesv5RawData'
properties:
results:
items: {}
nullable: true
type: array
type: object
Querybuildertypesv5QueryEnvelope:
oneOf:
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeBuilderTrace'
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeBuilderLog'
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeBuilderMetric'
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeFormula'
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeTraceOperator'
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopePromQL'
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeClickHouseSQL'
properties:
spec: {}
type:
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
type: object
Querybuildertypesv5QueryEnvelopeBuilderLog:
properties:
spec:
$ref: '#/components/schemas/Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5LogAggregation'
type:
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
type: object
Querybuildertypesv5QueryEnvelopeBuilderMetric:
properties:
spec:
$ref: '#/components/schemas/Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5MetricAggregation'
type:
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
type: object
Querybuildertypesv5QueryEnvelopeBuilderTrace:
properties:
spec:
$ref: '#/components/schemas/Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5TraceAggregation'
type:
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
type: object
Querybuildertypesv5QueryEnvelopeClickHouseSQL:
properties:
spec:
$ref: '#/components/schemas/Querybuildertypesv5ClickHouseQuery'
type:
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
type: object
Querybuildertypesv5QueryEnvelopeFormula:
properties:
spec:
$ref: '#/components/schemas/Querybuildertypesv5QueryBuilderFormula'
type:
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
type: object
Querybuildertypesv5QueryEnvelopePromQL:
properties:
spec:
$ref: '#/components/schemas/Querybuildertypesv5PromQuery'
type:
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
type: object
Querybuildertypesv5QueryEnvelopeTraceOperator:
properties:
spec:
$ref: '#/components/schemas/Querybuildertypesv5QueryBuilderTraceOperator'
type:
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
type: object
Querybuildertypesv5QueryRangeRequest:
description: Request body for the v5 query range endpoint. Supports builder
queries (traces, logs, metrics), formulas, joins, trace operators, PromQL,
and ClickHouse SQL queries.
example:
compositeQuery:
queries:
- spec:
aggregations:
- alias: span_count
expression: count()
filter:
expression: service.name = 'frontend'
groupBy:
- fieldContext: resource
name: service.name
limit: 10
name: A
order:
- direction: desc
key:
name: span_count
signal: traces
stepInterval: 60s
type: builder_query
end: 1.6409988e+12
requestType: time_series
schemaVersion: v1
start: 1.6409952e+12
properties:
compositeQuery:
$ref: '#/components/schemas/Querybuildertypesv5CompositeQuery'
end:
minimum: 0
type: integer
formatOptions:
$ref: '#/components/schemas/Querybuildertypesv5FormatOptions'
noCache:
type: boolean
requestType:
$ref: '#/components/schemas/Querybuildertypesv5RequestType'
schemaVersion:
type: string
start:
minimum: 0
type: integer
variables:
additionalProperties:
$ref: '#/components/schemas/Querybuildertypesv5VariableItem'
type: object
type: object
Querybuildertypesv5QueryRangeResponse:
description: 'Response from the v5 query range endpoint. The data.results array
contains typed results depending on the requestType: TimeSeriesData for time_series,
ScalarData for scalar, or RawData for raw requests.'
properties:
data:
$ref: '#/components/schemas/Querybuildertypesv5QueryData'
meta:
$ref: '#/components/schemas/Querybuildertypesv5ExecStats'
type:
type: string
$ref: '#/components/schemas/Querybuildertypesv5RequestType'
warning:
$ref: '#/components/schemas/Querybuildertypesv5QueryWarnData'
type: object
Querybuildertypesv5QueryType:
enum:
- builder_query
- builder_formula
- builder_trace_operator
- clickhouse_sql
- promql
type: string
Querybuildertypesv5QueryWarnData:
properties:
message:
@@ -4390,6 +5036,153 @@ components:
message:
type: string
type: object
Querybuildertypesv5RawData:
properties:
nextCursor:
type: string
queryName:
type: string
rows:
items:
$ref: '#/components/schemas/Querybuildertypesv5RawRow'
nullable: true
type: array
type: object
Querybuildertypesv5RawRow:
properties:
data:
additionalProperties: {}
nullable: true
type: object
timestamp:
format: date-time
type: string
type: object
Querybuildertypesv5ReduceTo:
enum:
- sum
- count
- avg
- min
- max
- last
- median
type: string
Querybuildertypesv5RequestType:
enum:
- scalar
- time_series
- raw
- raw_stream
- trace
type: string
Querybuildertypesv5ScalarData:
properties:
columns:
items:
$ref: '#/components/schemas/Querybuildertypesv5ColumnDescriptor'
nullable: true
type: array
data:
items:
items: {}
type: array
nullable: true
type: array
queryName:
type: string
type: object
Querybuildertypesv5SecondaryAggregation:
properties:
alias:
type: string
expression:
type: string
groupBy:
items:
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
type: array
limit:
type: integer
limitBy:
$ref: '#/components/schemas/Querybuildertypesv5LimitBy'
order:
items:
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
type: array
stepInterval:
$ref: '#/components/schemas/Querybuildertypesv5Step'
type: object
Querybuildertypesv5Step:
description: Step interval. Accepts a Go duration string (e.g., "60s", "1m",
"1h") or a number representing seconds (e.g., 60).
oneOf:
- description: Duration string (e.g., "60s", "5m", "1h").
example: 60s
type: string
- description: Duration in seconds.
example: 60
type: number
Querybuildertypesv5TimeSeries:
properties:
labels:
items:
$ref: '#/components/schemas/Querybuildertypesv5Label'
type: array
values:
items:
$ref: '#/components/schemas/Querybuildertypesv5TimeSeriesValue'
nullable: true
type: array
type: object
Querybuildertypesv5TimeSeriesData:
properties:
aggregations:
items:
$ref: '#/components/schemas/Querybuildertypesv5AggregationBucket'
nullable: true
type: array
queryName:
type: string
type: object
Querybuildertypesv5TimeSeriesValue:
properties:
bucket:
$ref: '#/components/schemas/Querybuildertypesv5Bucket'
partial:
type: boolean
timestamp:
format: int64
type: integer
value:
format: double
type: number
values:
items:
format: double
type: number
type: array
type: object
Querybuildertypesv5TraceAggregation:
properties:
alias:
type: string
expression:
type: string
type: object
Querybuildertypesv5VariableItem:
properties:
type:
$ref: '#/components/schemas/Querybuildertypesv5VariableType'
value: {}
type: object
Querybuildertypesv5VariableType:
enum:
- query
- dynamic
- custom
- text
type: string
RenderErrorResponse:
properties:
error:
@@ -4416,6 +5209,23 @@ components:
format: date-time
type: string
type: object
TelemetrytypesFieldContext:
enum:
- metric
- log
- span
- resource
- attribute
- body
type: string
TelemetrytypesFieldDataType:
enum:
- string
- bool
- float64
- int64
- number
type: string
TelemetrytypesGettableFieldKeys:
properties:
complete:
@@ -4435,18 +5245,28 @@ components:
values:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldValues'
type: object
TelemetrytypesSignal:
enum:
- traces
- logs
- metrics
type: string
TelemetrytypesSource:
enum:
- meter
type: string
TelemetrytypesTelemetryFieldKey:
properties:
description:
type: string
fieldContext:
type: string
$ref: '#/components/schemas/TelemetrytypesFieldContext'
fieldDataType:
type: string
$ref: '#/components/schemas/TelemetrytypesFieldDataType'
name:
type: string
signal:
type: string
$ref: '#/components/schemas/TelemetrytypesSignal'
unit:
type: string
type: object

View File

@@ -2,18 +2,12 @@ package openfgaauthz
import (
"context"
"slices"
"github.com/SigNoz/signoz/ee/authz/openfgaserver"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/authzstore/sqlauthzstore"
pkgopenfgaauthz "github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
@@ -21,224 +15,50 @@ import (
type provider struct {
pkgAuthzService authz.AuthZ
openfgaServer *openfgaserver.Server
licensing licensing.Licensing
store roletypes.Store
registry []authz.RegisterTypeable
}
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry ...authz.RegisterTypeable) factory.ProviderFactory[authz.AuthZ, authz.Config] {
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return factory.NewProviderFactory(factory.MustNewName("openfga"), func(ctx context.Context, ps factory.ProviderSettings, config authz.Config) (authz.AuthZ, error) {
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema, licensing, registry)
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema)
})
}
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry []authz.RegisterTypeable) (authz.AuthZ, error) {
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
pkgOpenfgaAuthzProvider := pkgopenfgaauthz.NewProviderFactory(sqlstore, openfgaSchema)
pkgAuthzService, err := pkgOpenfgaAuthzProvider.New(ctx, settings, config)
if err != nil {
return nil, err
}
openfgaServer, err := openfgaserver.NewOpenfgaServer(ctx, pkgAuthzService)
if err != nil {
return nil, err
}
return &provider{
pkgAuthzService: pkgAuthzService,
openfgaServer: openfgaServer,
licensing: licensing,
store: sqlauthzstore.NewSqlAuthzStore(sqlstore),
registry: registry,
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
return provider.openfgaServer.Start(ctx)
return provider.pkgAuthzService.Start(ctx)
}
func (provider *provider) Stop(ctx context.Context) error {
return provider.openfgaServer.Stop(ctx)
return provider.pkgAuthzService.Stop(ctx)
}
func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return provider.openfgaServer.Check(ctx, tuple)
return provider.pkgAuthzService.Check(ctx, tuple)
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.openfgaServer.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.openfgaServer.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return provider.openfgaServer.BatchCheck(ctx, tuples)
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return provider.openfgaServer.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return provider.openfgaServer.Write(ctx, additions, deletions)
}
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
return provider.pkgAuthzService.Get(ctx, orgID, id)
}
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
return provider.pkgAuthzService.GetByOrgIDAndName(ctx, orgID, name)
}
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
return provider.pkgAuthzService.List(ctx, orgID)
}
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
return provider.pkgAuthzService.ListByOrgIDAndNames(ctx, orgID, names)
}
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
return provider.pkgAuthzService.Grant(ctx, orgID, name, subject)
}
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
return provider.pkgAuthzService.ModifyGrant(ctx, orgID, existingRoleName, updatedRoleName, subject)
}
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
return provider.pkgAuthzService.Revoke(ctx, orgID, name, subject)
}
func (provider *provider) CreateManagedRoles(ctx context.Context, orgID valuer.UUID, managedRoles []*roletypes.Role) error {
return provider.pkgAuthzService.CreateManagedRoles(ctx, orgID, managedRoles)
}
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
tuples := make([]*openfgav1.TupleKey, 0)
grantTuples, err := provider.getManagedRoleGrantTuples(orgID, userID)
if err != nil {
return err
}
tuples = append(tuples, grantTuples...)
managedRoleTuples, err := provider.getManagedRoleTransactionTuples(orgID)
if err != nil {
return err
}
tuples = append(tuples, managedRoleTuples...)
return provider.Write(ctx, tuples, nil)
}
func (provider *provider) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
}
func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
existingRole, err := provider.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
}
if existingRole != nil {
return roletypes.NewRoleFromStorableRole(existingRole), nil
}
err = provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return nil, err
}
return role, nil
}
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
typeables := make([]authtypes.Typeable, 0)
for _, register := range provider.registry {
typeables = append(typeables, register.MustGetTypeables()...)
}
// role module cannot self register itself!
typeables = append(typeables, provider.MustGetTypeables()...)
resources := make([]*authtypes.Resource, 0)
for _, typeable := range typeables {
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
}
return resources
}
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
storableRole, err := provider.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
objects := make([]*authtypes.Object, 0)
for _, resource := range provider.GetResources(ctx) {
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
resourceObjects, err := provider.
ListObjects(
ctx,
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
relation,
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
)
if err != nil {
return nil, err
}
objects = append(objects, resourceObjects...)
}
}
return objects, nil
}
func (provider *provider) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return provider.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
}
func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = provider.Write(ctx, additionTuples, deletionTuples)
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
@@ -246,95 +66,33 @@ func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, n
return nil
}
func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
storableRole, err := provider.store.Get(ctx, orgID, id)
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
role := roletypes.NewRoleFromStorableRole(storableRole)
err = role.CanEditDelete()
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
return provider.store.Delete(ctx, orgID, id)
}
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
}
func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID valuer.UUID) ([]*openfgav1.TupleKey, error) {
tuples := []*openfgav1.TupleKey{}
// Grant the admin role to the user
adminSubject := authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil)
adminTuple, err := authtypes.TypeableRole.Tuples(
adminSubject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAdminRoleName),
},
orgID,
)
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return nil, err
return err
}
tuples = append(tuples, adminTuple...)
// Grant the admin role to the anonymous user
anonymousSubject := authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
anonymousTuple, err := authtypes.TypeableRole.Tuples(
anonymousSubject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAnonymousRoleName),
},
orgID,
)
if err != nil {
return nil, err
}
tuples = append(tuples, anonymousTuple...)
return tuples, nil
return nil
}
func (provider *provider) getManagedRoleTransactionTuples(orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
transactionsByRole := make(map[string][]*authtypes.Transaction)
for _, register := range provider.registry {
for roleName, txns := range register.MustGetManagedRoleTransactions() {
transactionsByRole[roleName] = append(transactionsByRole[roleName], txns...)
}
}
tuples := make([]*openfgav1.TupleKey, 0)
for roleName, transactions := range transactionsByRole {
for _, txn := range transactions {
typeable := authtypes.MustNewTypeableFromType(txn.Object.Resource.Type, txn.Object.Resource.Name)
txnTuples, err := typeable.Tuples(
authtypes.MustNewSubject(
authtypes.TypeableRole,
roleName,
orgID,
&authtypes.RelationAssignee,
),
txn.Relation,
[]authtypes.Selector{txn.Object.Selector},
orgID,
)
if err != nil {
return nil, err
}
tuples = append(tuples, txnTuples...)
}
}
return tuples, nil
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.BatchCheck(ctx, tuples)
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return provider.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return provider.pkgAuthzService.Write(ctx, additions, deletions)
}

View File

@@ -1,83 +0,0 @@
package openfgaserver
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
type Server struct {
pkgAuthzService authz.AuthZ
}
func NewOpenfgaServer(ctx context.Context, pkgAuthzService authz.AuthZ) (*Server, error) {
return &Server{
pkgAuthzService: pkgAuthzService,
}, nil
}
func (server *Server) Start(ctx context.Context) error {
return server.pkgAuthzService.Start(ctx)
}
func (server *Server) Stop(ctx context.Context) error {
return server.pkgAuthzService.Stop(ctx)
}
func (server *Server) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return server.pkgAuthzService.Check(ctx, tuple)
}
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return server.pkgAuthzService.BatchCheck(ctx, tuples)
}
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return server.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
}
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return server.pkgAuthzService.Write(ctx, additions, deletions)
}

View File

@@ -11,6 +11,7 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/types"
@@ -25,11 +26,13 @@ type module struct {
pkgDashboardModule dashboard.Module
store dashboardtypes.Store
settings factory.ScopedProviderSettings
roleSetter role.Setter
granter role.Granter
querier querier.Querier
licensing licensing.Licensing
}
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard")
pkgDashboardModule := pkgimpldashboard.NewModule(store, settings, analytics, orgGetter, queryParser)
@@ -37,6 +40,8 @@ func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, an
pkgDashboardModule: pkgDashboardModule,
store: store,
settings: scopedProviderSettings,
roleSetter: roleSetter,
granter: granter,
querier: querier,
licensing: licensing,
}
@@ -56,6 +61,29 @@ func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publi
return errors.Newf(errors.TypeAlreadyExists, dashboardtypes.ErrCodePublicDashboardAlreadyExists, "dashboard with id %s is already public", storablePublicDashboard.DashboardID)
}
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
err = module.granter.Grant(ctx, orgID, roletypes.SigNozAnonymousRoleName, authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.StringValue(), orgID, nil))
if err != nil {
return err
}
additionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, []*authtypes.Object{additionObject}, nil)
if err != nil {
return err
}
err = module.store.CreatePublic(ctx, dashboardtypes.NewStorablePublicDashboardFromPublicDashboard(publicDashboard))
if err != nil {
return err
@@ -100,7 +128,6 @@ func (module *module) GetPublicDashboardSelectorsAndOrg(ctx context.Context, id
return []authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeMetaResource, id.StringValue()),
authtypes.MustNewSelector(authtypes.TypeMetaResource, authtypes.WildCardSelectorString),
}, storableDashboard.OrgID, nil
}
@@ -163,6 +190,29 @@ func (module *module) DeletePublic(ctx context.Context, orgID valuer.UUID, dashb
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
publicDashboard, err := module.GetPublic(ctx, orgID, dashboardID)
if err != nil {
return err
}
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
deletionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
if err != nil {
return err
}
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
if err != nil {
return err
@@ -200,6 +250,10 @@ func (module *module) GetByMetricNames(ctx context.Context, orgID valuer.UUID, m
return module.pkgDashboardModule.GetByMetricNames(ctx, orgID, metricNames)
}
func (module *module) MustGetTypeables() []authtypes.Typeable {
return module.pkgDashboardModule.MustGetTypeables()
}
func (module *module) List(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
return module.pkgDashboardModule.List(ctx, orgID)
}
@@ -212,27 +266,34 @@ func (module *module) LockUnlock(ctx context.Context, orgID valuer.UUID, id valu
return module.pkgDashboardModule.LockUnlock(ctx, orgID, id, updatedBy, role, lock)
}
func (module *module) MustGetTypeables() []authtypes.Typeable {
return module.pkgDashboardModule.MustGetTypeables()
}
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
return map[string][]*authtypes.Transaction{
roletypes.SigNozAnonymousRoleName: {
{
Relation: authtypes.RelationRead,
Object: *authtypes.MustNewObject(
authtypes.Resource{
Type: authtypes.TypeMetaResource,
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, "*"),
),
},
},
func (module *module) deletePublic(ctx context.Context, orgID valuer.UUID, dashboardID valuer.UUID) error {
publicDashboard, err := module.store.GetPublic(ctx, dashboardID.String())
if err != nil {
return err
}
}
func (module *module) deletePublic(ctx context.Context, _ valuer.UUID, dashboardID valuer.UUID) error {
return module.store.DeletePublic(ctx, dashboardID.StringValue())
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
if err != nil {
return err
}
deletionObject := authtypes.MustNewObject(
authtypes.Resource{
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
Type: authtypes.TypeMetaResource,
},
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
)
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
if err != nil {
return err
}
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
if err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,165 @@
package implrole
import (
"context"
"slices"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type setter struct {
store roletypes.Store
authz authz.AuthZ
licensing licensing.Licensing
registry []role.RegisterTypeable
}
func NewSetter(store roletypes.Store, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
return &setter{
store: store,
authz: authz,
licensing: licensing,
registry: registry,
}
}
func (setter *setter) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
}
func (setter *setter) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
existingRole, err := setter.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
}
if existingRole != nil {
return roletypes.NewRoleFromStorableRole(existingRole), nil
}
err = setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return nil, err
}
return role, nil
}
func (setter *setter) GetResources(_ context.Context) []*authtypes.Resource {
typeables := make([]authtypes.Typeable, 0)
for _, register := range setter.registry {
typeables = append(typeables, register.MustGetTypeables()...)
}
// role module cannot self register itself!
typeables = append(typeables, setter.MustGetTypeables()...)
resources := make([]*authtypes.Resource, 0)
for _, typeable := range typeables {
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
}
return resources
}
func (setter *setter) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
storableRole, err := setter.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
objects := make([]*authtypes.Object, 0)
for _, resource := range setter.GetResources(ctx) {
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
resourceObjects, err := setter.
authz.
ListObjects(
ctx,
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
relation,
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
)
if err != nil {
return nil, err
}
objects = append(objects, resourceObjects...)
}
}
return objects, nil
}
func (setter *setter) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return setter.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
}
func (setter *setter) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
if err != nil {
return err
}
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
if err != nil {
return err
}
err = setter.authz.Write(ctx, additionTuples, deletionTuples)
if err != nil {
return err
}
return nil
}
func (setter *setter) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
_, err := setter.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
storableRole, err := setter.store.Get(ctx, orgID, id)
if err != nil {
return err
}
role := roletypes.NewRoleFromStorableRole(storableRole)
err = role.CanEditDelete()
if err != nil {
return err
}
return setter.store.Delete(ctx, orgID, id)
}
func (setter *setter) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
}

View File

@@ -1,7 +1,6 @@
package api
import (
"log/slog"
"net/http"
"net/http/httputil"
"time"
@@ -11,9 +10,11 @@ import (
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
@@ -21,7 +22,8 @@ import (
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/version"
"github.com/gorilla/mux"
)
@@ -31,14 +33,13 @@ type APIHandlerOptions struct {
RulesManager *rules.Manager
UsageManager *usage.Manager
IntegrationsController *integrations.Controller
CloudIntegrationsRegistry map[integrationstypes.CloudProviderType]integrationstypes.CloudProvider
CloudIntegrationsController *cloudintegrations.Controller
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
Gateway *httputil.ReverseProxy
GatewayUrl string
// Querier Influx Interval
FluxInterval time.Duration
GlobalConfig global.Config
Logger *slog.Logger // this is present in Signoz.Instrumentation but adding for quick access
}
type APIHandler struct {
@@ -52,7 +53,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
Reader: opts.DataConnector,
RuleManager: opts.RulesManager,
IntegrationsController: opts.IntegrationsController,
CloudIntegrationsRegistry: opts.CloudIntegrationsRegistry,
CloudIntegrationsController: opts.CloudIntegrationsController,
LogsParsingPipelineController: opts.LogsParsingPipelineController,
FluxInterval: opts.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
@@ -108,7 +109,22 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
// v5
router.HandleFunc("/api/v5/query_range", am.ViewAccess(ah.queryRangeV5)).Methods(http.MethodPost)
router.Handle("/api/v5/query_range", handler.New(am.ViewAccess(ah.queryRangeV5), handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"query"},
Summary: "Query range",
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
Response: new(qbtypes.QueryRangeResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: []handler.OpenAPISecurityScheme{
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
},
})).Methods(http.MethodPost)
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
@@ -120,12 +136,14 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
}
func (ah *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *middleware.AuthZ) {
ah.APIHandler.RegisterCloudIntegrationsRoutes(router, am)
router.HandleFunc(
"/api/v1/cloud-integrations/{cloudProvider}/accounts/generate-connection-params",
am.EditAccess(ah.CloudIntegrationsGenerateConnectionParams),
).Methods(http.MethodGet)
}
func (ah *APIHandler) getVersion(w http.ResponseWriter, r *http.Request) {

View File

@@ -6,7 +6,6 @@ import (
"encoding/json"
"fmt"
"io"
"log/slog"
"net/http"
"strings"
"time"
@@ -14,14 +13,20 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/user"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
"go.uber.org/zap"
)
// TODO: move this file with other cloud integration related code
type CloudIntegrationConnectionParamsResponse struct {
IngestionUrl string `json:"ingestion_url,omitempty"`
IngestionKey string `json:"ingestion_key,omitempty"`
SigNozAPIUrl string `json:"signoz_api_url,omitempty"`
SigNozAPIKey string `json:"signoz_api_key,omitempty"`
}
func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseWriter, r *http.Request) {
claims, err := authtypes.ClaimsFromContext(r.Context())
@@ -36,21 +41,23 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
return
}
cloudProviderString := mux.Vars(r)["cloudProvider"]
cloudProvider, err := integrationstypes.NewCloudProvider(cloudProviderString)
if err != nil {
render.Error(w, err)
cloudProvider := mux.Vars(r)["cloudProvider"]
if cloudProvider != "aws" {
RespondError(w, basemodel.BadRequest(fmt.Errorf(
"cloud provider not supported: %s", cloudProvider,
)), nil)
return
}
apiKey, err := ah.getOrCreateCloudIntegrationPAT(r.Context(), claims.OrgID, cloudProvider)
if err != nil {
render.Error(w, err)
apiKey, apiErr := ah.getOrCreateCloudIntegrationPAT(r.Context(), claims.OrgID, cloudProvider)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't provision PAT for cloud integration:",
), nil)
return
}
result := integrationstypes.GettableCloudIntegrationConnectionParams{
result := CloudIntegrationConnectionParamsResponse{
SigNozAPIKey: apiKey,
}
@@ -64,17 +71,16 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
// Return the API Key (PAT) even if the rest of the params can not be deduced.
// Params not returned from here will be requested from the user via form inputs.
// This enables gracefully degraded but working experience even for non-cloud deployments.
ah.opts.Logger.InfoContext(
r.Context(),
"ingestion params and signoz api url can not be deduced since no license was found",
)
render.Success(w, http.StatusOK, result)
zap.L().Info("ingestion params and signoz api url can not be deduced since no license was found")
ah.Respond(w, result)
return
}
signozApiUrl, err := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
if err != nil {
render.Error(w, err)
signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't deduce ingestion url and signoz api url",
), nil)
return
}
@@ -83,41 +89,48 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
gatewayUrl := ah.opts.GatewayUrl
if len(gatewayUrl) > 0 {
ingestionKeyString, err := ah.getOrCreateCloudProviderIngestionKey(
ingestionKey, apiErr := getOrCreateCloudProviderIngestionKey(
r.Context(), gatewayUrl, license.Key, cloudProvider,
)
if err != nil {
render.Error(w, err)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't get or create ingestion key",
), nil)
return
}
result.IngestionKey = ingestionKeyString
result.IngestionKey = ingestionKey
} else {
ah.opts.Logger.InfoContext(
r.Context(),
"ingestion key can't be deduced since no gateway url has been configured",
)
zap.L().Info("ingestion key can't be deduced since no gateway url has been configured")
}
render.Success(w, http.StatusOK, result)
ah.Respond(w, result)
}
func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId string, cloudProvider valuer.String) (string, error) {
func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId string, cloudProvider string) (
string, *basemodel.ApiError,
) {
integrationPATName := fmt.Sprintf("%s integration", cloudProvider)
integrationUser, err := ah.getOrCreateCloudIntegrationUser(ctx, orgId, cloudProvider)
if err != nil {
return "", err
integrationUser, apiErr := ah.getOrCreateCloudIntegrationUser(ctx, orgId, cloudProvider)
if apiErr != nil {
return "", apiErr
}
orgIdUUID, err := valuer.NewUUID(orgId)
if err != nil {
return "", err
return "", basemodel.InternalError(fmt.Errorf(
"couldn't parse orgId: %w", err,
))
}
allPats, err := ah.Signoz.Modules.User.ListAPIKeys(ctx, orgIdUUID)
if err != nil {
return "", err
return "", basemodel.InternalError(fmt.Errorf(
"couldn't list PATs: %w", err,
))
}
for _, p := range allPats {
if p.UserID == integrationUser.ID && p.Name == integrationPATName {
@@ -125,10 +138,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
}
}
ah.opts.Logger.InfoContext(
ctx,
zap.L().Info(
"no PAT found for cloud integration, creating a new one",
slog.String("cloudProvider", cloudProvider.String()),
zap.String("cloudProvider", cloudProvider),
)
newPAT, err := types.NewStorableAPIKey(
@@ -138,48 +150,68 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
0,
)
if err != nil {
return "", err
return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloud integration PAT: %w", err,
))
}
err = ah.Signoz.Modules.User.CreateAPIKey(ctx, newPAT)
if err != nil {
return "", err
return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloud integration PAT: %w", err,
))
}
return newPAT.Token, nil
}
// TODO: move this function out of handler and use proper module structure
func (ah *APIHandler) getOrCreateCloudIntegrationUser(ctx context.Context, orgId string, cloudProvider valuer.String) (*types.User, error) {
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider.String())
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
ctx context.Context, orgId string, cloudProvider string,
) (*types.User, *basemodel.ApiError) {
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider)
email := valuer.MustNewEmail(fmt.Sprintf("%s@signoz.io", cloudIntegrationUserName))
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, types.RoleViewer, valuer.MustNewUUID(orgId))
if err != nil {
return nil, err
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
}
password := types.MustGenerateFactorPassword(cloudIntegrationUser.ID.StringValue())
cloudIntegrationUser, err = ah.Signoz.Modules.User.GetOrCreateUser(ctx, cloudIntegrationUser, user.WithFactorPassword(password))
if err != nil {
return nil, err
return nil, basemodel.InternalError(fmt.Errorf("couldn't look for integration user: %w", err))
}
return cloudIntegrationUser, nil
}
// TODO: move this function out of handler and use proper module structure
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (string, error) {
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
if err != nil {
return "", errors.WrapInternalf(err, errors.CodeInternal, "couldn't query for deployment info: error")
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
string, *basemodel.ApiError,
) {
// TODO: remove this struct from here
type deploymentResponse struct {
Name string `json:"name"`
ClusterInfo struct {
Region struct {
DNS string `json:"dns"`
} `json:"region"`
} `json:"cluster"`
}
resp := new(integrationstypes.GettableDeployment)
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't query for deployment info: error: %w", err,
))
}
resp := new(deploymentResponse)
err = json.Unmarshal(respBytes, resp)
if err != nil {
return "", errors.WrapInternalf(err, errors.CodeInternal, "couldn't unmarshal deployment info response")
return "", basemodel.InternalError(fmt.Errorf(
"couldn't unmarshal deployment info response: error: %w", err,
))
}
regionDns := resp.ClusterInfo.Region.DNS
@@ -187,11 +219,9 @@ func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licens
if len(regionDns) < 1 || len(deploymentName) < 1 {
// Fail early if actual response structure and expectation here ever diverge
return "", errors.WrapInternalf(
err,
errors.CodeInternal,
return "", basemodel.InternalError(fmt.Errorf(
"deployment info response not in expected shape. couldn't determine region dns and deployment name",
)
))
}
signozApiUrl := fmt.Sprintf("https://%s.%s", deploymentName, regionDns)
@@ -199,85 +229,102 @@ func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licens
return signozApiUrl, nil
}
func (ah *APIHandler) getOrCreateCloudProviderIngestionKey(
ctx context.Context, gatewayUrl string, licenseKey string, cloudProvider valuer.String,
) (string, error) {
type ingestionKey struct {
Name string `json:"name"`
Value string `json:"value"`
// other attributes from gateway response not included here since they are not being used.
}
type ingestionKeysSearchResponse struct {
Status string `json:"status"`
Data []ingestionKey `json:"data"`
Error string `json:"error"`
}
type createIngestionKeyResponse struct {
Status string `json:"status"`
Data ingestionKey `json:"data"`
Error string `json:"error"`
}
func getOrCreateCloudProviderIngestionKey(
ctx context.Context, gatewayUrl string, licenseKey string, cloudProvider string,
) (string, *basemodel.ApiError) {
cloudProviderKeyName := fmt.Sprintf("%s-integration", cloudProvider)
// see if the key already exists
searchResult, err := requestGateway[integrationstypes.GettableIngestionKeysSearch](
searchResult, apiErr := requestGateway[ingestionKeysSearchResponse](
ctx,
gatewayUrl,
licenseKey,
fmt.Sprintf("/v1/workspaces/me/keys/search?name=%s", cloudProviderKeyName),
nil,
ah.opts.Logger,
)
if err != nil {
return "", err
if apiErr != nil {
return "", basemodel.WrapApiError(
apiErr, "couldn't search for cloudprovider ingestion key",
)
}
if searchResult.Status != "success" {
return "", errors.NewInternalf(
errors.CodeInternal,
"couldn't search for cloud provider ingestion key: status: %s, error: %s",
return "", basemodel.InternalError(fmt.Errorf(
"couldn't search for cloudprovider ingestion key: status: %s, error: %s",
searchResult.Status, searchResult.Error,
)
))
}
for _, k := range searchResult.Data {
if k.Name != cloudProviderKeyName {
continue
}
if k.Name == cloudProviderKeyName {
if len(k.Value) < 1 {
// Fail early if actual response structure and expectation here ever diverge
return "", basemodel.InternalError(fmt.Errorf(
"ingestion keys search response not as expected",
))
}
if len(k.Value) < 1 {
// Fail early if actual response structure and expectation here ever diverge
return "", errors.NewInternalf(errors.CodeInternal, "ingestion keys search response not as expected")
return k.Value, nil
}
return k.Value, nil
}
ah.opts.Logger.InfoContext(
ctx,
zap.L().Info(
"no existing ingestion key found for cloud integration, creating a new one",
slog.String("cloudProvider", cloudProvider.String()),
zap.String("cloudProvider", cloudProvider),
)
createKeyResult, err := requestGateway[integrationstypes.GettableCreateIngestionKey](
createKeyResult, apiErr := requestGateway[createIngestionKeyResponse](
ctx, gatewayUrl, licenseKey, "/v1/workspaces/me/keys",
map[string]any{
"name": cloudProviderKeyName,
"tags": []string{"integration", cloudProvider.String()},
"tags": []string{"integration", cloudProvider},
},
ah.opts.Logger,
)
if err != nil {
return "", err
if apiErr != nil {
return "", basemodel.WrapApiError(
apiErr, "couldn't create cloudprovider ingestion key",
)
}
if createKeyResult.Status != "success" {
return "", errors.NewInternalf(
errors.CodeInternal,
"couldn't create cloud provider ingestion key: status: %s, error: %s",
return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloudprovider ingestion key: status: %s, error: %s",
createKeyResult.Status, createKeyResult.Error,
)
))
}
ingestionKeyString := createKeyResult.Data.Value
if len(ingestionKeyString) < 1 {
ingestionKey := createKeyResult.Data.Value
if len(ingestionKey) < 1 {
// Fail early if actual response structure and expectation here ever diverge
return "", errors.NewInternalf(errors.CodeInternal,
return "", basemodel.InternalError(fmt.Errorf(
"ingestion key creation response not as expected",
)
))
}
return ingestionKeyString, nil
return ingestionKey, nil
}
func requestGateway[ResponseType any](
ctx context.Context, gatewayUrl, licenseKey, path string, payload any, logger *slog.Logger,
) (*ResponseType, error) {
ctx context.Context, gatewayUrl string, licenseKey string, path string, payload any,
) (*ResponseType, *basemodel.ApiError) {
baseUrl := strings.TrimSuffix(gatewayUrl, "/")
reqUrl := fmt.Sprintf("%s%s", baseUrl, path)
@@ -288,12 +335,13 @@ func requestGateway[ResponseType any](
"X-Consumer-Groups": "ns:default",
}
return requestAndParseResponse[ResponseType](ctx, reqUrl, headers, payload, logger)
return requestAndParseResponse[ResponseType](ctx, reqUrl, headers, payload)
}
func requestAndParseResponse[ResponseType any](
ctx context.Context, url string, headers map[string]string, payload any, logger *slog.Logger,
) (*ResponseType, error) {
ctx context.Context, url string, headers map[string]string, payload any,
) (*ResponseType, *basemodel.ApiError) {
reqMethod := http.MethodGet
var reqBody io.Reader
if payload != nil {
@@ -301,14 +349,18 @@ func requestAndParseResponse[ResponseType any](
bodyJson, err := json.Marshal(payload)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't marshal payload")
return nil, basemodel.InternalError(fmt.Errorf(
"couldn't serialize request payload to JSON: %w", err,
))
}
reqBody = bytes.NewBuffer(bodyJson)
reqBody = bytes.NewBuffer([]byte(bodyJson))
}
req, err := http.NewRequestWithContext(ctx, reqMethod, url, reqBody)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't create req")
return nil, basemodel.InternalError(fmt.Errorf(
"couldn't prepare request: %w", err,
))
}
for k, v := range headers {
@@ -321,26 +373,23 @@ func requestAndParseResponse[ResponseType any](
response, err := client.Do(req)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't make req")
return nil, basemodel.InternalError(fmt.Errorf("couldn't make request: %w", err))
}
defer func() {
err = response.Body.Close()
if err != nil {
logger.ErrorContext(ctx, "couldn't close response body", "error", err)
}
}()
defer response.Body.Close()
respBody, err := io.ReadAll(response.Body)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't read response body")
return nil, basemodel.InternalError(fmt.Errorf("couldn't read response: %w", err))
}
var resp ResponseType
err = json.Unmarshal(respBody, &resp)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't unmarshal response body")
return nil, basemodel.InternalError(fmt.Errorf(
"couldn't unmarshal gateway response into %T", resp,
))
}
return &resp, nil

View File

@@ -127,7 +127,12 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
)
}
cloudIntegrationsRegistry := cloudintegrations.NewCloudProviderRegistry(signoz.Instrumentation.Logger(), signoz.SQLStore, signoz.Querier)
cloudIntegrationsController, err := cloudintegrations.NewController(signoz.SQLStore)
if err != nil {
return nil, fmt.Errorf(
"couldn't create cloud provider integrations controller: %w", err,
)
}
// ingestion pipelines manager
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
@@ -162,13 +167,12 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
RulesManager: rm,
UsageManager: usageManager,
IntegrationsController: integrationsController,
CloudIntegrationsRegistry: cloudIntegrationsRegistry,
CloudIntegrationsController: cloudIntegrationsController,
LogsParsingPipelineController: logParsingPipelineController,
FluxInterval: config.Querier.FluxInterval,
Gateway: gatewayProxy,
GatewayUrl: config.Gateway.URL.String(),
GlobalConfig: config.Global,
Logger: signoz.Instrumentation.Logger(),
}
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
@@ -207,7 +211,7 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz, s.signoz.Modules.RoleGetter)
r.Use(otelmux.Middleware(
"apiserver",

View File

@@ -15,7 +15,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
@@ -63,8 +63,6 @@ type AnomalyRule struct {
seasonality anomaly.Seasonality
}
var _ baserules.Rule = (*AnomalyRule)(nil)
func NewAnomalyRule(
id string,
orgID valuer.UUID,
@@ -492,7 +490,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
continue
}
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration().Duration() {
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration() {
a.State = model.StateFiring
a.FiredAt = ts
state := model.StateFiring
@@ -555,7 +553,7 @@ func (r *AnomalyRule) String() string {
ar := ruletypes.PostableRule{
AlertName: r.Name(),
RuleCondition: r.Condition(),
EvalWindow: r.EvalWindow(),
EvalWindow: ruletypes.Duration(r.EvalWindow()),
Labels: r.Labels().Map(),
Annotations: r.Annotations().Map(),
PreferredChannels: r.PreferredChannels(),

View File

@@ -40,7 +40,7 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
// Test basic AlertOnAbsent functionality (without AbsentFor grace period)
baseTime := time.Unix(1700000000, 0)
evalWindow := valuer.MustParseTextDuration("5m")
evalWindow := 5 * time.Minute
evalTime := baseTime.Add(5 * time.Minute)
target := 500.0
@@ -50,8 +50,8 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: RuleTypeAnomaly,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(1 * time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,
@@ -147,7 +147,7 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
// 3. Alert fires only if t2 - t1 > AbsentFor
baseTime := time.Unix(1700000000, 0)
evalWindow := valuer.MustParseTextDuration("5m")
evalWindow := 5 * time.Minute
// Set target higher than test data so regular threshold alerts don't fire
target := 500.0
@@ -157,8 +157,8 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
AlertType: ruletypes.AlertTypeMetric,
RuleType: RuleTypeAnomaly,
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
EvalWindow: evalWindow,
Frequency: valuer.MustParseTextDuration("1m"),
EvalWindow: ruletypes.Duration(evalWindow),
Frequency: ruletypes.Duration(time.Minute),
}},
RuleCondition: &ruletypes.RuleCondition{
CompareOp: ruletypes.ValueIsAbove,

View File

@@ -48,7 +48,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, tr)
// create ch rule task for evaluation
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
@@ -72,7 +72,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, pr)
// create promql rule task for evaluation
task = newTask(baserules.TaskTypeProm, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
// create anomaly rule
@@ -96,7 +96,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
rules = append(rules, ar)
// create anomaly rule task for evaluation
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
} else {
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
@@ -213,7 +213,8 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
return alertsFound, nil
}
// newTask returns an appropriate group for the rule type
// newTask returns an appropriate group for
// rule type
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) baserules.Task {
if taskType == baserules.TaskTypeCh {
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="184" height="188" fill="none" viewBox="0 0 184 188"><path fill="#f3b01c" d="M108.092 130.021c18.166-2.018 35.293-11.698 44.723-27.854-4.466 39.961-48.162 65.218-83.83 49.711-3.286-1.425-6.115-3.796-8.056-6.844-8.016-12.586-10.65-28.601-6.865-43.135 10.817 18.668 32.81 30.111 54.028 28.122"/><path fill="#8d2676" d="M53.401 90.174c-7.364 17.017-7.682 36.94 1.345 53.336-31.77-23.902-31.423-75.052-.388-98.715 2.87-2.187 6.282-3.485 9.86-3.683 14.713-.776 29.662 4.91 40.146 15.507-21.3.212-42.046 13.857-50.963 33.555"/><path fill="#ee342f" d="M114.637 61.855C103.89 46.87 87.069 36.668 68.639 36.358c35.625-16.17 79.446 10.047 84.217 48.807.444 3.598-.139 7.267-1.734 10.512-6.656 13.518-18.998 24.002-33.42 27.882 10.567-19.599 9.263-43.544-3.065-61.704"/></svg>

Before

Width:  |  Height:  |  Size: 811 B

View File

@@ -140,7 +140,10 @@ const CustomMultiSelect: React.FC<CustomMultiSelectProps> = ({
}, [selectedValues, allAvailableValues, enableAllSelection]);
// Define allOptionShown earlier in the code
const allOptionShown = value === ALL_SELECTED_VALUE;
const allOptionShown = useMemo(
() => value === ALL_SELECTED_VALUE || value === 'ALL',
[value],
);
// Value passed to the underlying Ant Select component
const displayValue = useMemo(

View File

@@ -18,8 +18,8 @@ import { useWidgetsByDynamicVariableId } from 'hooks/dashboard/useWidgetsByDynam
import { getWidgetsHavingDynamicVariableAttribute } from 'hooks/dashboard/utils';
import { useGetFieldValues } from 'hooks/dynamicVariables/useGetFieldValues';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { commaValuesParser } from 'lib/dashboardVariables/customCommaValuesParser';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import { isEmpty, map } from 'lodash-es';
import {
ArrowLeft,

View File

@@ -1,11 +1,10 @@
import { memo, useEffect, useMemo } from 'react';
import { commaValuesParser } from 'lib/dashboardVariables/customCommaValuesParser';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import { memo, useMemo } from 'react';
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { VariableItemProps } from './VariableItem';
import { customVariableSelectStrategy } from './variableSelectStrategy/customVariableSelectStrategy';
type CustomVariableInputProps = Pick<
VariableItemProps,
@@ -30,31 +29,16 @@ function CustomVariableInput({
onChange,
onDropdownVisibleChange,
handleClear,
applyDefaultIfNeeded,
} = useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
strategy: customVariableSelectStrategy,
});
// Apply default on mount — options are available synchronously for custom variables
// eslint-disable-next-line react-hooks/exhaustive-deps
useEffect(applyDefaultIfNeeded, []);
const selectOptions = useMemo(
() =>
optionsData.map((option) => ({
label: option.toString(),
value: option.toString(),
})),
[optionsData],
);
return (
<SelectVariableInput
variableId={variableData.id}
options={selectOptions}
options={optionsData}
value={value}
onChange={onChange}
onDropdownVisibleChange={onDropdownVisibleChange}

View File

@@ -13,6 +13,7 @@ import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { GlobalReducer } from 'types/reducer/globalTime';
import DynamicVariableSelection from './DynamicVariableSelection';
import { onUpdateVariableNode } from './util';
import VariableItem from './VariableItem';
@@ -152,7 +153,14 @@ function DashboardVariableSelection(): JSX.Element | null {
{sortedVariablesArray.map((variable) => {
const key = `${variable.name}${variable.id}${variable.order}`;
return (
return variable.type === 'DYNAMIC' ? (
<DynamicVariableSelection
key={key}
existingVariables={dashboardVariables}
variableData={variable}
onValueUpdate={onValueUpdate}
/>
) : (
<VariableItem
key={key}
existingVariables={dashboardVariables}

View File

@@ -1,343 +0,0 @@
import { memo, useCallback, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import { getFieldValues } from 'api/dynamicVariables/getFieldValues';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { DEBOUNCE_DELAY } from 'constants/queryBuilderFilterConfig';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import useDebounce from 'hooks/useDebounce';
import { isEmpty } from 'lodash-es';
import { AppState } from 'store/reducers';
import { GlobalReducer } from 'types/reducer/globalTime';
import { isRetryableError as checkIfRetryableError } from 'utils/errorUtils';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { getOptionsForDynamicVariable } from './util';
import { VariableItemProps } from './VariableItem';
import { dynamicVariableSelectStrategy } from './variableSelectStrategy/dynamicVariableSelectStrategy';
import './DashboardVariableSelection.styles.scss';
type DynamicVariableInputProps = Pick<
VariableItemProps,
'variableData' | 'onValueUpdate' | 'existingVariables'
>;
// eslint-disable-next-line sonarjs/cognitive-complexity
function DynamicVariableInput({
variableData,
onValueUpdate,
existingVariables,
}: DynamicVariableInputProps): JSX.Element {
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
[],
);
const [errorMessage, setErrorMessage] = useState<null | string>(null);
const [isRetryableError, setIsRetryableError] = useState<boolean>(true);
const [isComplete, setIsComplete] = useState<boolean>(false);
const [filteredOptionsData, setFilteredOptionsData] = useState<
(string | number | boolean)[]
>([]);
const [relatedValues, setRelatedValues] = useState<string[]>([]);
const [originalRelatedValues, setOriginalRelatedValues] = useState<string[]>(
[],
);
// Track dropdown open state for auto-checking new values
const [isDropdownOpen, setIsDropdownOpen] = useState<boolean>(false);
const [apiSearchText, setApiSearchText] = useState<string>('');
const debouncedApiSearchText = useDebounce(apiSearchText, DEBOUNCE_DELAY);
// Build a memoized list of all currently available option strings (normalized + related)
const allAvailableOptionStrings = useMemo(
() => [
...new Set([
...optionsData.map((v) => v.toString()),
...relatedValues.map((v) => v.toString()),
]),
],
[optionsData, relatedValues],
);
const {
value,
tempSelection,
setTempSelection,
handleClear,
enableSelectAll,
defaultValue,
applyDefaultIfNeeded,
onChange,
onDropdownVisibleChange,
} = useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
strategy: dynamicVariableSelectStrategy,
allAvailableOptionStrings,
});
// Create a dependency key from all dynamic variables
const dynamicVariablesKey = useMemo(() => {
if (!existingVariables) {
return 'no_variables';
}
const dynamicVars = Object.values(existingVariables)
.filter((v) => v.type === 'DYNAMIC')
.map(
(v) => `${v.name || 'unnamed'}:${JSON.stringify(v.selectedValue || null)}`,
)
.join('|');
return dynamicVars || 'no_dynamic_variables';
}, [existingVariables]);
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
// existing query is the query made from the other dynamic variables around this one with there current values
// for e.g. k8s.namespace.name IN ["zeus", "gene"] AND doc_op_type IN ["test"]
// eslint-disable-next-line sonarjs/cognitive-complexity
const existingQuery = useMemo(() => {
if (!existingVariables || !variableData.dynamicVariablesAttribute) {
return '';
}
const queryParts: string[] = [];
Object.entries(existingVariables).forEach(([, variable]) => {
// Skip the current variable being processed
if (variable.id === variableData.id) {
return;
}
// Only include dynamic variables that have selected values and are not selected as ALL
if (
variable.type === 'DYNAMIC' &&
variable.dynamicVariablesAttribute &&
variable.selectedValue &&
!isEmpty(variable.selectedValue) &&
(variable.showALLOption ? !variable.allSelected : true)
) {
const attribute = variable.dynamicVariablesAttribute;
const values = Array.isArray(variable.selectedValue)
? variable.selectedValue
: [variable.selectedValue];
// Filter out empty values and convert to strings
const validValues = values
.filter((val) => val !== null && val !== undefined && val !== '')
.map((val) => val.toString());
if (validValues.length > 0) {
// Format values for query - wrap strings in quotes, keep numbers as is
const formattedValues = validValues.map((val) => {
// Check if value is a number
const numValue = Number(val);
if (!Number.isNaN(numValue) && Number.isFinite(numValue)) {
return val; // Keep as number
}
// Escape single quotes and wrap in quotes
return `'${val.replace(/'/g, "\\'")}'`;
});
if (formattedValues.length === 1) {
queryParts.push(`${attribute} = ${formattedValues[0]}`);
} else {
queryParts.push(`${attribute} IN [${formattedValues.join(', ')}]`);
}
}
}
});
return queryParts.join(' AND ');
}, [
existingVariables,
variableData.id,
variableData.dynamicVariablesAttribute,
]);
// Wrap the hook's onDropdownVisibleChange to also track isDropdownOpen and handle cleanup
const handleSelectDropdownVisibilityChange = useCallback(
(visible: boolean): void => {
setIsDropdownOpen(visible);
onDropdownVisibleChange(visible);
if (!visible) {
setFilteredOptionsData(optionsData);
setRelatedValues(originalRelatedValues);
setApiSearchText('');
}
},
[onDropdownVisibleChange, optionsData, originalRelatedValues],
);
const { isLoading, refetch } = useQuery(
[
REACT_QUERY_KEY.DASHBOARD_BY_ID,
variableData.name || `variable_${variableData.id}`,
dynamicVariablesKey,
minTime,
maxTime,
debouncedApiSearchText,
variableData.dynamicVariablesSource,
variableData.dynamicVariablesAttribute,
],
{
enabled:
variableData.type === 'DYNAMIC' &&
!!variableData.dynamicVariablesSource &&
!!variableData.dynamicVariablesAttribute,
queryFn: () =>
getFieldValues(
variableData.dynamicVariablesSource?.toLowerCase() === 'all telemetry'
? undefined
: (variableData.dynamicVariablesSource?.toLowerCase() as
| 'traces'
| 'logs'
| 'metrics'),
variableData.dynamicVariablesAttribute,
debouncedApiSearchText,
minTime,
maxTime,
existingQuery,
),
onSuccess: (data) => {
const newNormalizedValues = data.data?.normalizedValues || [];
const newRelatedValues = data.data?.relatedValues || [];
if (!debouncedApiSearchText) {
setOptionsData(newNormalizedValues);
setIsComplete(data.data?.complete || false);
}
setFilteredOptionsData(newNormalizedValues);
setRelatedValues(newRelatedValues);
setOriginalRelatedValues(newRelatedValues);
// Only run auto-check logic when necessary to avoid performance issues
if (variableData.allSelected && isDropdownOpen) {
// Build the latest full list from API (normalized + related)
const latestValues = [
...new Set([
...newNormalizedValues.map((v) => v.toString()),
...newRelatedValues.map((v) => v.toString()),
]),
];
// Update temp selection to exactly reflect latest API values when ALL is active
const currentStrings = Array.isArray(tempSelection)
? tempSelection.map((v) => v.toString())
: tempSelection
? [tempSelection.toString()]
: [];
const areSame =
currentStrings.length === latestValues.length &&
latestValues.every((v) => currentStrings.includes(v));
if (!areSame) {
setTempSelection(latestValues);
}
}
// Apply default if no value is selected (e.g., new variable, first load)
if (!debouncedApiSearchText) {
const allNewOptions = [
...new Set([
...newNormalizedValues.map((v) => v.toString()),
...newRelatedValues.map((v) => v.toString()),
]),
];
applyDefaultIfNeeded(allNewOptions);
}
},
onError: (error: any) => {
if (error) {
let message = SOMETHING_WENT_WRONG;
if (error?.message) {
message = error?.message;
} else {
message =
'Please make sure configuration is valid and you have required setup and permissions';
}
setErrorMessage(message);
// Check if error is retryable (5xx) or not (4xx)
const isRetryable = checkIfRetryableError(error);
setIsRetryableError(isRetryable);
}
},
},
);
const handleRetry = useCallback((): void => {
setErrorMessage(null);
setIsRetryableError(true);
refetch();
}, [refetch]);
const handleSearch = useCallback(
(text: string) => {
if (isComplete) {
if (!text) {
setFilteredOptionsData(optionsData);
setRelatedValues(originalRelatedValues);
return;
}
const lowerText = text.toLowerCase();
setFilteredOptionsData(
optionsData.filter((option) =>
option.toString().toLowerCase().includes(lowerText),
),
);
setRelatedValues(
originalRelatedValues.filter((val) =>
val.toLowerCase().includes(lowerText),
),
);
} else {
setApiSearchText(text);
}
},
[isComplete, optionsData, originalRelatedValues],
);
const selectOptions = useMemo(
() =>
getOptionsForDynamicVariable(filteredOptionsData || [], relatedValues || []),
[filteredOptionsData, relatedValues],
);
return (
<SelectVariableInput
variableId={variableData.id}
options={selectOptions}
value={value}
onChange={onChange}
onDropdownVisibleChange={handleSelectDropdownVisibilityChange}
onClear={handleClear}
enableSelectAll={enableSelectAll}
defaultValue={defaultValue}
isMultiSelect={variableData.multiSelect}
// dynamic variable specific + API related props
loading={isLoading}
errorMessage={errorMessage}
onRetry={handleRetry}
isDynamicVariable
showRetryButton={isRetryableError}
showIncompleteDataMessage={!isComplete && filteredOptionsData.length > 0}
onSearch={handleSearch}
/>
);
}
export default memo(DynamicVariableInput);

View File

@@ -0,0 +1,602 @@
/* eslint-disable sonarjs/cognitive-complexity */
/* eslint-disable no-nested-ternary */
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import { InfoCircleOutlined } from '@ant-design/icons';
import { Tooltip, Typography } from 'antd';
import { getFieldValues } from 'api/dynamicVariables/getFieldValues';
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { DEBOUNCE_DELAY } from 'constants/queryBuilderFilterConfig';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import useDebounce from 'hooks/useDebounce';
import { isEmpty, isUndefined } from 'lodash-es';
import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { GlobalReducer } from 'types/reducer/globalTime';
import { isRetryableError as checkIfRetryableError } from 'utils/errorUtils';
import { popupContainer } from 'utils/selectPopupContainer';
import { ALL_SELECT_VALUE } from '../utils';
import { SelectItemStyle } from './styles';
import {
areArraysEqual,
getOptionsForDynamicVariable,
getSelectValue,
uniqueValues,
} from './util';
import './DashboardVariableSelection.styles.scss';
interface DynamicVariableSelectionProps {
variableData: IDashboardVariable;
existingVariables: Record<string, IDashboardVariable>;
onValueUpdate: (
name: string,
id: string,
arg1: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
) => void;
}
function DynamicVariableSelection({
variableData,
onValueUpdate,
existingVariables,
}: DynamicVariableSelectionProps): JSX.Element {
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
[],
);
const [errorMessage, setErrorMessage] = useState<null | string>(null);
const [isRetryableError, setIsRetryableError] = useState<boolean>(true);
const [isComplete, setIsComplete] = useState<boolean>(false);
const [filteredOptionsData, setFilteredOptionsData] = useState<
(string | number | boolean)[]
>([]);
const [relatedValues, setRelatedValues] = useState<string[]>([]);
const [originalRelatedValues, setOriginalRelatedValues] = useState<string[]>(
[],
);
const [tempSelection, setTempSelection] = useState<
string | string[] | undefined
>(undefined);
// Track dropdown open state for auto-checking new values
const [isDropdownOpen, setIsDropdownOpen] = useState<boolean>(false);
// Create a dependency key from all dynamic variables
const dynamicVariablesKey = useMemo(() => {
if (!existingVariables) {
return 'no_variables';
}
const dynamicVars = Object.values(existingVariables)
.filter((v) => v.type === 'DYNAMIC')
.map(
(v) => `${v.name || 'unnamed'}:${JSON.stringify(v.selectedValue || null)}`,
)
.join('|');
return dynamicVars || 'no_dynamic_variables';
}, [existingVariables]);
const [apiSearchText, setApiSearchText] = useState<string>('');
const debouncedApiSearchText = useDebounce(apiSearchText, DEBOUNCE_DELAY);
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
// existing query is the query made from the other dynamic variables around this one with there current values
// for e.g. k8s.namespace.name IN ["zeus", "gene"] AND doc_op_type IN ["test"]
const existingQuery = useMemo(() => {
if (!existingVariables || !variableData.dynamicVariablesAttribute) {
return '';
}
const queryParts: string[] = [];
Object.entries(existingVariables).forEach(([, variable]) => {
// Skip the current variable being processed
if (variable.id === variableData.id) {
return;
}
// Only include dynamic variables that have selected values and are not selected as ALL
if (
variable.type === 'DYNAMIC' &&
variable.dynamicVariablesAttribute &&
variable.selectedValue &&
!isEmpty(variable.selectedValue) &&
(variable.showALLOption ? !variable.allSelected : true)
) {
const attribute = variable.dynamicVariablesAttribute;
const values = Array.isArray(variable.selectedValue)
? variable.selectedValue
: [variable.selectedValue];
// Filter out empty values and convert to strings
const validValues = values
.filter((value) => value !== null && value !== undefined && value !== '')
.map((value) => value.toString());
if (validValues.length > 0) {
// Format values for query - wrap strings in quotes, keep numbers as is
const formattedValues = validValues.map((value) => {
// Check if value is a number
const numValue = Number(value);
if (!Number.isNaN(numValue) && Number.isFinite(numValue)) {
return value; // Keep as number
}
// Escape single quotes and wrap in quotes
return `'${value.replace(/'/g, "\\'")}'`;
});
if (formattedValues.length === 1) {
queryParts.push(`${attribute} = ${formattedValues[0]}`);
} else {
queryParts.push(`${attribute} IN [${formattedValues.join(', ')}]`);
}
}
}
});
return queryParts.join(' AND ');
}, [
existingVariables,
variableData.id,
variableData.dynamicVariablesAttribute,
]);
const { isLoading, refetch } = useQuery(
[
REACT_QUERY_KEY.DASHBOARD_BY_ID,
variableData.name || `variable_${variableData.id}`,
dynamicVariablesKey,
minTime,
maxTime,
debouncedApiSearchText,
],
{
enabled: variableData.type === 'DYNAMIC',
queryFn: () =>
getFieldValues(
variableData.dynamicVariablesSource?.toLowerCase() === 'all telemetry'
? undefined
: (variableData.dynamicVariablesSource?.toLowerCase() as
| 'traces'
| 'logs'
| 'metrics'),
variableData.dynamicVariablesAttribute,
debouncedApiSearchText,
minTime,
maxTime,
existingQuery,
),
onSuccess: (data) => {
const newNormalizedValues = data.data?.normalizedValues || [];
const newRelatedValues = data.data?.relatedValues || [];
if (!debouncedApiSearchText) {
setOptionsData(newNormalizedValues);
setIsComplete(data.data?.complete || false);
}
setFilteredOptionsData(newNormalizedValues);
setRelatedValues(newRelatedValues);
setOriginalRelatedValues(newRelatedValues);
// Only run auto-check logic when necessary to avoid performance issues
if (variableData.allSelected && isDropdownOpen) {
// Build the latest full list from API (normalized + related)
const latestValues = [
...new Set([
...newNormalizedValues.map((v) => v.toString()),
...newRelatedValues.map((v) => v.toString()),
]),
];
// Update temp selection to exactly reflect latest API values when ALL is active
const currentStrings = Array.isArray(tempSelection)
? tempSelection.map((v) => v.toString())
: tempSelection
? [tempSelection.toString()]
: [];
const areSame =
currentStrings.length === latestValues.length &&
latestValues.every((v) => currentStrings.includes(v));
if (!areSame) {
setTempSelection(latestValues);
}
}
},
onError: (error: any) => {
if (error) {
let message = SOMETHING_WENT_WRONG;
if (error?.message) {
message = error?.message;
} else {
message =
'Please make sure configuration is valid and you have required setup and permissions';
}
setErrorMessage(message);
// Check if error is retryable (5xx) or not (4xx)
const isRetryable = checkIfRetryableError(error);
setIsRetryableError(isRetryable);
}
},
},
);
const handleChange = useCallback(
(inputValue: string | string[]): void => {
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
if (
value === variableData.selectedValue ||
(Array.isArray(value) &&
Array.isArray(variableData.selectedValue) &&
areArraysEqual(value, variableData.selectedValue))
) {
return;
}
if (variableData.name) {
if (
value === ALL_SELECT_VALUE ||
(Array.isArray(value) && value.includes(ALL_SELECT_VALUE))
) {
// For ALL selection in dynamic variables, pass null to avoid storing values
// The parent component will handle this appropriately
onValueUpdate(variableData.name, variableData.id, null, true);
} else {
// Build union of available options shown in dropdown (normalized + related)
const allAvailableOptionStrings = [
...new Set([
...optionsData.map((v) => v.toString()),
...relatedValues.map((v) => v.toString()),
]),
];
const haveCustomValuesSelected =
Array.isArray(value) &&
!value.every((v) => allAvailableOptionStrings.includes(v.toString()));
onValueUpdate(
variableData.name,
variableData.id,
value,
allAvailableOptionStrings.every((v) => value.includes(v.toString())),
haveCustomValuesSelected,
);
}
}
},
[variableData, onValueUpdate, optionsData, relatedValues],
);
useEffect(() => {
if (
variableData.dynamicVariablesSource &&
variableData.dynamicVariablesAttribute
) {
refetch();
}
}, [
refetch,
variableData.dynamicVariablesSource,
variableData.dynamicVariablesAttribute,
debouncedApiSearchText,
]);
// Build a memoized list of all currently available option strings (normalized + related)
const allAvailableOptionStrings = useMemo(
() => [
...new Set([
...optionsData.map((v) => v.toString()),
...relatedValues.map((v) => v.toString()),
]),
],
[optionsData, relatedValues],
);
const handleSearch = useCallback(
(text: string) => {
if (isComplete) {
if (!text) {
setFilteredOptionsData(optionsData);
setRelatedValues(originalRelatedValues);
return;
}
const localFilteredOptionsData: (string | number | boolean)[] = [];
optionsData.forEach((option) => {
if (option.toString().toLowerCase().includes(text.toLowerCase())) {
localFilteredOptionsData.push(option);
}
});
setFilteredOptionsData(localFilteredOptionsData);
setRelatedValues(
originalRelatedValues.filter((value) =>
value.toLowerCase().includes(text.toLowerCase()),
),
);
} else {
setApiSearchText(text);
}
},
[isComplete, optionsData, originalRelatedValues],
);
const { selectedValue } = variableData;
const selectedValueStringified = useMemo(
() => getSelectValue(selectedValue, variableData),
[selectedValue, variableData],
);
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
const selectValue =
variableData.allSelected && enableSelectAll
? ALL_SELECT_VALUE
: selectedValueStringified;
// Add a handler for tracking temporary selection changes
const handleTempChange = useCallback(
(inputValue: string | string[]): void => {
// Store the selection in temporary state while dropdown is open
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
const sanitizedValue = uniqueValues(value);
setTempSelection(sanitizedValue);
},
[variableData.multiSelect],
);
// Handle dropdown visibility changes
const handleDropdownVisibleChange = (visible: boolean): void => {
// Update dropdown open state for auto-checking
setIsDropdownOpen(visible);
// Initialize temp selection when opening dropdown
if (visible) {
if (isUndefined(tempSelection) && selectValue === ALL_SELECT_VALUE) {
// When ALL is selected, set selection to exactly the latest available values
const latestAll = [...allAvailableOptionStrings];
setTempSelection(latestAll);
} else {
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
}
}
// Apply changes when closing dropdown
else if (!visible && tempSelection !== undefined) {
// Only call handleChange if there's actually a change in the selection
const currentValue = variableData.selectedValue;
// Helper function to check if arrays have the same elements regardless of order
const areArraysEqualIgnoreOrder = (a: any[], b: any[]): boolean => {
if (a.length !== b.length) {
return false;
}
const sortedA = [...a].sort();
const sortedB = [...b].sort();
return areArraysEqual(sortedA, sortedB);
};
// If ALL was selected before and remains ALL after, skip updating
const wasAllSelected = enableSelectAll && variableData.allSelected;
const isAllSelectedAfter =
enableSelectAll &&
Array.isArray(tempSelection) &&
tempSelection.length === allAvailableOptionStrings.length &&
allAvailableOptionStrings.every((v) => tempSelection.includes(v));
if (wasAllSelected && isAllSelectedAfter) {
setTempSelection(undefined);
return;
}
const hasChanged =
tempSelection !== currentValue &&
!(
Array.isArray(tempSelection) &&
Array.isArray(currentValue) &&
areArraysEqualIgnoreOrder(tempSelection, currentValue)
);
if (hasChanged) {
handleChange(tempSelection);
}
setTempSelection(undefined);
}
// Always reset filtered data when dropdown closes, regardless of tempSelection state
if (!visible) {
setFilteredOptionsData(optionsData);
setRelatedValues(originalRelatedValues);
setApiSearchText('');
}
};
useEffect(
() => (): void => {
// Cleanup on unmount
setTempSelection(undefined);
setFilteredOptionsData([]);
setRelatedValues([]);
setApiSearchText('');
},
[],
);
// eslint-disable-next-line sonarjs/cognitive-complexity
const finalSelectedValues = useMemo(() => {
if (variableData.multiSelect) {
let value = tempSelection || selectedValue;
if (isEmpty(value)) {
if (variableData.showALLOption) {
if (variableData.defaultValue) {
value = variableData.defaultValue;
} else if (variableData.allSelected) {
// If ALL is selected but no stored values, derive from available options
// This handles the case where we don't store values in localStorage for ALL
value = allAvailableOptionStrings;
} else {
value = optionsData;
}
} else if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData?.[0];
}
}
return value;
}
if (isEmpty(selectedValue)) {
if (variableData.defaultValue) {
return variableData.defaultValue;
}
return optionsData[0]?.toString();
}
return selectedValue;
}, [
variableData.multiSelect,
variableData.showALLOption,
variableData.defaultValue,
variableData.allSelected,
selectedValue,
tempSelection,
optionsData,
allAvailableOptionStrings,
]);
useEffect(() => {
if (
(variableData.multiSelect && !(tempSelection || selectValue)) ||
isEmpty(selectValue)
) {
handleChange(finalSelectedValues as string[] | string);
}
}, [
finalSelectedValues,
handleChange,
selectValue,
tempSelection,
variableData.multiSelect,
]);
return (
<div className="variable-item">
<Typography.Text className="variable-name" ellipsis>
${variableData.name}
{variableData.description && (
<Tooltip title={variableData.description}>
<InfoCircleOutlined className="info-icon" />
</Tooltip>
)}
</Typography.Text>
<div className="variable-value">
{variableData.multiSelect ? (
<CustomMultiSelect
key={variableData.id}
options={getOptionsForDynamicVariable(
filteredOptionsData || [],
relatedValues || [],
)}
defaultValue={variableData.defaultValue}
onChange={handleTempChange}
bordered={false}
placeholder="Select value"
placement="bottomLeft"
style={SelectItemStyle}
loading={isLoading}
showSearch
data-testid="variable-select"
className="variable-select"
popupClassName="dropdown-styles"
maxTagCount={2}
getPopupContainer={popupContainer}
value={
(tempSelection || selectValue) === ALL_SELECT_VALUE
? 'ALL'
: tempSelection || selectValue
}
onDropdownVisibleChange={handleDropdownVisibleChange}
errorMessage={errorMessage}
// eslint-disable-next-line react/no-unstable-nested-components
maxTagPlaceholder={(omittedValues): JSX.Element => {
const maxDisplayValues = 10;
const valuesToShow = omittedValues.slice(0, maxDisplayValues);
const hasMore = omittedValues.length > maxDisplayValues;
const tooltipText =
valuesToShow.map(({ value }) => value).join(', ') +
(hasMore ? ` + ${omittedValues.length - maxDisplayValues} more` : '');
return (
<Tooltip title={tooltipText}>
<span>+ {omittedValues.length} </span>
</Tooltip>
);
}}
onClear={(): void => {
handleChange([]);
}}
enableAllSelection={enableSelectAll}
maxTagTextLength={30}
onSearch={handleSearch}
onRetry={(): void => {
setErrorMessage(null);
setIsRetryableError(true);
refetch();
}}
showIncompleteDataMessage={!isComplete && filteredOptionsData.length > 0}
isDynamicVariable
showRetryButton={isRetryableError}
/>
) : (
<CustomSelect
key={variableData.id}
onChange={handleChange}
bordered={false}
placeholder="Select value"
style={SelectItemStyle}
loading={isLoading}
showSearch
data-testid="variable-select"
className="variable-select"
popupClassName="dropdown-styles"
getPopupContainer={popupContainer}
options={getOptionsForDynamicVariable(
filteredOptionsData || [],
relatedValues || [],
)}
value={selectValue}
defaultValue={variableData.defaultValue}
errorMessage={errorMessage}
onSearch={handleSearch}
// eslint-disable-next-line sonarjs/no-identical-functions
onRetry={(): void => {
setErrorMessage(null);
setIsRetryableError(true);
refetch();
}}
showIncompleteDataMessage={!isComplete && filteredOptionsData.length > 0}
isDynamicVariable
showRetryButton={isRetryableError}
/>
)}
</div>
</div>
);
}
export default DynamicVariableSelection;

View File

@@ -1,11 +1,13 @@
import { memo, useCallback, useMemo, useState } from 'react';
import { memo, useCallback, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import { isArray, isString } from 'lodash-es';
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
import { GlobalReducer } from 'types/reducer/globalTime';
@@ -13,18 +15,20 @@ import { variablePropsToPayloadVariables } from '../utils';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { areArraysEqual, checkAPIInvocation } from './util';
import { VariableItemProps } from './VariableItem';
import { queryVariableSelectStrategy } from './variableSelectStrategy/queryVariableSelectStrategy';
type QueryVariableInputProps = Pick<
VariableItemProps,
| 'variableData'
| 'existingVariables'
| 'onValueUpdate'
| 'variablesToGetUpdated'
| 'setVariablesToGetUpdated'
| 'dependencyData'
>;
interface QueryVariableInputProps {
variableData: IDashboardVariable;
existingVariables: Record<string, IDashboardVariable>;
onValueUpdate: (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
) => void;
variablesToGetUpdated: string[];
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
dependencyData: IDependencyData | null;
}
function QueryVariableInput({
variableData,
@@ -52,15 +56,13 @@ function QueryVariableInput({
onChange,
onDropdownVisibleChange,
handleClear,
applyDefaultIfNeeded,
} = useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
strategy: queryVariableSelectStrategy,
});
const validVariableUpdate = useCallback((): boolean => {
const validVariableUpdate = (): boolean => {
if (!variableData.name) {
return false;
}
@@ -68,100 +70,86 @@ function QueryVariableInput({
variablesToGetUpdated.length &&
variablesToGetUpdated[0] === variableData.name,
);
}, [variableData.name, variablesToGetUpdated]);
};
const getOptions = useCallback(
// eslint-disable-next-line sonarjs/cognitive-complexity
(variablesRes: VariableResponseProps | null): void => {
try {
setErrorMessage(null);
// eslint-disable-next-line sonarjs/cognitive-complexity
const getOptions = (variablesRes: VariableResponseProps | null): void => {
try {
setErrorMessage(null);
if (
variablesRes?.variableValues &&
Array.isArray(variablesRes?.variableValues)
) {
const newOptionsData = sortValues(
variablesRes?.variableValues,
variableData.sort,
);
if (
variablesRes?.variableValues &&
Array.isArray(variablesRes?.variableValues)
) {
const newOptionsData = sortValues(
variablesRes?.variableValues,
variableData.sort,
);
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
let valueNotInList = false;
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
let valueNotInList = false;
if (isArray(variableData.selectedValue)) {
variableData.selectedValue.forEach((val) => {
if (!newOptionsData.includes(val)) {
valueNotInList = true;
}
});
} else if (
isString(variableData.selectedValue) &&
!newOptionsData.includes(variableData.selectedValue)
) {
valueNotInList = true;
}
if (isArray(variableData.selectedValue)) {
variableData.selectedValue.forEach((val) => {
if (!newOptionsData.includes(val)) {
valueNotInList = true;
}
});
} else if (
isString(variableData.selectedValue) &&
!newOptionsData.includes(variableData.selectedValue)
) {
valueNotInList = true;
}
// variablesData.allSelected is added for the case where on change of options we need to update the
// local storage
// variablesData.allSelected is added for the case where on change of options we need to update the
// local storage
if (
variableData.name &&
(validVariableUpdate() || valueNotInList || variableData.allSelected)
) {
if (
variableData.name &&
(validVariableUpdate() || valueNotInList || variableData.allSelected)
variableData.allSelected &&
variableData.multiSelect &&
variableData.showALLOption
) {
if (
variableData.allSelected &&
variableData.multiSelect &&
variableData.showALLOption
) {
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
// Update tempSelection to maintain ALL state when dropdown is open
if (tempSelection !== undefined) {
setTempSelection(newOptionsData.map((option) => option.toString()));
}
} else {
const value = variableData.selectedValue;
let allSelected = false;
// Update tempSelection to maintain ALL state when dropdown is open
if (tempSelection !== undefined) {
setTempSelection(newOptionsData.map((option) => option.toString()));
}
} else {
const value = variableData.selectedValue;
let allSelected = false;
if (variableData.multiSelect) {
const { selectedValue } = variableData;
allSelected =
newOptionsData.length > 0 &&
Array.isArray(selectedValue) &&
newOptionsData.every((option) => selectedValue.includes(option));
}
if (variableData.multiSelect) {
const { selectedValue } = variableData;
allSelected =
newOptionsData.length > 0 &&
Array.isArray(selectedValue) &&
newOptionsData.every((option) => selectedValue.includes(option));
}
if (variableData.name && variableData.id) {
onValueUpdate(variableData.name, variableData.id, value, allSelected);
}
if (variableData.name && variableData.id) {
onValueUpdate(variableData.name, variableData.id, value, allSelected);
}
}
setOptionsData(newOptionsData);
// Apply default if no value is selected (e.g., new variable, first load)
applyDefaultIfNeeded(newOptionsData);
} else {
setVariablesToGetUpdated((prev) =>
prev.filter((name) => name !== variableData.name),
);
}
setOptionsData(newOptionsData);
} else {
setVariablesToGetUpdated((prev) =>
prev.filter((name) => name !== variableData.name),
);
}
} catch (e) {
console.error(e);
}
},
[
variableData,
optionsData,
onValueUpdate,
tempSelection,
setTempSelection,
validVariableUpdate,
setVariablesToGetUpdated,
applyDefaultIfNeeded,
],
);
} catch (e) {
console.error(e);
}
};
const { isLoading, refetch } = useQuery(
[
@@ -174,6 +162,7 @@ function QueryVariableInput({
{
enabled:
variableData &&
variableData.type === 'QUERY' &&
checkAPIInvocation(
variablesToGetUpdated,
variableData,
@@ -218,19 +207,10 @@ function QueryVariableInput({
refetch();
}, [refetch]);
const selectOptions = useMemo(
() =>
optionsData.map((option) => ({
label: option.toString(),
value: option.toString(),
})),
[optionsData],
);
return (
<SelectVariableInput
variableId={variableData.id}
options={selectOptions}
options={optionsData}
value={value}
onChange={onChange}
onDropdownVisibleChange={onDropdownVisibleChange}

View File

@@ -3,7 +3,6 @@ import { orange } from '@ant-design/colors';
import { WarningOutlined } from '@ant-design/icons';
import { Popover, Tooltip, Typography } from 'antd';
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
import { OptionData } from 'components/NewSelect/types';
import { popupContainer } from 'utils/selectPopupContainer';
import { ALL_SELECT_VALUE } from '../utils';
@@ -13,7 +12,7 @@ const errorIconStyle = { margin: '0 0.5rem' };
interface SelectVariableInputProps {
variableId: string;
options: OptionData[];
options: (string | number | boolean)[];
value: string | string[] | undefined;
enableSelectAll: boolean;
isMultiSelect: boolean;
@@ -24,17 +23,13 @@ interface SelectVariableInputProps {
loading?: boolean;
errorMessage?: string | null;
onRetry?: () => void;
isDynamicVariable?: boolean;
showRetryButton?: boolean;
showIncompleteDataMessage?: boolean;
onSearch?: (searchTerm: string) => void;
}
const MAX_TAG_DISPLAY_VALUES = 10;
export const renderMaxTagPlaceholder = (
function maxTagPlaceholder(
omittedValues: { label?: React.ReactNode; value?: string | number }[],
): JSX.Element => {
): JSX.Element {
const valuesToShow = omittedValues.slice(0, MAX_TAG_DISPLAY_VALUES);
const hasMore = omittedValues.length > MAX_TAG_DISPLAY_VALUES;
const tooltipText =
@@ -46,7 +41,7 @@ export const renderMaxTagPlaceholder = (
<span>+ {omittedValues.length} </span>
</Tooltip>
);
};
}
function SelectVariableInput({
variableId,
@@ -61,11 +56,16 @@ function SelectVariableInput({
enableSelectAll,
isMultiSelect,
defaultValue,
isDynamicVariable,
showRetryButton,
showIncompleteDataMessage,
onSearch,
}: SelectVariableInputProps): JSX.Element {
const selectOptions = useMemo(
() =>
options.map((option) => ({
label: option.toString(),
value: option.toString(),
})),
[options],
);
const commonProps = useMemo(
() => ({
// main props
@@ -82,33 +82,23 @@ function SelectVariableInput({
showSearch: true,
bordered: false,
// changing props
// dynamic props
'data-testid': 'variable-select',
onChange,
loading,
options,
options: selectOptions,
errorMessage,
onRetry,
// dynamic variable only props
isDynamicVariable,
showRetryButton,
showIncompleteDataMessage,
onSearch,
}),
[
variableId,
defaultValue,
onChange,
loading,
options,
selectOptions,
value,
errorMessage,
onRetry,
isDynamicVariable,
showRetryButton,
showIncompleteDataMessage,
onSearch,
],
);
@@ -120,11 +110,11 @@ function SelectVariableInput({
placement="bottomLeft"
maxTagCount={2}
onDropdownVisibleChange={onDropdownVisibleChange}
maxTagPlaceholder={renderMaxTagPlaceholder}
maxTagPlaceholder={maxTagPlaceholder}
onClear={onClear}
enableAllSelection={enableSelectAll}
maxTagTextLength={30}
allowClear={value !== ALL_SELECT_VALUE}
allowClear={value !== ALL_SELECT_VALUE && value !== 'ALL'}
/>
) : (
<CustomSelect {...commonProps} />

View File

@@ -5,7 +5,6 @@ import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/da
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import CustomVariableInput from './CustomVariableInput';
import DynamicVariableInput from './DynamicVariableInput';
import QueryVariableInput from './QueryVariableInput';
import TextboxVariableInput from './TextboxVariableInput';
@@ -17,9 +16,8 @@ export interface VariableItemProps {
onValueUpdate: (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
arg1: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
) => void;
variablesToGetUpdated: string[];
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
@@ -70,13 +68,6 @@ function VariableItem({
dependencyData={dependencyData}
/>
)}
{variableType === 'DYNAMIC' && (
<DynamicVariableInput
variableData={variableData}
onValueUpdate={onValueUpdate}
existingVariables={existingVariables}
/>
)}
</div>
</div>
);

View File

@@ -5,7 +5,7 @@ import * as ReactRedux from 'react-redux';
import { fireEvent, render, screen } from '@testing-library/react';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import DynamicVariableInput from '../DynamicVariableInput';
import DynamicVariableSelection from '../DynamicVariableSelection';
// Don't mock the components - use real ones
@@ -54,7 +54,7 @@ const mockApiResponse = {
// Mock scrollIntoView since it's not available in JSDOM
window.HTMLElement.prototype.scrollIntoView = jest.fn();
describe('DynamicVariableInput Component', () => {
describe('DynamicVariableSelection Component', () => {
const mockOnValueUpdate = jest.fn();
const mockDynamicVariableData: IDashboardVariable = {
@@ -108,13 +108,18 @@ describe('DynamicVariableInput Component', () => {
it('renders with single select variable correctly', () => {
render(
<DynamicVariableInput
<DynamicVariableSelection
variableData={mockDynamicVariableData}
existingVariables={mockExistingVariables}
onValueUpdate={mockOnValueUpdate}
/>,
);
// Verify component renders correctly
expect(
screen.getByText(`$${mockDynamicVariableData.name}`),
).toBeInTheDocument();
// Verify the selected value is displayed
const selectedItem = screen.getByRole('combobox');
expect(selectedItem).toBeInTheDocument();
@@ -131,13 +136,18 @@ describe('DynamicVariableInput Component', () => {
};
render(
<DynamicVariableInput
<DynamicVariableSelection
variableData={multiSelectWithAllSelected}
existingVariables={mockExistingVariables}
onValueUpdate={mockOnValueUpdate}
/>,
);
// Verify variable name is rendered
expect(
screen.getByText(`$${multiSelectWithAllSelected.name}`),
).toBeInTheDocument();
// In ALL selected mode, there should be an "ALL" text element
expect(screen.getByText('ALL')).toBeInTheDocument();
});
@@ -154,13 +164,18 @@ describe('DynamicVariableInput Component', () => {
});
render(
<DynamicVariableInput
<DynamicVariableSelection
variableData={mockDynamicVariableData}
existingVariables={mockExistingVariables}
onValueUpdate={mockOnValueUpdate}
/>,
);
// Verify component renders in loading state
expect(
screen.getByText(`$${mockDynamicVariableData.name}`),
).toBeInTheDocument();
// Open dropdown to see loading text
const selectElement = screen.getByRole('combobox');
fireEvent.mouseDown(selectElement);
@@ -184,13 +199,18 @@ describe('DynamicVariableInput Component', () => {
});
render(
<DynamicVariableInput
<DynamicVariableSelection
variableData={mockDynamicVariableData}
existingVariables={mockExistingVariables}
onValueUpdate={mockOnValueUpdate}
/>,
);
// Verify the component renders
expect(
screen.getByText(`$${mockDynamicVariableData.name}`),
).toBeInTheDocument();
// For error states, we should check that error handling is in place
// Without opening the dropdown as the error message might be handled differently
expect(ReactQuery.useQuery).toHaveBeenCalled();
@@ -199,7 +219,7 @@ describe('DynamicVariableInput Component', () => {
it('makes API call to fetch variable values', () => {
render(
<DynamicVariableInput
<DynamicVariableSelection
variableData={mockDynamicVariableData}
existingVariables={mockExistingVariables}
onValueUpdate={mockOnValueUpdate}
@@ -215,8 +235,6 @@ describe('DynamicVariableInput Component', () => {
'2023-01-01T00:00:00Z', // minTime from useSelector mock
'2023-01-02T00:00:00Z', // maxTime from useSelector mock
'',
'Traces',
'service.name',
],
expect.objectContaining({
enabled: true, // Type is 'DYNAMIC'
@@ -237,13 +255,16 @@ describe('DynamicVariableInput Component', () => {
};
render(
<DynamicVariableInput
<DynamicVariableSelection
variableData={customVariable}
existingVariables={{ ...mockExistingVariables, custom1: customVariable }}
onValueUpdate={mockOnValueUpdate}
/>,
);
// Verify the component correctly displays the selected value
expect(screen.getByText(`$${customVariable.name}`)).toBeInTheDocument();
// Find the selection item in the component using data-testid
const selectElement = screen.getByTestId('variable-select');
expect(selectElement).toBeInTheDocument();

View File

@@ -63,10 +63,10 @@ describe('VariableItem Default Value Selection Behavior', () => {
expect(screen.getByTestId(VARIABLE_SELECT_TESTID)).toBeInTheDocument();
});
expect(await screen.findByText('option1')).toBeInTheDocument();
expect(screen.getByText('option1')).toBeInTheDocument();
});
test('should auto-select first option when no previous and no default', async () => {
test('should show placeholder when no previous and no default', async () => {
const variable: IDashboardVariable = {
id: TEST_VARIABLE_ID,
name: TEST_VARIABLE_NAME,
@@ -85,8 +85,7 @@ describe('VariableItem Default Value Selection Behavior', () => {
expect(screen.getByTestId(VARIABLE_SELECT_TESTID)).toBeInTheDocument();
});
// With the new variable select strategy, the first option is auto-selected
expect(await screen.findByText('option1')).toBeInTheDocument();
expect(screen.getByText('Select value')).toBeInTheDocument();
});
});
@@ -111,7 +110,7 @@ describe('VariableItem Default Value Selection Behavior', () => {
expect(screen.getByTestId(VARIABLE_SELECT_TESTID)).toBeInTheDocument();
});
expect(await screen.findByText('ALL')).toBeInTheDocument();
expect(screen.getByText('ALL')).toBeInTheDocument();
});
});
@@ -135,7 +134,7 @@ describe('VariableItem Default Value Selection Behavior', () => {
expect(screen.getByTestId(VARIABLE_SELECT_TESTID)).toBeInTheDocument();
});
expect(await screen.findByText('Select value')).toBeInTheDocument();
expect(screen.getByText('Select value')).toBeInTheDocument();
});
});
});

View File

@@ -1,14 +1,8 @@
import { useCallback, useMemo, useState } from 'react';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { isEmpty } from 'lodash-es';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { ALL_SELECT_VALUE } from '../utils';
import { areArraysEqual, getSelectValue } from './util';
import { VariableSelectStrategy } from './variableSelectStrategy/variableSelectStrategyTypes';
import {
areArraysEqualIgnoreOrder,
uniqueValues,
} from './variableSelectStrategy/variableSelectStrategyUtils';
interface UseDashboardVariableSelectHelperParams {
variableData: IDashboardVariable;
@@ -18,11 +12,7 @@ interface UseDashboardVariableSelectHelperParams {
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
) => void;
strategy: VariableSelectStrategy;
/** Override for all available option strings (default: optionsData.map(String)) */
allAvailableOptionStrings?: string[];
}
interface UseDashboardVariableSelectHelperReturn {
@@ -41,11 +31,6 @@ interface UseDashboardVariableSelectHelperReturn {
onChange: (value: string | string[]) => void;
onDropdownVisibleChange: (visible: boolean) => void;
handleClear: () => void;
// Default value helpers
applyDefaultIfNeeded: (
overrideOptions?: (string | number | boolean)[],
) => void;
}
// eslint-disable-next-line sonarjs/cognitive-complexity
@@ -53,8 +38,6 @@ export function useDashboardVariableSelectHelper({
variableData,
optionsData,
onValueUpdate,
strategy,
allAvailableOptionStrings,
}: UseDashboardVariableSelectHelperParams): UseDashboardVariableSelectHelperReturn {
const { selectedValue } = variableData;
@@ -69,37 +52,11 @@ export function useDashboardVariableSelectHelper({
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
const effectiveAllAvailableOptionStrings = useMemo(
() => allAvailableOptionStrings ?? optionsData.map((v) => v.toString()),
[allAvailableOptionStrings, optionsData],
);
const selectValue =
variableData.allSelected && enableSelectAll
? ALL_SELECT_VALUE
? 'ALL'
: selectedValueStringified;
const getDefaultValue = useCallback(
(overrideOptions?: (string | number | boolean)[]) => {
const options = overrideOptions || optionsData;
if (variableData.multiSelect) {
if (variableData.showALLOption) {
return variableData.defaultValue || options.map((o) => o.toString());
}
return variableData.defaultValue || options?.[0]?.toString();
}
return variableData.defaultValue || options[0]?.toString();
},
[
variableData.multiSelect,
variableData.showALLOption,
variableData.defaultValue,
optionsData,
],
);
const defaultValue = useMemo(() => getDefaultValue(), [getDefaultValue]);
const handleChange = useCallback(
(inputValue: string | string[]): void => {
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
@@ -112,21 +69,29 @@ export function useDashboardVariableSelectHelper({
) {
return;
}
if (variableData.name) {
// Check if ALL is effectively selected by comparing with available options
const isAllSelected =
Array.isArray(value) &&
value.length > 0 &&
optionsData.every((option) => value.includes(option.toString()));
strategy.handleChange({
value,
variableData,
optionsData,
allAvailableOptionStrings: effectiveAllAvailableOptionStrings,
onValueUpdate,
});
if (isAllSelected && variableData.showALLOption) {
// For ALL selection, pass optionsData as the value and set allSelected to true
onValueUpdate(variableData.name, variableData.id, optionsData, true);
} else {
onValueUpdate(variableData.name, variableData.id, value, false);
}
}
},
[
variableData,
optionsData,
effectiveAllAvailableOptionStrings,
variableData.multiSelect,
variableData.selectedValue,
variableData.name,
variableData.id,
variableData.showALLOption,
onValueUpdate,
strategy,
optionsData,
],
);
@@ -134,96 +99,79 @@ export function useDashboardVariableSelectHelper({
(inputValue: string | string[]): void => {
// Store the selection in temporary state while dropdown is open
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
setTempSelection(uniqueValues(value));
setTempSelection(value);
},
[variableData.multiSelect],
);
// Single select onChange: apply default if value is empty
const handleSingleSelectChange = useCallback(
(inputValue: string | string[]): void => {
if (isEmpty(inputValue)) {
if (defaultValue !== undefined) {
handleChange(defaultValue as string | string[]);
// Apply default value on first render if no selection exists
const finalSelectedValues = useMemo(() => {
if (variableData.multiSelect) {
let value = tempSelection || selectedValue;
if (isEmpty(value)) {
if (variableData.showALLOption) {
if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData;
}
} else if (variableData.defaultValue) {
value = variableData.defaultValue;
} else {
value = optionsData?.[0];
}
return;
}
handleChange(inputValue);
},
[handleChange, defaultValue],
);
return value;
}
if (isEmpty(selectedValue)) {
if (variableData.defaultValue) {
return variableData.defaultValue;
}
return optionsData[0]?.toString();
}
return selectedValue;
}, [
variableData.multiSelect,
variableData.showALLOption,
variableData.defaultValue,
selectedValue,
tempSelection,
optionsData,
]);
// Apply default values when needed
useEffect(() => {
if (
(variableData.multiSelect && !(tempSelection || selectValue)) ||
isEmpty(selectValue)
) {
handleChange(finalSelectedValues as string[] | string);
}
}, [
finalSelectedValues,
handleChange,
selectValue,
tempSelection,
variableData.multiSelect,
]);
// Handle dropdown visibility changes
const onDropdownVisibleChange = useCallback(
(visible: boolean): void => {
// Initialize temp selection when opening dropdown
if (visible) {
if (variableData.allSelected && enableSelectAll) {
// When ALL is selected, show all available options as individually checked
setTempSelection([...effectiveAllAvailableOptionStrings]);
} else {
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
}
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
}
// Apply changes when closing dropdown
else if (!visible && tempSelection !== undefined) {
// If ALL was selected before AND all options remain selected, skip updating
const wasAllSelected = enableSelectAll && variableData.allSelected;
const isAllSelectedAfter =
enableSelectAll &&
Array.isArray(tempSelection) &&
tempSelection.length === effectiveAllAvailableOptionStrings.length &&
effectiveAllAvailableOptionStrings.every((v) => tempSelection.includes(v));
if (wasAllSelected && isAllSelectedAfter) {
setTempSelection(undefined);
return;
}
// Apply default if closing with empty selection
let valueToApply = tempSelection;
if (isEmpty(tempSelection) && defaultValue !== undefined) {
valueToApply = defaultValue as string | string[];
}
// Order-agnostic change detection
const currentValue = variableData.selectedValue;
const hasChanged =
valueToApply !== currentValue &&
!(
Array.isArray(valueToApply) &&
Array.isArray(currentValue) &&
areArraysEqualIgnoreOrder(valueToApply, currentValue)
);
if (hasChanged) {
handleChange(valueToApply);
}
// Call handleChange with the temporarily stored selection
handleChange(tempSelection);
setTempSelection(undefined);
}
},
[
variableData,
enableSelectAll,
effectiveAllAvailableOptionStrings,
tempSelection,
handleChange,
defaultValue,
],
);
// Explicit function for callers to apply default on mount / data load
// Pass overrideOptions when freshly-loaded options aren't in state yet (async callers)
const applyDefaultIfNeeded = useCallback(
(overrideOptions?: (string | number | boolean)[]): void => {
if (isEmpty(selectValue)) {
const defaultValueFromOptions = getDefaultValue(overrideOptions);
if (defaultValueFromOptions !== undefined) {
handleChange(defaultValueFromOptions as string | string[]);
}
}
},
[selectValue, handleChange, getDefaultValue],
[variableData, tempSelection, handleChange],
);
const handleClear = useCallback((): void => {
@@ -234,9 +182,11 @@ export function useDashboardVariableSelectHelper({
? tempSelection || selectValue
: selectValue;
const defaultValue = variableData.defaultValue || selectValue;
const onChange = useMemo(() => {
return variableData.multiSelect ? handleTempChange : handleSingleSelectChange;
}, [variableData.multiSelect, handleTempChange, handleSingleSelectChange]);
return variableData.multiSelect ? handleTempChange : handleChange;
}, [variableData.multiSelect, handleTempChange, handleChange]);
return {
tempSelection,
@@ -247,6 +197,5 @@ export function useDashboardVariableSelectHelper({
value,
defaultValue,
onChange,
applyDefaultIfNeeded,
};
}

View File

@@ -363,6 +363,25 @@ export const uniqueOptions = (options: OptionData[]): OptionData[] => {
return uniqueOptions;
};
export const uniqueValues = (values: string[] | string): string[] | string => {
if (Array.isArray(values)) {
const uniqueValues: string[] = [];
const seenValues = new Set<string>();
values.forEach((value) => {
if (seenValues.has(value)) {
return;
}
seenValues.add(value);
uniqueValues.push(value);
});
return uniqueValues;
}
return values;
};
export const getSelectValue = (
selectedValue: IDashboardVariable['selectedValue'],
variableData: IDashboardVariable,

View File

@@ -1,4 +0,0 @@
import { defaultVariableSelectStrategy } from './defaultVariableSelectStrategy';
import { VariableSelectStrategy } from './variableSelectStrategyTypes';
export const customVariableSelectStrategy: VariableSelectStrategy = defaultVariableSelectStrategy;

View File

@@ -1,20 +0,0 @@
import { VariableSelectStrategy } from './variableSelectStrategyTypes';
export const defaultVariableSelectStrategy: VariableSelectStrategy = {
handleChange({ value, variableData, optionsData, onValueUpdate }) {
if (!variableData.name) {
return;
}
const isAllSelected =
Array.isArray(value) &&
value.length > 0 &&
optionsData.every((option) => value.includes(option.toString()));
if (isAllSelected && variableData.showALLOption) {
onValueUpdate(variableData.name, variableData.id, optionsData, true);
} else {
onValueUpdate(variableData.name, variableData.id, value, false);
}
},
};

View File

@@ -1,37 +0,0 @@
import { ALL_SELECT_VALUE } from 'container/DashboardContainer/utils';
import { VariableSelectStrategy } from './variableSelectStrategyTypes';
export const dynamicVariableSelectStrategy: VariableSelectStrategy = {
handleChange({
value,
variableData,
allAvailableOptionStrings,
onValueUpdate,
}) {
if (!variableData.name) {
return;
}
if (
value === ALL_SELECT_VALUE ||
(Array.isArray(value) && value.includes(ALL_SELECT_VALUE))
) {
onValueUpdate(variableData.name, variableData.id, null, true);
} else {
// For ALL selection in dynamic variables, pass null to avoid storing values
// The parent component will handle this appropriately
const haveCustomValuesSelected =
Array.isArray(value) &&
!value.every((v) => allAvailableOptionStrings.includes(v.toString()));
onValueUpdate(
variableData.name,
variableData.id,
value,
allAvailableOptionStrings.every((v) => value.includes(v.toString())),
haveCustomValuesSelected,
);
}
},
};

View File

@@ -1,4 +0,0 @@
import { defaultVariableSelectStrategy } from './defaultVariableSelectStrategy';
import { VariableSelectStrategy } from './variableSelectStrategyTypes';
export const queryVariableSelectStrategy: VariableSelectStrategy = defaultVariableSelectStrategy;

View File

@@ -1,17 +0,0 @@
import { IDashboardVariable } from 'types/api/dashboard/getAll';
export interface VariableSelectStrategy {
handleChange(params: {
value: string | string[];
variableData: IDashboardVariable;
optionsData: (string | number | boolean)[];
allAvailableOptionStrings: string[];
onValueUpdate: (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
) => void;
}): void;
}

View File

@@ -1,32 +0,0 @@
import { isEqual } from 'lodash-es';
export const areArraysEqualIgnoreOrder = (
a: (string | number | boolean)[],
b: (string | number | boolean)[],
): boolean => {
if (a.length !== b.length) {
return false;
}
const sortedA = [...a].sort();
const sortedB = [...b].sort();
return isEqual(sortedA, sortedB);
};
export const uniqueValues = (values: string[] | string): string[] | string => {
if (Array.isArray(values)) {
const uniqueValues: string[] = [];
const seenValues = new Set<string>();
values.forEach((value) => {
if (seenValues.has(value)) {
return;
}
seenValues.add(value);
uniqueValues.push(value);
});
return uniqueValues;
}
return values;
};

View File

@@ -2,7 +2,7 @@
/* eslint-disable sonarjs/no-duplicate-string */
/* eslint-disable react/jsx-props-no-spreading */
import React from 'react';
import { QueryClient, QueryClientProvider, useQuery } from 'react-query';
import { QueryClient, QueryClientProvider } from 'react-query';
import * as ReactRedux from 'react-redux';
import {
act,
@@ -15,22 +15,13 @@ import {
import { getFieldValues } from 'api/dynamicVariables/getFieldValues';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import DynamicVariableInput from '../DashboardVariablesSelection/DynamicVariableInput';
import DynamicVariableSelection from '../DashboardVariablesSelection/DynamicVariableSelection';
// Mock the getFieldValues API
jest.mock('api/dynamicVariables/getFieldValues', () => ({
getFieldValues: jest.fn(),
}));
// Mock useQuery from react-query
jest.mock('react-query', () => {
const originalModule = jest.requireActual('react-query');
return {
...originalModule,
useQuery: jest.fn(),
};
});
describe('Dynamic Variable Default Behavior', () => {
const mockOnValueUpdate = jest.fn();
const mockApiResponse = {
@@ -68,46 +59,6 @@ describe('Dynamic Variable Default Behavior', () => {
// Mock getFieldValues API to return our test data
(getFieldValues as jest.Mock).mockResolvedValue(mockApiResponse);
// Mock useQuery implementation to avoid infinite re-renders
// and ensure onSuccess is called once
(useQuery as jest.Mock).mockImplementation((key, options) => {
const { onSuccess, enabled, queryFn } = options || {};
const variableName = key[1];
const dynamicVarsKey = key[2];
React.useEffect(() => {
if (enabled !== false) {
if (onSuccess) {
// For 'services' tests:
// 1. "Default to ALL" expectations imply empty options -> [] behavior. This happens when selectedValue is undefined (dynamicVarsKey has 'null').
// 2. "ALL Option Special Value" needs full options to render the "ALL" item in dropdown. This happens when selectedValue is defined.
if (
variableName === 'services' &&
typeof dynamicVarsKey === 'string' &&
dynamicVarsKey.includes('null')
) {
onSuccess({
...mockApiResponse,
data: { ...mockApiResponse.data, normalizedValues: [] },
});
} else {
onSuccess(mockApiResponse);
}
}
if (queryFn) {
queryFn();
}
}
}, [enabled, variableName, dynamicVarsKey]); // Only depend on enabled/keys
return {
isLoading: false,
isError: false,
data: mockApiResponse,
refetch: jest.fn(),
};
});
jest.spyOn(ReactRedux, 'useSelector').mockReturnValue({
minTime: '2023-01-01T00:00:00Z',
maxTime: '2023-01-02T00:00:00Z',
@@ -133,7 +84,7 @@ describe('Dynamic Variable Default Behavior', () => {
await act(async () => {
renderWithQueryClient(
<DynamicVariableInput
<DynamicVariableSelection
variableData={variableData}
existingVariables={{ var1: variableData }}
onValueUpdate={mockOnValueUpdate}
@@ -169,7 +120,7 @@ describe('Dynamic Variable Default Behavior', () => {
await act(async () => {
renderWithQueryClient(
<DynamicVariableInput
<DynamicVariableSelection
variableData={variableData}
existingVariables={{ var1: variableData }}
onValueUpdate={mockOnValueUpdate}
@@ -213,7 +164,7 @@ describe('Dynamic Variable Default Behavior', () => {
await act(async () => {
renderWithQueryClient(
<DynamicVariableInput
<DynamicVariableSelection
variableData={variableData}
existingVariables={{ var1: variableData }}
onValueUpdate={mockOnValueUpdate}
@@ -221,6 +172,9 @@ describe('Dynamic Variable Default Behavior', () => {
);
});
// Component should render without errors
expect(screen.getByText('$service')).toBeInTheDocument();
// Check if the dropdown is present
const selectElement = screen.getByRole('combobox');
expect(selectElement).toBeInTheDocument();
@@ -278,7 +232,7 @@ describe('Dynamic Variable Default Behavior', () => {
await act(async () => {
renderWithQueryClient(
<DynamicVariableInput
<DynamicVariableSelection
variableData={variableData}
existingVariables={{ var1: variableData }}
onValueUpdate={mockOnValueUpdate}
@@ -313,7 +267,7 @@ describe('Dynamic Variable Default Behavior', () => {
await act(async () => {
renderWithQueryClient(
<DynamicVariableInput
<DynamicVariableSelection
variableData={variableData}
existingVariables={{ var1: variableData }}
onValueUpdate={mockOnValueUpdate}
@@ -339,7 +293,7 @@ describe('Dynamic Variable Default Behavior', () => {
expect(screen.queryByText('backend')).not.toBeInTheDocument();
});
it('sahould default to ALL when no default and no previous selection', async () => {
it('should default to ALL when no default and no previous selection', async () => {
const variableData: IDashboardVariable = {
id: 'var21',
name: 'services',
@@ -357,7 +311,7 @@ describe('Dynamic Variable Default Behavior', () => {
await act(async () => {
renderWithQueryClient(
<DynamicVariableInput
<DynamicVariableSelection
variableData={variableData}
existingVariables={{ var1: variableData }}
onValueUpdate={mockOnValueUpdate}
@@ -391,7 +345,7 @@ describe('Dynamic Variable Default Behavior', () => {
expect(mockOnValueUpdate).toHaveBeenCalledWith(
'services',
'var21',
[],
[], // Empty array when allSelected is true
true, // allSelected = true
false,
);
@@ -417,7 +371,7 @@ describe('Dynamic Variable Default Behavior', () => {
await act(async () => {
renderWithQueryClient(
<DynamicVariableInput
<DynamicVariableSelection
variableData={variableData}
existingVariables={{ var1: variableData }}
onValueUpdate={mockOnValueUpdate}
@@ -454,7 +408,7 @@ describe('Dynamic Variable Default Behavior', () => {
await act(async () => {
renderWithQueryClient(
<DynamicVariableInput
<DynamicVariableSelection
variableData={variableData}
existingVariables={{ var1: variableData }}
onValueUpdate={mockOnValueUpdate}
@@ -462,6 +416,9 @@ describe('Dynamic Variable Default Behavior', () => {
);
});
// Component should render without errors
expect(screen.getByText('$services')).toBeInTheDocument();
// Check if ALL is displayed in the UI (in the main selection area)
const allTextElement = screen.getByText('ALL');
expect(allTextElement).toBeInTheDocument();

View File

@@ -33,8 +33,8 @@ import { useChartMutable } from 'hooks/useChartMutable';
import useComponentPermission from 'hooks/useComponentPermission';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import useUrlQuery from 'hooks/useUrlQuery';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import GetMinMax from 'lib/getMinMax';
import { isEmpty } from 'lodash-es';
import { useAppContext } from 'providers/App/App';

View File

@@ -8,8 +8,8 @@ import { populateMultipleResults } from 'container/NewWidget/LeftContainer/Widge
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/types';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
import { useIntersectionObserver } from 'hooks/useIntersectionObserver';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import getTimeString from 'lib/getTimeString';
import { isEqual } from 'lodash-es';
import isEmpty from 'lodash-es/isEmpty';

View File

@@ -6,7 +6,7 @@ import { prepareQueryRangePayloadV5 } from 'api/v5/v5';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
import { AppState } from 'store/reducers';
import { Query } from 'types/api/queryBuilder/queryBuilderData';

View File

@@ -103,7 +103,7 @@ export const getTotalLogSizeWidgetData = (): Widgets =>
queryName: 'A',
reduceTo: ReduceOperators.SUM,
spaceAggregation: 'sum',
stepInterval: null,
stepInterval: 60,
timeAggregation: 'increase',
},
],
@@ -140,7 +140,7 @@ export const getTotalTraceSizeWidgetData = (): Widgets =>
queryName: 'A',
reduceTo: ReduceOperators.SUM,
spaceAggregation: 'sum',
stepInterval: null,
stepInterval: 60,
timeAggregation: 'increase',
},
],
@@ -177,7 +177,7 @@ export const getTotalMetricDatapointCountWidgetData = (): Widgets =>
queryName: 'A',
reduceTo: ReduceOperators.SUM,
spaceAggregation: 'sum',
stepInterval: null,
stepInterval: 60,
timeAggregation: 'increase',
},
],
@@ -214,7 +214,7 @@ export const getLogCountWidgetData = (): Widgets =>
queryName: 'A',
reduceTo: ReduceOperators.AVG,
spaceAggregation: 'sum',
stepInterval: null,
stepInterval: 60,
timeAggregation: 'increase',
},
],
@@ -251,7 +251,7 @@ export const getLogSizeWidgetData = (): Widgets =>
queryName: 'A',
reduceTo: ReduceOperators.AVG,
spaceAggregation: 'sum',
stepInterval: null,
stepInterval: 60,
timeAggregation: 'increase',
},
],
@@ -288,7 +288,7 @@ export const getSpanCountWidgetData = (): Widgets =>
queryName: 'A',
reduceTo: ReduceOperators.AVG,
spaceAggregation: 'sum',
stepInterval: null,
stepInterval: 60,
timeAggregation: 'increase',
},
],
@@ -325,7 +325,7 @@ export const getSpanSizeWidgetData = (): Widgets =>
queryName: 'A',
reduceTo: ReduceOperators.AVG,
spaceAggregation: 'sum',
stepInterval: null,
stepInterval: 60,
timeAggregation: 'increase',
},
],
@@ -362,7 +362,7 @@ export const getMetricCountWidgetData = (): Widgets =>
queryName: 'A',
reduceTo: ReduceOperators.AVG,
spaceAggregation: 'sum',
stepInterval: null,
stepInterval: 60,
timeAggregation: 'increase',
},
],

View File

@@ -27,8 +27,8 @@ import { useIsDarkMode } from 'hooks/useDarkMode';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import useUrlQuery from 'hooks/useUrlQuery';
import createQueryParams from 'lib/createQueryParams';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { cloneDeep, defaultTo, isEmpty, isUndefined } from 'lodash-es';
import { Check, X } from 'lucide-react';
import { DashboardWidgetPageParams } from 'pages/DashboardWidget';

View File

@@ -323,6 +323,7 @@
"java instrumentation",
"java k8s",
"java logs",
"java metrics",
"java microservices",
"java monitoring",
"java observability",
@@ -331,12 +332,10 @@
"java vm",
"java windows",
"jboss",
"wildfly",
"jdk 11",
"jdk 17",
"jdk 21",
"jdk 8",
"jdbc",
"opentelemetry java",
"quarkus",
"spring boot",
@@ -373,12 +372,6 @@
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/instrumentation/opentelemetry-springboot/"
},
{
"key": "docker",
"label": "Docker",
"imgUrl": "/Logos/docker.svg",
"link": "/docs/instrumentation/opentelemetry-springboot/"
},
{
"key": "windows",
"label": "Windows",
@@ -410,12 +403,6 @@
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/instrumentation/opentelemetry-tomcat/"
},
{
"key": "docker",
"label": "Docker",
"imgUrl": "/Logos/docker.svg",
"link": "/docs/instrumentation/opentelemetry-tomcat/"
},
{
"key": "windows",
"label": "Windows",
@@ -427,7 +414,7 @@
},
{
"key": "jboss",
"label": "JBoss/WildFly",
"label": "JBoss",
"imgUrl": "/Logos/jboss.svg",
"link": "/docs/instrumentation/opentelemetry-jboss/",
"question": {
@@ -447,12 +434,6 @@
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/instrumentation/opentelemetry-jboss/"
},
{
"key": "docker",
"label": "Docker",
"imgUrl": "/Logos/docker.svg",
"link": "/docs/instrumentation/opentelemetry-jboss/"
},
{
"key": "windows",
"label": "Windows",
@@ -484,12 +465,6 @@
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/instrumentation/opentelemetry-quarkus/"
},
{
"key": "docker",
"label": "Docker",
"imgUrl": "/Logos/docker.svg",
"link": "/docs/instrumentation/opentelemetry-quarkus/"
},
{
"key": "windows",
"label": "Windows",
@@ -520,18 +495,6 @@
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/instrumentation/opentelemetry-java/"
},
{
"key": "docker",
"label": "Docker",
"imgUrl": "/Logos/docker.svg",
"link": "/docs/instrumentation/opentelemetry-java/"
},
{
"key": "windows",
"label": "Windows",
"imgUrl": "/Logos/windows.svg",
"link": "/docs/instrumentation/opentelemetry-java/"
}
]
}
@@ -567,6 +530,7 @@
"python in containers",
"python instrumentation",
"python k8s",
"python metrics",
"python microservices",
"python observability",
"python on kubernetes",
@@ -619,6 +583,7 @@
],
"module": "apm",
"relatedSearchKeywords": [
"angular",
"apm",
"apm/traces",
"application performance monitoring",
@@ -630,6 +595,7 @@
"next.js",
"nodejs",
"nuxtjs",
"reactjs",
"traces",
"tracing",
"ts",
@@ -644,8 +610,8 @@
"entityID": "framework",
"options": [
{
"key": "nodejs-nestjs",
"label": "NodeJs/NestJS",
"key": "nodejs",
"label": "NodeJs",
"imgUrl": "/Logos/nodejs.svg",
"link": "/docs/instrumentation/opentelemetry-javascript/",
"question": {
@@ -666,16 +632,103 @@
"link": "/docs/instrumentation/opentelemetry-javascript/"
},
{
"key": "docker",
"label": "Docker",
"imgUrl": "/Logos/docker.svg",
"key": "windows",
"label": "Windows",
"imgUrl": "/Logos/windows.svg",
"link": "/docs/instrumentation/opentelemetry-javascript/"
}
]
}
},
{
"key": "express",
"label": "Express",
"imgUrl": "/Logos/express.svg",
"link": "/docs/instrumentation/opentelemetry-express/",
"question": {
"desc": "What is your Environment?",
"type": "select",
"entityID": "environment",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "/docs/instrumentation/opentelemetry-express/"
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/instrumentation/opentelemetry-express/"
},
{
"key": "windows",
"label": "Windows",
"imgUrl": "/Logos/windows.svg",
"link": "/docs/instrumentation/opentelemetry-javascript/"
"link": "/docs/instrumentation/opentelemetry-express/"
}
]
}
},
{
"key": "nestjs",
"label": "NestJS",
"imgUrl": "/Logos/nestjs.svg",
"link": "/docs/instrumentation/opentelemetry-nestjs/",
"question": {
"desc": "What is your Environment?",
"type": "select",
"entityID": "environment",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "/docs/instrumentation/opentelemetry-nestjs/"
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/instrumentation/opentelemetry-nestjs/"
},
{
"key": "windows",
"label": "Windows",
"imgUrl": "/Logos/windows.svg",
"link": "/docs/instrumentation/opentelemetry-nestjs/"
}
]
}
},
{
"key": "angular",
"label": "Angular",
"imgUrl": "/Logos/angular.svg",
"link": "/docs/instrumentation/opentelemetry-angular/",
"question": {
"desc": "What is your Environment?",
"type": "select",
"entityID": "environment",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "/docs/instrumentation/opentelemetry-angular/"
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/instrumentation/opentelemetry-angular/"
},
{
"key": "windows",
"label": "Windows",
"imgUrl": "/Logos/windows.svg",
"link": "/docs/instrumentation/opentelemetry-angular/"
}
]
}
@@ -703,16 +756,41 @@
"link": "/docs/instrumentation/opentelemetry-nextjs/"
},
{
"key": "docker",
"label": "Docker",
"imgUrl": "/Logos/docker.svg",
"key": "windows",
"label": "Windows",
"imgUrl": "/Logos/windows.svg",
"link": "/docs/instrumentation/opentelemetry-nextjs/"
}
]
}
},
{
"key": "reactjs",
"label": "ReactJS",
"imgUrl": "/Logos/reactjs.svg",
"link": "/docs/instrumentation/opentelemetry-reactjs/",
"question": {
"desc": "What is your Environment?",
"type": "select",
"entityID": "environment",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "/docs/instrumentation/opentelemetry-reactjs/"
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/instrumentation/opentelemetry-reactjs/"
},
{
"key": "windows",
"label": "Windows",
"imgUrl": "/Logos/windows.svg",
"link": "/docs/instrumentation/opentelemetry-nextjs/"
"link": "/docs/instrumentation/opentelemetry-reactjs/"
}
]
}
@@ -739,12 +817,6 @@
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/instrumentation/opentelemetry-nuxtjs/"
},
{
"key": "docker",
"label": "Docker",
"imgUrl": "/Logos/docker.svg",
"link": "/docs/instrumentation/opentelemetry-nuxtjs/"
},
{
"key": "windows",
"label": "Windows",
@@ -758,7 +830,32 @@
"key": "react-native",
"label": "React Native",
"imgUrl": "/Logos/reactjs.svg",
"link": "/docs/instrumentation/opentelemetry-react-native/"
"link": "/docs/instrumentation/opentelemetry-react-native/",
"question": {
"desc": "What is your Environment?",
"type": "select",
"entityID": "environment",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "/docs/instrumentation/opentelemetry-react-native/"
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/instrumentation/opentelemetry-react-native/"
},
{
"key": "windows",
"label": "Windows",
"imgUrl": "/Logos/windows.svg",
"link": "/docs/instrumentation/opentelemetry-react-native/"
}
]
}
}
]
}
@@ -792,6 +889,7 @@
"golang",
"golang apm",
"golang instrumentation",
"golang metrics",
"golang observability",
"golang performance monitoring",
"golang tracing",
@@ -882,39 +980,69 @@
"php-fpm monitoring",
"slim php",
"traces",
"tracing",
"wordpress"
"tracing"
],
"id": "php",
"link": "/docs/instrumentation/opentelemetry-php/",
"question": {
"desc": "What is your Environment?",
"desc": "Which PHP framework do you use?",
"type": "select",
"entityID": "environment",
"entityID": "framework",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "/docs/instrumentation/opentelemetry-php/"
"key": "laravel",
"label": "Laravel",
"imgUrl": "/Logos/laravel.svg",
"link": "/docs/instrumentation/laravel/",
"question": {
"desc": "What is your Environment?",
"type": "select",
"entityID": "environment",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "/docs/instrumentation/laravel/"
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/instrumentation/laravel/"
}
]
}
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/instrumentation/opentelemetry-php/"
},
{
"key": "docker",
"label": "Docker",
"imgUrl": "/Logos/docker.svg",
"link": "/docs/instrumentation/opentelemetry-php/"
},
{
"key": "windows",
"label": "Windows",
"imgUrl": "/Logos/windows.svg",
"link": "/docs/instrumentation/opentelemetry-php/"
"key": "Others",
"label": "Others",
"imgUrl": "/Logos/php-others.svg",
"link": "/docs/instrumentation/opentelemetry-php/",
"question": {
"desc": "What is your Environment?",
"type": "select",
"entityID": "environment",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "/docs/instrumentation/opentelemetry-php/"
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/instrumentation/opentelemetry-php/"
},
{
"key": "windows",
"label": "Windows",
"imgUrl": "/Logos/windows.svg",
"link": "/docs/instrumentation/opentelemetry-php/"
}
]
}
}
]
}
@@ -989,12 +1117,6 @@
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/instrumentation/opentelemetry-dotnet/"
},
{
"key": "docker",
"label": "Docker",
"imgUrl": "/Logos/docker.svg",
"link": "/docs/instrumentation/opentelemetry-dotnet/"
},
{
"key": "windows",
"label": "Windows",
@@ -1006,7 +1128,7 @@
},
{
"dataSource": "ruby-on-rails",
"label": "Ruby",
"label": "Ruby on Rails",
"imgUrl": "/Logos/ruby-on-rails.svg",
"tags": [
"apm/traces"
@@ -1046,12 +1168,9 @@
"ruby tracing",
"ruby-on-rails",
"traces",
"tracing",
"sinatra",
"sidekiq",
"resque"
"tracing"
],
"id": "ruby",
"id": "ruby-on-rails",
"link": "/docs/instrumentation/opentelemetry-ruby-on-rails/",
"question": {
"desc": "What is your Environment?",
@@ -1070,12 +1189,6 @@
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/instrumentation/opentelemetry-ruby-on-rails/"
},
{
"key": "docker",
"label": "Docker",
"imgUrl": "/Logos/docker.svg",
"link": "/docs/instrumentation/opentelemetry-ruby-on-rails/"
},
{
"key": "windows",
"label": "Windows",
@@ -1355,6 +1468,41 @@
"id": "nginx-tracing",
"link": "/docs/instrumentation/opentelemetry-nginx/"
},
{
"dataSource": "opentelemetry-wordpress",
"label": "WordPress",
"imgUrl": "/Logos/wordpress.svg",
"tags": [
"apm/traces"
],
"module": "apm",
"relatedSearchKeywords": [
"apm",
"apm/traces",
"application performance monitoring",
"monitor wordpress site",
"opentelemetry",
"opentelemetry wordpress",
"opentelemetry-wordpress",
"otel",
"otel wordpress",
"traces",
"tracing",
"wordpress",
"wordpress apm",
"wordpress instrumentation",
"wordpress metrics",
"wordpress monitoring",
"wordpress observability",
"wordpress performance",
"wordpress php monitoring",
"wordpress plugin monitoring",
"wordpress to signoz",
"wordpress tracing"
],
"id": "opentelemetry-wordpress",
"link": "/docs/instrumentation/opentelemetry-wordpress/"
},
{
"dataSource": "opentelemetry-cloudflare",
"label": "Cloudflare Tracing",
@@ -1419,25 +1567,6 @@
"id": "opentelemetry-cloudflare-logs",
"link": "/docs/logs-management/send-logs/cloudflare-logs/"
},
{
"dataSource": "convex-logs",
"label": "Convex Logs",
"imgUrl": "/Logos/convex-logo.svg",
"tags": [
"logs"
],
"module": "logs",
"relatedSearchKeywords": [
"convex",
"convex log streaming",
"convex logs",
"convex webhook",
"logging",
"logs"
],
"id": "convex-logs",
"link": "/docs/logs-management/send-logs/convex-log-streams-signoz/"
},
{
"dataSource": "kubernetes-pod-logs",
"label": "Kubernetes Pod Logs",
@@ -4439,7 +4568,6 @@
],
"module": "metrics",
"relatedSearchKeywords": [
"browser metrics",
"core web vitals metrics",
"frontend metrics",
"frontend monitoring",
@@ -4459,7 +4587,7 @@
"svelte",
"sveltekit",
"nextjs",
"next.js"
"next.js"
],
"link": "/docs/frontend-monitoring/web-vitals-with-metrics/"
},
@@ -4494,7 +4622,7 @@
"svelte",
"sveltekit",
"nextjs",
"next.js"
"next.js"
],
"link": "/docs/frontend-monitoring/web-vitals-with-traces/"
},
@@ -4536,7 +4664,7 @@
"svelte",
"sveltekit",
"nextjs",
"next.js"
"next.js"
],
"link": "/docs/frontend-monitoring/document-load/"
},
@@ -5188,6 +5316,23 @@
],
"link": "/docs/metrics-management/mysql-metrics/"
},
{
"dataSource": "jvm",
"label": "JVM",
"imgUrl": "/Logos/java.svg",
"tags": [
"metrics"
],
"module": "metrics",
"relatedSearchKeywords": [
"java virtual machine",
"jvm",
"jvm metrics",
"jvm monitoring",
"runtime"
],
"link": "/docs/tutorial/jvm-metrics/"
},
{
"dataSource": "jmx",
"label": "JMX",
@@ -5203,7 +5348,7 @@
"jmx monitoring",
"runtime"
],
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-java/jmx-metrics/"
"link": "/docs/tutorial/jmx-metrics/"
},
{
"dataSource": "prometheus-metrics",
@@ -5429,17 +5574,16 @@
"svelte",
"sveltekit",
"nextjs",
"next.js"
"next.js"
],
"link": "/docs/frontend-monitoring/sending-logs-with-opentelemetry/"
},
{
"dataSource": "frontend-traces",
"label": "Frontend Tracing",
"label": "Frontend Traces",
"imgUrl": "/Logos/traces.svg",
"tags": [
"Frontend Monitoring",
"apm/traces"
"Frontend Monitoring"
],
"module": "apm",
"relatedSearchKeywords": [
@@ -5461,7 +5605,7 @@
"svelte",
"sveltekit",
"nextjs",
"next.js"
"next.js"
],
"link": "/docs/frontend-monitoring/sending-traces-with-opentelemetry/"
},
@@ -5492,7 +5636,7 @@
"svelte",
"sveltekit",
"nextjs",
"next.js"
"next.js"
],
"link": "/docs/frontend-monitoring/sending-metrics-with-opentelemetry/"
},
@@ -5539,6 +5683,7 @@
"label": "Golang Metrics",
"imgUrl": "/Logos/go.svg",
"tags": [
"apm/traces",
"metrics"
],
"module": "metrics",
@@ -5592,64 +5737,12 @@
]
}
},
{
"dataSource": "java-metrics",
"label": "Java Metrics",
"imgUrl": "/Logos/java.svg",
"tags": [
"metrics"
],
"module": "metrics",
"relatedSearchKeywords": [
"java",
"java metrics",
"java monitoring",
"java observability",
"jvm metrics",
"metrics",
"opentelemetry java",
"otel java",
"runtime metrics"
],
"id": "java-metrics",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-java/",
"question": {
"desc": "What is your Environment?",
"type": "select",
"entityID": "environment",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-java/"
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-java/"
},
{
"key": "windows",
"label": "Windows",
"imgUrl": "/Logos/windows.svg",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-java/"
},
{
"key": "docker",
"label": "Docker",
"imgUrl": "/Logos/docker.svg",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-java/"
}
]
}
},
{
"dataSource": "python-metrics",
"label": "Python Metrics",
"imgUrl": "/Logos/python.svg",
"tags": [
"apm/traces",
"metrics"
],
"module": "metrics",
@@ -5703,6 +5796,7 @@
"label": ".NET Metrics",
"imgUrl": "/Logos/dotnet.svg",
"tags": [
"apm/traces",
"metrics"
],
"module": "metrics",
@@ -5759,6 +5853,7 @@
"label": "Node.js Metrics",
"imgUrl": "/Logos/nodejs.svg",
"tags": [
"apm/traces",
"metrics"
],
"module": "metrics",

View File

@@ -1,5 +1,5 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { FilterOutlined, VerticalAlignTopOutlined } from '@ant-design/icons';
import { FilterOutlined } from '@ant-design/icons';
import { Button, Tooltip } from 'antd';
import cx from 'classnames';
import { Atom, Binoculars, SquareMousePointer, Terminal } from 'lucide-react';
@@ -32,7 +32,6 @@ export default function LeftToolbarActions({
<Tooltip title="Show Filters">
<Button onClick={handleFilterVisibilityChange} className="filter-btn">
<FilterOutlined />
<VerticalAlignTopOutlined rotate={90} />
</Button>
</Tooltip>
)}

View File

@@ -7,7 +7,7 @@
align-items: center;
justify-content: center;
box-shadow: none;
width: 40px;
width: 32px;
height: 32px;
margin-right: 12px;
border: 1px solid var(--bg-slate-400);

View File

@@ -95,6 +95,7 @@ function ResourceAttributesFilter({
data-testid="resource-environment-filter"
style={{ minWidth: 200, height: 34 }}
onChange={handleEnvironmentChange}
onBlur={handleBlur}
>
{environments.map((opt) => (
<Select.Option key={opt.value} value={opt.value}>

View File

@@ -1,8 +1,8 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsApplication.factory';
import { updateStepInterval } from 'hooks/queryBuilder/useStepInterval';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { ServicesList } from 'types/api/metrics/getService';
import { QueryDataV3 } from 'types/api/widgets/getQuery';
import { EQueryType } from 'types/common/dashboard';

View File

@@ -13,7 +13,7 @@ import { MenuItemKeys } from 'container/GridCardLayout/WidgetHeader/contants';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
import { useNotifications } from 'hooks/useNotifications';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
import { isEmpty } from 'lodash-es';
import { useDashboard } from 'providers/Dashboard/Dashboard';

View File

@@ -3,8 +3,8 @@ import { useSelector } from 'react-redux';
import { initialQueriesMap } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { AppState } from 'store/reducers';
import { SuccessResponse } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';

View File

@@ -1,77 +0,0 @@
import ROUTES from 'constants/routes';
import { whilelistedKeys } from '../config';
import { mappingWithRoutesAndKeys } from '../utils';
describe('useResourceAttribute config', () => {
describe('whilelistedKeys', () => {
it('should include underscore-notation keys (DOT_METRICS_ENABLED=false)', () => {
expect(whilelistedKeys).toContain('resource_deployment_environment');
expect(whilelistedKeys).toContain('resource_k8s_cluster_name');
expect(whilelistedKeys).toContain('resource_k8s_cluster_namespace');
});
it('should include dot-notation keys (DOT_METRICS_ENABLED=true)', () => {
expect(whilelistedKeys).toContain('resource_deployment.environment');
expect(whilelistedKeys).toContain('resource_k8s.cluster.name');
expect(whilelistedKeys).toContain('resource_k8s.cluster.namespace');
});
});
describe('mappingWithRoutesAndKeys', () => {
const dotNotationFilters = [
{
label: 'deployment.environment',
value: 'resource_deployment.environment',
},
{ label: 'k8s.cluster.name', value: 'resource_k8s.cluster.name' },
{ label: 'k8s.cluster.namespace', value: 'resource_k8s.cluster.namespace' },
];
const underscoreNotationFilters = [
{
label: 'deployment.environment',
value: 'resource_deployment_environment',
},
{ label: 'k8s.cluster.name', value: 'resource_k8s_cluster_name' },
{ label: 'k8s.cluster.namespace', value: 'resource_k8s_cluster_namespace' },
];
const nonWhitelistedFilters = [
{ label: 'host.name', value: 'resource_host_name' },
{ label: 'service.name', value: 'resource_service_name' },
];
it('should keep dot-notation filters on the Service Map route', () => {
const result = mappingWithRoutesAndKeys(
ROUTES.SERVICE_MAP,
dotNotationFilters,
);
expect(result).toHaveLength(3);
expect(result).toEqual(dotNotationFilters);
});
it('should keep underscore-notation filters on the Service Map route', () => {
const result = mappingWithRoutesAndKeys(
ROUTES.SERVICE_MAP,
underscoreNotationFilters,
);
expect(result).toHaveLength(3);
expect(result).toEqual(underscoreNotationFilters);
});
it('should filter out non-whitelisted keys on the Service Map route', () => {
const allFilters = [...dotNotationFilters, ...nonWhitelistedFilters];
const result = mappingWithRoutesAndKeys(ROUTES.SERVICE_MAP, allFilters);
expect(result).toHaveLength(3);
expect(result).toEqual(dotNotationFilters);
});
it('should return all filters on non-Service Map routes', () => {
const allFilters = [...dotNotationFilters, ...nonWhitelistedFilters];
const result = mappingWithRoutesAndKeys('/services', allFilters);
expect(result).toHaveLength(5);
expect(result).toEqual(allFilters);
});
});
});

View File

@@ -1,8 +1,5 @@
export const whilelistedKeys = [
'resource_deployment_environment',
'resource_deployment.environment',
'resource_k8s_cluster_name',
'resource_k8s.cluster.name',
'resource_k8s_cluster_namespace',
'resource_k8s.cluster.namespace',
];

View File

@@ -1,412 +0,0 @@
import type { ReactNode } from 'react';
import { render, screen } from '@testing-library/react';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import type { AlignedData } from 'uplot';
import { PlotContextProvider } from '../../context/PlotContext';
import UPlotChart from '../UPlotChart';
// ---------------------------------------------------------------------------
// Mocks
// ---------------------------------------------------------------------------
jest.mock(
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
() => ({
getStoredSeriesVisibility: jest.fn(),
updateSeriesVisibilityToLocalStorage: jest.fn(),
}),
);
jest.mock('@sentry/react', () => ({
ErrorBoundary: ({ children }: { children: ReactNode }): JSX.Element => (
<>{children}</>
),
}));
jest.mock('pages/ErrorBoundaryFallback/ErrorBoundaryFallback', () => ({
__esModule: true,
default: (): JSX.Element => <div>Error Fallback</div>,
}));
interface MockUPlotInstance {
root: HTMLDivElement;
setData: jest.Mock;
setSize: jest.Mock;
destroy: jest.Mock;
}
let instances: MockUPlotInstance[] = [];
const mockUPlotConstructor = jest.fn();
jest.mock('uplot', () => {
function MockUPlot(
opts: Record<string, unknown>,
data: unknown,
target: HTMLElement,
): MockUPlotInstance {
mockUPlotConstructor(opts, data, target);
const rootEl = document.createElement('div');
target.appendChild(rootEl);
const inst: MockUPlotInstance = {
root: rootEl,
setData: jest.fn(),
setSize: jest.fn(),
destroy: jest.fn(),
};
instances.push(inst);
return inst;
}
MockUPlot.paths = {
spline: jest.fn(() => jest.fn()),
bars: jest.fn(() => jest.fn()),
linear: jest.fn(() => jest.fn()),
stepped: jest.fn(() => jest.fn()),
};
MockUPlot.tzDate = jest.fn();
return { __esModule: true, default: MockUPlot };
});
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
const createMockConfig = (): UPlotConfigBuilder => {
return ({
getConfig: jest.fn().mockReturnValue({
series: [{ value: (): string => '' }],
axes: [],
scales: {},
hooks: {},
cursor: {},
}),
getWidgetId: jest.fn().mockReturnValue(undefined),
getShouldSaveSelectionPreference: jest.fn().mockReturnValue(false),
} as unknown) as UPlotConfigBuilder;
};
const validData: AlignedData = [
[1, 2, 3],
[10, 20, 30],
];
const emptyData: AlignedData = [[]];
const Wrapper = ({ children }: { children: ReactNode }): JSX.Element => (
<PlotContextProvider>{children}</PlotContextProvider>
);
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
describe('UPlotChart', () => {
beforeEach(() => {
instances = [];
mockUPlotConstructor.mockClear();
});
describe('when data is empty', () => {
it('displays "No Data" message instead of the chart container', () => {
render(
<UPlotChart
config={createMockConfig()}
data={emptyData}
width={600}
height={400}
/>,
{ wrapper: Wrapper },
);
expect(screen.getByText('No Data')).toBeInTheDocument();
expect(screen.queryByTestId('uplot-main-div')).not.toBeInTheDocument();
});
it('sizes the empty-state container to the given width and height', () => {
render(
<UPlotChart
config={createMockConfig()}
data={emptyData}
width={750}
height={350}
/>,
{ wrapper: Wrapper },
);
const noDataContainer = screen
.getByText('No Data')
.closest('.uplot-no-data');
expect(noDataContainer).toHaveStyle({ width: '750px', height: '350px' });
});
it('does not create a uPlot instance', () => {
render(
<UPlotChart
config={createMockConfig()}
data={emptyData}
width={600}
height={400}
/>,
{ wrapper: Wrapper },
);
expect(mockUPlotConstructor).not.toHaveBeenCalled();
});
});
describe('chart container', () => {
it('renders children inside the chart wrapper', () => {
render(
<UPlotChart
config={createMockConfig()}
data={validData}
width={600}
height={400}
>
<div data-testid="tooltip-plugin">Tooltip</div>
</UPlotChart>,
{ wrapper: Wrapper },
);
expect(screen.getByTestId('tooltip-plugin')).toBeInTheDocument();
});
});
describe('plot creation', () => {
it('instantiates uPlot with floored dimensions and the container element', () => {
render(
<UPlotChart
config={createMockConfig()}
data={validData}
width={600.9}
height={400.2}
/>,
{ wrapper: Wrapper },
);
expect(mockUPlotConstructor).toHaveBeenCalledTimes(1);
const [opts, data, target] = mockUPlotConstructor.mock.calls[0];
expect(opts.width).toBe(600);
expect(opts.height).toBe(400);
expect(data).toBe(validData);
expect(target).toBe(screen.getByTestId('uplot-main-div'));
});
it('merges config builder output into the uPlot options', () => {
const config = createMockConfig();
config.getConfig = jest.fn().mockReturnValue({
series: [{ value: (): string => '' }],
axes: [{ scale: 'y' }],
scales: { y: {} },
hooks: {},
cursor: { show: true },
});
render(
<UPlotChart
config={(config as unknown) as UPlotConfigBuilder}
data={validData}
width={500}
height={300}
/>,
{ wrapper: Wrapper },
);
const [opts] = mockUPlotConstructor.mock.calls[0];
expect(opts.width).toBe(500);
expect(opts.height).toBe(300);
expect(opts.axes).toEqual([{ scale: 'y' }]);
expect(opts.cursor).toEqual({ show: true });
});
it('skips creation when width or height is 0', () => {
render(
<UPlotChart
config={createMockConfig()}
data={validData}
width={0}
height={0}
/>,
{ wrapper: Wrapper },
);
expect(mockUPlotConstructor).not.toHaveBeenCalled();
});
});
describe('lifecycle callbacks', () => {
it('invokes plotRef with the uPlot instance after creation', () => {
const plotRef = jest.fn();
render(
<UPlotChart
config={createMockConfig()}
data={validData}
width={600}
height={400}
plotRef={plotRef}
/>,
{ wrapper: Wrapper },
);
expect(plotRef).toHaveBeenCalledTimes(1);
expect(plotRef).toHaveBeenCalledWith(instances[0]);
});
it('destroys the instance and notifies callbacks when data becomes empty', () => {
const plotRef = jest.fn();
const onDestroy = jest.fn();
const config = createMockConfig();
const { rerender } = render(
<UPlotChart
config={config}
data={validData}
width={600}
height={400}
plotRef={plotRef}
onDestroy={onDestroy}
/>,
{ wrapper: Wrapper },
);
const firstInstance = instances[0];
plotRef.mockClear();
rerender(
<UPlotChart
config={config}
data={emptyData}
width={600}
height={400}
plotRef={plotRef}
onDestroy={onDestroy}
/>,
);
expect(onDestroy).toHaveBeenCalledWith(firstInstance);
expect(firstInstance.destroy).toHaveBeenCalled();
expect(plotRef).toHaveBeenCalledWith(null);
expect(screen.getByText('No Data')).toBeInTheDocument();
});
it('destroys the previous instance before creating a new one on config change', () => {
const onDestroy = jest.fn();
const config1 = createMockConfig();
const config2 = createMockConfig();
const { rerender } = render(
<UPlotChart
config={config1}
data={validData}
width={600}
height={400}
onDestroy={onDestroy}
/>,
{ wrapper: Wrapper },
);
const firstInstance = instances[0];
rerender(
<UPlotChart
config={config2}
data={validData}
width={600}
height={400}
onDestroy={onDestroy}
/>,
);
expect(onDestroy).toHaveBeenCalledWith(firstInstance);
expect(firstInstance.destroy).toHaveBeenCalled();
expect(instances).toHaveLength(2);
});
});
describe('prop updates', () => {
it('calls setData without recreating the plot when only data changes', () => {
const config = createMockConfig();
const newData: AlignedData = [
[4, 5, 6],
[40, 50, 60],
];
const { rerender } = render(
<UPlotChart config={config} data={validData} width={600} height={400} />,
{ wrapper: Wrapper },
);
const inst = instances[0];
rerender(
<UPlotChart config={config} data={newData} width={600} height={400} />,
);
expect(inst.setData).toHaveBeenCalledWith(newData);
expect(mockUPlotConstructor).toHaveBeenCalledTimes(1);
});
it('calls setSize with floored values when only dimensions change', () => {
const config = createMockConfig();
const { rerender } = render(
<UPlotChart config={config} data={validData} width={600} height={400} />,
{ wrapper: Wrapper },
);
const instance = instances[0];
rerender(
<UPlotChart
config={config}
data={validData}
width={800.7}
height={500.3}
/>,
);
expect(instance.setSize).toHaveBeenCalledWith({ width: 800, height: 500 });
expect(mockUPlotConstructor).toHaveBeenCalledTimes(1);
});
it('recreates the plot when config changes', () => {
const config1 = createMockConfig();
const config2 = createMockConfig();
const { rerender } = render(
<UPlotChart config={config1} data={validData} width={600} height={400} />,
{ wrapper: Wrapper },
);
rerender(
<UPlotChart config={config2} data={validData} width={600} height={400} />,
);
expect(mockUPlotConstructor).toHaveBeenCalledTimes(2);
});
it('does nothing when all props remain the same', () => {
const config = createMockConfig();
const { rerender } = render(
<UPlotChart config={config} data={validData} width={600} height={400} />,
{ wrapper: Wrapper },
);
const instance = instances[0];
rerender(
<UPlotChart config={config} data={validData} width={600} height={400} />,
);
expect(mockUPlotConstructor).toHaveBeenCalledTimes(1);
expect(instance.setData).not.toHaveBeenCalled();
expect(instance.setSize).not.toHaveBeenCalled();
});
});
});

View File

@@ -412,16 +412,14 @@ describe('Dashboard Provider - URL Variables Integration', () => {
});
// Verify dashboard state contains the variables with default values
await waitFor(() => {
const dashboardVariables = screen.getByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
const dashboardVariables = await screen.findByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
expect(parsedVariables).toHaveProperty('environment');
expect(parsedVariables).toHaveProperty('services');
// Default allSelected values should be preserved
expect(parsedVariables.environment.allSelected).toBe(false);
expect(parsedVariables.services.allSelected).toBe(false);
});
expect(parsedVariables).toHaveProperty('environment');
expect(parsedVariables).toHaveProperty('services');
// Default allSelected values should be preserved
expect(parsedVariables.environment.allSelected).toBe(false);
expect(parsedVariables.services.allSelected).toBe(false);
});
it('should merge URL variables with dashboard data and normalize values correctly', async () => {
@@ -468,26 +466,16 @@ describe('Dashboard Provider - URL Variables Integration', () => {
});
// Verify the dashboard state reflects the normalized URL values
await waitFor(() => {
const dashboardVariables = screen.getByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
const dashboardVariables = await screen.findByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
// First ensure the variables exist
expect(parsedVariables).toHaveProperty('environment');
expect(parsedVariables).toHaveProperty('services');
// The selectedValue should be updated with normalized URL values
expect(parsedVariables.environment.selectedValue).toBe('development');
expect(parsedVariables.services.selectedValue).toEqual(['db', 'cache']);
// Then check their properties
expect(parsedVariables.environment).toHaveProperty('selectedValue');
expect(parsedVariables.services).toHaveProperty('selectedValue');
// The selectedValue should be updated with normalized URL values
expect(parsedVariables.environment.selectedValue).toBe('development');
expect(parsedVariables.services.selectedValue).toEqual(['db', 'cache']);
// allSelected should be set to false when URL values override
expect(parsedVariables.environment.allSelected).toBe(false);
expect(parsedVariables.services.allSelected).toBe(false);
});
// allSelected should be set to false when URL values override
expect(parsedVariables.environment.allSelected).toBe(false);
expect(parsedVariables.services.allSelected).toBe(false);
});
it('should handle ALL_SELECTED_VALUE from URL and set allSelected correctly', async () => {
@@ -512,8 +500,8 @@ describe('Dashboard Provider - URL Variables Integration', () => {
);
// Verify that allSelected is set to true for the services variable
await waitFor(() => {
const dashboardVariables = screen.getByTestId('dashboard-variables');
await waitFor(async () => {
const dashboardVariables = await screen.findByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
expect(parsedVariables.services.allSelected).toBe(true);
@@ -615,8 +603,8 @@ describe('Dashboard Provider - Textbox Variable Backward Compatibility', () => {
});
// Verify that defaultValue is set from textboxValue
await waitFor(() => {
const dashboardVariables = screen.getByTestId('dashboard-variables');
await waitFor(async () => {
const dashboardVariables = await screen.findByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
expect(parsedVariables.myTextbox.type).toBe('TEXTBOX');
@@ -660,8 +648,8 @@ describe('Dashboard Provider - Textbox Variable Backward Compatibility', () => {
});
// Verify that existing defaultValue is preserved
await waitFor(() => {
const dashboardVariables = screen.getByTestId('dashboard-variables');
await waitFor(async () => {
const dashboardVariables = await screen.findByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
expect(parsedVariables.myTextbox.type).toBe('TEXTBOX');
@@ -706,8 +694,8 @@ describe('Dashboard Provider - Textbox Variable Backward Compatibility', () => {
});
// Verify that defaultValue is set to empty string
await waitFor(() => {
const dashboardVariables = screen.getByTestId('dashboard-variables');
await waitFor(async () => {
const dashboardVariables = await screen.findByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
expect(parsedVariables.myTextbox.type).toBe('TEXTBOX');
@@ -751,8 +739,8 @@ describe('Dashboard Provider - Textbox Variable Backward Compatibility', () => {
});
// Verify that defaultValue is NOT set from textboxValue for QUERY type
await waitFor(() => {
const dashboardVariables = screen.getByTestId('dashboard-variables');
await waitFor(async () => {
const dashboardVariables = await screen.findByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
expect(parsedVariables.myQuery.type).toBe('QUERY');

View File

@@ -1,7 +1,7 @@
import { ALL_SELECTED_VALUE } from 'components/NewSelect/utils';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { commaValuesParser } from '../../lib/dashboardVariables/customCommaValuesParser';
import { commaValuesParser } from '../../lib/dashbaordVariables/customCommaValuesParser';
interface UrlVariables {
[key: string]: any;

View File

@@ -18,6 +18,7 @@ import (
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types"
@@ -42,8 +43,9 @@ type provider struct {
dashboardHandler dashboard.Handler
metricsExplorerHandler metricsexplorer.Handler
gatewayHandler gateway.Handler
roleGetter role.Getter
roleHandler role.Handler
fieldsHandler fields.Handler
authzHandler authz.Handler
}
func NewFactory(
@@ -61,8 +63,9 @@ func NewFactory(
dashboardHandler dashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
gatewayHandler gateway.Handler,
roleGetter role.Getter,
roleHandler role.Handler,
fieldsHandler fields.Handler,
authzHandler authz.Handler,
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
return newProvider(
@@ -83,8 +86,9 @@ func NewFactory(
dashboardHandler,
metricsExplorerHandler,
gatewayHandler,
roleGetter,
roleHandler,
fieldsHandler,
authzHandler,
)
})
}
@@ -107,8 +111,9 @@ func newProvider(
dashboardHandler dashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
gatewayHandler gateway.Handler,
roleGetter role.Getter,
roleHandler role.Handler,
fieldsHandler fields.Handler,
authzHandler authz.Handler,
) (apiserver.APIServer, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
router := mux.NewRouter().UseEncodedPath()
@@ -129,11 +134,12 @@ func newProvider(
dashboardHandler: dashboardHandler,
metricsExplorerHandler: metricsExplorerHandler,
gatewayHandler: gatewayHandler,
roleGetter: roleGetter,
roleHandler: roleHandler,
fieldsHandler: fieldsHandler,
authzHandler: authzHandler,
}
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz, roleGetter)
if err := provider.AddToRouter(router); err != nil {
return nil, err

View File

@@ -10,7 +10,7 @@ import (
)
func (provider *provider) addRoleRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Create), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Create), handler.OpenAPIDef{
ID: "CreateRole",
Tags: []string{"role"},
Summary: "Create role",
@@ -27,7 +27,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.authzHandler.List), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles", handler.New(provider.authZ.AdminAccess(provider.roleHandler.List), handler.OpenAPIDef{
ID: "ListRoles",
Tags: []string{"role"},
Summary: "List roles",
@@ -44,7 +44,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Get), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Get), handler.OpenAPIDef{
ID: "GetRole",
Tags: []string{"role"},
Summary: "Get role",
@@ -61,7 +61,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Patch), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Patch), handler.OpenAPIDef{
ID: "PatchRole",
Tags: []string{"role"},
Summary: "Patch role",
@@ -78,7 +78,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Delete), handler.OpenAPIDef{
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.roleHandler.Delete), handler.OpenAPIDef{
ID: "DeleteRole",
Tags: []string{"role"},
Summary: "Delete role",

View File

@@ -2,11 +2,9 @@ package authz
import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
)
@@ -31,76 +29,4 @@ type AuthZ interface {
// Lists the selectors for objects assigned to subject (s) with relation (r) on resource (s)
ListObjects(context.Context, string, authtypes.Relation, authtypes.Typeable) ([]*authtypes.Object, error)
// Creates the role.
Create(context.Context, valuer.UUID, *roletypes.Role) error
// Gets the role if it exists or creates one.
GetOrCreate(context.Context, valuer.UUID, *roletypes.Role) (*roletypes.Role, error)
// Gets the objects associated with the given role and relation.
GetObjects(context.Context, valuer.UUID, valuer.UUID, authtypes.Relation) ([]*authtypes.Object, error)
// Gets all the typeable resources registered from role registry.
GetResources(context.Context) []*authtypes.Resource
// Patches the role.
Patch(context.Context, valuer.UUID, *roletypes.Role) error
// Patches the objects in authorization server associated with the given role and relation
PatchObjects(context.Context, valuer.UUID, string, authtypes.Relation, []*authtypes.Object, []*authtypes.Object) error
// Deletes the role and tuples in authorization server.
Delete(context.Context, valuer.UUID, valuer.UUID) error
// Gets the role
Get(context.Context, valuer.UUID, valuer.UUID) (*roletypes.Role, error)
// Gets the role by org_id and name
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*roletypes.Role, error)
// Lists all the roles for the organization.
List(context.Context, valuer.UUID) ([]*roletypes.Role, error)
// Lists all the roles for the organization filtered by name
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*roletypes.Role, error)
// Grants a role to the subject based on role name.
Grant(context.Context, valuer.UUID, string, string) error
// Revokes a granted role from the subject based on role name.
Revoke(context.Context, valuer.UUID, string, string) error
// Changes the granted role for the subject based on role name.
ModifyGrant(context.Context, valuer.UUID, string, string, string) error
// Bootstrap the managed roles.
CreateManagedRoles(context.Context, valuer.UUID, []*roletypes.Role) error
// Bootstrap managed roles transactions and user assignments
CreateManagedUserRoleTransactions(context.Context, valuer.UUID, valuer.UUID) error
}
type RegisterTypeable interface {
MustGetTypeables() []authtypes.Typeable
MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction
}
type Handler interface {
Create(http.ResponseWriter, *http.Request)
Get(http.ResponseWriter, *http.Request)
GetObjects(http.ResponseWriter, *http.Request)
GetResources(http.ResponseWriter, *http.Request)
List(http.ResponseWriter, *http.Request)
Patch(http.ResponseWriter, *http.Request)
PatchObjects(http.ResponseWriter, *http.Request)
Delete(http.ResponseWriter, *http.Request)
}

View File

@@ -1,4 +1,4 @@
package openfgaserver
package openfgaauthz
import (
"context"

View File

@@ -2,24 +2,35 @@ package openfgaauthz
import (
"context"
"strconv"
"sync"
authz "github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/authzstore/sqlauthzstore"
"github.com/SigNoz/signoz/pkg/authz/openfgaserver"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
openfgapkgserver "github.com/openfga/openfga/pkg/server"
"google.golang.org/protobuf/encoding/protojson"
)
var (
openfgaDefaultStore = valuer.NewString("signoz")
)
type provider struct {
server *openfgaserver.Server
store roletypes.Store
config authz.Config
settings factory.ScopedProviderSettings
openfgaSchema []openfgapkgtransformer.ModuleFile
openfgaServer *openfgapkgserver.Server
storeID string
modelID string
mtx sync.RWMutex
stopChan chan struct{}
}
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
@@ -29,194 +40,301 @@ func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtr
}
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
server, err := openfgaserver.NewOpenfgaServer(ctx, settings, config, sqlstore, openfgaSchema)
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/authz/openfgaauthz")
store, err := NewSQLStore(sqlstore)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to initialize sqlstore for authz")
return nil, err
}
// setup the openfga server
opts := []openfgapkgserver.OpenFGAServiceV1Option{
openfgapkgserver.WithDatastore(store),
openfgapkgserver.WithLogger(NewLogger(scopedProviderSettings.Logger())),
openfgapkgserver.WithContextPropagationToDatastore(true),
}
openfgaServer, err := openfgapkgserver.NewServerWithOpts(opts...)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to create authz server")
return nil, err
}
return &provider{
server: server,
store: sqlauthzstore.NewSqlAuthzStore(sqlstore),
config: config,
settings: scopedProviderSettings,
openfgaServer: openfgaServer,
openfgaSchema: openfgaSchema,
mtx: sync.RWMutex{},
stopChan: make(chan struct{}),
}, nil
}
func (provider *provider) Start(ctx context.Context) error {
return provider.server.Start(ctx)
storeId, err := provider.getOrCreateStore(ctx, openfgaDefaultStore.StringValue())
if err != nil {
return err
}
modelID, err := provider.getOrCreateModel(ctx, storeId)
if err != nil {
return err
}
provider.mtx.Lock()
provider.modelID = modelID
provider.storeID = storeId
provider.mtx.Unlock()
<-provider.stopChan
return nil
}
func (provider *provider) Stop(ctx context.Context) error {
return provider.server.Stop(ctx)
provider.openfgaServer.Close()
close(provider.stopChan)
return nil
}
func (provider *provider) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
return provider.server.Check(ctx, tupleReq)
storeID, modelID := provider.getStoreIDandModelID()
checkResponse, err := provider.openfgaServer.Check(
ctx,
&openfgav1.CheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tupleReq.User,
Relation: tupleReq.Relation,
Object: tupleReq.Object,
},
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
if !checkResponse.Allowed {
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
}
return nil
}
func (provider *provider) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
return provider.server.BatchCheck(ctx, tupleReq)
storeID, modelID := provider.getStoreIDandModelID()
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
for idx, tuple := range tupleReq {
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tuple.User,
Relation: tuple.Relation,
Object: tuple.Object,
},
// the batch check response is map[string] keyed by correlationID.
CorrelationId: strconv.Itoa(idx),
})
}
checkResponse, err := provider.openfgaServer.BatchCheck(
ctx,
&openfgav1.BatchCheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Checks: batchCheckItems,
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
for _, checkResponse := range checkResponse.Result {
if checkResponse.GetAllowed() {
return nil
}
}
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.server.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.server.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = provider.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
return provider.server.Write(ctx, additions, deletions)
if len(additions) == 0 && len(deletions) == 0 {
return nil
}
storeID, modelID := provider.getStoreIDandModelID()
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
for idx, tuple := range deletions {
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
}
_, err := provider.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Writes: func() *openfgav1.WriteRequestWrites {
if len(additions) == 0 {
return nil
}
return &openfgav1.WriteRequestWrites{
TupleKeys: additions,
OnDuplicate: "ignore",
}
}(),
Deletes: func() *openfgav1.WriteRequestDeletes {
if len(deletionTuplesWithoutCondition) == 0 {
return nil
}
return &openfgav1.WriteRequestDeletes{
TupleKeys: deletionTuplesWithoutCondition,
OnMissing: "ignore",
}
}(),
})
return err
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
return provider.server.ListObjects(ctx, subject, relation, typeable)
}
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
storableRole, err := provider.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
storableRole, err := provider.store.GetByOrgIDAndName(ctx, orgID, name)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
storableRoles, err := provider.store.List(ctx, orgID)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storableRole := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storableRole)
}
return roles, nil
}
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
storableRoles, err := provider.store.ListByOrgIDAndNames(ctx, orgID, names)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storable := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
}
return roles, nil
}
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return provider.Write(ctx, tuples, nil)
}
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
err := provider.Revoke(ctx, orgID, existingRoleName, subject)
if err != nil {
return err
}
err = provider.Grant(ctx, orgID, updatedRoleName, subject)
if err != nil {
return err
}
return nil
}
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return provider.Write(ctx, nil, tuples)
}
func (provider *provider) CreateManagedRoles(ctx context.Context, _ valuer.UUID, managedRoles []*roletypes.Role) error {
err := provider.store.RunInTx(ctx, func(ctx context.Context) error {
for _, role := range managedRoles {
err := provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return err
}
}
return nil
storeID, modelID := provider.getStoreIDandModelID()
response, err := provider.openfgaServer.ListObjects(ctx, &openfgav1.ListObjectsRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
User: subject,
Relation: relation.StringValue(),
Type: typeable.Type().StringValue(),
})
if err != nil {
return err
return nil, errors.Wrapf(err, errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "cannot list objects for subject %s with relation %s for type %s", subject, relation.StringValue(), typeable.Type().StringValue())
}
return nil
return authtypes.MustNewObjectsFromStringSlice(response.Objects), nil
}
func (provider *provider) SetManagedRoleTransactions(context.Context, valuer.UUID) error {
return nil
func (provider *provider) getOrCreateStore(ctx context.Context, name string) (string, error) {
stores, err := provider.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
if err != nil {
return "", err
}
for _, store := range stores.GetStores() {
if store.GetName() == name {
return store.Id, nil
}
}
store, err := provider.openfgaServer.CreateStore(ctx, &openfgav1.CreateStoreRequest{Name: name})
if err != nil {
return "", err
}
return store.Id, nil
}
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
return provider.Grant(ctx, orgID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil))
func (provider *provider) getOrCreateModel(ctx context.Context, storeID string) (string, error) {
schema, err := openfgapkgtransformer.TransformModuleFilesToModel(provider.openfgaSchema, "1.1")
if err != nil {
return "", err
}
authorisationModels, err := provider.openfgaServer.ReadAuthorizationModels(ctx, &openfgav1.ReadAuthorizationModelsRequest{StoreId: storeID})
if err != nil {
return "", err
}
for _, authModel := range authorisationModels.GetAuthorizationModels() {
equal, err := provider.isModelEqual(schema, authModel)
if err != nil {
return "", err
}
if equal {
return authModel.Id, nil
}
}
authorizationModel, err := provider.openfgaServer.WriteAuthorizationModel(ctx, &openfgav1.WriteAuthorizationModelRequest{
StoreId: storeID,
TypeDefinitions: schema.TypeDefinitions,
SchemaVersion: schema.SchemaVersion,
Conditions: schema.Conditions,
})
if err != nil {
return "", err
}
return authorizationModel.AuthorizationModelId, nil
}
func (setter *provider) Create(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
// the language model doesn't have any equality check
// https://github.com/openfga/language/blob/main/pkg/go/transformer/module-to-model_test.go#L38
func (provider *provider) isModelEqual(expected *openfgav1.AuthorizationModel, actual *openfgav1.AuthorizationModel) (bool, error) {
// we need to initialize a new model since the model extracted from schema doesn't have id
expectedAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: expected.SchemaVersion,
TypeDefinitions: expected.TypeDefinitions,
Conditions: expected.Conditions,
}
expectedAuthModelBytes, err := protojson.Marshal(&expectedAuthModel)
if err != nil {
return false, err
}
actualAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: actual.SchemaVersion,
TypeDefinitions: actual.TypeDefinitions,
Conditions: actual.Conditions,
}
actualAuthModelBytes, err := protojson.Marshal(&actualAuthModel)
if err != nil {
return false, err
}
return string(expectedAuthModelBytes) == string(actualAuthModelBytes), nil
}
func (provider *provider) GetOrCreate(_ context.Context, _ valuer.UUID, _ *roletypes.Role) (*roletypes.Role, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) getStoreIDandModelID() (string, string) {
provider.mtx.RLock()
defer provider.mtx.RUnlock()
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
return nil
}
storeID := provider.storeID
modelID := provider.modelID
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) Patch(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) PatchObjects(_ context.Context, _ valuer.UUID, _ string, _ authtypes.Relation, _, _ []*authtypes.Object) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) Delete(_ context.Context, _ valuer.UUID, _ valuer.UUID) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
return nil
return storeID, modelID
}

View File

@@ -1,4 +1,4 @@
package openfgaserver
package openfgaauthz
import (
"context"
@@ -20,7 +20,7 @@ func TestProviderStartStop(t *testing.T) {
expectedModel := `module base
type user`
provider, err := NewOpenfgaServer(context.Background(), providerSettings, authz.Config{}, sqlstore, []transformer.ModuleFile{{Name: "test.fga", Contents: expectedModel}})
provider, err := newOpenfgaProvider(context.Background(), providerSettings, authz.Config{}, sqlstore, []transformer.ModuleFile{{Name: "test.fga", Contents: expectedModel}})
require.NoError(t, err)
storeRows := sqlstore.Mock().NewRows([]string{"id", "name", "created_at", "updated_at"}).AddRow("01K3V0NTN47MPTMEV1PD5ST6ZC", "signoz", time.Now(), time.Now())

View File

@@ -1,4 +1,4 @@
package openfgaserver
package openfgaauthz
import (
"github.com/SigNoz/signoz/pkg/errors"

View File

@@ -1,334 +0,0 @@
package openfgaserver
import (
"context"
"strconv"
"sync"
authz "github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
openfgapkgserver "github.com/openfga/openfga/pkg/server"
"google.golang.org/protobuf/encoding/protojson"
)
var (
openfgaDefaultStore = valuer.NewString("signoz")
)
type Server struct {
config authz.Config
settings factory.ScopedProviderSettings
openfgaSchema []openfgapkgtransformer.ModuleFile
openfgaServer *openfgapkgserver.Server
storeID string
modelID string
mtx sync.RWMutex
stopChan chan struct{}
}
func NewOpenfgaServer(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (*Server, error) {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/authz/openfgaauthz")
store, err := NewSQLStore(sqlstore)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to initialize sqlstore for authz")
return nil, err
}
// setup the openfga server
opts := []openfgapkgserver.OpenFGAServiceV1Option{
openfgapkgserver.WithDatastore(store),
openfgapkgserver.WithLogger(NewLogger(scopedProviderSettings.Logger())),
openfgapkgserver.WithContextPropagationToDatastore(true),
}
openfgaServer, err := openfgapkgserver.NewServerWithOpts(opts...)
if err != nil {
scopedProviderSettings.Logger().DebugContext(ctx, "failed to create authz server")
return nil, err
}
return &Server{
config: config,
settings: scopedProviderSettings,
openfgaServer: openfgaServer,
openfgaSchema: openfgaSchema,
mtx: sync.RWMutex{},
stopChan: make(chan struct{}),
}, nil
}
func (server *Server) Start(ctx context.Context) error {
storeID, err := server.getOrCreateStore(ctx, openfgaDefaultStore.StringValue())
if err != nil {
return err
}
modelID, err := server.getOrCreateModel(ctx, storeID)
if err != nil {
return err
}
server.mtx.Lock()
server.modelID = modelID
server.storeID = storeID
server.mtx.Unlock()
<-server.stopChan
return nil
}
func (server *Server) Stop(ctx context.Context) error {
server.openfgaServer.Close()
close(server.stopChan)
return nil
}
func (server *Server) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
storeID, modelID := server.getStoreIDandModelID()
checkResponse, err := server.openfgaServer.Check(
ctx,
&openfgav1.CheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tupleReq.User,
Relation: tupleReq.Relation,
Object: tupleReq.Object,
},
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
if !checkResponse.Allowed {
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
}
return nil
}
func (server *Server) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
storeID, modelID := server.getStoreIDandModelID()
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
for idx, tuple := range tupleReq {
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tuple.User,
Relation: tuple.Relation,
Object: tuple.Object,
},
// the batch check response is map[string] keyed by correlationID.
CorrelationId: strconv.Itoa(idx),
})
}
checkResponse, err := server.openfgaServer.BatchCheck(
ctx,
&openfgav1.BatchCheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Checks: batchCheckItems,
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
for _, checkResponse := range checkResponse.Result {
if checkResponse.GetAllowed() {
return nil
}
}
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
}
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
if err != nil {
return err
}
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
return nil
}
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
if len(additions) == 0 && len(deletions) == 0 {
return nil
}
storeID, modelID := server.getStoreIDandModelID()
deletionTuplesWithoutCondition := make([]*openfgav1.TupleKeyWithoutCondition, len(deletions))
for idx, tuple := range deletions {
deletionTuplesWithoutCondition[idx] = &openfgav1.TupleKeyWithoutCondition{User: tuple.User, Object: tuple.Object, Relation: tuple.Relation}
}
_, err := server.openfgaServer.Write(ctx, &openfgav1.WriteRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
Writes: func() *openfgav1.WriteRequestWrites {
if len(additions) == 0 {
return nil
}
return &openfgav1.WriteRequestWrites{
TupleKeys: additions,
OnDuplicate: "ignore",
}
}(),
Deletes: func() *openfgav1.WriteRequestDeletes {
if len(deletionTuplesWithoutCondition) == 0 {
return nil
}
return &openfgav1.WriteRequestDeletes{
TupleKeys: deletionTuplesWithoutCondition,
OnMissing: "ignore",
}
}(),
})
return err
}
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
storeID, modelID := server.getStoreIDandModelID()
response, err := server.openfgaServer.ListObjects(ctx, &openfgav1.ListObjectsRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
User: subject,
Relation: relation.StringValue(),
Type: typeable.Type().StringValue(),
})
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "cannot list objects for subject %s with relation %s for type %s", subject, relation.StringValue(), typeable.Type().StringValue())
}
return authtypes.MustNewObjectsFromStringSlice(response.Objects), nil
}
func (server *Server) getOrCreateStore(ctx context.Context, name string) (string, error) {
stores, err := server.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
if err != nil {
return "", err
}
for _, store := range stores.GetStores() {
if store.GetName() == name {
return store.Id, nil
}
}
store, err := server.openfgaServer.CreateStore(ctx, &openfgav1.CreateStoreRequest{Name: name})
if err != nil {
return "", err
}
return store.Id, nil
}
func (server *Server) getOrCreateModel(ctx context.Context, storeID string) (string, error) {
schema, err := openfgapkgtransformer.TransformModuleFilesToModel(server.openfgaSchema, "1.1")
if err != nil {
return "", err
}
authorisationModels, err := server.openfgaServer.ReadAuthorizationModels(ctx, &openfgav1.ReadAuthorizationModelsRequest{StoreId: storeID})
if err != nil {
return "", err
}
for _, authModel := range authorisationModels.GetAuthorizationModels() {
equal, err := server.isModelEqual(schema, authModel)
if err != nil {
return "", err
}
if equal {
return authModel.Id, nil
}
}
authorizationModel, err := server.openfgaServer.WriteAuthorizationModel(ctx, &openfgav1.WriteAuthorizationModelRequest{
StoreId: storeID,
TypeDefinitions: schema.TypeDefinitions,
SchemaVersion: schema.SchemaVersion,
Conditions: schema.Conditions,
})
if err != nil {
return "", err
}
return authorizationModel.AuthorizationModelId, nil
}
// the language model doesn't have any equality check
// https://github.com/openfga/language/blob/main/pkg/go/transformer/module-to-model_test.go#L38
func (server *Server) isModelEqual(expected *openfgav1.AuthorizationModel, actual *openfgav1.AuthorizationModel) (bool, error) {
// we need to initialize a new model since the model extracted from schema doesn't have id
expectedAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: expected.SchemaVersion,
TypeDefinitions: expected.TypeDefinitions,
Conditions: expected.Conditions,
}
expectedAuthModelBytes, err := protojson.Marshal(&expectedAuthModel)
if err != nil {
return false, err
}
actualAuthModel := openfgav1.AuthorizationModel{
SchemaVersion: actual.SchemaVersion,
TypeDefinitions: actual.TypeDefinitions,
Conditions: actual.Conditions,
}
actualAuthModelBytes, err := protojson.Marshal(&actualAuthModel)
if err != nil {
return false, err
}
return string(expectedAuthModelBytes) == string(actualAuthModelBytes), nil
}
func (server *Server) getStoreIDandModelID() (string, string) {
server.mtx.RLock()
defer server.mtx.RUnlock()
storeID := server.storeID
modelID := server.modelID
return storeID, modelID
}

View File

@@ -8,6 +8,7 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
@@ -23,14 +24,15 @@ type AuthZ struct {
logger *slog.Logger
orgGetter organization.Getter
authzService authz.AuthZ
roleGetter role.Getter
}
func NewAuthZ(logger *slog.Logger, orgGetter organization.Getter, authzService authz.AuthZ) *AuthZ {
func NewAuthZ(logger *slog.Logger, orgGetter organization.Getter, authzService authz.AuthZ, roleGetter role.Getter) *AuthZ {
if logger == nil {
panic("cannot build authz middleware, logger is empty")
}
return &AuthZ{logger: logger, orgGetter: orgGetter, authzService: authzService}
return &AuthZ{logger: logger, orgGetter: orgGetter, authzService: authzService, roleGetter: roleGetter}
}
func (middleware *AuthZ) ViewAccess(next http.HandlerFunc) http.HandlerFunc {

View File

@@ -1,44 +0,0 @@
package middleware
import (
"log/slog"
"net/http"
"runtime/debug"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
)
// Recovery is a middleware that recovers from panics, logs the panic,
// and returns a 500 Internal Server Error.
type Recovery struct {
logger *slog.Logger
}
// NewRecovery creates a new Recovery middleware.
func NewRecovery(logger *slog.Logger) Wrapper {
return &Recovery{
logger: logger.With("pkg", "http-middleware-recovery"),
}
}
// Wrap is the middleware handler.
func (m *Recovery) Wrap(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
defer func() {
if err := recover(); err != nil {
m.logger.ErrorContext(
r.Context(),
"panic recovered",
"err", err, "stack", string(debug.Stack()),
)
render.Error(w, errors.NewInternalf(
errors.CodeInternal, "internal server error",
))
}
}()
next.ServeHTTP(w, r)
})
}

View File

@@ -4,7 +4,7 @@ import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/statsreporter"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
@@ -51,7 +51,7 @@ type Module interface {
statsreporter.StatsCollector
authz.RegisterTypeable
role.RegisterTypeable
}
type Handler interface {

View File

@@ -206,10 +206,6 @@ func (module *module) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{dashboardtypes.TypeableMetaResourceDashboard, dashboardtypes.TypeableMetaResourcesDashboards}
}
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
return nil
}
// not supported
func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publicDashboard *dashboardtypes.PublicDashboard) error {
return errors.Newf(errors.TypeUnsupported, dashboardtypes.ErrCodePublicDashboardUnsupported, "not implemented")

View File

@@ -0,0 +1,63 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type getter struct {
store roletypes.Store
}
func NewGetter(store roletypes.Store) role.Getter {
return &getter{store: store}
}
func (getter *getter) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
storableRole, err := getter.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (getter *getter) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
storableRole, err := getter.store.GetByOrgIDAndName(ctx, orgID, name)
if err != nil {
return nil, err
}
return roletypes.NewRoleFromStorableRole(storableRole), nil
}
func (getter *getter) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
storableRoles, err := getter.store.List(ctx, orgID)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storableRole := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storableRole)
}
return roles, nil
}
func (getter *getter) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
storableRoles, err := getter.store.ListByOrgIDAndNames(ctx, orgID, names)
if err != nil {
return nil, err
}
roles := make([]*roletypes.Role, len(storableRoles))
for idx, storable := range storableRoles {
roles[idx] = roletypes.NewRoleFromStorableRole(storable)
}
return roles, nil
}

View File

@@ -0,0 +1,83 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type granter struct {
store roletypes.Store
authz authz.AuthZ
}
func NewGranter(store roletypes.Store, authz authz.AuthZ) role.Granter {
return &granter{store: store, authz: authz}
}
func (granter *granter) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return granter.authz.Write(ctx, tuples, nil)
}
func (granter *granter) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
err := granter.Revoke(ctx, orgID, existingRoleName, subject)
if err != nil {
return err
}
err = granter.Grant(ctx, orgID, updatedRoleName, subject)
if err != nil {
return err
}
return nil
}
func (granter *granter) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
tuples, err := authtypes.TypeableRole.Tuples(
subject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, name),
},
orgID,
)
if err != nil {
return err
}
return granter.authz.Write(ctx, nil, tuples)
}
func (granter *granter) CreateManagedRoles(ctx context.Context, _ valuer.UUID, managedRoles []*roletypes.Role) error {
err := granter.store.RunInTx(ctx, func(ctx context.Context) error {
for _, role := range managedRoles {
err := granter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return err
}
}
return nil
})
if err != nil {
return err
}
return nil
}

View File

@@ -1,12 +1,12 @@
package signozauthzapi
package implrole
import (
"net/http"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -14,11 +14,12 @@ import (
)
type handler struct {
authz authz.AuthZ
setter role.Setter
getter role.Getter
}
func NewHandler(authz authz.AuthZ) authz.Handler {
return &handler{authz: authz}
func NewHandler(setter role.Setter, getter role.Getter) role.Handler {
return &handler{setter: setter, getter: getter}
}
func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
@@ -35,7 +36,7 @@ func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.authz.Create(ctx, valuer.MustNewUUID(claims.OrgID), roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID)))
err = handler.setter.Create(ctx, valuer.MustNewUUID(claims.OrgID), roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID)))
if err != nil {
render.Error(rw, err)
return
@@ -63,7 +64,7 @@ func (handler *handler) Get(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), roleID)
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), roleID)
if err != nil {
render.Error(rw, err)
return
@@ -102,7 +103,7 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
return
}
objects, err := handler.authz.GetObjects(ctx, valuer.MustNewUUID(claims.OrgID), roleID, relation)
objects, err := handler.setter.GetObjects(ctx, valuer.MustNewUUID(claims.OrgID), roleID, relation)
if err != nil {
render.Error(rw, err)
return
@@ -113,7 +114,7 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
func (handler *handler) GetResources(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
resources := handler.authz.GetResources(ctx)
resources := handler.setter.GetResources(ctx)
var resourceRelations = struct {
Resources []*authtypes.Resource `json:"resources"`
@@ -133,7 +134,7 @@ func (handler *handler) List(rw http.ResponseWriter, r *http.Request) {
return
}
roles, err := handler.authz.List(ctx, valuer.MustNewUUID(claims.OrgID))
roles, err := handler.getter.List(ctx, valuer.MustNewUUID(claims.OrgID))
if err != nil {
render.Error(rw, err)
return
@@ -162,7 +163,7 @@ func (handler *handler) Patch(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return
@@ -174,7 +175,7 @@ func (handler *handler) Patch(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.authz.Patch(ctx, valuer.MustNewUUID(claims.OrgID), role)
err = handler.setter.Patch(ctx, valuer.MustNewUUID(claims.OrgID), role)
if err != nil {
render.Error(rw, err)
return
@@ -209,7 +210,7 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
return
}
role, err := handler.authz.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
role, err := handler.getter.Get(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return
@@ -221,7 +222,7 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.authz.PatchObjects(ctx, valuer.MustNewUUID(claims.OrgID), role.Name, relation, patchableObjects.Additions, patchableObjects.Deletions)
err = handler.setter.PatchObjects(ctx, valuer.MustNewUUID(claims.OrgID), role.Name, relation, patchableObjects.Additions, patchableObjects.Deletions)
if err != nil {
render.Error(rw, err)
return
@@ -244,7 +245,7 @@ func (handler *handler) Delete(rw http.ResponseWriter, r *http.Request) {
return
}
err = handler.authz.Delete(ctx, valuer.MustNewUUID(claims.OrgID), id)
err = handler.setter.Delete(ctx, valuer.MustNewUUID(claims.OrgID), id)
if err != nil {
render.Error(rw, err)
return

View File

@@ -0,0 +1,53 @@
package implrole
import (
"context"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type setter struct {
store roletypes.Store
authz authz.AuthZ
}
func NewSetter(store roletypes.Store, authz authz.AuthZ) role.Setter {
return &setter{store: store, authz: authz}
}
func (setter *setter) Create(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) GetOrCreate(_ context.Context, _ valuer.UUID, _ *roletypes.Role) (*roletypes.Role, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) GetResources(_ context.Context) []*authtypes.Resource {
return nil
}
func (setter *setter) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
return nil, errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) Patch(_ context.Context, _ valuer.UUID, _ *roletypes.Role) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) PatchObjects(_ context.Context, _ valuer.UUID, _ string, _ authtypes.Relation, _, _ []*authtypes.Object) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) Delete(_ context.Context, _ valuer.UUID, _ valuer.UUID) error {
return errors.Newf(errors.TypeUnsupported, roletypes.ErrCodeRoleUnsupported, "not implemented")
}
func (setter *setter) MustGetTypeables() []authtypes.Typeable {
return nil
}

View File

@@ -1,4 +1,4 @@
package sqlauthzstore
package implrole
import (
"context"
@@ -14,7 +14,7 @@ type store struct {
sqlstore sqlstore.SQLStore
}
func NewSqlAuthzStore(sqlstore sqlstore.SQLStore) roletypes.Store {
func NewStore(sqlstore sqlstore.SQLStore) roletypes.Store {
return &store{sqlstore: sqlstore}
}

85
pkg/modules/role/role.go Normal file
View File

@@ -0,0 +1,85 @@
package role
import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type Setter interface {
// Creates the role.
Create(context.Context, valuer.UUID, *roletypes.Role) error
// Gets the role if it exists or creates one.
GetOrCreate(context.Context, valuer.UUID, *roletypes.Role) (*roletypes.Role, error)
// Gets the objects associated with the given role and relation.
GetObjects(context.Context, valuer.UUID, valuer.UUID, authtypes.Relation) ([]*authtypes.Object, error)
// Gets all the typeable resources registered from role registry.
GetResources(context.Context) []*authtypes.Resource
// Patches the role.
Patch(context.Context, valuer.UUID, *roletypes.Role) error
// Patches the objects in authorization server associated with the given role and relation
PatchObjects(context.Context, valuer.UUID, string, authtypes.Relation, []*authtypes.Object, []*authtypes.Object) error
// Deletes the role and tuples in authorization server.
Delete(context.Context, valuer.UUID, valuer.UUID) error
RegisterTypeable
}
type Getter interface {
// Gets the role
Get(context.Context, valuer.UUID, valuer.UUID) (*roletypes.Role, error)
// Gets the role by org_id and name
GetByOrgIDAndName(context.Context, valuer.UUID, string) (*roletypes.Role, error)
// Lists all the roles for the organization.
List(context.Context, valuer.UUID) ([]*roletypes.Role, error)
// Lists all the roles for the organization filtered by name
ListByOrgIDAndNames(context.Context, valuer.UUID, []string) ([]*roletypes.Role, error)
}
type Granter interface {
// Grants a role to the subject based on role name.
Grant(context.Context, valuer.UUID, string, string) error
// Revokes a granted role from the subject based on role name.
Revoke(context.Context, valuer.UUID, string, string) error
// Changes the granted role for the subject based on role name.
ModifyGrant(context.Context, valuer.UUID, string, string, string) error
// Bootstrap the managed roles.
CreateManagedRoles(context.Context, valuer.UUID, []*roletypes.Role) error
}
type RegisterTypeable interface {
MustGetTypeables() []authtypes.Typeable
}
type Handler interface {
Create(http.ResponseWriter, *http.Request)
Get(http.ResponseWriter, *http.Request)
GetObjects(http.ResponseWriter, *http.Request)
GetResources(http.ResponseWriter, *http.Request)
List(http.ResponseWriter, *http.Request)
Patch(http.ResponseWriter, *http.Request)
PatchObjects(http.ResponseWriter, *http.Request)
Delete(http.ResponseWriter, *http.Request)
}

View File

@@ -13,7 +13,6 @@ import (
root "github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
)
@@ -463,7 +462,7 @@ func (h *handler) UpdateAPIKey(w http.ResponseWriter, r *http.Request) {
return
}
if slices.Contains(integrationstypes.IntegrationUserEmails, createdByUser.Email) {
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email.String())) {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked"))
return
}
@@ -508,7 +507,7 @@ func (h *handler) RevokeAPIKey(w http.ResponseWriter, r *http.Request) {
return
}
if slices.Contains(integrationstypes.IntegrationUserEmails, createdByUser.Email) {
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email.String())) {
render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked"))
return
}

View File

@@ -8,18 +8,17 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/emailing"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/modules/user"
root "github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/tokenizer"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/emailtypes"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/dustin/go-humanize"
@@ -33,13 +32,13 @@ type Module struct {
emailing emailing.Emailing
settings factory.ScopedProviderSettings
orgSetter organization.Setter
authz authz.AuthZ
granter role.Granter
analytics analytics.Analytics
config user.Config
}
// This module is a WIP, don't take inspiration from this.
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, authz authz.AuthZ, analytics analytics.Analytics, config user.Config) root.Module {
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, granter role.Granter, analytics analytics.Analytics, config user.Config) root.Module {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/modules/user/impluser")
return &Module{
store: store,
@@ -48,7 +47,7 @@ func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing em
settings: settings,
orgSetter: orgSetter,
analytics: analytics,
authz: authz,
granter: granter,
config: config,
}
}
@@ -172,8 +171,8 @@ func (m *Module) DeleteInvite(ctx context.Context, orgID string, id valuer.UUID)
func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ...root.CreateUserOption) error {
createUserOpts := root.NewCreateUserOptions(opts...)
// since assign is idempotent multiple calls to assign won't cause issues in case of retries.
err := module.authz.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
// since assign is idempotant multiple calls to assign won't cause issues in case of retries.
err := module.granter.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
if err != nil {
return err
}
@@ -239,7 +238,7 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
}
if user.Role != existingUser.Role {
err = m.authz.ModifyGrant(ctx,
err = m.granter.ModifyGrant(ctx,
orgID,
roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role),
roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role),
@@ -287,7 +286,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
return err
}
if slices.Contains(integrationstypes.IntegrationUserEmails, user.Email) {
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email.String())) {
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "integration user cannot be deleted")
}
@@ -301,8 +300,8 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot delete the last admin")
}
// since revoke is idempotent multiple calls to revoke won't cause issues in case of retries
err = module.authz.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
// since revoke is idempotant multiple calls to revoke won't cause issues in case of retries
err = module.granter.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
if err != nil {
return err
}
@@ -505,14 +504,14 @@ func (module *Module) CreateFirstUser(ctx context.Context, organization *types.O
}
managedRoles := roletypes.NewManagedRoles(organization.ID)
err = module.authz.CreateManagedUserRoleTransactions(ctx, organization.ID, user.ID)
err = module.granter.Grant(ctx, organization.ID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, user.ID.StringValue(), user.OrgID, nil))
if err != nil {
return nil, err
}
if err = module.store.RunInTx(ctx, func(ctx context.Context) error {
err = module.orgSetter.Create(ctx, organization, func(ctx context.Context, orgID valuer.UUID) error {
err = module.authz.CreateManagedRoles(ctx, orgID, managedRoles)
err = module.granter.CreateManagedRoles(ctx, orgID, managedRoles)
if err != nil {
return err
}

View File

@@ -208,16 +208,7 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
event.GroupByApplied = len(spec.GroupBy) > 0
if spec.Source == telemetrytypes.SourceMeter {
if spec.StepInterval.Seconds() == 0 {
spec.StepInterval = qbtypes.Step{Duration: time.Second * time.Duration(querybuilder.RecommendedStepIntervalForMeter(req.Start, req.End))}
}
if spec.StepInterval.Seconds() < float64(querybuilder.MinAllowedStepIntervalForMeter(req.Start, req.End)) {
newStep := qbtypes.Step{
Duration: time.Second * time.Duration(querybuilder.MinAllowedStepIntervalForMeter(req.Start, req.End)),
}
spec.StepInterval = newStep
}
spec.StepInterval = qbtypes.Step{Duration: time.Second * time.Duration(querybuilder.RecommendedStepIntervalForMeter(req.Start, req.End))}
} else {
if spec.StepInterval.Seconds() == 0 {
spec.StepInterval = qbtypes.Step{

View File

@@ -830,7 +830,7 @@ func (r *ClickHouseReader) GetUsage(ctx context.Context, queryParams *model.GetU
func (r *ClickHouseReader) GetSpansForTrace(ctx context.Context, traceID string, traceDetailsQuery string) ([]model.SpanItemV2, *model.ApiError) {
var traceSummary model.TraceSummary
summaryQuery := fmt.Sprintf("SELECT trace_id, min(start) AS start, max(end) AS end, sum(num_spans) AS num_spans FROM %s.%s WHERE trace_id=$1 GROUP BY trace_id", r.TraceDB, r.traceSummaryTable)
summaryQuery := fmt.Sprintf("SELECT * from %s.%s WHERE trace_id=$1", r.TraceDB, r.traceSummaryTable)
err := r.db.QueryRow(ctx, summaryQuery, traceID).Scan(&traceSummary.TraceID, &traceSummary.Start, &traceSummary.End, &traceSummary.NumSpans)
if err != nil {
if err == sql.ErrNoRows {
@@ -6458,7 +6458,7 @@ func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.Searc
}
var traceSummary model.TraceSummary
summaryQuery := fmt.Sprintf("SELECT trace_id, min(start) AS start, max(end) AS end, sum(num_spans) AS num_spans FROM %s.%s WHERE trace_id=$1 GROUP BY trace_id", r.TraceDB, r.traceSummaryTable)
summaryQuery := fmt.Sprintf("SELECT * from %s.%s WHERE trace_id=$1", r.TraceDB, r.traceSummaryTable)
err := r.db.QueryRow(ctx, summaryQuery, params.TraceID).Scan(&traceSummary.TraceID, &traceSummary.Start, &traceSummary.End, &traceSummary.NumSpans)
if err != nil {
if err == sql.ErrNoRows {

View File

@@ -1,4 +1,4 @@
package store
package cloudintegrations
import (
"context"
@@ -7,50 +7,49 @@ import (
"strings"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
var (
CodeCloudIntegrationAccountNotFound errors.Code = errors.MustNewCode("cloud_integration_account_not_found")
)
type cloudProviderAccountsRepository interface {
listConnected(ctx context.Context, orgId string, provider string) ([]types.CloudIntegration, *model.ApiError)
type CloudProviderAccountsRepository interface {
ListConnected(ctx context.Context, orgId string, provider string) ([]integrationstypes.CloudIntegration, error)
get(ctx context.Context, orgId string, provider string, id string) (*types.CloudIntegration, *model.ApiError)
Get(ctx context.Context, orgId string, provider string, id string) (*integrationstypes.CloudIntegration, error)
GetConnectedCloudAccount(ctx context.Context, orgId string, provider string, accountID string) (*integrationstypes.CloudIntegration, error)
getConnectedCloudAccount(ctx context.Context, orgId string, provider string, accountID string) (*types.CloudIntegration, *model.ApiError)
// Insert an account or update it by (cloudProvider, id)
// for specified non-empty fields
Upsert(
upsert(
ctx context.Context,
orgId string,
provider string,
id *string,
config []byte,
config *types.AccountConfig,
accountId *string,
agentReport *integrationstypes.AgentReport,
agentReport *types.AgentReport,
removedAt *time.Time,
) (*integrationstypes.CloudIntegration, error)
) (*types.CloudIntegration, *model.ApiError)
}
func NewCloudProviderAccountsRepository(store sqlstore.SQLStore) CloudProviderAccountsRepository {
return &cloudProviderAccountsSQLRepository{store: store}
func newCloudProviderAccountsRepository(store sqlstore.SQLStore) (
*cloudProviderAccountsSQLRepository, error,
) {
return &cloudProviderAccountsSQLRepository{
store: store,
}, nil
}
type cloudProviderAccountsSQLRepository struct {
store sqlstore.SQLStore
}
func (r *cloudProviderAccountsSQLRepository) ListConnected(
func (r *cloudProviderAccountsSQLRepository) listConnected(
ctx context.Context, orgId string, cloudProvider string,
) ([]integrationstypes.CloudIntegration, error) {
accounts := []integrationstypes.CloudIntegration{}
) ([]types.CloudIntegration, *model.ApiError) {
accounts := []types.CloudIntegration{}
err := r.store.BunDB().NewSelect().
Model(&accounts).
@@ -63,16 +62,18 @@ func (r *cloudProviderAccountsSQLRepository) ListConnected(
Scan(ctx)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "could not query connected cloud accounts")
return nil, model.InternalError(fmt.Errorf(
"could not query connected cloud accounts: %w", err,
))
}
return accounts, nil
}
func (r *cloudProviderAccountsSQLRepository) Get(
func (r *cloudProviderAccountsSQLRepository) get(
ctx context.Context, orgId string, provider string, id string,
) (*integrationstypes.CloudIntegration, error) {
var result integrationstypes.CloudIntegration
) (*types.CloudIntegration, *model.ApiError) {
var result types.CloudIntegration
err := r.store.BunDB().NewSelect().
Model(&result).
@@ -81,25 +82,23 @@ func (r *cloudProviderAccountsSQLRepository) Get(
Where("id = ?", id).
Scan(ctx)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, errors.WrapNotFoundf(
err,
CodeCloudIntegrationAccountNotFound,
"couldn't find account with Id %s", id,
)
}
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud provider account")
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
"couldn't find account with Id %s", id,
))
} else if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud provider accounts: %w", err,
))
}
return &result, nil
}
func (r *cloudProviderAccountsSQLRepository) GetConnectedCloudAccount(
func (r *cloudProviderAccountsSQLRepository) getConnectedCloudAccount(
ctx context.Context, orgId string, provider string, accountId string,
) (*integrationstypes.CloudIntegration, error) {
var result integrationstypes.CloudIntegration
) (*types.CloudIntegration, *model.ApiError) {
var result types.CloudIntegration
err := r.store.BunDB().NewSelect().
Model(&result).
@@ -110,25 +109,29 @@ func (r *cloudProviderAccountsSQLRepository) GetConnectedCloudAccount(
Where("removed_at is NULL").
Scan(ctx)
if errors.Is(err, sql.ErrNoRows) {
return nil, errors.WrapNotFoundf(err, CodeCloudIntegrationAccountNotFound, "couldn't find connected cloud account %s", accountId)
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
"couldn't find connected cloud account %s", accountId,
))
} else if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud provider account")
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud provider accounts: %w", err,
))
}
return &result, nil
}
func (r *cloudProviderAccountsSQLRepository) Upsert(
func (r *cloudProviderAccountsSQLRepository) upsert(
ctx context.Context,
orgId string,
provider string,
id *string,
config []byte,
config *types.AccountConfig,
accountId *string,
agentReport *integrationstypes.AgentReport,
agentReport *types.AgentReport,
removedAt *time.Time,
) (*integrationstypes.CloudIntegration, error) {
) (*types.CloudIntegration, *model.ApiError) {
// Insert
if id == nil {
temp := valuer.GenerateUUID().StringValue()
@@ -178,7 +181,7 @@ func (r *cloudProviderAccountsSQLRepository) Upsert(
)
}
integration := integrationstypes.CloudIntegration{
integration := types.CloudIntegration{
OrgID: orgId,
Provider: provider,
Identifiable: types.Identifiable{ID: valuer.MustNewUUID(*id)},
@@ -192,18 +195,22 @@ func (r *cloudProviderAccountsSQLRepository) Upsert(
RemovedAt: removedAt,
}
_, err := r.store.BunDB().NewInsert().
_, dbErr := r.store.BunDB().NewInsert().
Model(&integration).
On(onConflictClause).
Exec(ctx)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't upsert cloud integration account")
if dbErr != nil {
return nil, model.InternalError(fmt.Errorf(
"could not upsert cloud account record: %w", dbErr,
))
}
upsertedAccount, err := r.Get(ctx, orgId, provider, *id)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't get upserted cloud integration account")
upsertedAccount, apiErr := r.get(ctx, orgId, provider, *id)
if apiErr != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't fetch upserted account by id: %w", apiErr.ToError(),
))
}
return upsertedAccount, nil

View File

@@ -1,4 +1,4 @@
package integrationstypes
package cloudintegrations
import (
"github.com/SigNoz/signoz/pkg/errors"
@@ -41,62 +41,3 @@ var ValidAWSRegions = map[string]bool{
"us-west-1": true, // US West (N. California).
"us-west-2": true, // US West (Oregon).
}
var ValidAzureRegions = map[string]bool{
"australiacentral": true,
"australiacentral2": true,
"australiaeast": true,
"australiasoutheast": true,
"austriaeast": true,
"belgiumcentral": true,
"brazilsouth": true,
"brazilsoutheast": true,
"canadacentral": true,
"canadaeast": true,
"centralindia": true,
"centralus": true,
"chilecentral": true,
"denmarkeast": true,
"eastasia": true,
"eastus": true,
"eastus2": true,
"francecentral": true,
"francesouth": true,
"germanynorth": true,
"germanywestcentral": true,
"indonesiacentral": true,
"israelcentral": true,
"italynorth": true,
"japaneast": true,
"japanwest": true,
"koreacentral": true,
"koreasouth": true,
"malaysiawest": true,
"mexicocentral": true,
"newzealandnorth": true,
"northcentralus": true,
"northeurope": true,
"norwayeast": true,
"norwaywest": true,
"polandcentral": true,
"qatarcentral": true,
"southafricanorth": true,
"southafricawest": true,
"southcentralus": true,
"southindia": true,
"southeastasia": true,
"spaincentral": true,
"swedencentral": true,
"switzerlandnorth": true,
"switzerlandwest": true,
"uaecentral": true,
"uaenorth": true,
"uksouth": true,
"ukwest": true,
"westcentralus": true,
"westeurope": true,
"westindia": true,
"westus": true,
"westus2": true,
"westus3": true,
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,796 +0,0 @@
package implawsprovider
import (
"context"
"fmt"
"log/slog"
"net/url"
"slices"
"sort"
"sync"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
integrationstore "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/store"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"golang.org/x/exp/maps"
)
var (
CodeInvalidAWSRegion = errors.MustNewCode("invalid_aws_region")
CodeDashboardNotFound = errors.MustNewCode("dashboard_not_found")
)
type awsProvider struct {
logger *slog.Logger
querier querier.Querier
accountsRepo integrationstore.CloudProviderAccountsRepository
serviceConfigRepo integrationstore.ServiceConfigDatabase
awsServiceDefinitions *services.AWSServicesProvider
}
func NewAWSCloudProvider(
logger *slog.Logger,
accountsRepo integrationstore.CloudProviderAccountsRepository,
serviceConfigRepo integrationstore.ServiceConfigDatabase,
querier querier.Querier,
) integrationstypes.CloudProvider {
awsServiceDefinitions, err := services.NewAWSCloudProviderServices()
if err != nil {
panic("failed to initialize AWS service definitions: " + err.Error())
}
return &awsProvider{
logger: logger,
querier: querier,
accountsRepo: accountsRepo,
serviceConfigRepo: serviceConfigRepo,
awsServiceDefinitions: awsServiceDefinitions,
}
}
func (a *awsProvider) GetAccountStatus(ctx context.Context, orgID, accountID string) (*integrationstypes.GettableAccountStatus, error) {
accountRecord, err := a.accountsRepo.Get(ctx, orgID, a.GetName().String(), accountID)
if err != nil {
return nil, err
}
return &integrationstypes.GettableAccountStatus{
Id: accountRecord.ID.String(),
CloudAccountId: accountRecord.AccountID,
Status: accountRecord.Status(),
}, nil
}
func (a *awsProvider) ListConnectedAccounts(ctx context.Context, orgID string) (*integrationstypes.GettableConnectedAccountsList, error) {
accountRecords, err := a.accountsRepo.ListConnected(ctx, orgID, a.GetName().String())
if err != nil {
return nil, err
}
connectedAccounts := make([]*integrationstypes.Account, 0, len(accountRecords))
for _, r := range accountRecords {
connectedAccounts = append(connectedAccounts, r.Account(a.GetName()))
}
return &integrationstypes.GettableConnectedAccountsList{
Accounts: connectedAccounts,
}, nil
}
func (a *awsProvider) AgentCheckIn(ctx context.Context, req *integrationstypes.PostableAgentCheckInPayload) (any, error) {
// agent can't check in unless the account is already created
existingAccount, err := a.accountsRepo.Get(ctx, req.OrgID, a.GetName().String(), req.ID)
if err != nil {
return nil, err
}
if existingAccount != nil && existingAccount.AccountID != nil && *existingAccount.AccountID != req.AccountID {
return nil, model.BadRequest(fmt.Errorf(
"can't check in with new %s account id %s for account %s with existing %s id %s",
a.GetName().String(), req.AccountID, existingAccount.ID.StringValue(), a.GetName().String(),
*existingAccount.AccountID,
))
}
existingAccount, err = a.accountsRepo.GetConnectedCloudAccount(ctx, req.OrgID, a.GetName().String(), req.AccountID)
if existingAccount != nil && existingAccount.ID.StringValue() != req.ID {
return nil, model.BadRequest(fmt.Errorf(
"can't check in to %s account %s with id %s. already connected with id %s",
a.GetName().String(), req.AccountID, req.ID, existingAccount.ID.StringValue(),
))
}
agentReport := integrationstypes.AgentReport{
TimestampMillis: time.Now().UnixMilli(),
Data: req.Data,
}
account, err := a.accountsRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), &req.ID, nil, &req.AccountID, &agentReport, nil,
)
if err != nil {
return nil, err
}
agentConfig, err := a.getAWSAgentConfig(ctx, account)
if err != nil {
return nil, err
}
return &integrationstypes.GettableAWSAgentCheckIn{
AccountId: account.ID.StringValue(),
CloudAccountId: *account.AccountID,
RemovedAt: account.RemovedAt,
IntegrationConfig: *agentConfig,
}, nil
}
func (a *awsProvider) getAWSAgentConfig(ctx context.Context, account *integrationstypes.CloudIntegration) (*integrationstypes.AWSAgentIntegrationConfig, error) {
// prepare and return integration config to be consumed by agent
agentConfig := &integrationstypes.AWSAgentIntegrationConfig{
EnabledRegions: []string{},
TelemetryCollectionStrategy: &integrationstypes.AWSCollectionStrategy{
Provider: a.GetName(),
AWSMetrics: &integrationstypes.AWSMetricsStrategy{},
AWSLogs: &integrationstypes.AWSLogsStrategy{},
S3Buckets: map[string][]string{},
},
}
accountConfig := new(integrationstypes.AWSAccountConfig)
err := accountConfig.Unmarshal(account.Config)
if err != nil {
return nil, err
}
if accountConfig != nil && accountConfig.EnabledRegions != nil {
agentConfig.EnabledRegions = accountConfig.EnabledRegions
}
svcConfigs, err := a.serviceConfigRepo.GetAllForAccount(
ctx, account.OrgID, account.ID.StringValue(),
)
if err != nil {
return nil, err
}
// accumulate config in a fixed order to ensure same config generated across runs
configuredServices := maps.Keys(svcConfigs)
slices.Sort(configuredServices)
for _, svcType := range configuredServices {
definition, err := a.awsServiceDefinitions.GetServiceDefinition(ctx, svcType)
if err != nil {
continue
}
config := svcConfigs[svcType]
serviceConfig := new(integrationstypes.AWSCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
continue
}
if serviceConfig.Logs != nil && serviceConfig.Logs.Enabled {
if svcType == integrationstypes.S3Sync {
// S3 bucket sync; No cloudwatch logs are appended for this service type;
// Though definition is populated with a custom cloudwatch group that helps in calculating logs connection status
agentConfig.TelemetryCollectionStrategy.S3Buckets = serviceConfig.Logs.S3Buckets
} else if definition.Strategy.AWSLogs != nil { // services that includes a logs subscription
agentConfig.TelemetryCollectionStrategy.AWSLogs.Subscriptions = append(
agentConfig.TelemetryCollectionStrategy.AWSLogs.Subscriptions,
definition.Strategy.AWSLogs.Subscriptions...,
)
}
}
if serviceConfig.Metrics != nil && serviceConfig.Metrics.Enabled && definition.Strategy.AWSMetrics != nil {
agentConfig.TelemetryCollectionStrategy.AWSMetrics.StreamFilters = append(
agentConfig.TelemetryCollectionStrategy.AWSMetrics.StreamFilters,
definition.Strategy.AWSMetrics.StreamFilters...,
)
}
}
return agentConfig, nil
}
func (a *awsProvider) GetName() integrationstypes.CloudProviderType {
return integrationstypes.CloudProviderAWS
}
func (a *awsProvider) ListServices(ctx context.Context, orgID string, cloudAccountID *string) (any, error) {
svcConfigs := make(map[string]*integrationstypes.AWSCloudServiceConfig)
if cloudAccountID != nil {
activeAccount, err := a.accountsRepo.GetConnectedCloudAccount(ctx, orgID, a.GetName().String(), *cloudAccountID)
if err != nil {
return nil, err
}
serviceConfigs, err := a.serviceConfigRepo.GetAllForAccount(ctx, orgID, activeAccount.ID.String())
if err != nil {
return nil, err
}
for svcType, config := range serviceConfigs {
serviceConfig := new(integrationstypes.AWSCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
return nil, err
}
svcConfigs[svcType] = serviceConfig
}
}
summaries := make([]integrationstypes.AWSServiceSummary, 0)
definitions, err := a.awsServiceDefinitions.ListServiceDefinitions(ctx)
if err != nil {
return nil, model.InternalError(fmt.Errorf("couldn't list aws service definitions: %w", err))
}
for _, def := range definitions {
summary := integrationstypes.AWSServiceSummary{
DefinitionMetadata: def.DefinitionMetadata,
Config: nil,
}
summary.Config = svcConfigs[summary.Id]
summaries = append(summaries, summary)
}
sort.Slice(summaries, func(i, j int) bool {
return summaries[i].DefinitionMetadata.Title < summaries[j].DefinitionMetadata.Title
})
return &integrationstypes.GettableAWSServices{
Services: summaries,
}, nil
}
func (a *awsProvider) GetServiceDetails(ctx context.Context, req *integrationstypes.GetServiceDetailsReq) (any, error) {
details := new(integrationstypes.GettableAWSServiceDetails)
awsDefinition, err := a.awsServiceDefinitions.GetServiceDefinition(ctx, req.ServiceId)
if err != nil {
return nil, model.InternalError(fmt.Errorf("couldn't get aws service definition: %w", err))
}
details.AWSServiceDefinition = *awsDefinition
details.Strategy.Provider = a.GetName()
if req.CloudAccountID == nil {
return details, nil
}
config, err := a.getServiceConfig(ctx, &details.AWSServiceDefinition, req.OrgID, a.GetName().String(), req.ServiceId, *req.CloudAccountID)
if err != nil {
return nil, err
}
if config == nil {
return details, nil
}
details.Config = config
connectionStatus, err := a.getServiceConnectionStatus(
ctx,
*req.CloudAccountID,
req.OrgID,
&details.AWSServiceDefinition,
config,
)
if err != nil {
return nil, err
}
details.ConnectionStatus = connectionStatus
return details, nil
}
func (a *awsProvider) getServiceConnectionStatus(
ctx context.Context,
cloudAccountID string,
orgID string,
def *integrationstypes.AWSServiceDefinition,
serviceConfig *integrationstypes.AWSCloudServiceConfig,
) (*integrationstypes.ServiceConnectionStatus, error) {
if def.Strategy == nil {
return nil, nil
}
resp := new(integrationstypes.ServiceConnectionStatus)
wg := sync.WaitGroup{}
wg.Add(2)
if def.Strategy.AWSMetrics != nil && serviceConfig.Metrics.Enabled {
go func() {
defer func() {
if r := recover(); r != nil {
a.logger.ErrorContext(
ctx, "panic while getting service metrics connection status",
"error", r,
"service", def.DefinitionMetadata.Id,
)
}
}()
defer wg.Done()
status, _ := a.getServiceMetricsConnectionStatus(ctx, cloudAccountID, orgID, def)
resp.Metrics = status
}()
}
if def.Strategy.AWSLogs != nil && serviceConfig.Logs.Enabled {
go func() {
defer func() {
if r := recover(); r != nil {
a.logger.ErrorContext(
ctx, "panic while getting service logs connection status",
"error", r,
"service", def.DefinitionMetadata.Id,
)
}
}()
defer wg.Done()
status, _ := a.getServiceLogsConnectionStatus(ctx, cloudAccountID, orgID, def)
resp.Logs = status
}()
}
wg.Wait()
return resp, nil
}
func (a *awsProvider) getServiceMetricsConnectionStatus(
ctx context.Context,
cloudAccountID string,
orgID string,
def *integrationstypes.AWSServiceDefinition,
) ([]*integrationstypes.SignalConnectionStatus, error) {
if def.Strategy == nil ||
len(def.Strategy.AWSMetrics.StreamFilters) < 1 ||
len(def.DataCollected.Metrics) < 1 {
return nil, nil
}
statusResp := make([]*integrationstypes.SignalConnectionStatus, 0)
for _, category := range def.IngestionStatusCheck.Metrics {
queries := make([]qbtypes.QueryEnvelope, 0)
for _, check := range category.Checks {
filterExpression := fmt.Sprintf(`cloud.provider="aws" AND cloud.account.id="%s"`, cloudAccountID)
f := ""
for _, attribute := range check.Attributes {
f = fmt.Sprintf("%s %s", attribute.Name, attribute.Operator)
if attribute.Value != "" {
f = fmt.Sprintf("%s '%s'", f, attribute.Value)
}
filterExpression = fmt.Sprintf("%s AND %s", filterExpression, f)
}
queries = append(queries, qbtypes.QueryEnvelope{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: valuer.GenerateUUID().String(),
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{{
MetricName: check.Key,
TimeAggregation: metrictypes.TimeAggregationAvg,
SpaceAggregation: metrictypes.SpaceAggregationAvg,
}},
Filter: &qbtypes.Filter{
Expression: filterExpression,
},
},
})
}
resp, err := a.querier.QueryRange(ctx, valuer.MustNewUUID(orgID), &qbtypes.QueryRangeRequest{
SchemaVersion: "v5",
Start: uint64(time.Now().Add(-time.Hour).UnixMilli()),
End: uint64(time.Now().UnixMilli()),
RequestType: qbtypes.RequestTypeScalar,
CompositeQuery: qbtypes.CompositeQuery{
Queries: queries,
},
})
if err != nil {
a.logger.DebugContext(ctx,
"error querying for service metrics connection status",
"error", err,
"service", def.DefinitionMetadata.Id,
)
continue
}
if resp != nil && len(resp.Data.Results) < 1 {
continue
}
queryResponse, ok := resp.Data.Results[0].(*qbtypes.TimeSeriesData)
if !ok {
continue
}
if queryResponse == nil ||
len(queryResponse.Aggregations) < 1 ||
len(queryResponse.Aggregations[0].Series) < 1 ||
len(queryResponse.Aggregations[0].Series[0].Values) < 1 {
continue
}
statusResp = append(statusResp, &integrationstypes.SignalConnectionStatus{
CategoryID: category.Category,
CategoryDisplayName: category.DisplayName,
LastReceivedTsMillis: queryResponse.Aggregations[0].Series[0].Values[0].Timestamp,
LastReceivedFrom: "signoz-aws-integration",
})
}
return statusResp, nil
}
func (a *awsProvider) getServiceLogsConnectionStatus(
ctx context.Context,
cloudAccountID string,
orgID string,
def *integrationstypes.AWSServiceDefinition,
) ([]*integrationstypes.SignalConnectionStatus, error) {
if def.Strategy == nil ||
len(def.Strategy.AWSLogs.Subscriptions) < 1 ||
len(def.DataCollected.Logs) < 1 {
return nil, nil
}
statusResp := make([]*integrationstypes.SignalConnectionStatus, 0)
for _, category := range def.IngestionStatusCheck.Logs {
queries := make([]qbtypes.QueryEnvelope, 0)
for _, check := range category.Checks {
filterExpression := fmt.Sprintf(`cloud.account.id="%s"`, cloudAccountID)
f := ""
for _, attribute := range check.Attributes {
f = fmt.Sprintf("%s %s", attribute.Name, attribute.Operator)
if attribute.Value != "" {
f = fmt.Sprintf("%s '%s'", f, attribute.Value)
}
filterExpression = fmt.Sprintf("%s AND %s", filterExpression, f)
}
queries = append(queries, qbtypes.QueryEnvelope{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Name: valuer.GenerateUUID().String(),
Signal: telemetrytypes.SignalLogs,
Aggregations: []qbtypes.LogAggregation{{
Expression: "count()",
}},
Filter: &qbtypes.Filter{
Expression: filterExpression,
},
Limit: 10,
Offset: 0,
},
})
}
resp, err := a.querier.QueryRange(ctx, valuer.MustNewUUID(orgID), &qbtypes.QueryRangeRequest{
SchemaVersion: "v1",
Start: uint64(time.Now().Add(-time.Hour * 1).UnixMilli()),
End: uint64(time.Now().UnixMilli()),
RequestType: qbtypes.RequestTypeTimeSeries,
CompositeQuery: qbtypes.CompositeQuery{
Queries: queries,
},
})
if err != nil {
a.logger.DebugContext(ctx,
"error querying for service logs connection status",
"error", err,
"service", def.DefinitionMetadata.Id,
)
continue
}
if resp != nil && len(resp.Data.Results) < 1 {
continue
}
queryResponse, ok := resp.Data.Results[0].(*qbtypes.TimeSeriesData)
if !ok {
continue
}
if queryResponse == nil ||
len(queryResponse.Aggregations) < 1 ||
len(queryResponse.Aggregations[0].Series) < 1 ||
len(queryResponse.Aggregations[0].Series[0].Values) < 1 {
continue
}
statusResp = append(statusResp, &integrationstypes.SignalConnectionStatus{
CategoryID: category.Category,
CategoryDisplayName: category.DisplayName,
LastReceivedTsMillis: queryResponse.Aggregations[0].Series[0].Values[0].Timestamp,
LastReceivedFrom: "signoz-aws-integration",
})
}
return statusResp, nil
}
func (a *awsProvider) getServiceConfig(ctx context.Context,
def *integrationstypes.AWSServiceDefinition, orgID, cloudProvider, serviceId, cloudAccountId string,
) (*integrationstypes.AWSCloudServiceConfig, error) {
activeAccount, err := a.accountsRepo.GetConnectedCloudAccount(ctx, orgID, cloudProvider, cloudAccountId)
if err != nil {
return nil, err
}
config, err := a.serviceConfigRepo.Get(ctx, orgID, activeAccount.ID.StringValue(), serviceId)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
return nil, nil
}
return nil, err
}
serviceConfig := new(integrationstypes.AWSCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
return nil, err
}
if config != nil && serviceConfig.Metrics != nil && serviceConfig.Metrics.Enabled {
def.PopulateDashboardURLs(serviceId)
}
return serviceConfig, nil
}
func (a *awsProvider) GetAvailableDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
accountRecords, err := a.accountsRepo.ListConnected(ctx, orgID.StringValue(), a.GetName().String())
if err != nil {
return nil, err
}
// for now service dashboards are only available when metrics are enabled.
servicesWithAvailableMetrics := map[string]*time.Time{}
for _, ar := range accountRecords {
if ar.AccountID != nil {
configsBySvcId, err := a.serviceConfigRepo.GetAllForAccount(ctx, orgID.StringValue(), ar.ID.StringValue())
if err != nil {
return nil, err
}
for svcId, config := range configsBySvcId {
serviceConfig := new(integrationstypes.AWSCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
return nil, err
}
if serviceConfig.Metrics != nil && serviceConfig.Metrics.Enabled {
servicesWithAvailableMetrics[svcId] = &ar.CreatedAt
}
}
}
}
svcDashboards := make([]*dashboardtypes.Dashboard, 0)
allServices, err := a.awsServiceDefinitions.ListServiceDefinitions(ctx)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to list aws service definitions")
}
for _, svc := range allServices {
serviceDashboardsCreatedAt, ok := servicesWithAvailableMetrics[svc.Id]
if ok {
svcDashboards = integrationstypes.GetDashboardsFromAssets(svc.Id, a.GetName(), serviceDashboardsCreatedAt, svc.Assets)
servicesWithAvailableMetrics[svc.Id] = nil
}
}
return svcDashboards, nil
}
func (a *awsProvider) GetDashboard(ctx context.Context, req *integrationstypes.GettableDashboard) (*dashboardtypes.Dashboard, error) {
allDashboards, err := a.GetAvailableDashboards(ctx, req.OrgID)
if err != nil {
return nil, err
}
for _, d := range allDashboards {
if d.ID == req.ID {
return d, nil
}
}
return nil, errors.NewNotFoundf(CodeDashboardNotFound, "dashboard with id %s not found", req.ID)
}
func (a *awsProvider) GenerateConnectionArtifact(ctx context.Context, req *integrationstypes.PostableConnectionArtifact) (any, error) {
connection := new(integrationstypes.PostableAWSConnectionUrl)
err := connection.Unmarshal(req.Data)
if err != nil {
return nil, err
}
if connection.AccountConfig != nil {
for _, region := range connection.AccountConfig.EnabledRegions {
if integrationstypes.ValidAWSRegions[region] {
continue
}
return nil, errors.NewInvalidInputf(CodeInvalidAWSRegion, "invalid aws region: %s", region)
}
}
config, err := connection.AccountConfig.Marshal()
if err != nil {
return nil, err
}
account, err := a.accountsRepo.Upsert(
ctx, req.OrgID, integrationstypes.CloudProviderAWS.String(), nil, config,
nil, nil, nil,
)
if err != nil {
return nil, err
}
agentVersion := "v0.0.8"
if connection.AgentConfig.Version != "" {
agentVersion = connection.AgentConfig.Version
}
baseURL := fmt.Sprintf("https://%s.console.aws.amazon.com/cloudformation/home",
connection.AgentConfig.Region)
u, _ := url.Parse(baseURL)
q := u.Query()
q.Set("region", connection.AgentConfig.Region)
u.Fragment = "/stacks/quickcreate"
u.RawQuery = q.Encode()
q = u.Query()
q.Set("stackName", "signoz-integration")
q.Set("templateURL", fmt.Sprintf("https://signoz-integrations.s3.us-east-1.amazonaws.com/aws-quickcreate-template-%s.json", agentVersion))
q.Set("param_SigNozIntegrationAgentVersion", agentVersion)
q.Set("param_SigNozApiUrl", connection.AgentConfig.SigNozAPIUrl)
q.Set("param_SigNozApiKey", connection.AgentConfig.SigNozAPIKey)
q.Set("param_SigNozAccountId", account.ID.StringValue())
q.Set("param_IngestionUrl", connection.AgentConfig.IngestionUrl)
q.Set("param_IngestionKey", connection.AgentConfig.IngestionKey)
return &integrationstypes.GettableAWSConnectionUrl{
AccountId: account.ID.StringValue(),
ConnectionUrl: u.String() + "?&" + q.Encode(), // this format is required by AWS
}, nil
}
func (a *awsProvider) UpdateServiceConfig(ctx context.Context, req *integrationstypes.PatchableServiceConfig) (any, error) {
definition, err := a.awsServiceDefinitions.GetServiceDefinition(ctx, req.ServiceId)
if err != nil {
return nil, model.InternalError(fmt.Errorf("couldn't get aws service definition: %w", err))
}
serviceConfig := new(integrationstypes.PatchableAWSCloudServiceConfig)
err = serviceConfig.Unmarshal(req.Config)
if err != nil {
return nil, err
}
if err = serviceConfig.Config.Validate(definition); err != nil {
return nil, err
}
// can only update config for a connected cloud account id
_, err = a.accountsRepo.GetConnectedCloudAccount(
ctx, req.OrgID, a.GetName().String(), serviceConfig.CloudAccountId,
)
if err != nil {
return nil, err
}
serviceConfigBytes, err := serviceConfig.Config.Marshal()
if err != nil {
return nil, err
}
updatedConfig, err := a.serviceConfigRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), serviceConfig.CloudAccountId, req.ServiceId, serviceConfigBytes,
)
if err != nil {
return nil, err
}
if err = serviceConfig.Unmarshal(updatedConfig); err != nil {
return nil, err
}
return &integrationstypes.PatchServiceConfigResponse{
ServiceId: req.ServiceId,
Config: serviceConfig,
}, nil
}
func (a *awsProvider) UpdateAccountConfig(ctx context.Context, req *integrationstypes.PatchableAccountConfig) (any, error) {
config := new(integrationstypes.PatchableAWSAccountConfig)
err := config.Unmarshal(req.Data)
if err != nil {
return nil, err
}
if config.Config == nil {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "account config can't be null")
}
for _, region := range config.Config.EnabledRegions {
if integrationstypes.ValidAWSRegions[region] {
continue
}
return nil, errors.NewInvalidInputf(CodeInvalidAWSRegion, "invalid aws region: %s", region)
}
configBytes, err := config.Config.Marshal()
if err != nil {
return nil, err
}
// account must exist to update config, but it doesn't need to be connected
_, err = a.accountsRepo.Get(ctx, req.OrgID, a.GetName().String(), req.AccountId)
if err != nil {
return nil, err
}
accountRecord, err := a.accountsRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), &req.AccountId, configBytes, nil, nil, nil,
)
if err != nil {
return nil, err
}
return accountRecord.Account(a.GetName()), nil
}
func (a *awsProvider) DisconnectAccount(ctx context.Context, orgID, accountID string) (*integrationstypes.CloudIntegration, error) {
account, err := a.accountsRepo.Get(ctx, orgID, a.GetName().String(), accountID)
if err != nil {
return nil, err
}
tsNow := time.Now()
account, err = a.accountsRepo.Upsert(
ctx, orgID, a.GetName().String(), &accountID, nil, nil, nil, &tsNow,
)
if err != nil {
return nil, err
}
return account, nil
}

View File

@@ -1,584 +0,0 @@
package implazureprovider
import (
"context"
"fmt"
"log/slog"
"slices"
"sort"
"strings"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/store"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
"golang.org/x/exp/maps"
)
var (
CodeInvalidAzureRegion = errors.MustNewCode("invalid_azure_region")
CodeDashboardNotFound = errors.MustNewCode("dashboard_not_found")
)
type azureProvider struct {
logger *slog.Logger
accountsRepo store.CloudProviderAccountsRepository
serviceConfigRepo store.ServiceConfigDatabase
azureServiceDefinitions *services.AzureServicesProvider
querier querier.Querier
}
func NewAzureCloudProvider(
logger *slog.Logger,
accountsRepo store.CloudProviderAccountsRepository,
serviceConfigRepo store.ServiceConfigDatabase,
querier querier.Querier,
) integrationstypes.CloudProvider {
azureServiceDefinitions, err := services.NewAzureCloudProviderServices()
if err != nil {
panic("failed to initialize Azure service definitions: " + err.Error())
}
return &azureProvider{
logger: logger,
accountsRepo: accountsRepo,
serviceConfigRepo: serviceConfigRepo,
azureServiceDefinitions: azureServiceDefinitions,
querier: querier,
}
}
func (a *azureProvider) GetAccountStatus(ctx context.Context, orgID, accountID string) (*integrationstypes.GettableAccountStatus, error) {
account, err := a.accountsRepo.Get(ctx, orgID, a.GetName().String(), accountID)
if err != nil {
return nil, err
}
return &integrationstypes.GettableAccountStatus{
Id: account.ID.String(),
CloudAccountId: account.AccountID,
Status: account.Status(),
}, nil
}
func (a *azureProvider) ListConnectedAccounts(ctx context.Context, orgID string) (*integrationstypes.GettableConnectedAccountsList, error) {
accountRecords, err := a.accountsRepo.ListConnected(ctx, orgID, a.GetName().String())
if err != nil {
return nil, err
}
connectedAccounts := make([]*integrationstypes.Account, 0, len(accountRecords))
for _, r := range accountRecords {
connectedAccounts = append(connectedAccounts, r.Account(a.GetName()))
}
return &integrationstypes.GettableConnectedAccountsList{
Accounts: connectedAccounts,
}, nil
}
func (a *azureProvider) AgentCheckIn(ctx context.Context, req *integrationstypes.PostableAgentCheckInPayload) (any, error) {
existingAccount, err := a.accountsRepo.Get(ctx, req.OrgID, a.GetName().String(), req.ID)
if err != nil {
return nil, err
}
if existingAccount != nil && existingAccount.AccountID != nil && *existingAccount.AccountID != req.AccountID {
return nil, model.BadRequest(fmt.Errorf(
"can't check in with new %s account id %s for account %s with existing %s id %s",
a.GetName().String(), req.AccountID, existingAccount.ID.StringValue(), a.GetName().String(),
*existingAccount.AccountID,
))
}
existingAccount, err = a.accountsRepo.GetConnectedCloudAccount(ctx, req.OrgID, a.GetName().String(), req.AccountID)
if existingAccount != nil && existingAccount.ID.StringValue() != req.ID {
return nil, model.BadRequest(fmt.Errorf(
"can't check in to %s account %s with id %s. already connected with id %s",
a.GetName().String(), req.AccountID, req.ID, existingAccount.ID.StringValue(),
))
}
agentReport := integrationstypes.AgentReport{
TimestampMillis: time.Now().UnixMilli(),
Data: req.Data,
}
account, err := a.accountsRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), &req.ID, nil, &req.AccountID, &agentReport, nil,
)
if err != nil {
return nil, err
}
agentConfig, err := a.getAzureAgentConfig(ctx, account)
if err != nil {
return nil, err
}
return &integrationstypes.GettableAzureAgentCheckIn{
AccountId: account.ID.StringValue(),
CloudAccountId: *account.AccountID,
RemovedAt: account.RemovedAt,
IntegrationConfig: *agentConfig,
}, nil
}
func (a *azureProvider) getAzureAgentConfig(ctx context.Context, account *integrationstypes.CloudIntegration) (*integrationstypes.AzureAgentIntegrationConfig, error) {
// prepare and return integration config to be consumed by agent
agentConfig := &integrationstypes.AzureAgentIntegrationConfig{
TelemetryCollectionStrategy: make(map[string]*integrationstypes.AzureCollectionStrategy),
}
accountConfig := new(integrationstypes.AzureAccountConfig)
err := accountConfig.Unmarshal(account.Config)
if err != nil {
return nil, err
}
if account.Config != nil {
agentConfig.DeploymentRegion = accountConfig.DeploymentRegion
agentConfig.EnabledResourceGroups = accountConfig.EnabledResourceGroups
}
svcConfigs, err := a.serviceConfigRepo.GetAllForAccount(
ctx, account.OrgID, account.ID.StringValue(),
)
if err != nil {
return nil, err
}
// accumulate config in a fixed order to ensure same config generated across runs
configuredServices := maps.Keys(svcConfigs)
slices.Sort(configuredServices)
metrics := make([]*integrationstypes.AzureMetricsStrategy, 0)
logs := make([]*integrationstypes.AzureLogsStrategy, 0)
for _, svcType := range configuredServices {
definition, err := a.azureServiceDefinitions.GetServiceDefinition(ctx, svcType)
if err != nil {
continue
}
config := svcConfigs[svcType]
serviceConfig := new(integrationstypes.AzureCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
continue
}
metricsStrategyMap := make(map[string]*integrationstypes.AzureMetricsStrategy)
logsStrategyMap := make(map[string]*integrationstypes.AzureLogsStrategy)
for _, metric := range definition.Strategy.AzureMetrics {
metricsStrategyMap[metric.Name] = metric
}
for _, log := range definition.Strategy.AzureLogs {
logsStrategyMap[log.Name] = log
}
if serviceConfig.Metrics != nil {
for _, metric := range serviceConfig.Metrics {
if metric.Enabled {
metrics = append(metrics, &integrationstypes.AzureMetricsStrategy{
CategoryType: metricsStrategyMap[metric.Name].CategoryType,
Name: metric.Name,
})
}
}
}
if serviceConfig.Logs != nil {
for _, log := range serviceConfig.Logs {
if log.Enabled {
logs = append(logs, &integrationstypes.AzureLogsStrategy{
CategoryType: logsStrategyMap[log.Name].CategoryType,
Name: log.Name,
})
}
}
}
strategy := integrationstypes.AzureCollectionStrategy{}
strategy.AzureMetrics = metrics
strategy.AzureLogs = logs
agentConfig.TelemetryCollectionStrategy[svcType] = &strategy
}
return agentConfig, nil
}
func (a *azureProvider) GetName() valuer.String {
return integrationstypes.CloudProviderAzure
}
func (a *azureProvider) ListServices(ctx context.Context, orgID string, cloudAccountID *string) (any, error) {
svcConfigs := make(map[string]*integrationstypes.AzureCloudServiceConfig)
if cloudAccountID != nil {
activeAccount, err := a.accountsRepo.GetConnectedCloudAccount(ctx, orgID, a.GetName().String(), *cloudAccountID)
if err != nil {
return nil, err
}
serviceConfigs, err := a.serviceConfigRepo.GetAllForAccount(ctx, orgID, activeAccount.ID.StringValue())
if err != nil {
return nil, err
}
for svcType, config := range serviceConfigs {
serviceConfig := new(integrationstypes.AzureCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
return nil, err
}
svcConfigs[svcType] = serviceConfig
}
}
summaries := make([]integrationstypes.AzureServiceSummary, 0)
definitions, err := a.azureServiceDefinitions.ListServiceDefinitions(ctx)
if err != nil {
return nil, model.InternalError(fmt.Errorf("couldn't list aws service definitions: %w", err))
}
for _, def := range definitions {
summary := integrationstypes.AzureServiceSummary{
DefinitionMetadata: def.DefinitionMetadata,
Config: nil,
}
summary.Config = svcConfigs[summary.Id]
summaries = append(summaries, summary)
}
sort.Slice(summaries, func(i, j int) bool {
return summaries[i].DefinitionMetadata.Title < summaries[j].DefinitionMetadata.Title
})
return &integrationstypes.GettableAzureServices{
Services: summaries,
}, nil
}
func (a *azureProvider) GetServiceDetails(ctx context.Context, req *integrationstypes.GetServiceDetailsReq) (any, error) {
details := new(integrationstypes.GettableAzureServiceDetails)
azureDefinition, err := a.azureServiceDefinitions.GetServiceDefinition(ctx, req.ServiceId)
if err != nil {
return nil, model.InternalError(fmt.Errorf("couldn't get aws service definition: %w", err))
}
details.AzureServiceDefinition = *azureDefinition
if req.CloudAccountID == nil {
return details, nil
}
config, err := a.getServiceConfig(ctx, &details.AzureServiceDefinition, req.OrgID, req.ServiceId, *req.CloudAccountID)
if err != nil {
return nil, err
}
details.Config = config
// fill default values for config
if details.Config == nil {
cfg := new(integrationstypes.AzureCloudServiceConfig)
logs := make([]*integrationstypes.AzureCloudServiceLogsConfig, 0)
for _, log := range azureDefinition.Strategy.AzureLogs {
logs = append(logs, &integrationstypes.AzureCloudServiceLogsConfig{
Enabled: false,
Name: log.Name,
})
}
metrics := make([]*integrationstypes.AzureCloudServiceMetricsConfig, 0)
for _, metric := range azureDefinition.Strategy.AzureMetrics {
metrics = append(metrics, &integrationstypes.AzureCloudServiceMetricsConfig{
Enabled: false,
Name: metric.Name,
})
}
cfg.Logs = logs
cfg.Metrics = metrics
details.Config = cfg
}
// TODO: write logic for getting connection status
return details, nil
}
func (a *azureProvider) getServiceConfig(
ctx context.Context,
definition *integrationstypes.AzureServiceDefinition,
orgID string,
serviceId string,
cloudAccountId string,
) (*integrationstypes.AzureCloudServiceConfig, error) {
activeAccount, err := a.accountsRepo.GetConnectedCloudAccount(ctx, orgID, a.GetName().String(), cloudAccountId)
if err != nil {
return nil, err
}
configBytes, err := a.serviceConfigRepo.Get(ctx, orgID, activeAccount.ID.String(), serviceId)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
return nil, nil
}
return nil, err
}
config := new(integrationstypes.AzureCloudServiceConfig)
err = config.Unmarshal(configBytes)
if err != nil {
return nil, err
}
for _, metric := range config.Metrics {
if metric.Enabled {
definition.PopulateDashboardURLs(serviceId)
break
}
}
return config, nil
}
func (a *azureProvider) GenerateConnectionArtifact(ctx context.Context, req *integrationstypes.PostableConnectionArtifact) (any, error) {
connection := new(integrationstypes.PostableAzureConnectionCommand)
err := connection.Unmarshal(req.Data)
if err != nil {
return nil, errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed unmarshal request data into AWS connection config")
}
// validate connection config
if connection.AccountConfig != nil {
if !integrationstypes.ValidAzureRegions[connection.AccountConfig.DeploymentRegion] {
return nil, errors.NewInvalidInputf(CodeInvalidAzureRegion, "invalid azure region: %s",
connection.AccountConfig.DeploymentRegion,
)
}
}
config, err := connection.AccountConfig.Marshal()
if err != nil {
return nil, err
}
account, err := a.accountsRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), nil, config,
nil, nil, nil,
)
if err != nil {
return nil, err
}
agentVersion := "v0.0.1"
if connection.AgentConfig.Version != "" {
agentVersion = connection.AgentConfig.Version
}
// TODO: improve the command and set url
cliCommand := []string{"az", "stack", "sub", "create", "--name", "SigNozIntegration", "--location",
connection.AccountConfig.DeploymentRegion, "--template-uri", fmt.Sprintf("<url>%s", agentVersion),
"--action-on-unmanage", "deleteAll", "--deny-settings-mode", "denyDelete", "--parameters", fmt.Sprintf("rgName=%s", "signoz-integration-rg"),
fmt.Sprintf("rgLocation=%s", connection.AccountConfig.DeploymentRegion)}
return &integrationstypes.GettableAzureConnectionCommand{
AccountId: account.ID.String(),
AzureShellConnectionCommand: "az create",
AzureCliConnectionCommand: strings.Join(cliCommand, " "),
}, nil
}
func (a *azureProvider) UpdateServiceConfig(ctx context.Context, req *integrationstypes.PatchableServiceConfig) (any, error) {
definition, err := a.azureServiceDefinitions.GetServiceDefinition(ctx, req.ServiceId)
if err != nil {
return nil, err
}
serviceConfig := new(integrationstypes.PatchableAzureCloudServiceConfig)
err = serviceConfig.Unmarshal(req.Config)
if err != nil {
return nil, err
}
if err = serviceConfig.Config.Validate(definition); err != nil {
return nil, err
}
// can only update config for a connected cloud account id
_, err = a.accountsRepo.GetConnectedCloudAccount(
ctx, req.OrgID, a.GetName().String(), serviceConfig.CloudAccountId,
)
if err != nil {
return nil, err
}
serviceConfigBytes, err := serviceConfig.Config.Marshal()
if err != nil {
return nil, err
}
updatedConfig, err := a.serviceConfigRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), serviceConfig.CloudAccountId, req.ServiceId, serviceConfigBytes,
)
if err != nil {
return nil, err
}
if err = serviceConfig.Unmarshal(updatedConfig); err != nil {
return nil, err
}
return &integrationstypes.PatchServiceConfigResponse{
ServiceId: req.ServiceId,
Config: serviceConfig,
}, nil
}
func (a *azureProvider) GetAvailableDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
accountRecords, err := a.accountsRepo.ListConnected(ctx, orgID.StringValue(), a.GetName().String())
if err != nil {
return nil, err
}
// for now service dashboards are only available when metrics are enabled.
servicesWithAvailableMetrics := map[string]*time.Time{}
for _, ar := range accountRecords {
if ar.AccountID == nil {
continue
}
configsBySvcId, err := a.serviceConfigRepo.GetAllForAccount(ctx, orgID.StringValue(), ar.ID.StringValue())
if err != nil {
return nil, err
}
for svcId, config := range configsBySvcId {
serviceConfig := new(integrationstypes.AzureCloudServiceConfig)
err = serviceConfig.Unmarshal(config)
if err != nil {
return nil, err
}
if serviceConfig.Metrics != nil {
for _, metric := range serviceConfig.Metrics {
if metric.Enabled {
servicesWithAvailableMetrics[svcId] = &ar.CreatedAt
break
}
}
}
}
}
svcDashboards := make([]*dashboardtypes.Dashboard, 0)
allServices, err := a.azureServiceDefinitions.ListServiceDefinitions(ctx)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to list azure service definitions")
}
for _, svc := range allServices {
serviceDashboardsCreatedAt := servicesWithAvailableMetrics[svc.Id]
if serviceDashboardsCreatedAt != nil {
svcDashboards = integrationstypes.GetDashboardsFromAssets(svc.Id, a.GetName(), serviceDashboardsCreatedAt, svc.Assets)
servicesWithAvailableMetrics[svc.Id] = nil
}
}
return svcDashboards, nil
}
func (a *azureProvider) GetDashboard(ctx context.Context, req *integrationstypes.GettableDashboard) (*dashboardtypes.Dashboard, error) {
allDashboards, err := a.GetAvailableDashboards(ctx, req.OrgID)
if err != nil {
return nil, err
}
for _, dashboard := range allDashboards {
if dashboard.ID == req.ID {
return dashboard, nil
}
}
return nil, errors.NewNotFoundf(CodeDashboardNotFound, "dashboard with id %s not found", req.ID)
}
func (a *azureProvider) UpdateAccountConfig(ctx context.Context, req *integrationstypes.PatchableAccountConfig) (any, error) {
config := new(integrationstypes.PatchableAzureAccountConfig)
err := config.Unmarshal(req.Data)
if err != nil {
return nil, err
}
if config.Config == nil && len(config.Config.EnabledResourceGroups) < 1 {
return nil, errors.NewInvalidInputf(CodeInvalidAzureRegion, "azure region and resource groups must be provided")
}
//for azure, preserve deployment region if already set
account, err := a.accountsRepo.Get(ctx, req.OrgID, a.GetName().String(), req.AccountId)
if err != nil {
return nil, err
}
storedConfig := new(integrationstypes.AzureAccountConfig)
err = storedConfig.Unmarshal(account.Config)
if err != nil {
return nil, err
}
if account.Config != nil {
config.Config.DeploymentRegion = storedConfig.DeploymentRegion
}
configBytes, err := config.Config.Marshal()
if err != nil {
return nil, err
}
accountRecord, err := a.accountsRepo.Upsert(
ctx, req.OrgID, a.GetName().String(), &req.AccountId, configBytes, nil, nil, nil,
)
if err != nil {
return nil, err
}
return accountRecord.Account(a.GetName()), nil
}
func (a *azureProvider) DisconnectAccount(ctx context.Context, orgID, accountID string) (*integrationstypes.CloudIntegration, error) {
account, err := a.accountsRepo.Get(ctx, orgID, a.GetName().String(), accountID)
if err != nil {
return nil, err
}
tsNow := time.Now()
account, err = a.accountsRepo.Upsert(
ctx, orgID, a.GetName().String(), &accountID, nil, nil, nil, &tsNow,
)
if err != nil {
return nil, err
}
return account, nil
}

View File

@@ -1 +1,94 @@
package cloudintegrations
import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
"github.com/SigNoz/signoz/pkg/types"
)
type ServiceSummary struct {
services.Metadata
Config *types.CloudServiceConfig `json:"config"`
}
type ServiceDetails struct {
services.Definition
Config *types.CloudServiceConfig `json:"config"`
ConnectionStatus *ServiceConnectionStatus `json:"status,omitempty"`
}
type AccountStatus struct {
Integration AccountIntegrationStatus `json:"integration"`
}
type AccountIntegrationStatus struct {
LastHeartbeatTsMillis *int64 `json:"last_heartbeat_ts_ms"`
}
type LogsConfig struct {
Enabled bool `json:"enabled"`
S3Buckets map[string][]string `json:"s3_buckets,omitempty"`
}
type MetricsConfig struct {
Enabled bool `json:"enabled"`
}
type ServiceConnectionStatus struct {
Logs *SignalConnectionStatus `json:"logs"`
Metrics *SignalConnectionStatus `json:"metrics"`
}
type SignalConnectionStatus struct {
LastReceivedTsMillis int64 `json:"last_received_ts_ms"` // epoch milliseconds
LastReceivedFrom string `json:"last_received_from"` // resource identifier
}
type CompiledCollectionStrategy = services.CollectionStrategy
func NewCompiledCollectionStrategy(provider string) (*CompiledCollectionStrategy, error) {
if provider == "aws" {
return &CompiledCollectionStrategy{
Provider: "aws",
AWSMetrics: &services.AWSMetricsStrategy{},
AWSLogs: &services.AWSLogsStrategy{},
}, nil
}
return nil, errors.NewNotFoundf(services.CodeUnsupportedCloudProvider, "unsupported cloud provider: %s", provider)
}
// Helper for accumulating strategies for enabled services.
func AddServiceStrategy(serviceType string, cs *CompiledCollectionStrategy,
definitionStrat *services.CollectionStrategy, config *types.CloudServiceConfig) error {
if definitionStrat.Provider != cs.Provider {
return errors.NewInternalf(CodeMismatchCloudProvider, "can't add %s service strategy to compiled strategy for %s",
definitionStrat.Provider, cs.Provider)
}
if cs.Provider == "aws" {
if config.Logs != nil && config.Logs.Enabled {
if serviceType == services.S3Sync {
// S3 bucket sync; No cloudwatch logs are appended for this service type;
// Though definition is populated with a custom cloudwatch group that helps in calculating logs connection status
cs.S3Buckets = config.Logs.S3Buckets
} else if definitionStrat.AWSLogs != nil { // services that includes a logs subscription
cs.AWSLogs.Subscriptions = append(
cs.AWSLogs.Subscriptions,
definitionStrat.AWSLogs.Subscriptions...,
)
}
}
if config.Metrics != nil && config.Metrics.Enabled && definitionStrat.AWSMetrics != nil {
cs.AWSMetrics.StreamFilters = append(
cs.AWSMetrics.StreamFilters,
definitionStrat.AWSMetrics.StreamFilters...,
)
}
return nil
}
return errors.NewNotFoundf(services.CodeUnsupportedCloudProvider, "unsupported cloud provider: %s", cs.Provider)
}

View File

@@ -1,26 +0,0 @@
package cloudintegrations
import (
"log/slog"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/implawsprovider"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/implazureprovider"
integrationstore "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/store"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
)
func NewCloudProviderRegistry(logger *slog.Logger, store sqlstore.SQLStore, querier querier.Querier) map[integrationstypes.CloudProviderType]integrationstypes.CloudProvider {
registry := make(map[integrationstypes.CloudProviderType]integrationstypes.CloudProvider)
accountsRepo := integrationstore.NewCloudProviderAccountsRepository(store)
serviceConfigRepo := integrationstore.NewServiceConfigRepository(store)
awsProviderImpl := implawsprovider.NewAWSCloudProvider(logger, accountsRepo, serviceConfigRepo, querier)
registry[integrationstypes.CloudProviderAWS] = awsProviderImpl
azureProviderImpl := implazureprovider.NewAzureCloudProvider(logger, accountsRepo, serviceConfigRepo, querier)
registry[integrationstypes.CloudProviderAzure] = azureProviderImpl
return registry
}

View File

@@ -1,63 +1,64 @@
package store
package cloudintegrations
import (
"context"
"database/sql"
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/integrationstypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
var (
CodeServiceConfigNotFound = errors.MustNewCode("service_config_not_found")
)
type ServiceConfigDatabase interface {
Get(
get(
ctx context.Context,
orgID string,
cloudAccountId string,
serviceType string,
) ([]byte, error)
) (*types.CloudServiceConfig, *model.ApiError)
Upsert(
upsert(
ctx context.Context,
orgID string,
cloudProvider string,
cloudAccountId string,
serviceId string,
config []byte,
) ([]byte, error)
config types.CloudServiceConfig,
) (*types.CloudServiceConfig, *model.ApiError)
GetAllForAccount(
getAllForAccount(
ctx context.Context,
orgID string,
cloudAccountId string,
) (
map[string][]byte,
error,
configsBySvcId map[string]*types.CloudServiceConfig,
apiErr *model.ApiError,
)
}
func NewServiceConfigRepository(store sqlstore.SQLStore) ServiceConfigDatabase {
return &serviceConfigSQLRepository{store: store}
func newServiceConfigRepository(store sqlstore.SQLStore) (
*serviceConfigSQLRepository, error,
) {
return &serviceConfigSQLRepository{
store: store,
}, nil
}
type serviceConfigSQLRepository struct {
store sqlstore.SQLStore
}
func (r *serviceConfigSQLRepository) Get(
func (r *serviceConfigSQLRepository) get(
ctx context.Context,
orgID string,
cloudAccountId string,
serviceType string,
) ([]byte, error) {
var result integrationstypes.CloudIntegrationService
) (*types.CloudServiceConfig, *model.ApiError) {
var result types.CloudIntegrationService
err := r.store.BunDB().NewSelect().
Model(&result).
@@ -66,30 +67,36 @@ func (r *serviceConfigSQLRepository) Get(
Where("ci.id = ?", cloudAccountId).
Where("cis.type = ?", serviceType).
Scan(ctx)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, errors.WrapNotFoundf(err, CodeServiceConfigNotFound, "couldn't find config for cloud account %s", cloudAccountId)
}
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud service config")
if err == sql.ErrNoRows {
return nil, model.NotFoundError(fmt.Errorf(
"couldn't find config for cloud account %s",
cloudAccountId,
))
} else if err != nil {
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud service config: %w", err,
))
}
return result.Config, nil
return &result.Config, nil
}
func (r *serviceConfigSQLRepository) Upsert(
func (r *serviceConfigSQLRepository) upsert(
ctx context.Context,
orgID string,
cloudProvider string,
cloudAccountId string,
serviceId string,
config []byte,
) ([]byte, error) {
config types.CloudServiceConfig,
) (*types.CloudServiceConfig, *model.ApiError) {
// get cloud integration id from account id
// if the account is not connected, we don't need to upsert the config
var cloudIntegrationId string
err := r.store.BunDB().NewSelect().
Model((*integrationstypes.CloudIntegration)(nil)).
Model((*types.CloudIntegration)(nil)).
Column("id").
Where("provider = ?", cloudProvider).
Where("account_id = ?", cloudAccountId).
@@ -97,18 +104,14 @@ func (r *serviceConfigSQLRepository) Upsert(
Where("removed_at is NULL").
Where("last_agent_report is not NULL").
Scan(ctx, &cloudIntegrationId)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, errors.WrapNotFoundf(
err,
CodeCloudIntegrationAccountNotFound,
"couldn't find active cloud integration account",
)
}
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query cloud integration id")
return nil, model.InternalError(fmt.Errorf(
"couldn't query cloud integration id: %w", err,
))
}
serviceConfig := integrationstypes.CloudIntegrationService{
serviceConfig := types.CloudIntegrationService{
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
TimeAuditable: types.TimeAuditable{
CreatedAt: time.Now(),
@@ -123,18 +126,21 @@ func (r *serviceConfigSQLRepository) Upsert(
On("conflict(cloud_integration_id, type) do update set config=excluded.config, updated_at=excluded.updated_at").
Exec(ctx)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't upsert cloud service config")
return nil, model.InternalError(fmt.Errorf(
"could not upsert cloud service config: %w", err,
))
}
return config, nil
return &serviceConfig.Config, nil
}
func (r *serviceConfigSQLRepository) GetAllForAccount(
func (r *serviceConfigSQLRepository) getAllForAccount(
ctx context.Context,
orgID string,
cloudAccountId string,
) (map[string][]byte, error) {
var serviceConfigs []integrationstypes.CloudIntegrationService
) (map[string]*types.CloudServiceConfig, *model.ApiError) {
serviceConfigs := []types.CloudIntegrationService{}
err := r.store.BunDB().NewSelect().
Model(&serviceConfigs).
@@ -143,13 +149,15 @@ func (r *serviceConfigSQLRepository) GetAllForAccount(
Where("ci.org_id = ?", orgID).
Scan(ctx)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't query service configs from db")
return nil, model.InternalError(fmt.Errorf(
"could not query service configs from db: %w", err,
))
}
result := make(map[string][]byte)
result := map[string]*types.CloudServiceConfig{}
for _, r := range serviceConfigs {
result[r.Type] = r.Config
result[r.Type] = &r.Config
}
return result, nil

View File

@@ -7,24 +7,6 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_ApplicationELB_ConsumedLCUs_count",
"attributes": []
},
{
"key": "aws_ApplicationELB_ProcessedBytes_sum",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{

View File

@@ -7,75 +7,6 @@
"metrics": true,
"logs": true
},
"ingestion_status_check": {
"metrics": [
{
"category": "rest_api",
"display_name": "REST API Metrics",
"checks": [
{
"key": "aws_ApiGateway_Count_count",
"attributes": [
{
"name": "ApiName",
"operator": "EXISTS",
"value": ""
}
]
}
]
},
{
"category": "http_api",
"display_name": "HTTP API Metrics",
"checks": [
{
"key": "aws_ApiGateway_Count_count",
"attributes": [
{
"name": "ApiId",
"operator": "EXISTS",
"value": ""
}
]
}
]
},
{
"category": "websocket_api",
"display_name": "Websocket API Metrics",
"checks": [
{
"key": "aws_ApiGateway_Count_count",
"attributes": [
{
"name": "ApiId",
"operator": "EXISTS",
"value": ""
}
]
}
]
}
],
"logs": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"attributes": [
{
"name": "aws.cloudwatch.log_group_name",
"operator": "ILIKE",
"value": "API-Gateway%"
}
]
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -217,146 +148,6 @@
"name": "aws_ApiGateway_Latency_sum",
"unit": "Milliseconds",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_4xx_sum",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_4xx_max",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_4xx_min",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_4xx_count",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_5xx_sum",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_5xx_max",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_5xx_min",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_5xx_count",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_DataProcessed_sum",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_DataProcessed_max",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_DataProcessed_min",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_DataProcessed_count",
"unit": "Bytes",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ExecutionError_sum",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ExecutionError_max",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ExecutionError_min",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ExecutionError_count",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ClientError_sum",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ClientError_max",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ClientError_min",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ClientError_count",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_IntegrationError_sum",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_IntegrationError_max",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_IntegrationError_min",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_IntegrationError_count",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ConnectCount_sum",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ConnectCount_max",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ConnectCount_min",
"unit": "Count",
"type": "Gauge"
},
{
"name": "aws_ApiGateway_ConnectCount_count",
"unit": "Count",
"type": "Gauge"
}
],
"logs": [

View File

@@ -7,24 +7,6 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_DynamoDB_AccountMaxReads_max",
"attributes": []
},
{
"key": "aws_DynamoDB_AccountProvisionedReadCapacityUtilization_max",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -409,4 +391,4 @@
}
]
}
}
}

View File

@@ -7,24 +7,6 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_EC2_CPUUtilization_max",
"attributes": []
},
{
"key": "aws_EC2_NetworkIn_max",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -533,4 +515,4 @@
}
]
}
}
}

View File

@@ -7,81 +7,6 @@
"metrics": true,
"logs": true
},
"ingestion_status_check": {
"metrics": [
{
"category": "overview",
"display_name": "Overview",
"checks": [
{
"key": "aws_ECS_CPUUtilization_max",
"attributes": []
},
{
"key": "aws_ECS_MemoryUtilization_max",
"attributes": []
}
]
},
{
"category": "containerinsights",
"display_name": "Container Insights",
"checks": [
{
"key": "aws_ECS_ContainerInsights_NetworkRxBytes_max",
"attributes": []
},
{
"key": "aws_ECS_ContainerInsights_StorageReadBytes_max",
"attributes": []
}
]
},
{
"category": "enhanced_containerinsights",
"display_name": "Enhanced Container Insights",
"checks": [
{
"key": "aws_ECS_ContainerInsights_ContainerCpuUtilization_max",
"attributes": [
{
"name": "TaskId",
"operator": "EXISTS",
"value": ""
}
]
},
{
"key": "aws_ECS_ContainerInsights_TaskMemoryUtilization_max",
"attributes": [
{
"name": "TaskId",
"operator": "EXISTS",
"value": ""
}
]
}
]
}
],
"logs": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"attributes": [
{
"name": "aws.cloudwatch.log_group_name",
"operator": "ILIKE",
"value": "%/ecs/%"
}
]
}
]
}
]
},
"data_collected": {
"metrics": [
{

View File

@@ -7,20 +7,6 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_ElastiCache_CacheHitRate_max",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics":[
{
@@ -1942,7 +1928,7 @@
"unit": "Percent",
"type": "Gauge",
"description": ""
}
}
]
},
"telemetry_collection_strategy": {
@@ -1965,4 +1951,4 @@
}
]
}
}
}

View File

@@ -7,37 +7,6 @@
"metrics": true,
"logs": true
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_Lambda_Invocations_sum",
"attributes": []
}
]
}
],
"logs": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"attributes": [
{
"name": "aws.cloudwatch.log_group_name",
"operator": "ILIKE",
"value": "/aws/lambda%"
}
]
}
]
}
]
},
"data_collected": {
"metrics": [
{

View File

@@ -7,20 +7,6 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_Kafka_KafkaDataLogsDiskUsed_max",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -1102,3 +1088,4 @@
]
}
}

View File

@@ -7,37 +7,6 @@
"metrics": true,
"logs": true
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_RDS_CPUUtilization_max",
"attributes": []
}
]
}
],
"logs": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"attributes": [
{
"name": "resources.aws.cloudwatch.log_group_name",
"operator": "ILIKE",
"value": "/aws/rds%"
}
]
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -831,4 +800,4 @@
}
]
}
}
}

View File

@@ -7,20 +7,6 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_SNS_NumberOfMessagesPublished_sum",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -141,4 +127,4 @@
}
]
}
}
}

View File

@@ -7,24 +7,6 @@
"metrics": true,
"logs": false
},
"ingestion_status_check": {
"metrics": [
{
"category": "$default",
"display_name": "Default",
"checks": [
{
"key": "aws_SQS_SentMessageSize_max",
"attributes": []
},
{
"key": "aws_SQS_NumberOfMessagesSent_sum",
"attributes": []
}
]
}
]
},
"data_collected": {
"metrics": [
{
@@ -265,4 +247,4 @@
}
]
}
}
}

View File

@@ -1 +0,0 @@
<svg id="f2f04349-8aee-4413-84c9-a9053611b319" xmlns="http://www.w3.org/2000/svg" width="18" height="18" viewBox="0 0 18 18"><defs><linearGradient id="ad4c4f96-09aa-4f91-ba10-5cb8ad530f74" x1="9" y1="15.83" x2="9" y2="5.79" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#b3b3b3" /><stop offset="0.26" stop-color="#c1c1c1" /><stop offset="1" stop-color="#e6e6e6" /></linearGradient></defs><title>Icon-storage-86</title><path d="M.5,5.79h17a0,0,0,0,1,0,0v9.48a.57.57,0,0,1-.57.57H1.07a.57.57,0,0,1-.57-.57V5.79A0,0,0,0,1,.5,5.79Z" fill="url(#ad4c4f96-09aa-4f91-ba10-5cb8ad530f74)" /><path d="M1.07,2.17H16.93a.57.57,0,0,1,.57.57V5.79a0,0,0,0,1,0,0H.5a0,0,0,0,1,0,0V2.73A.57.57,0,0,1,1.07,2.17Z" fill="#37c2b1" /><path d="M2.81,6.89H15.18a.27.27,0,0,1,.26.27v1.4a.27.27,0,0,1-.26.27H2.81a.27.27,0,0,1-.26-.27V7.16A.27.27,0,0,1,2.81,6.89Z" fill="#fff" /><path d="M2.82,9.68H15.19a.27.27,0,0,1,.26.27v1.41a.27.27,0,0,1-.26.27H2.82a.27.27,0,0,1-.26-.27V10A.27.27,0,0,1,2.82,9.68Z" fill="#37c2b1" /><path d="M2.82,12.5H15.19a.27.27,0,0,1,.26.27v1.41a.27.27,0,0,1-.26.27H2.82a.27.27,0,0,1-.26-.27V12.77A.27.27,0,0,1,2.82,12.5Z" fill="#258277" /></svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

Some files were not shown because too many files have changed in this diff Show More