mirror of
https://github.com/SigNoz/signoz.git
synced 2026-03-06 13:52:02 +00:00
Compare commits
21 Commits
refactor/c
...
SIG-3733
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
10d198f305 | ||
|
|
3051d442c0 | ||
|
|
ea15ce4e04 | ||
|
|
865a7a5a31 | ||
|
|
de4ca50a40 | ||
|
|
8cabaafc58 | ||
|
|
e9d66b8094 | ||
|
|
26d3d6b1e4 | ||
|
|
36d6debeab | ||
|
|
445b0cace8 | ||
|
|
132f10f8a3 | ||
|
|
14011bc277 | ||
|
|
f17a332c23 | ||
|
|
5ae7a464e6 | ||
|
|
51c3628f6e | ||
|
|
6a69076828 | ||
|
|
edd04e2f07 | ||
|
|
ee734cf78c | ||
|
|
6d137bcdff | ||
|
|
444161671d | ||
|
|
31e9e896ec |
9
.github/CODEOWNERS
vendored
9
.github/CODEOWNERS
vendored
@@ -55,7 +55,6 @@
|
||||
/pkg/telemetrymetrics/ @srikanthccv
|
||||
/pkg/telemetrytraces/ @srikanthccv
|
||||
|
||||
|
||||
# Metrics
|
||||
|
||||
/pkg/types/metrictypes/ @srikanthccv
|
||||
@@ -91,6 +90,14 @@
|
||||
# AuthN / AuthZ Owners
|
||||
|
||||
/pkg/authz/ @vikrantgupta25
|
||||
/ee/authz/ @vikrantgupta25
|
||||
/pkg/authn/ @vikrantgupta25
|
||||
/ee/authn/ @vikrantgupta25
|
||||
/pkg/modules/user/ @vikrantgupta25
|
||||
/pkg/modules/session/ @vikrantgupta25
|
||||
/pkg/modules/organization/ @vikrantgupta25
|
||||
/pkg/modules/authdomain/ @vikrantgupta25
|
||||
/pkg/modules/role/ @vikrantgupta25
|
||||
|
||||
# Integration tests
|
||||
|
||||
|
||||
7
.github/workflows/commitci.yaml
vendored
7
.github/workflows/commitci.yaml
vendored
@@ -25,3 +25,10 @@ jobs:
|
||||
else
|
||||
echo "No references to 'ee' packages found in 'pkg' directory"
|
||||
fi
|
||||
|
||||
if grep -R --include="*.go" '.*/ee/.*' cmd/community/; then
|
||||
echo "Error: Found references to 'ee' packages in 'cmd/community' directory"
|
||||
exit 1
|
||||
else
|
||||
echo "No references to 'ee' packages found in 'cmd/community' directory"
|
||||
fi
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,6 +1,9 @@
|
||||
|
||||
node_modules
|
||||
|
||||
.vscode
|
||||
!.vscode/settings.json
|
||||
|
||||
deploy/docker/environment_tiny/common_test
|
||||
frontend/node_modules
|
||||
frontend/.pnp
|
||||
@@ -104,7 +107,6 @@ dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
|
||||
4
Makefile
4
Makefile
@@ -86,7 +86,7 @@ go-run-enterprise: ## Runs the enterprise go backend server
|
||||
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \
|
||||
SIGNOZ_SQLSTORE_SQLITE_PATH=signoz.db \
|
||||
SIGNOZ_WEB_ENABLED=false \
|
||||
SIGNOZ_JWT_SECRET=secret \
|
||||
SIGNOZ_TOKENIZER_JWT_SECRET=secret \
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
@@ -103,7 +103,7 @@ go-run-community: ## Runs the community go backend server
|
||||
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \
|
||||
SIGNOZ_SQLSTORE_SQLITE_PATH=signoz.db \
|
||||
SIGNOZ_WEB_ENABLED=false \
|
||||
SIGNOZ_JWT_SECRET=secret \
|
||||
SIGNOZ_TOKENIZER_JWT_SECRET=secret \
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
|
||||
@@ -5,13 +5,14 @@ import (
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/pkg/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/gateway/noopgateway"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
@@ -24,7 +25,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
@@ -57,13 +57,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
// print the version
|
||||
version.Info.PrettyPrint(config.Version)
|
||||
|
||||
// add enterprise sqlstore factories to the community sqlstore factories
|
||||
sqlstoreFactories := signoz.NewSQLStoreProviderFactories()
|
||||
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
|
||||
logger.ErrorContext(ctx, "failed to add postgressqlstore factory", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
signoz, err := signoz.New(
|
||||
ctx,
|
||||
config,
|
||||
@@ -90,6 +83,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, _ role.Module, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
|
||||
return impldashboard.NewModule(impldashboard.NewStore(store), settings, analytics, orgGetter, queryParser)
|
||||
},
|
||||
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return noopgateway.NewProviderFactory()
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
|
||||
@@ -22,6 +23,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
@@ -120,6 +122,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, role role.Module, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
|
||||
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, role, queryParser, querier, licensing)
|
||||
},
|
||||
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return httpgateway.NewProviderFactory(licensing)
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
@@ -195,7 +195,7 @@ services:
|
||||
- GODEBUG=netdns=go
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-swarm
|
||||
- SIGNOZ_JWT_SECRET=secret
|
||||
- SIGNOZ_TOKENIZER_JWT_SECRET=secret
|
||||
- DOT_METRICS_ENABLED=true
|
||||
healthcheck:
|
||||
test:
|
||||
|
||||
@@ -2067,6 +2067,361 @@ paths:
|
||||
summary: Get features
|
||||
tags:
|
||||
- features
|
||||
/api/v2/gateway/ingestion_keys:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoint returns the ingestion keys for a workspace
|
||||
operationId: GetIngestionKeys
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/GatewaytypesGettableIngestionKeys'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Get ingestion keys for workspace
|
||||
tags:
|
||||
- gateway
|
||||
post:
|
||||
deprecated: false
|
||||
description: This endpoint creates an ingestion key for the workspace
|
||||
operationId: CreateIngestionKey
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/GatewaytypesPostableIngestionKey'
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/GatewaytypesGettableCreatedIngestionKey'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Create ingestion key for workspace
|
||||
tags:
|
||||
- gateway
|
||||
/api/v2/gateway/ingestion_keys/{keyId}:
|
||||
delete:
|
||||
deprecated: false
|
||||
description: This endpoint deletes an ingestion key for the workspace
|
||||
operationId: DeleteIngestionKey
|
||||
parameters:
|
||||
- in: path
|
||||
name: keyId
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Delete ingestion key for workspace
|
||||
tags:
|
||||
- gateway
|
||||
patch:
|
||||
deprecated: false
|
||||
description: This endpoint updates an ingestion key for the workspace
|
||||
operationId: UpdateIngestionKey
|
||||
parameters:
|
||||
- in: path
|
||||
name: keyId
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/GatewaytypesPostableIngestionKey'
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Update ingestion key for workspace
|
||||
tags:
|
||||
- gateway
|
||||
/api/v2/gateway/ingestion_keys/{keyId}/limits:
|
||||
post:
|
||||
deprecated: false
|
||||
description: This endpoint creates an ingestion key limit
|
||||
operationId: CreateIngestionKeyLimit
|
||||
parameters:
|
||||
- in: path
|
||||
name: keyId
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/GatewaytypesPostableIngestionKeyLimit'
|
||||
responses:
|
||||
"201":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/GatewaytypesGettableCreatedIngestionKeyLimit'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: Created
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Create limit for the ingestion key
|
||||
tags:
|
||||
- gateway
|
||||
/api/v2/gateway/ingestion_keys/limits/{limitId}:
|
||||
delete:
|
||||
deprecated: false
|
||||
description: This endpoint deletes an ingestion key limit
|
||||
operationId: DeleteIngestionKeyLimit
|
||||
parameters:
|
||||
- in: path
|
||||
name: limitId
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Delete limit for the ingestion key
|
||||
tags:
|
||||
- gateway
|
||||
patch:
|
||||
deprecated: false
|
||||
description: This endpoint updates an ingestion key limit
|
||||
operationId: UpdateIngestionKeyLimit
|
||||
parameters:
|
||||
- in: path
|
||||
name: limitId
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/GatewaytypesUpdatableIngestionKeyLimit'
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Update limit for the ingestion key
|
||||
tags:
|
||||
- gateway
|
||||
/api/v2/gateway/ingestion_keys/search:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoint returns the ingestion keys for a workspace
|
||||
operationId: SearchIngestionKeys
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/GatewaytypesGettableIngestionKeys'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Search ingestion keys for workspace
|
||||
tags:
|
||||
- gateway
|
||||
/api/v2/metric/alerts:
|
||||
get:
|
||||
deprecated: false
|
||||
@@ -2736,12 +3091,25 @@ paths:
|
||||
- sessions
|
||||
components:
|
||||
schemas:
|
||||
AuthtypesAttributeMapping:
|
||||
properties:
|
||||
email:
|
||||
type: string
|
||||
groups:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
role:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesAuthDomainConfig:
|
||||
properties:
|
||||
googleAuthConfig:
|
||||
$ref: '#/components/schemas/AuthtypesGoogleConfig'
|
||||
oidcConfig:
|
||||
$ref: '#/components/schemas/AuthtypesOIDCConfig'
|
||||
roleMapping:
|
||||
$ref: '#/components/schemas/AuthtypesRoleMapping'
|
||||
samlConfig:
|
||||
$ref: '#/components/schemas/AuthtypesSamlConfig'
|
||||
ssoEnabled:
|
||||
@@ -2775,11 +3143,6 @@ components:
|
||||
url:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesClaimMapping:
|
||||
properties:
|
||||
email:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesDeprecatedGettableLogin:
|
||||
properties:
|
||||
accessJwt:
|
||||
@@ -2811,6 +3174,8 @@ components:
|
||||
$ref: '#/components/schemas/AuthtypesOIDCConfig'
|
||||
orgId:
|
||||
type: string
|
||||
roleMapping:
|
||||
$ref: '#/components/schemas/AuthtypesRoleMapping'
|
||||
samlConfig:
|
||||
$ref: '#/components/schemas/AuthtypesSamlConfig'
|
||||
ssoEnabled:
|
||||
@@ -2834,17 +3199,33 @@ components:
|
||||
type: object
|
||||
AuthtypesGoogleConfig:
|
||||
properties:
|
||||
allowedGroups:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
clientId:
|
||||
type: string
|
||||
clientSecret:
|
||||
type: string
|
||||
domainToAdminEmail:
|
||||
additionalProperties:
|
||||
type: string
|
||||
type: object
|
||||
fetchGroups:
|
||||
type: boolean
|
||||
fetchTransitiveGroupMembership:
|
||||
type: boolean
|
||||
insecureSkipEmailVerified:
|
||||
type: boolean
|
||||
redirectURI:
|
||||
type: string
|
||||
serviceAccountJson:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesOIDCConfig:
|
||||
properties:
|
||||
claimMapping:
|
||||
$ref: '#/components/schemas/AuthtypesClaimMapping'
|
||||
$ref: '#/components/schemas/AuthtypesAttributeMapping'
|
||||
clientId:
|
||||
type: string
|
||||
clientSecret:
|
||||
@@ -2895,8 +3276,22 @@ components:
|
||||
refreshToken:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesRoleMapping:
|
||||
properties:
|
||||
defaultRole:
|
||||
type: string
|
||||
groupMappings:
|
||||
additionalProperties:
|
||||
type: string
|
||||
nullable: true
|
||||
type: object
|
||||
useRoleAttribute:
|
||||
type: boolean
|
||||
type: object
|
||||
AuthtypesSamlConfig:
|
||||
properties:
|
||||
attributeMapping:
|
||||
$ref: '#/components/schemas/AuthtypesAttributeMapping'
|
||||
insecureSkipAuthNRequestsSigned:
|
||||
type: boolean
|
||||
samlCert:
|
||||
@@ -3011,6 +3406,160 @@ components:
|
||||
nullable: true
|
||||
type: object
|
||||
type: object
|
||||
GatewaytypesGettableCreatedIngestionKey:
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
value:
|
||||
type: string
|
||||
type: object
|
||||
GatewaytypesGettableCreatedIngestionKeyLimit:
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
type: object
|
||||
GatewaytypesGettableIngestionKeys:
|
||||
properties:
|
||||
_pagination:
|
||||
$ref: '#/components/schemas/GatewaytypesPagination'
|
||||
keys:
|
||||
items:
|
||||
$ref: '#/components/schemas/GatewaytypesIngestionKey'
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
GatewaytypesIngestionKey:
|
||||
properties:
|
||||
created_at:
|
||||
format: date-time
|
||||
type: string
|
||||
expires_at:
|
||||
format: date-time
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
limits:
|
||||
items:
|
||||
$ref: '#/components/schemas/GatewaytypesLimit'
|
||||
nullable: true
|
||||
type: array
|
||||
name:
|
||||
type: string
|
||||
tags:
|
||||
items:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
updated_at:
|
||||
format: date-time
|
||||
type: string
|
||||
value:
|
||||
type: string
|
||||
workspace_id:
|
||||
type: string
|
||||
type: object
|
||||
GatewaytypesLimit:
|
||||
properties:
|
||||
config:
|
||||
$ref: '#/components/schemas/GatewaytypesLimitConfig'
|
||||
created_at:
|
||||
format: date-time
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
key_id:
|
||||
type: string
|
||||
metric:
|
||||
$ref: '#/components/schemas/GatewaytypesLimitMetric'
|
||||
signal:
|
||||
type: string
|
||||
tags:
|
||||
items:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
updated_at:
|
||||
format: date-time
|
||||
type: string
|
||||
type: object
|
||||
GatewaytypesLimitConfig:
|
||||
properties:
|
||||
day:
|
||||
$ref: '#/components/schemas/GatewaytypesLimitValue'
|
||||
second:
|
||||
$ref: '#/components/schemas/GatewaytypesLimitValue'
|
||||
type: object
|
||||
GatewaytypesLimitMetric:
|
||||
properties:
|
||||
day:
|
||||
$ref: '#/components/schemas/GatewaytypesLimitMetricValue'
|
||||
second:
|
||||
$ref: '#/components/schemas/GatewaytypesLimitMetricValue'
|
||||
type: object
|
||||
GatewaytypesLimitMetricValue:
|
||||
properties:
|
||||
count:
|
||||
format: int64
|
||||
type: integer
|
||||
size:
|
||||
format: int64
|
||||
type: integer
|
||||
type: object
|
||||
GatewaytypesLimitValue:
|
||||
properties:
|
||||
count:
|
||||
format: int64
|
||||
type: integer
|
||||
size:
|
||||
format: int64
|
||||
type: integer
|
||||
type: object
|
||||
GatewaytypesPagination:
|
||||
properties:
|
||||
page:
|
||||
type: integer
|
||||
pages:
|
||||
type: integer
|
||||
per_page:
|
||||
type: integer
|
||||
total:
|
||||
type: integer
|
||||
type: object
|
||||
GatewaytypesPostableIngestionKey:
|
||||
properties:
|
||||
expires_at:
|
||||
format: date-time
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
tags:
|
||||
items:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
GatewaytypesPostableIngestionKeyLimit:
|
||||
properties:
|
||||
config:
|
||||
$ref: '#/components/schemas/GatewaytypesLimitConfig'
|
||||
signal:
|
||||
type: string
|
||||
tags:
|
||||
items:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
GatewaytypesUpdatableIngestionKeyLimit:
|
||||
properties:
|
||||
config:
|
||||
$ref: '#/components/schemas/GatewaytypesLimitConfig'
|
||||
tags:
|
||||
items:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
MetricsexplorertypesMetricAlert:
|
||||
properties:
|
||||
alertId:
|
||||
|
||||
@@ -2,6 +2,7 @@ package oidccallbackauthn
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
@@ -19,25 +20,27 @@ const (
|
||||
redirectPath string = "/api/v1/complete/oidc"
|
||||
)
|
||||
|
||||
var (
|
||||
scopes []string = []string{"email", oidc.ScopeOpenID}
|
||||
)
|
||||
var defaultScopes []string = []string{"email", "profile", oidc.ScopeOpenID}
|
||||
|
||||
var _ authn.CallbackAuthN = (*AuthN)(nil)
|
||||
|
||||
type AuthN struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
store authtypes.AuthNStore
|
||||
licensing licensing.Licensing
|
||||
httpClient *client.Client
|
||||
}
|
||||
|
||||
func New(store authtypes.AuthNStore, licensing licensing.Licensing, providerSettings factory.ProviderSettings) (*AuthN, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn")
|
||||
|
||||
httpClient, err := client.New(providerSettings.Logger, providerSettings.TracerProvider, providerSettings.MeterProvider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &AuthN{
|
||||
settings: settings,
|
||||
store: store,
|
||||
licensing: licensing,
|
||||
httpClient: httpClient,
|
||||
@@ -126,7 +129,40 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
}
|
||||
}
|
||||
|
||||
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
|
||||
name := ""
|
||||
if nameClaim := authDomain.AuthDomainConfig().OIDC.ClaimMapping.Name; nameClaim != "" {
|
||||
if n, ok := claims[nameClaim].(string); ok {
|
||||
name = n
|
||||
}
|
||||
}
|
||||
|
||||
var groups []string
|
||||
if groupsClaim := authDomain.AuthDomainConfig().OIDC.ClaimMapping.Groups; groupsClaim != "" {
|
||||
if claimValue, exists := claims[groupsClaim]; exists {
|
||||
switch g := claimValue.(type) {
|
||||
case []any:
|
||||
for _, group := range g {
|
||||
if gs, ok := group.(string); ok {
|
||||
groups = append(groups, gs)
|
||||
}
|
||||
}
|
||||
case string:
|
||||
// Some IDPs return a single group as a string instead of an array
|
||||
groups = append(groups, g)
|
||||
default:
|
||||
a.settings.Logger().WarnContext(ctx, "oidc: unsupported groups type", "type", fmt.Sprintf("%T", claimValue))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
role := ""
|
||||
if roleClaim := authDomain.AuthDomainConfig().OIDC.ClaimMapping.Role; roleClaim != "" {
|
||||
if r, ok := claims[roleClaim].(string); ok {
|
||||
role = r
|
||||
}
|
||||
}
|
||||
|
||||
return authtypes.NewCallbackIdentity(name, email, authDomain.StorableAuthDomain().OrgID, state, groups, role), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
@@ -145,6 +181,13 @@ func (a *AuthN) oidcProviderAndoauth2Config(ctx context.Context, siteURL *url.UR
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
scopes := make([]string, len(defaultScopes))
|
||||
copy(scopes, defaultScopes)
|
||||
|
||||
if authDomain.AuthDomainConfig().RoleMapping != nil && len(authDomain.AuthDomainConfig().RoleMapping.GroupMappings) > 0 {
|
||||
scopes = append(scopes, "groups")
|
||||
}
|
||||
|
||||
return oidcProvider, &oauth2.Config{
|
||||
ClientID: authDomain.AuthDomainConfig().OIDC.ClientID,
|
||||
ClientSecret: authDomain.AuthDomainConfig().OIDC.ClientSecret,
|
||||
|
||||
@@ -96,7 +96,26 @@ func (a *AuthN) HandleCallback(ctx context.Context, formValues url.Values) (*aut
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "saml: invalid email").WithAdditional("The nameID assertion is used to retrieve the email address, please check your IDP configuration and try again.")
|
||||
}
|
||||
|
||||
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
|
||||
name := ""
|
||||
if nameAttribute := authDomain.AuthDomainConfig().SAML.AttributeMapping.Name; nameAttribute != "" {
|
||||
if val := assertionInfo.Values.Get(nameAttribute); val != "" {
|
||||
name = val
|
||||
}
|
||||
}
|
||||
|
||||
var groups []string
|
||||
if groupAttribute := authDomain.AuthDomainConfig().SAML.AttributeMapping.Groups; groupAttribute != "" {
|
||||
groups = assertionInfo.Values.GetAll(groupAttribute)
|
||||
}
|
||||
|
||||
role := ""
|
||||
if roleAttribute := authDomain.AuthDomainConfig().SAML.AttributeMapping.Role; roleAttribute != "" {
|
||||
if val := assertionInfo.Values.Get(roleAttribute); val != "" {
|
||||
role = val
|
||||
}
|
||||
}
|
||||
|
||||
return authtypes.NewCallbackIdentity(name, email, authDomain.StorableAuthDomain().OrgID, state, groups, role), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
|
||||
282
ee/gateway/httpgateway/provider.go
Normal file
282
ee/gateway/httpgateway/provider.go
Normal file
@@ -0,0 +1,282 @@
|
||||
package httpgateway
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/http/client"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/types/gatewaytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
type Provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
config gateway.Config
|
||||
httpClient *client.Client
|
||||
licensing licensing.Licensing
|
||||
}
|
||||
|
||||
func NewProviderFactory(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, ps factory.ProviderSettings, c gateway.Config) (gateway.Gateway, error) {
|
||||
return New(ctx, ps, c, licensing)
|
||||
})
|
||||
}
|
||||
|
||||
func New(ctx context.Context, providerSettings factory.ProviderSettings, config gateway.Config, licensing licensing.Licensing) (gateway.Gateway, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/gateway/httpgateway")
|
||||
|
||||
httpClient, err := client.New(
|
||||
settings.Logger(),
|
||||
providerSettings.TracerProvider,
|
||||
providerSettings.MeterProvider,
|
||||
client.WithRequestResponseLog(true),
|
||||
client.WithRetryCount(3),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Provider{
|
||||
settings: settings,
|
||||
config: config,
|
||||
httpClient: httpClient,
|
||||
licensing: licensing,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) GetIngestionKeys(ctx context.Context, orgID valuer.UUID, page, perPage int) (*gatewaytypes.GettableIngestionKeys, error) {
|
||||
qParams := url.Values{}
|
||||
qParams.Add("page", strconv.Itoa(page))
|
||||
qParams.Add("per_page", strconv.Itoa(perPage))
|
||||
|
||||
responseBody, err := provider.do(ctx, orgID, http.MethodGet, "/v1/workspaces/me/keys", qParams, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var ingestionKeys []gatewaytypes.IngestionKey
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "data").String()), &ingestionKeys); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var pagination gatewaytypes.Pagination
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "_pagination").String()), &pagination); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &gatewaytypes.GettableIngestionKeys{
|
||||
Keys: ingestionKeys,
|
||||
Pagination: pagination,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) SearchIngestionKeysByName(ctx context.Context, orgID valuer.UUID, name string, page, perPage int) (*gatewaytypes.GettableIngestionKeys, error) {
|
||||
qParams := url.Values{}
|
||||
qParams.Add("name", name)
|
||||
qParams.Add("page", strconv.Itoa(page))
|
||||
qParams.Add("per_page", strconv.Itoa(perPage))
|
||||
|
||||
responseBody, err := provider.do(ctx, orgID, http.MethodGet, "/v1/workspaces/me/keys/search", qParams, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var ingestionKeys []gatewaytypes.IngestionKey
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "data").String()), &ingestionKeys); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var pagination gatewaytypes.Pagination
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "_pagination").String()), &pagination); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &gatewaytypes.GettableIngestionKeys{
|
||||
Keys: ingestionKeys,
|
||||
Pagination: pagination,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) CreateIngestionKey(ctx context.Context, orgID valuer.UUID, name string, tags []string, expiresAt time.Time) (*gatewaytypes.GettableCreatedIngestionKey, error) {
|
||||
requestBody := gatewaytypes.PostableIngestionKey{
|
||||
Name: name,
|
||||
Tags: tags,
|
||||
ExpiresAt: expiresAt,
|
||||
}
|
||||
requestBodyBytes, err := json.Marshal(requestBody)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
responseBody, err := provider.do(ctx, orgID, http.MethodPost, "/v1/workspaces/me/keys", nil, requestBodyBytes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var createdKeyResponse gatewaytypes.GettableCreatedIngestionKey
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "data").String()), &createdKeyResponse); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &createdKeyResponse, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) UpdateIngestionKey(ctx context.Context, orgID valuer.UUID, keyID string, name string, tags []string, expiresAt time.Time) error {
|
||||
requestBody := gatewaytypes.PostableIngestionKey{
|
||||
Name: name,
|
||||
Tags: tags,
|
||||
ExpiresAt: expiresAt,
|
||||
}
|
||||
requestBodyBytes, err := json.Marshal(requestBody)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.do(ctx, orgID, http.MethodPatch, "/v1/workspaces/me/keys/"+keyID, nil, requestBodyBytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *Provider) DeleteIngestionKey(ctx context.Context, orgID valuer.UUID, keyID string) error {
|
||||
_, err := provider.do(ctx, orgID, http.MethodDelete, "/v1/workspaces/me/keys/"+keyID, nil, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *Provider) CreateIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, keyID string, signal string, limitConfig gatewaytypes.LimitConfig, tags []string) (*gatewaytypes.GettableCreatedIngestionKeyLimit, error) {
|
||||
requestBody := gatewaytypes.PostableIngestionKeyLimit{
|
||||
Signal: signal,
|
||||
Config: limitConfig,
|
||||
Tags: tags,
|
||||
}
|
||||
requestBodyBytes, err := json.Marshal(requestBody)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
responseBody, err := provider.do(ctx, orgID, http.MethodPost, "/v1/workspaces/me/keys/"+keyID+"/limits", nil, requestBodyBytes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var createdIngestionKeyLimitResponse gatewaytypes.GettableCreatedIngestionKeyLimit
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "data").String()), &createdIngestionKeyLimitResponse); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &createdIngestionKeyLimitResponse, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) UpdateIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, limitID string, limitConfig gatewaytypes.LimitConfig, tags []string) error {
|
||||
requestBody := gatewaytypes.UpdatableIngestionKeyLimit{
|
||||
Config: limitConfig,
|
||||
Tags: tags,
|
||||
}
|
||||
requestBodyBytes, err := json.Marshal(requestBody)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.do(ctx, orgID, http.MethodPatch, "/v1/workspaces/me/limits/"+limitID, nil, requestBodyBytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *Provider) DeleteIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, limitID string) error {
|
||||
_, err := provider.do(ctx, orgID, http.MethodDelete, "/v1/workspaces/me/limits/"+limitID, nil, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *Provider) do(ctx context.Context, orgID valuer.UUID, method string, path string, queryParams url.Values, body []byte) ([]byte, error) {
|
||||
license, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "no valid license found").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
// build url
|
||||
requestURL := provider.config.URL.JoinPath(path)
|
||||
|
||||
// add query params to the url
|
||||
if queryParams != nil {
|
||||
requestURL.RawQuery = queryParams.Encode()
|
||||
}
|
||||
|
||||
// build request
|
||||
request, err := http.NewRequestWithContext(ctx, method, requestURL.String(), bytes.NewBuffer(body))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// add headers needed to call gateway
|
||||
request.Header.Set("Content-Type", "application/json")
|
||||
request.Header.Set("X-Signoz-Cloud-Api-Key", license.Key)
|
||||
request.Header.Set("X-Consumer-Username", "lid:00000000-0000-0000-0000-000000000000")
|
||||
request.Header.Set("X-Consumer-Groups", "ns:default")
|
||||
|
||||
// execute request
|
||||
response, err := provider.httpClient.Do(request)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// read response
|
||||
defer response.Body.Close()
|
||||
responseBody, err := io.ReadAll(response.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// only 2XX
|
||||
if response.StatusCode/100 == 2 {
|
||||
return responseBody, nil
|
||||
}
|
||||
|
||||
errorMessage := gjson.GetBytes(responseBody, "error").String()
|
||||
if errorMessage == "" {
|
||||
errorMessage = "an unknown error occurred"
|
||||
}
|
||||
|
||||
// return error for non 2XX
|
||||
return nil, provider.errFromStatusCode(response.StatusCode, errorMessage)
|
||||
}
|
||||
|
||||
func (provider *Provider) errFromStatusCode(code int, errorMessage string) error {
|
||||
switch code {
|
||||
case http.StatusBadRequest:
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, errorMessage)
|
||||
case http.StatusUnauthorized:
|
||||
return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, errorMessage)
|
||||
case http.StatusForbidden:
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, errorMessage)
|
||||
case http.StatusNotFound:
|
||||
return errors.New(errors.TypeNotFound, errors.CodeNotFound, errorMessage)
|
||||
case http.StatusConflict:
|
||||
return errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, errorMessage)
|
||||
}
|
||||
|
||||
return errors.New(errors.TypeInternal, errors.CodeInternal, errorMessage)
|
||||
}
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
@@ -36,6 +37,7 @@ type APIHandlerOptions struct {
|
||||
GatewayUrl string
|
||||
// Querier Influx Interval
|
||||
FluxInterval time.Duration
|
||||
GlobalConfig global.Config
|
||||
}
|
||||
|
||||
type APIHandler struct {
|
||||
|
||||
@@ -76,7 +76,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
return
|
||||
}
|
||||
|
||||
ingestionUrl, signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
|
||||
signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
|
||||
if apiErr != nil {
|
||||
RespondError(w, basemodel.WrapApiError(
|
||||
apiErr, "couldn't deduce ingestion url and signoz api url",
|
||||
@@ -84,7 +84,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
return
|
||||
}
|
||||
|
||||
result.IngestionUrl = ingestionUrl
|
||||
result.IngestionUrl = ah.opts.GlobalConfig.IngestionURL.String()
|
||||
result.SigNozAPIUrl = signozApiUrl
|
||||
|
||||
gatewayUrl := ah.opts.GatewayUrl
|
||||
@@ -186,7 +186,7 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
|
||||
string, string, *basemodel.ApiError,
|
||||
string, *basemodel.ApiError,
|
||||
) {
|
||||
// TODO: remove this struct from here
|
||||
type deploymentResponse struct {
|
||||
@@ -200,7 +200,7 @@ func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licens
|
||||
|
||||
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
|
||||
if err != nil {
|
||||
return "", "", basemodel.InternalError(fmt.Errorf(
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't query for deployment info: error: %w", err,
|
||||
))
|
||||
}
|
||||
@@ -209,7 +209,7 @@ func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licens
|
||||
|
||||
err = json.Unmarshal(respBytes, resp)
|
||||
if err != nil {
|
||||
return "", "", basemodel.InternalError(fmt.Errorf(
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't unmarshal deployment info response: error: %w", err,
|
||||
))
|
||||
}
|
||||
@@ -219,16 +219,14 @@ func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licens
|
||||
|
||||
if len(regionDns) < 1 || len(deploymentName) < 1 {
|
||||
// Fail early if actual response structure and expectation here ever diverge
|
||||
return "", "", basemodel.InternalError(fmt.Errorf(
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"deployment info response not in expected shape. couldn't determine region dns and deployment name",
|
||||
))
|
||||
}
|
||||
|
||||
ingestionUrl := fmt.Sprintf("https://ingest.%s", regionDns)
|
||||
|
||||
signozApiUrl := fmt.Sprintf("https://%s.%s", deploymentName, regionDns)
|
||||
|
||||
return ingestionUrl, signozApiUrl, nil
|
||||
return signozApiUrl, nil
|
||||
}
|
||||
|
||||
type ingestionKey struct {
|
||||
|
||||
@@ -172,6 +172,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
FluxInterval: config.Querier.FluxInterval,
|
||||
Gateway: gatewayProxy,
|
||||
GatewayUrl: config.Gateway.URL.String(),
|
||||
GlobalConfig: config.Global,
|
||||
}
|
||||
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
|
||||
|
||||
@@ -211,7 +211,10 @@ describe('VariableItem Integration Tests', () => {
|
||||
await user.clear(textInput);
|
||||
await user.type(textInput, 'new-text-value');
|
||||
|
||||
// Should call onValueUpdate after debounce
|
||||
// Blur the input to trigger the value update
|
||||
await user.tab();
|
||||
|
||||
// Should call onValueUpdate after blur
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(mockOnValueUpdate).toHaveBeenCalledWith(
|
||||
|
||||
@@ -4,7 +4,7 @@ import { OPERATORS, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { Formula } from 'container/QueryBuilder/components/Formula';
|
||||
import { QueryBuilderProps } from 'container/QueryBuilder/QueryBuilder.interfaces';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { memo, useEffect, useMemo, useRef } from 'react';
|
||||
import { memo, useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
import { IBuilderTraceOperator } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
@@ -33,6 +33,7 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
addTraceOperator,
|
||||
panelType,
|
||||
initialDataSource,
|
||||
handleRunQuery,
|
||||
} = useQueryBuilder();
|
||||
|
||||
const containerRef = useRef(null);
|
||||
@@ -157,10 +158,29 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
[showTraceOperator, traceOperator, hasAtLeastOneTraceQuery],
|
||||
);
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(e: React.KeyboardEvent<HTMLDivElement>): void => {
|
||||
const target = e.target as HTMLElement | null;
|
||||
const tagName = target?.tagName || '';
|
||||
|
||||
const isInputElement =
|
||||
['INPUT', 'TEXTAREA', 'SELECT'].includes(tagName) ||
|
||||
(target?.getAttribute('contenteditable') || '').toLowerCase() === 'true';
|
||||
|
||||
// Allow input elements in qb to run the query when Cmd/Ctrl + Enter is pressed
|
||||
if (isInputElement && (e.metaKey || e.ctrlKey) && e.key === 'Enter') {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
handleRunQuery();
|
||||
}
|
||||
},
|
||||
[handleRunQuery],
|
||||
);
|
||||
|
||||
return (
|
||||
<QueryBuilderV2Provider>
|
||||
<div className="query-builder-v2">
|
||||
<div className="qb-content-container">
|
||||
<div className="qb-content-container" onKeyDownCapture={handleKeyDown}>
|
||||
{!isMultiQueryAllowed ? (
|
||||
<QueryV2
|
||||
ref={containerRef}
|
||||
|
||||
@@ -11,7 +11,7 @@ import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import { get, isEmpty } from 'lodash-es';
|
||||
import { BarChart2, ChevronUp, ExternalLink, ScrollText } from 'lucide-react';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { MetricAggregation } from 'types/api/v5/queryRange';
|
||||
import { DataSource, ReduceOperators } from 'types/common/queryBuilder';
|
||||
@@ -171,6 +171,9 @@ function QueryAddOns({
|
||||
|
||||
const [selectedViews, setSelectedViews] = useState<AddOn[]>([]);
|
||||
|
||||
const initializedRef = useRef(false);
|
||||
const prevAvailableKeysRef = useRef<Set<string> | null>(null);
|
||||
|
||||
const { handleChangeQueryData } = useQueryOperations({
|
||||
index,
|
||||
query,
|
||||
@@ -213,23 +216,41 @@ function QueryAddOns({
|
||||
}
|
||||
setAddOns(filteredAddOns);
|
||||
|
||||
const activeAddOnKeys = new Set(
|
||||
Object.entries(ADD_ONS_KEYS_TO_QUERY_PATH)
|
||||
.filter(([, path]) => hasValue(get(query, path)))
|
||||
.map(([key]) => key),
|
||||
);
|
||||
const availableAddOnKeys = new Set(filteredAddOns.map((a) => a.key));
|
||||
const previousKeys = prevAvailableKeysRef.current;
|
||||
const hasAvailabilityItemsChanged =
|
||||
previousKeys !== null &&
|
||||
(previousKeys.size !== availableAddOnKeys.size ||
|
||||
[...availableAddOnKeys].some((key) => !previousKeys.has(key)));
|
||||
prevAvailableKeysRef.current = availableAddOnKeys;
|
||||
|
||||
const availableAddOnKeys = new Set(filteredAddOns.map((addOn) => addOn.key));
|
||||
// Filter and set selected views: add-ons that are both active and available
|
||||
setSelectedViews(
|
||||
filteredAddOns.filter(
|
||||
(addOn) =>
|
||||
activeAddOnKeys.has(addOn.key) && availableAddOnKeys.has(addOn.key),
|
||||
if (!initializedRef.current || hasAvailabilityItemsChanged) {
|
||||
initializedRef.current = true;
|
||||
|
||||
const activeAddOnKeys = new Set(
|
||||
Object.entries(ADD_ONS_KEYS_TO_QUERY_PATH)
|
||||
.filter(([, path]) => hasValue(get(query, path)))
|
||||
.map(([key]) => key),
|
||||
);
|
||||
|
||||
// Initial seeding from query values on mount
|
||||
setSelectedViews(
|
||||
filteredAddOns.filter(
|
||||
(addOn) =>
|
||||
activeAddOnKeys.has(addOn.key) && availableAddOnKeys.has(addOn.key),
|
||||
),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
setSelectedViews((prev) =>
|
||||
prev.filter((view) =>
|
||||
filteredAddOns.some((addOn) => addOn.key === view.key),
|
||||
),
|
||||
);
|
||||
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [panelType, isListViewPanel, query]);
|
||||
}, [panelType, isListViewPanel, query, showReduceTo]);
|
||||
|
||||
const handleOptionClick = (e: RadioChangeEvent): void => {
|
||||
if (selectedViews.find((view) => view.key === e.target.value.key)) {
|
||||
|
||||
@@ -1379,8 +1379,6 @@ function QuerySearch({
|
||||
run: (): boolean => {
|
||||
if (onRun && typeof onRun === 'function') {
|
||||
onRun(getCurrentExpression());
|
||||
} else {
|
||||
handleRunQuery();
|
||||
}
|
||||
return true;
|
||||
},
|
||||
|
||||
@@ -410,8 +410,6 @@ function TraceOperatorEditor({
|
||||
run: (): boolean => {
|
||||
if (onRun && typeof onRun === 'function') {
|
||||
onRun(value);
|
||||
} else {
|
||||
handleRunQuery();
|
||||
}
|
||||
return true;
|
||||
},
|
||||
|
||||
@@ -270,44 +270,6 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
await waitFor(() => expect(onRun).toHaveBeenCalled(), { timeout: 2000 });
|
||||
});
|
||||
|
||||
it('calls handleRunQuery when Mod-Enter without onRun', async () => {
|
||||
const mockedHandleRunQuery = handleRunQueryMock as jest.MockedFunction<
|
||||
() => void
|
||||
>;
|
||||
mockedHandleRunQuery.mockClear();
|
||||
|
||||
render(
|
||||
<QuerySearch
|
||||
onChange={jest.fn() as jest.MockedFunction<(v: string) => void>}
|
||||
queryData={initialQueriesMap.logs.builder.queryData[0]}
|
||||
dataSource={DataSource.LOGS}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Wait for CodeMirror to initialize
|
||||
await waitFor(() => {
|
||||
const editor = document.querySelector(CM_EDITOR_SELECTOR);
|
||||
expect(editor).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
|
||||
await userEvent.click(editor);
|
||||
await userEvent.type(editor, SAMPLE_VALUE_TYPING_COMPLETE);
|
||||
|
||||
// Use fireEvent for keyboard shortcuts as userEvent might not work well with CodeMirror
|
||||
const modKey = navigator.platform.includes('Mac') ? 'metaKey' : 'ctrlKey';
|
||||
fireEvent.keyDown(editor, {
|
||||
key: 'Enter',
|
||||
code: 'Enter',
|
||||
[modKey]: true,
|
||||
keyCode: 13,
|
||||
});
|
||||
|
||||
await waitFor(() => expect(mockedHandleRunQuery).toHaveBeenCalled(), {
|
||||
timeout: 2000,
|
||||
});
|
||||
});
|
||||
|
||||
it('initializes CodeMirror with expression from queryData.filter.expression on mount', async () => {
|
||||
const testExpression =
|
||||
"http.status_code >= 500 AND service.name = 'frontend'";
|
||||
|
||||
@@ -3,14 +3,21 @@
|
||||
import '@testing-library/jest-dom';
|
||||
|
||||
import { jest } from '@jest/globals';
|
||||
import { fireEvent, waitFor } from '@testing-library/react';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
import { Having, IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { render, screen, userEvent } from 'tests/test-utils';
|
||||
import {
|
||||
Having,
|
||||
IBuilderQuery,
|
||||
Query,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { UseQueryOperations } from 'types/common/operations.types';
|
||||
import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder';
|
||||
|
||||
import { QueryV2 } from '../QueryV2';
|
||||
import { QueryBuilderV2 } from '../../QueryBuilderV2';
|
||||
|
||||
// Local mocks for domain-specific heavy child components
|
||||
jest.mock(
|
||||
@@ -36,16 +43,87 @@ const mockedUseQueryOperations = jest.mocked(
|
||||
useQueryOperations,
|
||||
) as jest.MockedFunction<UseQueryOperations>;
|
||||
|
||||
describe('QueryV2 - base render', () => {
|
||||
describe('QueryBuilderV2 + QueryV2 - base render', () => {
|
||||
let handleRunQueryMock: jest.MockedFunction<() => void>;
|
||||
|
||||
beforeEach(() => {
|
||||
const mockCloneQuery = jest.fn() as jest.MockedFunction<
|
||||
(type: string, q: IBuilderQuery) => void
|
||||
>;
|
||||
handleRunQueryMock = jest.fn() as jest.MockedFunction<() => void>;
|
||||
const baseQuery: IBuilderQuery = {
|
||||
queryName: 'A',
|
||||
dataSource: DataSource.LOGS,
|
||||
aggregateOperator: '',
|
||||
aggregations: [],
|
||||
timeAggregation: '',
|
||||
spaceAggregation: '',
|
||||
temporality: '',
|
||||
functions: [],
|
||||
filter: undefined,
|
||||
filters: { items: [], op: 'AND' },
|
||||
groupBy: [],
|
||||
expression: '',
|
||||
disabled: false,
|
||||
having: [] as Having[],
|
||||
limit: 10,
|
||||
stepInterval: null,
|
||||
orderBy: [],
|
||||
legend: 'A',
|
||||
};
|
||||
|
||||
const currentQueryObj: Query = {
|
||||
id: 'test',
|
||||
unit: undefined,
|
||||
queryType: EQueryType.CLICKHOUSE,
|
||||
promql: [],
|
||||
clickhouse_sql: [],
|
||||
builder: {
|
||||
queryData: [baseQuery],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
};
|
||||
|
||||
const updateAllQueriesOperators: QueryBuilderContextType['updateAllQueriesOperators'] = (
|
||||
q,
|
||||
) => q;
|
||||
const updateQueriesData: QueryBuilderContextType['updateQueriesData'] = (q) =>
|
||||
q;
|
||||
|
||||
mockedUseQueryBuilder.mockReturnValue(({
|
||||
// Only fields used by QueryV2
|
||||
currentQuery: currentQueryObj,
|
||||
stagedQuery: null,
|
||||
lastUsedQuery: null,
|
||||
setLastUsedQuery: jest.fn(),
|
||||
supersetQuery: currentQueryObj,
|
||||
setSupersetQuery: jest.fn(),
|
||||
initialDataSource: null,
|
||||
panelType: PANEL_TYPES.TABLE,
|
||||
isEnabledQuery: true,
|
||||
handleSetQueryData: jest.fn(),
|
||||
handleSetTraceOperatorData: jest.fn(),
|
||||
handleSetFormulaData: jest.fn(),
|
||||
handleSetQueryItemData: jest.fn(),
|
||||
handleSetConfig: jest.fn(),
|
||||
removeQueryBuilderEntityByIndex: jest.fn(),
|
||||
removeAllQueryBuilderEntities: jest.fn(),
|
||||
removeQueryTypeItemByIndex: jest.fn(),
|
||||
addNewBuilderQuery: jest.fn(),
|
||||
addNewFormula: jest.fn(),
|
||||
removeTraceOperator: jest.fn(),
|
||||
addTraceOperator: jest.fn(),
|
||||
cloneQuery: mockCloneQuery,
|
||||
panelType: null,
|
||||
addNewQueryItem: jest.fn(),
|
||||
redirectWithQueryBuilderData: jest.fn(),
|
||||
handleRunQuery: handleRunQueryMock,
|
||||
resetQuery: jest.fn(),
|
||||
handleOnUnitsChange: jest.fn(),
|
||||
updateAllQueriesOperators,
|
||||
updateQueriesData,
|
||||
initQueryBuilderData: jest.fn(),
|
||||
isStagedQueryUpdated: jest.fn(() => false),
|
||||
isDefaultQuery: jest.fn(() => false),
|
||||
} as unknown) as QueryBuilderContextType);
|
||||
|
||||
mockedUseQueryOperations.mockReturnValue({
|
||||
@@ -71,40 +149,7 @@ describe('QueryV2 - base render', () => {
|
||||
});
|
||||
|
||||
it('renders limit input when dataSource is logs', () => {
|
||||
const baseQuery: IBuilderQuery = {
|
||||
queryName: 'A',
|
||||
dataSource: DataSource.LOGS,
|
||||
aggregateOperator: '',
|
||||
aggregations: [],
|
||||
timeAggregation: '',
|
||||
spaceAggregation: '',
|
||||
temporality: '',
|
||||
functions: [],
|
||||
filter: undefined,
|
||||
filters: { items: [], op: 'AND' },
|
||||
groupBy: [],
|
||||
expression: '',
|
||||
disabled: false,
|
||||
having: [] as Having[],
|
||||
limit: 10,
|
||||
stepInterval: null,
|
||||
orderBy: [],
|
||||
legend: 'A',
|
||||
};
|
||||
|
||||
render(
|
||||
<QueryV2
|
||||
index={0}
|
||||
isAvailableToDisable
|
||||
query={baseQuery}
|
||||
version="v4"
|
||||
onSignalSourceChange={jest.fn() as jest.MockedFunction<(v: string) => void>}
|
||||
signalSourceChangeEnabled={false}
|
||||
queriesCount={1}
|
||||
showTraceOperator={false}
|
||||
hasTraceOperator={false}
|
||||
/>,
|
||||
);
|
||||
render(<QueryBuilderV2 panelType={PANEL_TYPES.TABLE} version="v4" />);
|
||||
|
||||
// Ensure the Limit add-on input is present and is of type number
|
||||
const limitInput = screen.getByPlaceholderText(
|
||||
@@ -115,4 +160,43 @@ describe('QueryV2 - base render', () => {
|
||||
expect(limitInput).toHaveAttribute('name', 'limit');
|
||||
expect(limitInput).toHaveAttribute('data-testid', 'input-Limit');
|
||||
});
|
||||
|
||||
it('Cmd+Enter on an input triggers handleRunQuery via container handler', async () => {
|
||||
render(<QueryBuilderV2 panelType={PANEL_TYPES.TABLE} version="v4" />);
|
||||
|
||||
const limitInput = screen.getByPlaceholderText('Enter limit');
|
||||
fireEvent.keyDown(limitInput, {
|
||||
key: 'Enter',
|
||||
code: 'Enter',
|
||||
metaKey: true,
|
||||
});
|
||||
|
||||
expect(handleRunQueryMock).toHaveBeenCalled();
|
||||
|
||||
const legendInput = screen.getByPlaceholderText('Write legend format');
|
||||
fireEvent.keyDown(legendInput, {
|
||||
key: 'Enter',
|
||||
code: 'Enter',
|
||||
metaKey: true,
|
||||
});
|
||||
|
||||
expect(handleRunQueryMock).toHaveBeenCalled();
|
||||
|
||||
const CM_EDITOR_SELECTOR = '.cm-editor .cm-content';
|
||||
// Wait for CodeMirror to initialize
|
||||
await waitFor(() => {
|
||||
const editor = document.querySelector(CM_EDITOR_SELECTOR);
|
||||
expect(editor).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
|
||||
await userEvent.click(editor);
|
||||
fireEvent.keyDown(editor, {
|
||||
key: 'Enter',
|
||||
code: 'Enter',
|
||||
metaKey: true,
|
||||
});
|
||||
|
||||
expect(handleRunQueryMock).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -37,10 +37,7 @@ function ThresholdItem({
|
||||
);
|
||||
if (units.length === 0) {
|
||||
component = (
|
||||
<Tooltip
|
||||
trigger="hover"
|
||||
title="Please select a Y-axis unit for the query first"
|
||||
>
|
||||
<Tooltip trigger="hover" title="No compatible units available">
|
||||
<Select
|
||||
placeholder="Unit"
|
||||
value={threshold.unit ? threshold.unit : null}
|
||||
|
||||
@@ -47,9 +47,17 @@ export function getCategoryByOptionId(id: string): string | undefined {
|
||||
}
|
||||
|
||||
export function getCategorySelectOptionByName(
|
||||
name: string,
|
||||
name: string | undefined,
|
||||
): DefaultOptionType[] {
|
||||
if (!name) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const categories = getYAxisCategories(YAxisSource.ALERTS);
|
||||
if (!categories.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return (
|
||||
categories
|
||||
.find((category) => category.name === name)
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
import YAxisUnitSelector from 'components/YAxisUnitSelector';
|
||||
import { YAxisSource } from 'components/YAxisUnitSelector/types';
|
||||
import {
|
||||
UniversalYAxisUnit,
|
||||
YAxisSource,
|
||||
} from 'components/YAxisUnitSelector/types';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { useCreateAlertState } from 'container/CreateAlertV2/context';
|
||||
import ChartPreviewComponent from 'container/FormAlertRules/ChartPreview';
|
||||
import PlotTag from 'container/NewWidget/LeftContainer/WidgetGraph/PlotTag';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import useGetYAxisUnit from 'hooks/useGetYAxisUnit';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||
@@ -26,6 +29,7 @@ function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
|
||||
alertState,
|
||||
setAlertState,
|
||||
isEditMode,
|
||||
setThresholdState,
|
||||
} = useCreateAlertState();
|
||||
const { selectedTime: globalSelectedInterval } = useSelector<
|
||||
AppState,
|
||||
@@ -51,6 +55,18 @@ function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
|
||||
}
|
||||
}, [initialYAxisUnit, setAlertState, shouldUpdateYAxisUnit]);
|
||||
|
||||
const handleYAxisUnitChange = useCallback(
|
||||
(value: UniversalYAxisUnit): void => {
|
||||
setAlertState({ type: 'SET_Y_AXIS_UNIT', payload: value });
|
||||
// Reset all threshold units when the y-axis unit changes
|
||||
setThresholdState({
|
||||
type: 'SET_THRESHOLDS',
|
||||
payload: thresholdState.thresholds.map((t) => ({ ...t, unit: '' })),
|
||||
});
|
||||
},
|
||||
[setAlertState, setThresholdState, thresholdState.thresholds],
|
||||
);
|
||||
|
||||
const headline = (
|
||||
<div className="chart-preview-headline">
|
||||
<PlotTag
|
||||
@@ -60,9 +76,7 @@ function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
|
||||
<YAxisUnitSelector
|
||||
value={yAxisUnit}
|
||||
initialValue={initialYAxisUnit}
|
||||
onChange={(value): void => {
|
||||
setAlertState({ type: 'SET_Y_AXIS_UNIT', payload: value });
|
||||
}}
|
||||
onChange={handleYAxisUnitChange}
|
||||
source={YAxisSource.ALERTS}
|
||||
loading={isLoading}
|
||||
/>
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
/* eslint-disable sonarjs/no-identical-functions */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { fireEvent, render, screen, waitFor } from 'tests/test-utils';
|
||||
import {
|
||||
fireEvent,
|
||||
render,
|
||||
screen,
|
||||
userEvent,
|
||||
waitFor,
|
||||
} from 'tests/test-utils';
|
||||
import {
|
||||
IDashboardVariable,
|
||||
TSortVariableValuesType,
|
||||
@@ -639,4 +645,186 @@ describe('VariableItem Component', () => {
|
||||
await expectCircularDependencyError();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Textbox Variable Default Value Handling', () => {
|
||||
test('saves textbox variable with defaultValue and selectedValue set to textboxValue', async () => {
|
||||
const user = userEvent.setup();
|
||||
const textboxVariable: IDashboardVariable = {
|
||||
id: TEST_VAR_IDS.VAR1,
|
||||
name: TEST_VAR_NAMES.VAR1,
|
||||
description: 'Test Textbox Variable',
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: 'my-default-value',
|
||||
...VARIABLE_DEFAULTS,
|
||||
order: 0,
|
||||
};
|
||||
|
||||
renderVariableItem(textboxVariable);
|
||||
|
||||
// Click save button
|
||||
const saveButton = screen.getByText(SAVE_BUTTON_TEXT);
|
||||
await user.click(saveButton);
|
||||
|
||||
// Verify that onSave was called with defaultValue and selectedValue equal to textboxValue
|
||||
expect(onSave).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: 'my-default-value',
|
||||
defaultValue: 'my-default-value',
|
||||
selectedValue: 'my-default-value',
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
test('saves textbox variable with empty values when textboxValue is empty', async () => {
|
||||
const user = userEvent.setup();
|
||||
const textboxVariable: IDashboardVariable = {
|
||||
id: TEST_VAR_IDS.VAR1,
|
||||
name: TEST_VAR_NAMES.VAR1,
|
||||
description: 'Test Textbox Variable',
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: '',
|
||||
...VARIABLE_DEFAULTS,
|
||||
order: 0,
|
||||
};
|
||||
|
||||
renderVariableItem(textboxVariable);
|
||||
|
||||
// Click save button
|
||||
const saveButton = screen.getByText(SAVE_BUTTON_TEXT);
|
||||
await user.click(saveButton);
|
||||
|
||||
// Verify that onSave was called with empty defaultValue and selectedValue
|
||||
expect(onSave).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: '',
|
||||
defaultValue: '',
|
||||
selectedValue: '',
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
test('updates textbox defaultValue and selectedValue when user changes textboxValue input', async () => {
|
||||
const user = userEvent.setup();
|
||||
const textboxVariable: IDashboardVariable = {
|
||||
id: TEST_VAR_IDS.VAR1,
|
||||
name: TEST_VAR_NAMES.VAR1,
|
||||
description: 'Test Textbox Variable',
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: 'initial-value',
|
||||
...VARIABLE_DEFAULTS,
|
||||
order: 0,
|
||||
};
|
||||
|
||||
renderVariableItem(textboxVariable);
|
||||
|
||||
// Change the textbox value
|
||||
const textboxInput = screen.getByPlaceholderText(
|
||||
'Enter a default value (if any)...',
|
||||
);
|
||||
await user.clear(textboxInput);
|
||||
await user.type(textboxInput, 'updated-value');
|
||||
|
||||
// Click save button
|
||||
const saveButton = screen.getByText(SAVE_BUTTON_TEXT);
|
||||
await user.click(saveButton);
|
||||
|
||||
// Verify that onSave was called with the updated defaultValue and selectedValue
|
||||
expect(onSave).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: 'updated-value',
|
||||
defaultValue: 'updated-value',
|
||||
selectedValue: 'updated-value',
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
test('non-textbox variables use variableDefaultValue instead of textboxValue', async () => {
|
||||
const user = userEvent.setup();
|
||||
const queryVariable: IDashboardVariable = {
|
||||
id: TEST_VAR_IDS.VAR1,
|
||||
name: TEST_VAR_NAMES.VAR1,
|
||||
description: 'Test Query Variable',
|
||||
type: 'QUERY',
|
||||
queryValue: 'SELECT * FROM test',
|
||||
textboxValue: 'should-not-be-used',
|
||||
defaultValue: 'query-default-value',
|
||||
...VARIABLE_DEFAULTS,
|
||||
order: 0,
|
||||
};
|
||||
|
||||
renderVariableItem(queryVariable);
|
||||
|
||||
// Click save button
|
||||
const saveButton = screen.getByText(SAVE_BUTTON_TEXT);
|
||||
await user.click(saveButton);
|
||||
|
||||
// Verify that onSave was called with defaultValue not being textboxValue
|
||||
expect(onSave).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
type: 'QUERY',
|
||||
defaultValue: 'query-default-value',
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
|
||||
// Verify that defaultValue is NOT the textboxValue
|
||||
const savedVariable = onSave.mock.calls[0][1];
|
||||
expect(savedVariable.defaultValue).not.toBe('should-not-be-used');
|
||||
});
|
||||
|
||||
test('switching to textbox type sets defaultValue and selectedValue correctly on save', async () => {
|
||||
const user = userEvent.setup();
|
||||
// Start with a QUERY variable
|
||||
const queryVariable: IDashboardVariable = {
|
||||
id: TEST_VAR_IDS.VAR1,
|
||||
name: TEST_VAR_NAMES.VAR1,
|
||||
description: 'Test Variable',
|
||||
type: 'QUERY',
|
||||
queryValue: 'SELECT * FROM test',
|
||||
...VARIABLE_DEFAULTS,
|
||||
order: 0,
|
||||
};
|
||||
|
||||
renderVariableItem(queryVariable);
|
||||
|
||||
// Switch to TEXTBOX type
|
||||
const textboxButton = findButtonByText(TEXT.TEXTBOX);
|
||||
expect(textboxButton).toBeInTheDocument();
|
||||
if (textboxButton) {
|
||||
await user.click(textboxButton);
|
||||
}
|
||||
|
||||
// Enter a default value in the textbox input
|
||||
const textboxInput = screen.getByPlaceholderText(
|
||||
'Enter a default value (if any)...',
|
||||
);
|
||||
await user.type(textboxInput, 'new-textbox-default');
|
||||
|
||||
// Click save button
|
||||
const saveButton = screen.getByText(SAVE_BUTTON_TEXT);
|
||||
await user.click(saveButton);
|
||||
|
||||
// Verify that onSave was called with type TEXTBOX and correct defaultValue and selectedValue
|
||||
expect(onSave).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: 'new-textbox-default',
|
||||
defaultValue: 'new-textbox-default',
|
||||
selectedValue: 'new-textbox-default',
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -320,6 +320,10 @@ function VariableItem({
|
||||
]);
|
||||
|
||||
const variableValue = useMemo(() => {
|
||||
if (queryType === 'TEXTBOX') {
|
||||
return variableTextboxValue;
|
||||
}
|
||||
|
||||
if (variableMultiSelect) {
|
||||
let value = variableData.selectedValue;
|
||||
if (isEmpty(value)) {
|
||||
@@ -352,6 +356,8 @@ function VariableItem({
|
||||
variableData.selectedValue,
|
||||
variableData.showALLOption,
|
||||
variableDefaultValue,
|
||||
variableTextboxValue,
|
||||
queryType,
|
||||
previewValues,
|
||||
]);
|
||||
|
||||
@@ -367,13 +373,10 @@ function VariableItem({
|
||||
multiSelect: variableMultiSelect,
|
||||
showALLOption: queryType === 'DYNAMIC' ? true : variableShowALLOption,
|
||||
sort: variableSortType,
|
||||
...(queryType === 'TEXTBOX' && {
|
||||
selectedValue: (variableData.selectedValue ||
|
||||
variableTextboxValue) as never,
|
||||
}),
|
||||
...(queryType !== 'TEXTBOX' && {
|
||||
defaultValue: variableDefaultValue as never,
|
||||
}),
|
||||
// the reason we need to do this is because defaultValues are treated differently in case of textbox type
|
||||
// They are the exact same and not like the other types where defaultValue is a separate field
|
||||
defaultValue:
|
||||
queryType === 'TEXTBOX' ? variableTextboxValue : variableDefaultValue,
|
||||
modificationUUID: generateUUID(),
|
||||
id: variableData.id || generateUUID(),
|
||||
order: variableData.order,
|
||||
|
||||
@@ -25,6 +25,12 @@
|
||||
}
|
||||
}
|
||||
|
||||
&.focused {
|
||||
.variable-value {
|
||||
outline: 1px solid var(--bg-robin-400);
|
||||
}
|
||||
}
|
||||
|
||||
.variable-value {
|
||||
display: flex;
|
||||
min-width: 120px;
|
||||
@@ -93,6 +99,12 @@
|
||||
|
||||
.lightMode {
|
||||
.variable-item {
|
||||
&.focused {
|
||||
.variable-value {
|
||||
border: 1px solid var(--bg-robin-400);
|
||||
}
|
||||
}
|
||||
|
||||
.variable-name {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
|
||||
@@ -94,7 +94,7 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
cycleNodes,
|
||||
});
|
||||
}
|
||||
}, [setVariablesToGetUpdated, variables, variablesTableData]);
|
||||
}, [variables, variablesTableData]);
|
||||
|
||||
// this handles the case where the dependency order changes i.e. variable list updated via creation or deletion etc. and we need to refetch the variables
|
||||
// also trigger when the global time changes
|
||||
|
||||
@@ -80,10 +80,12 @@ describe('VariableItem', () => {
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
expect(screen.getByPlaceholderText('Enter value')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByTestId('variable-textbox-test_variable'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('calls onChange event handler when Input value changes', async () => {
|
||||
test('calls onValueUpdate when Input value changes and blurs', async () => {
|
||||
render(
|
||||
<MockQueryClientProvider>
|
||||
<VariableItem
|
||||
@@ -102,13 +104,19 @@ describe('VariableItem', () => {
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
|
||||
const inputElement = screen.getByTestId('variable-textbox-test_variable');
|
||||
|
||||
// Change the value
|
||||
act(() => {
|
||||
const inputElement = screen.getByPlaceholderText('Enter value');
|
||||
fireEvent.change(inputElement, { target: { value: 'newValue' } });
|
||||
});
|
||||
|
||||
// Blur the input to trigger the update
|
||||
act(() => {
|
||||
fireEvent.blur(inputElement);
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
// expect(mockOnValueUpdate).toHaveBeenCalledTimes(1);
|
||||
expect(mockOnValueUpdate).toHaveBeenCalledWith(
|
||||
'testVariable',
|
||||
'test_variable',
|
||||
|
||||
@@ -8,14 +8,14 @@ import './DashboardVariableSelection.styles.scss';
|
||||
|
||||
import { orange } from '@ant-design/colors';
|
||||
import { InfoCircleOutlined, WarningOutlined } from '@ant-design/icons';
|
||||
import { Input, Popover, Tooltip, Typography } from 'antd';
|
||||
import { Input, InputRef, Popover, Tooltip, Typography } from 'antd';
|
||||
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
|
||||
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
|
||||
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
||||
import { debounce, isArray, isEmpty, isString } from 'lodash-es';
|
||||
import { memo, useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
@@ -71,6 +71,15 @@ function VariableItem({
|
||||
string | string[] | undefined
|
||||
>(undefined);
|
||||
|
||||
// Local state for textbox input to ensure smooth editing experience
|
||||
const [textboxInputValue, setTextboxInputValue] = useState<string>(
|
||||
(variableData.selectedValue?.toString() ||
|
||||
variableData.defaultValue?.toString()) ??
|
||||
'',
|
||||
);
|
||||
const [isTextboxFocused, setIsTextboxFocused] = useState<boolean>(false);
|
||||
const textboxInputRef = useRef<InputRef>(null);
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
@@ -371,7 +380,7 @@ function VariableItem({
|
||||
}, [variableData.type, variableData.customValue]);
|
||||
|
||||
return (
|
||||
<div className="variable-item">
|
||||
<div className={`variable-item${isTextboxFocused ? ' focused' : ''}`}>
|
||||
<Typography.Text className="variable-name" ellipsis>
|
||||
${variableData.name}
|
||||
{variableData.description && (
|
||||
@@ -384,16 +393,40 @@ function VariableItem({
|
||||
<div className="variable-value">
|
||||
{variableData.type === 'TEXTBOX' ? (
|
||||
<Input
|
||||
ref={textboxInputRef}
|
||||
placeholder="Enter value"
|
||||
data-testid={`variable-textbox-${variableData.id}`}
|
||||
bordered={false}
|
||||
key={variableData.selectedValue?.toString()}
|
||||
defaultValue={variableData.selectedValue?.toString()}
|
||||
value={textboxInputValue}
|
||||
title={textboxInputValue}
|
||||
onChange={(e): void => {
|
||||
debouncedHandleChange(e.target.value || '');
|
||||
setTextboxInputValue(e.target.value);
|
||||
}}
|
||||
style={{
|
||||
width:
|
||||
50 + ((variableData.selectedValue?.toString()?.length || 0) * 7 || 50),
|
||||
onFocus={(): void => {
|
||||
setIsTextboxFocused(true);
|
||||
}}
|
||||
onBlur={(e): void => {
|
||||
setIsTextboxFocused(false);
|
||||
const value = e.target.value.trim();
|
||||
// If empty, reset to default value
|
||||
if (!value && variableData.defaultValue) {
|
||||
setTextboxInputValue(variableData.defaultValue.toString());
|
||||
debouncedHandleChange(variableData.defaultValue.toString());
|
||||
} else {
|
||||
debouncedHandleChange(value);
|
||||
}
|
||||
}}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter') {
|
||||
const value = textboxInputValue.trim();
|
||||
if (!value && variableData.defaultValue) {
|
||||
setTextboxInputValue(variableData.defaultValue.toString());
|
||||
debouncedHandleChange(variableData.defaultValue.toString());
|
||||
} else {
|
||||
debouncedHandleChange(value);
|
||||
}
|
||||
textboxInputRef.current?.blur();
|
||||
}
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
|
||||
@@ -257,6 +257,15 @@ export const onUpdateVariableNode = (
|
||||
): void => {
|
||||
const visited = new Set<string>();
|
||||
|
||||
// If nodeToUpdate is not in topologicalOrder (e.g., CUSTOM variable),
|
||||
// we still need to mark its children as needing updates
|
||||
if (!topologicalOrder.includes(nodeToUpdate)) {
|
||||
// Mark direct children of the node as visited so they get processed
|
||||
(graph[nodeToUpdate] || []).forEach((child) => {
|
||||
visited.add(child);
|
||||
});
|
||||
}
|
||||
|
||||
// Start processing from the node to update
|
||||
topologicalOrder.forEach((node) => {
|
||||
if (node === nodeToUpdate || visited.has(node)) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { areArraysEqual } from './util';
|
||||
import { areArraysEqual, onUpdateVariableNode, VariableGraph } from './util';
|
||||
|
||||
describe('areArraysEqual', () => {
|
||||
it('should return true for equal arrays with same order', () => {
|
||||
@@ -31,3 +31,121 @@ describe('areArraysEqual', () => {
|
||||
expect(areArraysEqual(array1, array2)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onUpdateVariableNode', () => {
|
||||
// Graph structure:
|
||||
// deployment -> namespace -> service -> pod
|
||||
// deployment has no parents, namespace depends on deployment, etc.
|
||||
const graph: VariableGraph = {
|
||||
deployment: ['namespace'],
|
||||
namespace: ['service'],
|
||||
service: ['pod'],
|
||||
pod: [],
|
||||
customVar: ['namespace'], // CUSTOM variable that affects namespace
|
||||
};
|
||||
|
||||
const topologicalOrder = ['deployment', 'namespace', 'service', 'pod'];
|
||||
|
||||
it('should call callback for the node and all its descendants', () => {
|
||||
const visited: string[] = [];
|
||||
const callback = (node: string): void => {
|
||||
visited.push(node);
|
||||
};
|
||||
|
||||
onUpdateVariableNode('deployment', graph, topologicalOrder, callback);
|
||||
|
||||
expect(visited).toEqual(['deployment', 'namespace', 'service', 'pod']);
|
||||
});
|
||||
|
||||
it('should call callback starting from a middle node', () => {
|
||||
const visited: string[] = [];
|
||||
const callback = (node: string): void => {
|
||||
visited.push(node);
|
||||
};
|
||||
|
||||
onUpdateVariableNode('namespace', graph, topologicalOrder, callback);
|
||||
|
||||
expect(visited).toEqual(['namespace', 'service', 'pod']);
|
||||
});
|
||||
|
||||
it('should only call callback for the leaf node when updating leaf', () => {
|
||||
const visited: string[] = [];
|
||||
const callback = (node: string): void => {
|
||||
visited.push(node);
|
||||
};
|
||||
|
||||
onUpdateVariableNode('pod', graph, topologicalOrder, callback);
|
||||
|
||||
expect(visited).toEqual(['pod']);
|
||||
});
|
||||
|
||||
it('should handle CUSTOM variable not in topologicalOrder by updating its children', () => {
|
||||
const visited: string[] = [];
|
||||
const callback = (node: string): void => {
|
||||
visited.push(node);
|
||||
};
|
||||
|
||||
// customVar is not in topologicalOrder but has namespace as a child
|
||||
onUpdateVariableNode('customVar', graph, topologicalOrder, callback);
|
||||
|
||||
// Should process namespace and its descendants (service, pod)
|
||||
expect(visited).toEqual(['namespace', 'service', 'pod']);
|
||||
});
|
||||
|
||||
it('should handle node not in graph gracefully', () => {
|
||||
const visited: string[] = [];
|
||||
const callback = (node: string): void => {
|
||||
visited.push(node);
|
||||
};
|
||||
|
||||
onUpdateVariableNode('unknownNode', graph, topologicalOrder, callback);
|
||||
|
||||
// Should not call callback for any node since unknownNode has no children
|
||||
expect(visited).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle empty graph', () => {
|
||||
const visited: string[] = [];
|
||||
const callback = (node: string): void => {
|
||||
visited.push(node);
|
||||
};
|
||||
|
||||
onUpdateVariableNode('deployment', {}, topologicalOrder, callback);
|
||||
|
||||
// deployment is in topologicalOrder, so callback is called for it
|
||||
expect(visited).toEqual(['deployment']);
|
||||
});
|
||||
|
||||
it('should handle empty topologicalOrder', () => {
|
||||
const visited: string[] = [];
|
||||
const callback = (node: string): void => {
|
||||
visited.push(node);
|
||||
};
|
||||
|
||||
onUpdateVariableNode('deployment', graph, [], callback);
|
||||
|
||||
expect(visited).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle CUSTOM variable with multiple children', () => {
|
||||
const graphWithMultipleChildren: VariableGraph = {
|
||||
...graph,
|
||||
customMulti: ['namespace', 'service'], // CUSTOM variable affecting multiple nodes
|
||||
};
|
||||
|
||||
const visited: string[] = [];
|
||||
const callback = (node: string): void => {
|
||||
visited.push(node);
|
||||
};
|
||||
|
||||
onUpdateVariableNode(
|
||||
'customMulti',
|
||||
graphWithMultipleChildren,
|
||||
topologicalOrder,
|
||||
callback,
|
||||
);
|
||||
|
||||
// Should process namespace, service, and pod (descendants)
|
||||
expect(visited).toEqual(['namespace', 'service', 'pod']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,3 +4,7 @@
|
||||
gap: 8px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.rule-unit-selector {
|
||||
width: 150px;
|
||||
}
|
||||
|
||||
@@ -15,8 +15,7 @@ import { DefaultOptionType } from 'antd/es/select';
|
||||
import {
|
||||
getCategoryByOptionId,
|
||||
getCategorySelectOptionByName,
|
||||
} from 'container/NewWidget/RightContainer/alertFomatCategories';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
} from 'container/CreateAlertV2/AlertCondition/utils';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import {
|
||||
AlertDef,
|
||||
@@ -43,10 +42,10 @@ function RuleOptions({
|
||||
setAlertDef,
|
||||
queryCategory,
|
||||
queryOptions,
|
||||
yAxisUnit,
|
||||
}: RuleOptionsProps): JSX.Element {
|
||||
// init namespace for translations
|
||||
const { t } = useTranslation('alerts');
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
const { ruleType } = alertDef;
|
||||
|
||||
@@ -365,11 +364,9 @@ function RuleOptions({
|
||||
</InlineSelect>
|
||||
);
|
||||
|
||||
const selectedCategory = getCategoryByOptionId(currentQuery?.unit || '');
|
||||
const selectedCategory = getCategoryByOptionId(yAxisUnit);
|
||||
|
||||
const categorySelectOptions = getCategorySelectOptionByName(
|
||||
selectedCategory?.name,
|
||||
);
|
||||
const categorySelectOptions = getCategorySelectOptionByName(selectedCategory);
|
||||
|
||||
const step3Label = alertDef.alertType === 'METRIC_BASED_ALERT' ? '3' : '2';
|
||||
|
||||
@@ -402,6 +399,7 @@ function RuleOptions({
|
||||
|
||||
<Form.Item noStyle>
|
||||
<Select
|
||||
className="rule-unit-selector"
|
||||
getPopupContainer={popupContainer}
|
||||
allowClear
|
||||
showSearch
|
||||
@@ -515,5 +513,6 @@ interface RuleOptionsProps {
|
||||
setAlertDef: (a: AlertDef) => void;
|
||||
queryCategory: EQueryType;
|
||||
queryOptions: DefaultOptionType[];
|
||||
yAxisUnit: string;
|
||||
}
|
||||
export default RuleOptions;
|
||||
|
||||
@@ -914,6 +914,7 @@ function FormAlertRules({
|
||||
alertDef={alertDef}
|
||||
setAlertDef={setAlertDef}
|
||||
queryOptions={queryOptions}
|
||||
yAxisUnit={yAxisUnit || ''}
|
||||
/>
|
||||
|
||||
{renderBasicInfo()}
|
||||
|
||||
@@ -318,7 +318,9 @@ function GridCardGraph({
|
||||
version={version}
|
||||
threshold={threshold}
|
||||
headerMenuList={menuList}
|
||||
isFetchingResponse={queryResponse.isFetching}
|
||||
isFetchingResponse={
|
||||
queryResponse.isFetching || variablesToGetUpdated.length > 0
|
||||
}
|
||||
setRequestData={setRequestData}
|
||||
onClickHandler={onClickHandler}
|
||||
onDragSelect={onDragSelect}
|
||||
|
||||
@@ -0,0 +1,84 @@
|
||||
.license-section {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
|
||||
.license-section-header {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
|
||||
.license-section-title {
|
||||
color: #fff;
|
||||
font-family: Inter;
|
||||
font-size: 16px;
|
||||
font-style: normal;
|
||||
line-height: 24px;
|
||||
letter-spacing: -0.08px;
|
||||
}
|
||||
}
|
||||
|
||||
.license-section-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
|
||||
.license-section-content-item {
|
||||
padding: 16px;
|
||||
border: 1px solid var(--Slate-500, #161922);
|
||||
background: var(--Ink-400, #121317);
|
||||
border-radius: 3px;
|
||||
|
||||
.license-section-content-item-title-action {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 8px;
|
||||
|
||||
color: var(--Vanilla-300, #eee);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: normal;
|
||||
letter-spacing: -0.07px;
|
||||
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.license-section-content-item-description {
|
||||
color: var(--Vanilla-400, #c0c1c3);
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-style: normal;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.license-section {
|
||||
.license-section-header {
|
||||
.license-section-title {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
|
||||
.license-section-content {
|
||||
.license-section-content-item {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
|
||||
.license-section-content-item-title-action {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
.license-section-content-item-description {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
import './LicenseSection.styles.scss';
|
||||
|
||||
import { Button } from '@signozhq/button';
|
||||
import { Typography } from 'antd';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { Copy } from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useCopyToClipboard } from 'react-use';
|
||||
|
||||
function LicenseSection(): JSX.Element | null {
|
||||
const { activeLicense } = useAppContext();
|
||||
const { notifications } = useNotifications();
|
||||
const [, handleCopyToClipboard] = useCopyToClipboard();
|
||||
|
||||
const getMaskedKey = (key: string): string => {
|
||||
if (!key || key.length < 4) return key || 'N/A';
|
||||
return `${key.substring(0, 2)}********${key
|
||||
.substring(key.length - 2)
|
||||
.trim()}`;
|
||||
};
|
||||
|
||||
const handleCopyKey = (text: string): void => {
|
||||
handleCopyToClipboard(text);
|
||||
notifications.success({
|
||||
message: 'Copied to clipboard',
|
||||
});
|
||||
};
|
||||
|
||||
if (!activeLicense?.key) {
|
||||
return <></>;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="license-section">
|
||||
<div className="license-section-header">
|
||||
<div className="license-section-title">License</div>
|
||||
</div>
|
||||
|
||||
<div className="license-section-content">
|
||||
<div className="license-section-content-item">
|
||||
<div className="license-section-content-item-title-action">
|
||||
<span>License key</span>
|
||||
<span style={{ display: 'flex', alignItems: 'center', gap: '8px' }}>
|
||||
<Typography.Text code>{getMaskedKey(activeLicense.key)}</Typography.Text>
|
||||
<Button
|
||||
variant="ghost"
|
||||
aria-label="Copy license key"
|
||||
data-testid="license-key-copy-btn"
|
||||
onClick={(): void => handleCopyKey(activeLicense.key)}
|
||||
>
|
||||
<Copy size={14} />
|
||||
</Button>
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div className="license-section-content-item-description">
|
||||
Your SigNoz license key.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default LicenseSection;
|
||||
@@ -0,0 +1 @@
|
||||
export { default } from './LicenseSection';
|
||||
@@ -1,8 +1,31 @@
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import MySettingsContainer from 'container/MySettings';
|
||||
import { act, fireEvent, render, screen, waitFor } from 'tests/test-utils';
|
||||
import {
|
||||
act,
|
||||
fireEvent,
|
||||
render,
|
||||
screen,
|
||||
waitFor,
|
||||
within,
|
||||
} from 'tests/test-utils';
|
||||
|
||||
const toggleThemeFunction = jest.fn();
|
||||
const logEventFunction = jest.fn();
|
||||
const copyToClipboardFn = jest.fn();
|
||||
const editUserFn = jest.fn();
|
||||
|
||||
jest.mock('react-use', () => ({
|
||||
__esModule: true,
|
||||
useCopyToClipboard: (): [unknown, (text: string) => void] => [
|
||||
null,
|
||||
copyToClipboardFn,
|
||||
],
|
||||
}));
|
||||
|
||||
jest.mock('api/v1/user/id/update', () => ({
|
||||
__esModule: true,
|
||||
default: (...args: unknown[]): Promise<unknown> => editUserFn(...args),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useDarkMode', () => ({
|
||||
__esModule: true,
|
||||
@@ -44,6 +67,7 @@ const PASSWORD_VALIDATION_MESSAGE_TEST_ID = 'password-validation-message';
|
||||
describe('MySettings Flows', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
editUserFn.mockResolvedValue({});
|
||||
render(<MySettingsContainer />);
|
||||
});
|
||||
|
||||
@@ -215,4 +239,71 @@ describe('MySettings Flows', () => {
|
||||
expect(submitButton).not.toBeDisabled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('License section', () => {
|
||||
it('Should render license section content when license key exists', () => {
|
||||
expect(screen.getByText('License')).toBeInTheDocument();
|
||||
expect(screen.getByText('License key')).toBeInTheDocument();
|
||||
expect(screen.getByText('Your SigNoz license key.')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('Should not render license section when license key is missing', () => {
|
||||
const { container } = render(<MySettingsContainer />, undefined, {
|
||||
appContextOverrides: {
|
||||
activeLicense: null,
|
||||
},
|
||||
});
|
||||
|
||||
const scoped = within(container);
|
||||
expect(scoped.queryByText('License')).not.toBeInTheDocument();
|
||||
expect(scoped.queryByText('License key')).not.toBeInTheDocument();
|
||||
expect(
|
||||
scoped.queryByText('Your SigNoz license key.'),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('Should mask license key in the UI', () => {
|
||||
const { container } = render(<MySettingsContainer />, undefined, {
|
||||
appContextOverrides: {
|
||||
activeLicense: {
|
||||
key: 'abcd',
|
||||
} as any,
|
||||
},
|
||||
});
|
||||
|
||||
expect(within(container).getByText('ab********cd')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('Should not mask license key if it is too short', () => {
|
||||
const { container } = render(<MySettingsContainer />, undefined, {
|
||||
appContextOverrides: {
|
||||
activeLicense: {
|
||||
key: 'abc',
|
||||
} as any,
|
||||
},
|
||||
});
|
||||
|
||||
expect(within(container).getByText('abc')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('Should copy license key and show success toast', async () => {
|
||||
const user = userEvent.setup();
|
||||
const { container } = render(<MySettingsContainer />, undefined, {
|
||||
appContextOverrides: {
|
||||
activeLicense: {
|
||||
key: 'test-license-key-12345',
|
||||
} as any,
|
||||
},
|
||||
});
|
||||
|
||||
await user.click(within(container).getByTestId('license-key-copy-btn'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(copyToClipboardFn).toHaveBeenCalledWith('test-license-key-12345');
|
||||
expect(successNotification).toHaveBeenCalledWith({
|
||||
message: 'Copied to clipboard',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -15,6 +15,7 @@ import { useMutation } from 'react-query';
|
||||
import { UserPreference } from 'types/api/preferences/preference';
|
||||
import { showErrorNotification } from 'utils/error';
|
||||
|
||||
import LicenseSection from './LicenseSection';
|
||||
import TimezoneAdaptation from './TimezoneAdaptation/TimezoneAdaptation';
|
||||
import UserInfo from './UserInfo';
|
||||
|
||||
@@ -230,6 +231,8 @@ function MySettings(): JSX.Element {
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<LicenseSection />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -20,13 +20,17 @@ interface AttributeRecord {
|
||||
interface AttributeActionsProps {
|
||||
record: AttributeRecord;
|
||||
isPinned?: boolean;
|
||||
onTogglePin: (fieldKey: string) => void;
|
||||
onTogglePin?: (fieldKey: string) => void;
|
||||
showPinned?: boolean;
|
||||
showCopyOptions?: boolean;
|
||||
}
|
||||
|
||||
export default function AttributeActions({
|
||||
record,
|
||||
isPinned,
|
||||
onTogglePin,
|
||||
showPinned = true,
|
||||
showCopyOptions = true,
|
||||
}: AttributeActionsProps): JSX.Element {
|
||||
const [isOpen, setIsOpen] = useState<boolean>(false);
|
||||
const [isFilterInLoading, setIsFilterInLoading] = useState<boolean>(false);
|
||||
@@ -91,7 +95,7 @@ export default function AttributeActions({
|
||||
}, [onCopyFieldValue, textToCopy]);
|
||||
|
||||
const handleTogglePin = useCallback((): void => {
|
||||
onTogglePin(record.field);
|
||||
onTogglePin?.(record.field);
|
||||
}, [onTogglePin, record.field]);
|
||||
|
||||
const moreActionsContent = (
|
||||
@@ -105,35 +109,41 @@ export default function AttributeActions({
|
||||
>
|
||||
Group By Attribute
|
||||
</Button>
|
||||
<Button
|
||||
type="text"
|
||||
icon={<Copy size={14} />}
|
||||
onClick={handleCopyFieldName}
|
||||
block
|
||||
>
|
||||
Copy Field Name
|
||||
</Button>
|
||||
<Button
|
||||
type="text"
|
||||
icon={<Copy size={14} />}
|
||||
onClick={handleCopyFieldValue}
|
||||
block
|
||||
>
|
||||
Copy Field Value
|
||||
</Button>
|
||||
{showCopyOptions && (
|
||||
<>
|
||||
<Button
|
||||
type="text"
|
||||
icon={<Copy size={14} />}
|
||||
onClick={handleCopyFieldName}
|
||||
block
|
||||
>
|
||||
Copy Field Name
|
||||
</Button>
|
||||
<Button
|
||||
type="text"
|
||||
icon={<Copy size={14} />}
|
||||
onClick={handleCopyFieldValue}
|
||||
block
|
||||
>
|
||||
Copy Field Value
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
|
||||
return (
|
||||
<div className={cx('action-btn', { 'action-btn--is-open': isOpen })}>
|
||||
<Tooltip title={isPinned ? 'Unpin attribute' : 'Pin attribute'}>
|
||||
<Button
|
||||
className={`filter-btn periscope-btn ${isPinned ? 'pinned' : ''}`}
|
||||
aria-label={isPinned ? 'Unpin attribute' : 'Pin attribute'}
|
||||
icon={<Pin size={14} fill={isPinned ? 'currentColor' : 'none'} />}
|
||||
onClick={handleTogglePin}
|
||||
/>
|
||||
</Tooltip>
|
||||
{showPinned && (
|
||||
<Tooltip title={isPinned ? 'Unpin attribute' : 'Pin attribute'}>
|
||||
<Button
|
||||
className={`filter-btn periscope-btn ${isPinned ? 'pinned' : ''}`}
|
||||
aria-label={isPinned ? 'Unpin attribute' : 'Pin attribute'}
|
||||
icon={<Pin size={14} fill={isPinned ? 'currentColor' : 'none'} />}
|
||||
onClick={handleTogglePin}
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
<Tooltip title="Filter for value">
|
||||
<Button
|
||||
className="filter-btn periscope-btn"
|
||||
@@ -184,4 +194,7 @@ export default function AttributeActions({
|
||||
|
||||
AttributeActions.defaultProps = {
|
||||
isPinned: false,
|
||||
showPinned: true,
|
||||
showCopyOptions: true,
|
||||
onTogglePin: undefined,
|
||||
};
|
||||
|
||||
@@ -47,15 +47,56 @@
|
||||
.description {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
padding: 10px 12px;
|
||||
padding: 10px 0px;
|
||||
|
||||
.item {
|
||||
padding: 8px 12px;
|
||||
&,
|
||||
.attribute-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
position: relative; // ensure absolutely-positioned children anchor to the row
|
||||
}
|
||||
|
||||
// Show attribute actions on hover for hardcoded rows
|
||||
.attribute-actions-wrapper {
|
||||
display: none;
|
||||
gap: 8px;
|
||||
position: absolute;
|
||||
right: 8px;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
border-radius: 4px;
|
||||
padding: 2px;
|
||||
|
||||
// style the action button group
|
||||
.action-btn {
|
||||
display: flex;
|
||||
gap: 4px;
|
||||
}
|
||||
.filter-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
border: none;
|
||||
box-shadow: none;
|
||||
border-radius: 2px;
|
||||
background: var(--bg-slate-400);
|
||||
padding: 4px;
|
||||
gap: 3px;
|
||||
height: 24px;
|
||||
width: 24px;
|
||||
|
||||
&:hover {
|
||||
background: var(--bg-slate-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
&:hover {
|
||||
background-color: var(--bg-slate-500);
|
||||
.attribute-actions-wrapper {
|
||||
display: flex;
|
||||
}
|
||||
}
|
||||
|
||||
.span-name-wrapper {
|
||||
@@ -646,6 +687,29 @@
|
||||
|
||||
.description {
|
||||
.item {
|
||||
.attribute-actions-wrapper {
|
||||
display: none;
|
||||
gap: 8px;
|
||||
position: absolute;
|
||||
right: 8px;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
border-radius: 4px;
|
||||
padding: 2px;
|
||||
|
||||
.filter-btn {
|
||||
background: var(--bg-vanilla-200);
|
||||
&:hover {
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
}
|
||||
}
|
||||
&:hover {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
.attribute-actions-wrapper {
|
||||
display: flex;
|
||||
}
|
||||
}
|
||||
.span-name-wrapper {
|
||||
.span-percentile-value-container {
|
||||
&.span-percentile-value-container-open {
|
||||
|
||||
@@ -21,6 +21,7 @@ import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { themeColors } from 'constants/theme';
|
||||
import { USER_PREFERENCES } from 'constants/userPreferences';
|
||||
import AttributeActions from 'container/SpanDetailsDrawer/Attributes/AttributeActions';
|
||||
import dayjs from 'dayjs';
|
||||
import useClickOutside from 'hooks/useClickOutside';
|
||||
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
|
||||
@@ -103,6 +104,10 @@ interface IResourceAttribute {
|
||||
const DEFAULT_RESOURCE_ATTRIBUTES = {
|
||||
serviceName: 'service.name',
|
||||
name: 'name',
|
||||
spanId: 'span_id',
|
||||
spanKind: 'kind_string',
|
||||
statusCodeString: 'status_code_string',
|
||||
statusMessage: 'status_message',
|
||||
};
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
@@ -835,6 +840,16 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
{selectedSpan.spanId}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<div className="attribute-actions-wrapper">
|
||||
<AttributeActions
|
||||
record={{
|
||||
field: DEFAULT_RESOURCE_ATTRIBUTES.spanId,
|
||||
value: selectedSpan.spanId,
|
||||
}}
|
||||
showPinned={false}
|
||||
showCopyOptions={false}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="item">
|
||||
<Typography.Text className="attribute-key">start time</Typography.Text>
|
||||
@@ -863,6 +878,16 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
</Typography.Text>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<div className="attribute-actions-wrapper">
|
||||
<AttributeActions
|
||||
record={{
|
||||
field: DEFAULT_RESOURCE_ATTRIBUTES.serviceName,
|
||||
value: selectedSpan.serviceName,
|
||||
}}
|
||||
showPinned={false}
|
||||
showCopyOptions={false}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="item">
|
||||
@@ -872,6 +897,16 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
{selectedSpan.spanKind}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<div className="attribute-actions-wrapper">
|
||||
<AttributeActions
|
||||
record={{
|
||||
field: DEFAULT_RESOURCE_ATTRIBUTES.spanKind,
|
||||
value: selectedSpan.spanKind,
|
||||
}}
|
||||
showPinned={false}
|
||||
showCopyOptions={false}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="item">
|
||||
<Typography.Text className="attribute-key">
|
||||
@@ -882,6 +917,16 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
{selectedSpan.statusCodeString}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<div className="attribute-actions-wrapper">
|
||||
<AttributeActions
|
||||
record={{
|
||||
field: DEFAULT_RESOURCE_ATTRIBUTES.statusCodeString,
|
||||
value: selectedSpan.statusCodeString,
|
||||
}}
|
||||
showPinned={false}
|
||||
showCopyOptions={false}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{selectedSpan.statusMessage && (
|
||||
@@ -891,6 +936,16 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
attributeValue={selectedSpan.statusMessage}
|
||||
onExpand={showStatusMessageModal}
|
||||
/>
|
||||
<div className="attribute-actions-wrapper">
|
||||
<AttributeActions
|
||||
record={{
|
||||
field: DEFAULT_RESOURCE_ATTRIBUTES.statusMessage,
|
||||
value: selectedSpan.statusMessage,
|
||||
}}
|
||||
showPinned={false}
|
||||
showCopyOptions={false}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
<div className="item">
|
||||
|
||||
@@ -0,0 +1,247 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import {
|
||||
fireEvent,
|
||||
render,
|
||||
screen,
|
||||
userEvent,
|
||||
waitFor,
|
||||
} from 'tests/test-utils';
|
||||
|
||||
import AttributeActions from '../Attributes/AttributeActions';
|
||||
|
||||
// Mock only Popover from antd to simplify hover/open behavior while keeping other components real
|
||||
jest.mock('antd', () => {
|
||||
const actual = jest.requireActual('antd');
|
||||
const MockPopover = ({
|
||||
content,
|
||||
children,
|
||||
open,
|
||||
onOpenChange,
|
||||
...rest
|
||||
}: any): JSX.Element => (
|
||||
<div
|
||||
data-testid="mock-popover-wrapper"
|
||||
onMouseEnter={(): void => onOpenChange?.(true)}
|
||||
{...rest}
|
||||
>
|
||||
{children}
|
||||
{open ? <div data-testid="mock-popover-content">{content}</div> : null}
|
||||
</div>
|
||||
);
|
||||
return { ...actual, Popover: MockPopover };
|
||||
});
|
||||
|
||||
// Mock getAggregateKeys API used inside useTraceActions to resolve autocomplete keys
|
||||
jest.mock('api/queryBuilder/getAttributeKeys', () => ({
|
||||
getAggregateKeys: jest.fn().mockResolvedValue({
|
||||
payload: {
|
||||
attributeKeys: [
|
||||
{
|
||||
key: 'http.method',
|
||||
dataType: 'string',
|
||||
type: 'tag',
|
||||
isColumn: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
}));
|
||||
|
||||
const record = { field: 'http.method', value: 'GET' };
|
||||
|
||||
describe('AttributeActions (unit)', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('renders core action buttons (pin, filter in/out, more)', async () => {
|
||||
render(<AttributeActions record={record} isPinned={false} />);
|
||||
|
||||
expect(
|
||||
screen.getByRole('button', { name: 'Pin attribute' }),
|
||||
).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByRole('button', { name: 'Filter for value' }),
|
||||
).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByRole('button', { name: 'Filter out value' }),
|
||||
).toBeInTheDocument();
|
||||
// more actions (ellipsis) button
|
||||
expect(
|
||||
document.querySelector('.lucide-ellipsis')?.closest('button'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('applies "Filter for" and calls redirectWithQueryBuilderData with correct query', async () => {
|
||||
const redirectWithQueryBuilderData = jest.fn();
|
||||
const currentQuery = {
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
aggregateOperator: 'count',
|
||||
aggregateAttribute: { key: 'signoz_span_duration' },
|
||||
filters: { items: [], op: 'AND' },
|
||||
filter: { expression: '' },
|
||||
groupBy: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
} as any;
|
||||
|
||||
render(<AttributeActions record={record} />, undefined, {
|
||||
queryBuilderOverrides: { currentQuery, redirectWithQueryBuilderData },
|
||||
});
|
||||
|
||||
const filterForBtn = screen.getByRole('button', { name: 'Filter for value' });
|
||||
|
||||
await userEvent.click(filterForBtn);
|
||||
await waitFor(() => {
|
||||
expect(redirectWithQueryBuilderData).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
builder: expect.objectContaining({
|
||||
queryData: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
filters: expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: 'http.method' }),
|
||||
op: '=',
|
||||
value: 'GET',
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
}),
|
||||
{},
|
||||
expect.any(String),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('applies "Filter out" and calls redirectWithQueryBuilderData with correct query', async () => {
|
||||
const redirectWithQueryBuilderData = jest.fn();
|
||||
const currentQuery = {
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
aggregateOperator: 'count',
|
||||
aggregateAttribute: { key: 'signoz_span_duration' },
|
||||
filters: { items: [], op: 'AND' },
|
||||
filter: { expression: '' },
|
||||
groupBy: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
} as any;
|
||||
|
||||
render(<AttributeActions record={record} />, undefined, {
|
||||
queryBuilderOverrides: { currentQuery, redirectWithQueryBuilderData },
|
||||
});
|
||||
|
||||
const filterOutBtn = screen.getByRole('button', { name: 'Filter out value' });
|
||||
|
||||
await userEvent.click(filterOutBtn);
|
||||
await waitFor(() => {
|
||||
expect(redirectWithQueryBuilderData).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
builder: expect.objectContaining({
|
||||
queryData: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
filters: expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: 'http.method' }),
|
||||
op: '!=',
|
||||
value: 'GET',
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
}),
|
||||
{},
|
||||
expect.any(String),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('opens more actions on hover and calls Group By handler; closes after click', async () => {
|
||||
const redirectWithQueryBuilderData = jest.fn();
|
||||
const currentQuery = {
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
aggregateOperator: 'count',
|
||||
aggregateAttribute: { key: 'signoz_span_duration' },
|
||||
filters: { items: [], op: 'AND' },
|
||||
filter: { expression: '' },
|
||||
groupBy: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
} as any;
|
||||
render(<AttributeActions record={record} />, undefined, {
|
||||
queryBuilderOverrides: { currentQuery, redirectWithQueryBuilderData },
|
||||
});
|
||||
|
||||
const ellipsisBtn = document
|
||||
.querySelector('.lucide-ellipsis')
|
||||
?.closest('button') as HTMLElement;
|
||||
expect(ellipsisBtn).toBeInTheDocument();
|
||||
|
||||
// hover to trigger Popover open via mock
|
||||
fireEvent.mouseEnter(ellipsisBtn.parentElement as Element);
|
||||
|
||||
// content appears
|
||||
await waitFor(() =>
|
||||
expect(screen.getByText('Group By Attribute')).toBeInTheDocument(),
|
||||
);
|
||||
|
||||
await userEvent.click(screen.getByText('Group By Attribute'));
|
||||
await waitFor(() => {
|
||||
expect(redirectWithQueryBuilderData).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
builder: expect.objectContaining({
|
||||
queryData: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
groupBy: expect.arrayContaining([
|
||||
expect.objectContaining({ key: 'http.method' }),
|
||||
]),
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
}),
|
||||
{},
|
||||
expect.any(String),
|
||||
);
|
||||
});
|
||||
|
||||
// After clicking group by, popover should close
|
||||
await waitFor(() =>
|
||||
expect(screen.queryByTestId('mock-popover-content')).not.toBeInTheDocument(),
|
||||
);
|
||||
});
|
||||
|
||||
it('hides pin button when showPinned=false', async () => {
|
||||
render(<AttributeActions record={record} showPinned={false} />);
|
||||
expect(
|
||||
screen.queryByRole('button', { name: /pin attribute/i }),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('hides copy options when showCopyOptions=false', async () => {
|
||||
render(<AttributeActions record={record} showCopyOptions={false} />);
|
||||
const ellipsisBtn = document
|
||||
.querySelector('.lucide-ellipsis')
|
||||
?.closest('button') as HTMLElement;
|
||||
fireEvent.mouseEnter(ellipsisBtn.parentElement as Element);
|
||||
|
||||
await waitFor(() =>
|
||||
expect(screen.queryByText('Copy Field Name')).not.toBeInTheDocument(),
|
||||
);
|
||||
expect(screen.queryByText('Copy Field Value')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
183
frontend/src/hooks/dashboard/utils.test.ts
Normal file
183
frontend/src/hooks/dashboard/utils.test.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import {
|
||||
initialClickHouseData,
|
||||
initialQueryBuilderFormValuesMap,
|
||||
initialQueryPromQLData,
|
||||
PANEL_TYPES,
|
||||
} from 'constants/queryBuilder';
|
||||
import { IDashboardVariable, Widgets } from 'types/api/dashboard/getAll';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import { createDynamicVariableToWidgetsMap } from './utils';
|
||||
|
||||
const createMockDynamicVariable = (
|
||||
overrides: Partial<IDashboardVariable> = {},
|
||||
): IDashboardVariable => ({
|
||||
id: 'var-1',
|
||||
name: 'testVar',
|
||||
description: '',
|
||||
type: 'DYNAMIC',
|
||||
sort: 'DISABLED',
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
dynamicVariablesAttribute: 'service.name',
|
||||
...overrides,
|
||||
});
|
||||
|
||||
const createBaseWidget = (id: string, query: Query): Widgets => ({
|
||||
id,
|
||||
title: 'Test Widget',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.TIME_SERIES,
|
||||
opacity: '1',
|
||||
nullZeroValues: '',
|
||||
timePreferance: 'GLOBAL_TIME',
|
||||
softMin: null,
|
||||
softMax: null,
|
||||
selectedLogFields: null,
|
||||
selectedTracesFields: null,
|
||||
query,
|
||||
});
|
||||
|
||||
const createMockPromQLWidget = (
|
||||
id: string,
|
||||
queries: {
|
||||
query: string;
|
||||
name?: string;
|
||||
legend?: string;
|
||||
disabled?: boolean;
|
||||
}[],
|
||||
): Widgets => {
|
||||
const promqlQueries = queries.map((q) => ({
|
||||
...initialQueryPromQLData,
|
||||
query: q.query,
|
||||
name: q.name || 'A',
|
||||
legend: q.legend || '',
|
||||
disabled: q.disabled ?? false,
|
||||
}));
|
||||
|
||||
const query: Query = {
|
||||
queryType: EQueryType.PROM,
|
||||
promql: promqlQueries,
|
||||
builder: {
|
||||
queryData: [],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [],
|
||||
id: 'query-1',
|
||||
};
|
||||
|
||||
return createBaseWidget(id, query);
|
||||
};
|
||||
|
||||
const createMockClickHouseWidget = (
|
||||
id: string,
|
||||
queries: {
|
||||
query: string;
|
||||
name?: string;
|
||||
legend?: string;
|
||||
disabled?: boolean;
|
||||
}[],
|
||||
): Widgets => {
|
||||
const clickhouseQueries = queries.map((q) => ({
|
||||
...initialClickHouseData,
|
||||
query: q.query,
|
||||
name: q.name || 'A',
|
||||
legend: q.legend || '',
|
||||
disabled: q.disabled ?? false,
|
||||
}));
|
||||
|
||||
const query: Query = {
|
||||
queryType: EQueryType.CLICKHOUSE,
|
||||
promql: [],
|
||||
builder: {
|
||||
queryData: [],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: clickhouseQueries,
|
||||
id: 'query-1',
|
||||
};
|
||||
|
||||
return createBaseWidget(id, query);
|
||||
};
|
||||
|
||||
const createMockQueryBuilderWidget = (
|
||||
id: string,
|
||||
filters: { key: string; value: string | string[]; op?: string }[],
|
||||
): Widgets => {
|
||||
const queryData = {
|
||||
...initialQueryBuilderFormValuesMap[DataSource.LOGS],
|
||||
queryName: 'A',
|
||||
filters: {
|
||||
items: filters.map((f, index) => ({
|
||||
id: `filter-${index}`,
|
||||
key: { key: f.key, dataType: DataTypes.String, type: '', id: f.key },
|
||||
op: f.op || '=',
|
||||
value: f.value,
|
||||
})),
|
||||
op: 'AND',
|
||||
},
|
||||
};
|
||||
|
||||
const query: Query = {
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
promql: [],
|
||||
builder: {
|
||||
queryData: [queryData],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [],
|
||||
id: 'query-1',
|
||||
};
|
||||
|
||||
return createBaseWidget(id, query);
|
||||
};
|
||||
|
||||
describe('createDynamicVariableToWidgetsMap', () => {
|
||||
it('should handle widgets with different query types', () => {
|
||||
const dynamicVariables = [
|
||||
createMockDynamicVariable({
|
||||
id: 'var-1',
|
||||
name: 'service.name123',
|
||||
dynamicVariablesAttribute: 'service.name',
|
||||
}),
|
||||
];
|
||||
|
||||
const widgets = [
|
||||
createMockPromQLWidget('widget-promql-pass', [
|
||||
{ query: 'up{service="$service.name123"}' },
|
||||
]),
|
||||
createMockPromQLWidget('widget-promql-fail', [
|
||||
{ query: 'up{service="$service.name"}' },
|
||||
]),
|
||||
createMockClickHouseWidget('widget-clickhouse-pass', [
|
||||
{ query: "SELECT * FROM logs WHERE service_name = '$service.name123'" },
|
||||
]),
|
||||
createMockClickHouseWidget('widget-clickhouse-fail', [
|
||||
{ query: "SELECT * FROM logs WHERE service_name = '$service.name'" },
|
||||
]),
|
||||
createMockQueryBuilderWidget('widget-builder-pass', [
|
||||
{ key: 'service.name', value: '$service.name123' },
|
||||
]),
|
||||
createMockQueryBuilderWidget('widget-builder-fail', [
|
||||
{ key: 'service.name', value: '$service.name' },
|
||||
]),
|
||||
];
|
||||
|
||||
const result = createDynamicVariableToWidgetsMap(dynamicVariables, widgets);
|
||||
|
||||
expect(result['var-1']).toContain('widget-promql-pass');
|
||||
expect(result['var-1']).toContain('widget-clickhouse-pass');
|
||||
expect(result['var-1']).toContain('widget-builder-pass');
|
||||
|
||||
expect(result['var-1']).not.toContain('widget-promql-fail');
|
||||
expect(result['var-1']).not.toContain('widget-clickhouse-fail');
|
||||
expect(result['var-1']).not.toContain('widget-builder-fail');
|
||||
});
|
||||
});
|
||||
@@ -104,10 +104,9 @@ export const createDynamicVariableToWidgetsMap = (
|
||||
// Check each widget for usage of dynamic variables
|
||||
if (Array.isArray(widgets)) {
|
||||
widgets.forEach((widget) => {
|
||||
if (
|
||||
widget.query?.builder?.queryData &&
|
||||
widget.query?.queryType === EQueryType.QUERY_BUILDER
|
||||
) {
|
||||
if (widget.query?.queryType === EQueryType.QUERY_BUILDER) {
|
||||
if (!Array.isArray(widget.query.builder.queryData)) return;
|
||||
|
||||
widget.query.builder.queryData.forEach((queryData: IBuilderQuery) => {
|
||||
// Check filter items for dynamic variables
|
||||
queryData.filters?.items?.forEach((filter: TagFilterItem) => {
|
||||
@@ -139,6 +138,34 @@ export const createDynamicVariableToWidgetsMap = (
|
||||
});
|
||||
}
|
||||
});
|
||||
} else if (widget.query?.queryType === EQueryType.PROM) {
|
||||
if (!Array.isArray(widget.query.promql)) return;
|
||||
|
||||
widget.query.promql.forEach((promqlQuery) => {
|
||||
dynamicVariables.forEach((variable) => {
|
||||
if (
|
||||
variable.dynamicVariablesAttribute &&
|
||||
promqlQuery.query?.includes(`$${variable.name}`) &&
|
||||
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
|
||||
) {
|
||||
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
|
||||
}
|
||||
});
|
||||
});
|
||||
} else if (widget.query?.queryType === EQueryType.CLICKHOUSE) {
|
||||
if (!Array.isArray(widget.query.clickhouse_sql)) return;
|
||||
|
||||
widget.query.clickhouse_sql.forEach((clickhouseQuery) => {
|
||||
dynamicVariables.forEach((variable) => {
|
||||
if (
|
||||
variable.dynamicVariablesAttribute &&
|
||||
clickhouseQuery.query?.includes(`$${variable.name}`) &&
|
||||
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
|
||||
) {
|
||||
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
132
frontend/src/lib/__tests__/getConvertedValue.test.ts
Normal file
132
frontend/src/lib/__tests__/getConvertedValue.test.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
|
||||
import { convertValue, getFormattedUnit } from 'lib/getConvertedValue';
|
||||
|
||||
describe('getFormattedUnit', () => {
|
||||
it('should return the grafana unit for universal unit if it exists', () => {
|
||||
const formattedUnit = getFormattedUnit(UniversalYAxisUnit.KILOBYTES);
|
||||
expect(formattedUnit).toBe('deckbytes');
|
||||
});
|
||||
|
||||
it('should return the unit directly if it is not a universal unit', () => {
|
||||
const formattedUnit = getFormattedUnit('{reason}');
|
||||
expect(formattedUnit).toBe('{reason}');
|
||||
});
|
||||
|
||||
it('should return the universal unit directly if it does not have a grafana equivalent', () => {
|
||||
const formattedUnit = getFormattedUnit(UniversalYAxisUnit.EXABYTES);
|
||||
expect(formattedUnit).toBe(UniversalYAxisUnit.EXABYTES);
|
||||
});
|
||||
});
|
||||
|
||||
describe('convertValue', () => {
|
||||
describe('data', () => {
|
||||
it('should convert bytes (IEC) to kilobytes', () => {
|
||||
expect(
|
||||
convertValue(
|
||||
1000,
|
||||
UniversalYAxisUnit.BYTES_IEC,
|
||||
UniversalYAxisUnit.KILOBYTES,
|
||||
),
|
||||
).toBe(1);
|
||||
});
|
||||
|
||||
it('should convert bytes (SI) to kilobytes', () => {
|
||||
expect(
|
||||
convertValue(1000, UniversalYAxisUnit.BYTES, UniversalYAxisUnit.KILOBYTES),
|
||||
).toBe(1);
|
||||
});
|
||||
|
||||
it('should convert kilobytes to bytes', () => {
|
||||
expect(
|
||||
convertValue(1, UniversalYAxisUnit.KILOBYTES, UniversalYAxisUnit.BYTES),
|
||||
).toBe(1000);
|
||||
});
|
||||
|
||||
it('should convert megabytes to kilobytes', () => {
|
||||
expect(convertValue(1, 'mbytes', 'kbytes')).toBe(1024);
|
||||
});
|
||||
|
||||
it('should convert gigabytes to megabytes', () => {
|
||||
expect(convertValue(1, 'gbytes', 'mbytes')).toBe(1024);
|
||||
});
|
||||
|
||||
it('should convert kilobytes to megabytes', () => {
|
||||
expect(convertValue(1024, 'kbytes', 'mbytes')).toBe(1);
|
||||
});
|
||||
|
||||
it('should convert bits to gigabytes', () => {
|
||||
// 12 GB = 103079215104 bits
|
||||
expect(convertValue(103079215104, 'bits', 'gbytes')).toBe(12);
|
||||
});
|
||||
});
|
||||
|
||||
describe('time', () => {
|
||||
it('should convert milliseconds to seconds', () => {
|
||||
expect(convertValue(1000, 'ms', 's')).toBe(1);
|
||||
});
|
||||
|
||||
it('should convert seconds to milliseconds', () => {
|
||||
expect(convertValue(1, 's', 'ms')).toBe(1000);
|
||||
});
|
||||
|
||||
it('should convert nanoseconds to milliseconds', () => {
|
||||
expect(convertValue(1000000, 'ns', 'ms')).toBe(1);
|
||||
});
|
||||
|
||||
it('should convert seconds to minutes', () => {
|
||||
expect(convertValue(60, 's', 'm')).toBe(1);
|
||||
});
|
||||
|
||||
it('should convert minutes to hours', () => {
|
||||
expect(convertValue(60, 'm', 'h')).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('data rate', () => {
|
||||
it('should convert bytes/sec to kibibytes/sec', () => {
|
||||
expect(convertValue(1024, 'binBps', 'KiBs')).toBe(1);
|
||||
});
|
||||
|
||||
it('should convert kibibytes/sec to bytes/sec', () => {
|
||||
expect(convertValue(1, 'KiBs', 'binBps')).toBe(1024);
|
||||
});
|
||||
});
|
||||
|
||||
describe('throughput', () => {
|
||||
it('should convert counts per second to counts per minute', () => {
|
||||
expect(convertValue(1, 'cps', 'cpm')).toBe(1 / 60);
|
||||
});
|
||||
|
||||
it('should convert operations per second to operations per minute', () => {
|
||||
expect(convertValue(1, 'ops', 'opm')).toBe(1 / 60);
|
||||
});
|
||||
|
||||
it('should convert counts per minute to counts per second', () => {
|
||||
expect(convertValue(1, 'cpm', 'cps')).toBe(60);
|
||||
});
|
||||
|
||||
it('should convert operations per minute to operations per second', () => {
|
||||
expect(convertValue(1, 'opm', 'ops')).toBe(60);
|
||||
});
|
||||
});
|
||||
|
||||
describe('percent', () => {
|
||||
it('should convert percentunit to percent', () => {
|
||||
expect(convertValue(0.5, 'percentunit', 'percent')).toBe(50);
|
||||
});
|
||||
|
||||
it('should convert percent to percentunit', () => {
|
||||
expect(convertValue(50, 'percent', 'percentunit')).toBe(0.5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('invalid values', () => {
|
||||
it('should return null when currentUnit is invalid', () => {
|
||||
expect(convertValue(100, 'invalidUnit', 'bytes')).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null when targetUnit is invalid', () => {
|
||||
expect(convertValue(100, 'bytes', 'invalidUnit')).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,3 +1,17 @@
|
||||
import {
|
||||
UniversalUnitToGrafanaUnit,
|
||||
Y_AXIS_UNIT_NAMES,
|
||||
} from 'components/YAxisUnitSelector/constants';
|
||||
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
|
||||
import { isUniversalUnit } from 'components/YAxisUnitSelector/utils';
|
||||
|
||||
// 1 byte = 8 bits
|
||||
// Or 1 bit = 1/8 bytes
|
||||
const BIT_FACTOR = 1 / 8;
|
||||
|
||||
const DECIMAL_FACTOR = 1000;
|
||||
const BINARY_FACTOR = 1024;
|
||||
|
||||
const unitsMapping = [
|
||||
{
|
||||
label: 'Data',
|
||||
@@ -15,62 +29,132 @@ const unitsMapping = [
|
||||
{
|
||||
label: 'bits(IEC)',
|
||||
value: 'bits',
|
||||
factor: 8, // 1 byte = 8 bits
|
||||
factor: BIT_FACTOR,
|
||||
},
|
||||
{
|
||||
label: 'bits(SI)',
|
||||
value: 'decbits',
|
||||
factor: 8, // 1 byte = 8 bits
|
||||
factor: BIT_FACTOR,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.KILOBITS],
|
||||
value: UniversalYAxisUnit.KILOBITS,
|
||||
factor: BIT_FACTOR * DECIMAL_FACTOR,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MEGABITS],
|
||||
value: UniversalYAxisUnit.MEGABITS,
|
||||
factor: BIT_FACTOR * DECIMAL_FACTOR ** 2,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.GIGABITS],
|
||||
value: UniversalYAxisUnit.GIGABITS,
|
||||
factor: BIT_FACTOR * DECIMAL_FACTOR ** 3,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.TERABITS],
|
||||
value: UniversalYAxisUnit.TERABITS,
|
||||
factor: BIT_FACTOR * DECIMAL_FACTOR ** 4,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.PETABITS],
|
||||
value: UniversalYAxisUnit.PETABITS,
|
||||
factor: BIT_FACTOR * DECIMAL_FACTOR ** 5,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXABITS],
|
||||
value: UniversalYAxisUnit.EXABITS,
|
||||
factor: BIT_FACTOR * DECIMAL_FACTOR ** 6,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZETTABITS],
|
||||
value: UniversalYAxisUnit.ZETTABITS,
|
||||
factor: BIT_FACTOR * DECIMAL_FACTOR ** 7,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOTTABITS],
|
||||
value: UniversalYAxisUnit.YOTTABITS,
|
||||
factor: BIT_FACTOR * DECIMAL_FACTOR ** 8,
|
||||
},
|
||||
{
|
||||
label: 'kibibytes',
|
||||
value: 'kbytes',
|
||||
factor: 1024,
|
||||
factor: BINARY_FACTOR,
|
||||
},
|
||||
{
|
||||
label: 'kilobytes',
|
||||
value: 'deckbytes',
|
||||
factor: 1000,
|
||||
factor: DECIMAL_FACTOR,
|
||||
},
|
||||
{
|
||||
label: 'mebibytes',
|
||||
value: 'mbytes',
|
||||
factor: 1024 * 1024,
|
||||
factor: BINARY_FACTOR ** 2,
|
||||
},
|
||||
{
|
||||
label: 'megabytes',
|
||||
value: 'decmbytes',
|
||||
factor: 1000 * 1000,
|
||||
factor: DECIMAL_FACTOR ** 2,
|
||||
},
|
||||
{
|
||||
label: 'gibibytes',
|
||||
value: 'gbytes',
|
||||
factor: 1024 * 1024 * 1024,
|
||||
factor: BINARY_FACTOR ** 3,
|
||||
},
|
||||
{
|
||||
label: 'gigabytes',
|
||||
value: 'decgbytes',
|
||||
factor: 1000 * 1000 * 1000,
|
||||
factor: DECIMAL_FACTOR ** 3,
|
||||
},
|
||||
{
|
||||
label: 'tebibytes',
|
||||
value: 'tbytes',
|
||||
factor: 1024 * 1024 * 1024 * 1024,
|
||||
factor: BINARY_FACTOR ** 4,
|
||||
},
|
||||
{
|
||||
label: 'terabytes',
|
||||
value: 'dectbytes',
|
||||
factor: 1000 * 1000 * 1000 * 1000,
|
||||
factor: DECIMAL_FACTOR ** 4,
|
||||
},
|
||||
{
|
||||
label: 'pebibytes',
|
||||
value: 'pbytes',
|
||||
factor: 1024 * 1024 * 1024 * 1024 * 1024,
|
||||
factor: BINARY_FACTOR ** 5,
|
||||
},
|
||||
{
|
||||
label: 'petabytes',
|
||||
value: 'decpbytes',
|
||||
factor: 1000 * 1000 * 1000 * 1000 * 1000,
|
||||
factor: DECIMAL_FACTOR ** 5,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXABYTES],
|
||||
value: UniversalYAxisUnit.EXABYTES,
|
||||
factor: DECIMAL_FACTOR ** 6,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXBIBYTES],
|
||||
value: UniversalYAxisUnit.EXBIBYTES,
|
||||
factor: BINARY_FACTOR ** 6,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZETTABYTES],
|
||||
value: UniversalYAxisUnit.ZETTABYTES,
|
||||
factor: DECIMAL_FACTOR ** 7,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZEBIBYTES],
|
||||
value: UniversalYAxisUnit.ZEBIBYTES,
|
||||
factor: BINARY_FACTOR ** 7,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOTTABYTES],
|
||||
value: UniversalYAxisUnit.YOTTABYTES,
|
||||
factor: DECIMAL_FACTOR ** 8,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOBIBYTES],
|
||||
value: UniversalYAxisUnit.YOBIBYTES,
|
||||
factor: BINARY_FACTOR ** 8,
|
||||
},
|
||||
],
|
||||
},
|
||||
@@ -90,44 +174,103 @@ const unitsMapping = [
|
||||
{
|
||||
label: 'bits/sec(IEC)',
|
||||
value: 'binbps',
|
||||
factor: 8, // 1 byte = 8 bits
|
||||
factor: BIT_FACTOR, // 1 byte = 8 bits
|
||||
},
|
||||
{
|
||||
label: 'bits/sec(SI)',
|
||||
value: 'bps',
|
||||
factor: 8, // 1 byte = 8 bits
|
||||
factor: BIT_FACTOR, // 1 byte = 8 bits
|
||||
},
|
||||
{
|
||||
label: 'kibibytes/sec',
|
||||
value: 'KiBs',
|
||||
factor: 1024,
|
||||
factor: BINARY_FACTOR,
|
||||
},
|
||||
{
|
||||
label: 'kibibits/sec',
|
||||
value: 'Kibits',
|
||||
factor: 8 * 1024, // 1 KiB = 8 Kibits
|
||||
factor: BIT_FACTOR * BINARY_FACTOR, // 1 KiB = 8 Kibits
|
||||
},
|
||||
{
|
||||
label: 'kilobytes/sec',
|
||||
value: 'KBs',
|
||||
factor: 1000,
|
||||
factor: DECIMAL_FACTOR,
|
||||
},
|
||||
{
|
||||
label: 'kilobits/sec',
|
||||
value: 'Kbits',
|
||||
factor: 8 * 1000, // 1 KB = 8 Kbits
|
||||
factor: BIT_FACTOR * DECIMAL_FACTOR, // 1 KB = 8 Kbits
|
||||
},
|
||||
{
|
||||
label: 'mebibytes/sec',
|
||||
value: 'MiBs',
|
||||
factor: 1024 * 1024,
|
||||
factor: BINARY_FACTOR ** 2,
|
||||
},
|
||||
{
|
||||
label: 'mebibits/sec',
|
||||
value: 'Mibits',
|
||||
factor: 8 * 1024 * 1024, // 1 MiB = 8 Mibits
|
||||
factor: BIT_FACTOR * BINARY_FACTOR ** 2, // 1 MiB = 8 Mibits
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXABYTES_SECOND],
|
||||
value: UniversalYAxisUnit.EXABYTES_SECOND,
|
||||
factor: DECIMAL_FACTOR ** 6,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZETTABYTES_SECOND],
|
||||
value: UniversalYAxisUnit.ZETTABYTES_SECOND,
|
||||
factor: DECIMAL_FACTOR ** 7,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOTTABYTES_SECOND],
|
||||
value: UniversalYAxisUnit.YOTTABYTES_SECOND,
|
||||
factor: DECIMAL_FACTOR ** 8,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXBIBYTES_SECOND],
|
||||
value: UniversalYAxisUnit.EXBIBYTES_SECOND,
|
||||
factor: BINARY_FACTOR ** 6,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZEBIBYTES_SECOND],
|
||||
value: UniversalYAxisUnit.ZEBIBYTES_SECOND,
|
||||
factor: BINARY_FACTOR ** 7,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOBIBYTES_SECOND],
|
||||
value: UniversalYAxisUnit.YOBIBYTES_SECOND,
|
||||
factor: BINARY_FACTOR ** 8,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXABITS_SECOND],
|
||||
value: UniversalYAxisUnit.EXABITS_SECOND,
|
||||
factor: BIT_FACTOR * DECIMAL_FACTOR ** 6,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZETTABITS_SECOND],
|
||||
value: UniversalYAxisUnit.ZETTABITS_SECOND,
|
||||
factor: BIT_FACTOR * DECIMAL_FACTOR ** 7,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOTTABITS_SECOND],
|
||||
value: UniversalYAxisUnit.YOTTABITS_SECOND,
|
||||
factor: BIT_FACTOR * DECIMAL_FACTOR ** 8,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXBIBITS_SECOND],
|
||||
value: UniversalYAxisUnit.EXBIBITS_SECOND,
|
||||
factor: BIT_FACTOR * BINARY_FACTOR ** 6,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZEBIBITS_SECOND],
|
||||
value: UniversalYAxisUnit.ZEBIBITS_SECOND,
|
||||
factor: BIT_FACTOR * BINARY_FACTOR ** 7,
|
||||
},
|
||||
{
|
||||
label: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOBIBITS_SECOND],
|
||||
value: UniversalYAxisUnit.YOBIBITS_SECOND,
|
||||
factor: BIT_FACTOR * BINARY_FACTOR ** 8,
|
||||
},
|
||||
// ... (other options)
|
||||
],
|
||||
},
|
||||
{
|
||||
@@ -268,6 +411,14 @@ function findUnitObject(
|
||||
return unitObj || null;
|
||||
}
|
||||
|
||||
export function getFormattedUnit(unit: string): string {
|
||||
const isUniversalYAxisUnit = isUniversalUnit(unit);
|
||||
if (isUniversalYAxisUnit) {
|
||||
return UniversalUnitToGrafanaUnit[unit as UniversalYAxisUnit] || unit;
|
||||
}
|
||||
return unit;
|
||||
}
|
||||
|
||||
export function convertValue(
|
||||
value: number,
|
||||
currentUnit?: string,
|
||||
@@ -281,8 +432,12 @@ export function convertValue(
|
||||
) {
|
||||
return value;
|
||||
}
|
||||
const currentUnitObj = findUnitObject(currentUnit);
|
||||
const targetUnitObj = findUnitObject(targetUnit);
|
||||
|
||||
const formattedCurrentUnit = getFormattedUnit(currentUnit);
|
||||
const formattedTargetUnit = getFormattedUnit(targetUnit);
|
||||
|
||||
const currentUnitObj = findUnitObject(formattedCurrentUnit);
|
||||
const targetUnitObj = findUnitObject(formattedTargetUnit);
|
||||
|
||||
if (currentUnitObj && targetUnitObj) {
|
||||
const baseValue = value * currentUnitObj.factor;
|
||||
|
||||
@@ -291,6 +291,10 @@ export function DashboardProvider({
|
||||
|
||||
variable.order = order;
|
||||
existingOrders.add(order);
|
||||
// ! BWC - Specific case for backward compatibility where textboxValue was used instead of defaultValue
|
||||
if (variable.type === 'TEXTBOX' && !variable.defaultValue) {
|
||||
variable.defaultValue = variable.textboxValue || '';
|
||||
}
|
||||
}
|
||||
|
||||
if (variable.id === undefined) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { render, waitFor } from '@testing-library/react';
|
||||
import { render, screen, waitFor } from '@testing-library/react';
|
||||
import getDashboard from 'api/v1/dashboards/id/get';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import ROUTES from 'constants/routes';
|
||||
@@ -379,12 +379,9 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
// Empty URL variables - tests initialization flow
|
||||
mockGetUrlVariables.mockReturnValue({});
|
||||
|
||||
const { getByTestId } = renderWithDashboardProvider(
|
||||
`/dashboard/${DASHBOARD_ID}`,
|
||||
{
|
||||
dashboardId: DASHBOARD_ID,
|
||||
},
|
||||
);
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -415,16 +412,14 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
});
|
||||
|
||||
// Verify dashboard state contains the variables with default values
|
||||
await waitFor(() => {
|
||||
const dashboardVariables = getByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
const dashboardVariables = await screen.findByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
|
||||
expect(parsedVariables).toHaveProperty('environment');
|
||||
expect(parsedVariables).toHaveProperty('services');
|
||||
// Default allSelected values should be preserved
|
||||
expect(parsedVariables.environment.allSelected).toBe(false);
|
||||
expect(parsedVariables.services.allSelected).toBe(false);
|
||||
});
|
||||
expect(parsedVariables).toHaveProperty('environment');
|
||||
expect(parsedVariables).toHaveProperty('services');
|
||||
// Default allSelected values should be preserved
|
||||
expect(parsedVariables.environment.allSelected).toBe(false);
|
||||
expect(parsedVariables.services.allSelected).toBe(false);
|
||||
});
|
||||
|
||||
it('should merge URL variables with dashboard data and normalize values correctly', async () => {
|
||||
@@ -438,12 +433,9 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
.mockReturnValueOnce('development')
|
||||
.mockReturnValueOnce(['db', 'cache']);
|
||||
|
||||
const { getByTestId } = renderWithDashboardProvider(
|
||||
`/dashboard/${DASHBOARD_ID}`,
|
||||
{
|
||||
dashboardId: DASHBOARD_ID,
|
||||
},
|
||||
);
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -474,18 +466,16 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
});
|
||||
|
||||
// Verify the dashboard state reflects the normalized URL values
|
||||
await waitFor(() => {
|
||||
const dashboardVariables = getByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
const dashboardVariables = await screen.findByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
|
||||
// The selectedValue should be updated with normalized URL values
|
||||
expect(parsedVariables.environment.selectedValue).toBe('development');
|
||||
expect(parsedVariables.services.selectedValue).toEqual(['db', 'cache']);
|
||||
// The selectedValue should be updated with normalized URL values
|
||||
expect(parsedVariables.environment.selectedValue).toBe('development');
|
||||
expect(parsedVariables.services.selectedValue).toEqual(['db', 'cache']);
|
||||
|
||||
// allSelected should be set to false when URL values override
|
||||
expect(parsedVariables.environment.allSelected).toBe(false);
|
||||
expect(parsedVariables.services.allSelected).toBe(false);
|
||||
});
|
||||
// allSelected should be set to false when URL values override
|
||||
expect(parsedVariables.environment.allSelected).toBe(false);
|
||||
expect(parsedVariables.services.allSelected).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle ALL_SELECTED_VALUE from URL and set allSelected correctly', async () => {
|
||||
@@ -495,12 +485,9 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
|
||||
mockGetUrlVariables.mockReturnValue(urlVariables);
|
||||
|
||||
const { getByTestId } = renderWithDashboardProvider(
|
||||
`/dashboard/${DASHBOARD_ID}`,
|
||||
{
|
||||
dashboardId: DASHBOARD_ID,
|
||||
},
|
||||
);
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -513,8 +500,8 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
);
|
||||
|
||||
// Verify that allSelected is set to true for the services variable
|
||||
await waitFor(() => {
|
||||
const dashboardVariables = getByTestId('dashboard-variables');
|
||||
await waitFor(async () => {
|
||||
const dashboardVariables = await screen.findByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
|
||||
expect(parsedVariables.services.allSelected).toBe(true);
|
||||
@@ -563,3 +550,203 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Dashboard Provider - Textbox Variable Backward Compatibility', () => {
|
||||
const DASHBOARD_ID = 'test-dashboard-id';
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockGetUrlVariables.mockReturnValue({});
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
mockNormalizeUrlValueForVariable.mockImplementation((urlValue) => {
|
||||
if (urlValue === undefined || urlValue === null) {
|
||||
return urlValue;
|
||||
}
|
||||
return urlValue as IDashboardVariable['selectedValue'];
|
||||
});
|
||||
});
|
||||
|
||||
describe('Textbox Variable defaultValue Migration', () => {
|
||||
it('should set defaultValue from textboxValue for TEXTBOX variables without defaultValue (BWC)', async () => {
|
||||
// Mock dashboard with TEXTBOX variable that has textboxValue but no defaultValue
|
||||
// This simulates old data format before the migration
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
mockGetDashboard.mockResolvedValue({
|
||||
httpStatusCode: 200,
|
||||
data: {
|
||||
id: DASHBOARD_ID,
|
||||
title: 'Test Dashboard',
|
||||
data: {
|
||||
variables: {
|
||||
myTextbox: {
|
||||
id: 'textbox-id',
|
||||
name: 'myTextbox',
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: 'legacy-default-value',
|
||||
// defaultValue is intentionally missing to test BWC
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
sort: 'DISABLED',
|
||||
} as any,
|
||||
},
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
});
|
||||
|
||||
// Verify that defaultValue is set from textboxValue
|
||||
await waitFor(async () => {
|
||||
const dashboardVariables = await screen.findByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
|
||||
expect(parsedVariables.myTextbox.type).toBe('TEXTBOX');
|
||||
expect(parsedVariables.myTextbox.textboxValue).toBe('legacy-default-value');
|
||||
expect(parsedVariables.myTextbox.defaultValue).toBe('legacy-default-value');
|
||||
});
|
||||
});
|
||||
|
||||
it('should not override existing defaultValue for TEXTBOX variables', async () => {
|
||||
// Mock dashboard with TEXTBOX variable that already has defaultValue
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
mockGetDashboard.mockResolvedValue({
|
||||
httpStatusCode: 200,
|
||||
data: {
|
||||
id: DASHBOARD_ID,
|
||||
title: 'Test Dashboard',
|
||||
data: {
|
||||
variables: {
|
||||
myTextbox: {
|
||||
id: 'textbox-id',
|
||||
name: 'myTextbox',
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: 'old-textbox-value',
|
||||
defaultValue: 'existing-default-value',
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
sort: 'DISABLED',
|
||||
} as any,
|
||||
},
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
});
|
||||
|
||||
// Verify that existing defaultValue is preserved
|
||||
await waitFor(async () => {
|
||||
const dashboardVariables = await screen.findByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
|
||||
expect(parsedVariables.myTextbox.type).toBe('TEXTBOX');
|
||||
expect(parsedVariables.myTextbox.defaultValue).toBe(
|
||||
'existing-default-value',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should set empty defaultValue when textboxValue is also empty for TEXTBOX variables', async () => {
|
||||
// Mock dashboard with TEXTBOX variable with empty textboxValue and no defaultValue
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
mockGetDashboard.mockResolvedValue({
|
||||
httpStatusCode: 200,
|
||||
data: {
|
||||
id: DASHBOARD_ID,
|
||||
title: 'Test Dashboard',
|
||||
data: {
|
||||
variables: {
|
||||
myTextbox: {
|
||||
id: 'textbox-id',
|
||||
name: 'myTextbox',
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: '',
|
||||
// defaultValue is intentionally missing
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
sort: 'DISABLED',
|
||||
} as any,
|
||||
},
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
});
|
||||
|
||||
// Verify that defaultValue is set to empty string
|
||||
await waitFor(async () => {
|
||||
const dashboardVariables = await screen.findByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
|
||||
expect(parsedVariables.myTextbox.type).toBe('TEXTBOX');
|
||||
expect(parsedVariables.myTextbox.defaultValue).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
it('should not apply BWC logic to non-TEXTBOX variables', async () => {
|
||||
// Mock dashboard with QUERY variable that has no defaultValue
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
mockGetDashboard.mockResolvedValue({
|
||||
httpStatusCode: 200,
|
||||
data: {
|
||||
id: DASHBOARD_ID,
|
||||
title: 'Test Dashboard',
|
||||
data: {
|
||||
variables: {
|
||||
myQuery: {
|
||||
id: 'query-id',
|
||||
name: 'myQuery',
|
||||
type: 'QUERY',
|
||||
queryValue: 'SELECT * FROM test',
|
||||
textboxValue: 'should-not-be-used',
|
||||
// defaultValue is intentionally missing
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
sort: 'DISABLED',
|
||||
} as any,
|
||||
},
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
});
|
||||
|
||||
// Verify that defaultValue is NOT set from textboxValue for QUERY type
|
||||
await waitFor(async () => {
|
||||
const dashboardVariables = await screen.findByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
|
||||
expect(parsedVariables.myQuery.type).toBe('QUERY');
|
||||
// defaultValue should not be set to textboxValue for non-TEXTBOX variables
|
||||
expect(parsedVariables.myQuery.defaultValue).not.toBe('should-not-be-used');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -37,6 +37,7 @@ export interface IDashboardVariable {
|
||||
// Custom
|
||||
customValue?: string;
|
||||
// Textbox
|
||||
// special case of variable where defaultValue is same as this. Otherwise, defaultValue is a single field
|
||||
textboxValue?: string;
|
||||
|
||||
sort: TSortVariableValuesType;
|
||||
|
||||
@@ -7634,11 +7634,6 @@ compute-scroll-into-view@^3.0.2:
|
||||
resolved "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-3.0.3.tgz"
|
||||
integrity sha512-nadqwNxghAGTamwIqQSG433W6OADZx2vCo3UXHNrzTRHK/htu+7+L0zhjEoaeaQVNAi3YgqWDv8+tzf0hRfR+A==
|
||||
|
||||
confusing-browser-globals@^1.0.10:
|
||||
version "1.0.11"
|
||||
resolved "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz"
|
||||
integrity sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA==
|
||||
|
||||
connect-history-api-fallback@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz"
|
||||
@@ -9018,42 +9013,11 @@ escodegen@^2.0.0:
|
||||
optionalDependencies:
|
||||
source-map "~0.6.1"
|
||||
|
||||
eslint-config-airbnb-base@^15.0.0:
|
||||
version "15.0.0"
|
||||
resolved "https://registry.npmjs.org/eslint-config-airbnb-base/-/eslint-config-airbnb-base-15.0.0.tgz"
|
||||
integrity sha512-xaX3z4ZZIcFLvh2oUNvcX5oEofXda7giYmuplVxoOg5A7EXJMrUyqRgR+mhDhPK8LZ4PttFOBvCYDbX3sUoUig==
|
||||
dependencies:
|
||||
confusing-browser-globals "^1.0.10"
|
||||
object.assign "^4.1.2"
|
||||
object.entries "^1.1.5"
|
||||
semver "^6.3.0"
|
||||
|
||||
eslint-config-airbnb-typescript@^16.1.4:
|
||||
version "16.2.0"
|
||||
resolved "https://registry.npmjs.org/eslint-config-airbnb-typescript/-/eslint-config-airbnb-typescript-16.2.0.tgz"
|
||||
integrity sha512-OUaMPZpTOZGKd5tXOjJ9PRU4iYNW/Z5DoHIynjsVK/FpkWdiY5+nxQW6TiJAlLwVI1l53xUOrnlZWtVBVQzuWA==
|
||||
dependencies:
|
||||
eslint-config-airbnb-base "^15.0.0"
|
||||
|
||||
eslint-config-airbnb@^19.0.4:
|
||||
version "19.0.4"
|
||||
resolved "https://registry.npmjs.org/eslint-config-airbnb/-/eslint-config-airbnb-19.0.4.tgz"
|
||||
integrity sha512-T75QYQVQX57jiNgpF9r1KegMICE94VYwoFQyMGhrvc+lB8YF2E/M/PYDaQe1AJcWaEgqLE+ErXV1Og/+6Vyzew==
|
||||
dependencies:
|
||||
eslint-config-airbnb-base "^15.0.0"
|
||||
object.assign "^4.1.2"
|
||||
object.entries "^1.1.5"
|
||||
|
||||
eslint-config-prettier@^8.3.0:
|
||||
version "8.8.0"
|
||||
resolved "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.8.0.tgz"
|
||||
integrity sha512-wLbQiFre3tdGgpDv67NQKnJuTlcUVYHas3k+DZCc2U2BadthoEY4B7hLPvAxaqdyOGCzuLfii2fqGph10va7oA==
|
||||
|
||||
eslint-config-standard@^16.0.3:
|
||||
version "16.0.3"
|
||||
resolved "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-16.0.3.tgz"
|
||||
integrity sha512-x4fmJL5hGqNJKGHSjnLdgA6U6h1YW/G2dW9fA+cyVur4SK6lyue8+UgNKWlZtUDTXvgKDD/Oa3GQjmB5kjtVvg==
|
||||
|
||||
eslint-import-resolver-node@^0.3.7:
|
||||
version "0.3.7"
|
||||
resolved "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz"
|
||||
@@ -9070,14 +9034,6 @@ eslint-module-utils@^2.8.0:
|
||||
dependencies:
|
||||
debug "^3.2.7"
|
||||
|
||||
eslint-plugin-es@^3.0.0:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.1.tgz"
|
||||
integrity sha512-GUmAsJaN4Fc7Gbtl8uOBlayo2DqhwWvEzykMHSCZHU3XdJ+NSzzZcVhXh3VxX5icqQ+oQdIEawXX8xkR3mIFmQ==
|
||||
dependencies:
|
||||
eslint-utils "^2.0.0"
|
||||
regexpp "^3.0.0"
|
||||
|
||||
eslint-plugin-import@^2.28.1:
|
||||
version "2.28.1"
|
||||
resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.28.1.tgz#63b8b5b3c409bfc75ebaf8fb206b07ab435482c4"
|
||||
@@ -9130,18 +9086,6 @@ eslint-plugin-jsx-a11y@^6.5.1:
|
||||
object.fromentries "^2.0.6"
|
||||
semver "^6.3.0"
|
||||
|
||||
eslint-plugin-node@^11.1.0:
|
||||
version "11.1.0"
|
||||
resolved "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz"
|
||||
integrity sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g==
|
||||
dependencies:
|
||||
eslint-plugin-es "^3.0.0"
|
||||
eslint-utils "^2.0.0"
|
||||
ignore "^5.1.1"
|
||||
minimatch "^3.0.4"
|
||||
resolve "^1.10.1"
|
||||
semver "^6.1.0"
|
||||
|
||||
eslint-plugin-prettier@^4.0.0:
|
||||
version "4.2.1"
|
||||
resolved "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz"
|
||||
@@ -9149,11 +9093,6 @@ eslint-plugin-prettier@^4.0.0:
|
||||
dependencies:
|
||||
prettier-linter-helpers "^1.0.0"
|
||||
|
||||
eslint-plugin-promise@^5.1.0:
|
||||
version "5.2.0"
|
||||
resolved "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-5.2.0.tgz"
|
||||
integrity sha512-SftLb1pUG01QYq2A/hGAWfDRXqYD82zE7j7TopDOyNdU+7SvvoXREls/+PRTY17vUXzXnZA/zfnyKgRH6x4JJw==
|
||||
|
||||
eslint-plugin-react-hooks@^4.3.0:
|
||||
version "4.6.0"
|
||||
resolved "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz"
|
||||
@@ -9198,7 +9137,7 @@ eslint-scope@5.1.1, eslint-scope@^5.1.1:
|
||||
esrecurse "^4.3.0"
|
||||
estraverse "^4.1.1"
|
||||
|
||||
eslint-utils@^2.0.0, eslint-utils@^2.1.0:
|
||||
eslint-utils@^2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz"
|
||||
integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==
|
||||
@@ -10733,7 +10672,7 @@ ignore@^4.0.6:
|
||||
resolved "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz"
|
||||
integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==
|
||||
|
||||
ignore@^5.1.1, ignore@^5.1.8, ignore@^5.2.0:
|
||||
ignore@^5.1.8, ignore@^5.2.0:
|
||||
version "5.2.4"
|
||||
resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324"
|
||||
integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==
|
||||
@@ -13693,7 +13632,7 @@ object-keys@^1.1.1:
|
||||
resolved "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz"
|
||||
integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==
|
||||
|
||||
object.assign@^4.1.2, object.assign@^4.1.3, object.assign@^4.1.4:
|
||||
object.assign@^4.1.3, object.assign@^4.1.4:
|
||||
version "4.1.4"
|
||||
resolved "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz"
|
||||
integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==
|
||||
@@ -13703,7 +13642,7 @@ object.assign@^4.1.2, object.assign@^4.1.3, object.assign@^4.1.4:
|
||||
has-symbols "^1.0.3"
|
||||
object-keys "^1.1.1"
|
||||
|
||||
object.entries@^1.1.5, object.entries@^1.1.6:
|
||||
object.entries@^1.1.6:
|
||||
version "1.1.6"
|
||||
resolved "https://registry.npmjs.org/object.entries/-/object.entries-1.1.6.tgz"
|
||||
integrity sha512-leTPzo4Zvg3pmbQ3rDK69Rl8GQvIqMWubrkxONG9/ojtFE2rD9fjMKfSI5BxW3osRH1m6VdzmqK8oAY9aT4x5w==
|
||||
@@ -15814,7 +15753,7 @@ regexp.prototype.flags@^1.4.3, regexp.prototype.flags@^1.5.0:
|
||||
define-properties "^1.2.0"
|
||||
functions-have-names "^1.2.3"
|
||||
|
||||
regexpp@^3.0.0, regexpp@^3.1.0:
|
||||
regexpp@^3.1.0:
|
||||
version "3.2.0"
|
||||
resolved "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz"
|
||||
integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==
|
||||
@@ -16095,7 +16034,7 @@ resolve.exports@^1.1.0:
|
||||
resolved "https://registry.npmjs.org/resolve.exports/-/resolve.exports-1.1.1.tgz"
|
||||
integrity sha512-/NtpHNDN7jWhAaQ9BvBUYZ6YTXsRBgfqWFWP7BZBaoMJO/I3G5OFzvTuWNlZC3aPjins1F+TNrLKsGbH4rfsRQ==
|
||||
|
||||
resolve@^1.10.0, resolve@^1.10.1, resolve@^1.12.0, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.1:
|
||||
resolve@^1.10.0, resolve@^1.12.0, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.1:
|
||||
version "1.22.2"
|
||||
resolved "https://registry.npmjs.org/resolve/-/resolve-1.22.2.tgz"
|
||||
integrity sha512-Sb+mjNHOULsBv818T40qSPeRiuWLyaGMa5ewydRLFimneixmVy2zdivRl+AF6jaYPC8ERxGDmFSiqui6SfPd+g==
|
||||
@@ -16343,7 +16282,7 @@ selfsigned@^2.4.1:
|
||||
"@types/node-forge" "^1.3.0"
|
||||
node-forge "^1"
|
||||
|
||||
"semver@2 || 3 || 4 || 5", semver@7.3.7, semver@7.5.4, semver@7.x, semver@^5.6.0, semver@^6.0.0, semver@^6.1.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0, semver@^6.3.1, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.6.3:
|
||||
"semver@2 || 3 || 4 || 5", semver@7.3.7, semver@7.5.4, semver@7.x, semver@^5.6.0, semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0, semver@^6.3.1, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.6.3:
|
||||
version "7.5.4"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e"
|
||||
integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==
|
||||
|
||||
14
go.mod
14
go.mod
@@ -16,6 +16,7 @@ require (
|
||||
github.com/coreos/go-oidc/v3 v3.14.1
|
||||
github.com/dgraph-io/ristretto/v2 v2.3.0
|
||||
github.com/dustin/go-humanize v1.0.1
|
||||
github.com/gin-gonic/gin v1.11.0
|
||||
github.com/go-co-op/gocron v1.30.1
|
||||
github.com/go-openapi/runtime v0.28.0
|
||||
github.com/go-openapi/strfmt v0.23.0
|
||||
@@ -92,6 +93,12 @@ require (
|
||||
github.com/bytedance/gopkg v0.1.3 // indirect
|
||||
github.com/bytedance/sonic/loader v0.3.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.6 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.27.0 // indirect
|
||||
github.com/goccy/go-yaml v1.18.0 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/redis/go-redis/extra/rediscmd/v9 v9.15.1 // indirect
|
||||
@@ -99,10 +106,11 @@ require (
|
||||
github.com/swaggest/refl v1.4.0 // indirect
|
||||
github.com/swaggest/usecase v1.3.1 // indirect
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/ugorji/go/codec v1.3.0 // indirect
|
||||
github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 // indirect
|
||||
go.opentelemetry.io/collector/config/configretry v1.34.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.2 // indirect
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670 // indirect
|
||||
golang.org/x/arch v0.20.0 // indirect
|
||||
golang.org/x/tools/godoc v0.1.0-deprecated // indirect
|
||||
modernc.org/libc v1.66.10 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
@@ -199,7 +207,7 @@ require (
|
||||
github.com/jpillora/backoff v1.0.0 // indirect
|
||||
github.com/jtolds/gls v4.20.0+incompatible // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.10 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
|
||||
github.com/kylelemons/godebug v1.1.0 // indirect
|
||||
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect
|
||||
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect
|
||||
@@ -343,7 +351,7 @@ require (
|
||||
golang.org/x/time v0.11.0 // indirect
|
||||
golang.org/x/tools v0.39.0 // indirect
|
||||
gonum.org/v1/gonum v0.16.0 // indirect
|
||||
google.golang.org/api v0.236.0 // indirect
|
||||
google.golang.org/api v0.236.0
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 // indirect
|
||||
google.golang.org/grpc v1.75.1 // indirect
|
||||
|
||||
28
go.sum
28
go.sum
@@ -292,7 +292,11 @@ github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S
|
||||
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
|
||||
github.com/fxamacker/cbor/v2 v2.9.0 h1:NpKPmjDBgUfBms6tr6JZkTHtfFGcMKsw3eGcmD/sapM=
|
||||
github.com/fxamacker/cbor/v2 v2.9.0/go.mod h1:vM4b+DJCtHn+zz7h3FFp/hDAI9WNWCsZj23V5ytsSxQ=
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM=
|
||||
github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/gin-gonic/gin v1.11.0 h1:OW/6PLjyusp2PPXtyxKHU0RbX6I/l28FTdDlae5ueWk=
|
||||
github.com/gin-gonic/gin v1.11.0/go.mod h1:+iq/FyxlGzII0KHiBGjuNn4UNENUlKbGlNmc+W50Dls=
|
||||
github.com/go-co-op/gocron v1.30.1 h1:tjWUvJl5KrcwpkEkSXFSQFr4F9h5SfV/m4+RX0cV2fs=
|
||||
github.com/go-co-op/gocron v1.30.1/go.mod h1:39f6KNSGVOU1LO/ZOoZfcSxwlsJDQOKSu8erN0SH48Y=
|
||||
github.com/go-faster/city v1.0.1 h1:4WAxSZ3V2Ws4QRDrscLEDcibJY8uf41H6AhXDrNDcGw=
|
||||
@@ -340,9 +344,17 @@ github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ
|
||||
github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58=
|
||||
github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ=
|
||||
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
||||
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||
github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
|
||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
||||
github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
|
||||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
|
||||
github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4=
|
||||
github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
|
||||
github.com/go-redis/redismock/v9 v9.2.0 h1:ZrMYQeKPECZPjOj5u9eyOjg8Nnb0BS9lkVIZ6IpsKLw=
|
||||
github.com/go-redis/redismock/v9 v9.2.0/go.mod h1:18KHfGDK4Y6c2R0H38EUGWAdc7ZQS9gfYxc94k7rWT0=
|
||||
github.com/go-resty/resty/v2 v2.16.5 h1:hBKqmWrr7uRc3euHVqmh1HTHcKn99Smr7o5spptdhTM=
|
||||
@@ -360,6 +372,8 @@ github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJA
|
||||
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/goccy/go-yaml v1.9.5/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA=
|
||||
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
|
||||
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/gofrs/uuid v4.4.0+incompatible h1:3qXRTX8/NbyulANqlc0lchS1gqAVxRgsuW1YrTJupqA=
|
||||
github.com/gofrs/uuid v4.4.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
|
||||
@@ -636,8 +650,8 @@ github.com/kisielk/sqlstruct v0.0.0-20201105191214-5f3e10d3ab46/go.mod h1:yyMNCy
|
||||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
||||
github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE=
|
||||
github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
||||
github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y=
|
||||
github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
||||
github.com/knadh/koanf v1.5.0 h1:q2TSd/3Pyc/5yP9ldIrSdIz26MCcyNQzW0pEAugLPNs=
|
||||
github.com/knadh/koanf v1.5.0/go.mod h1:Hgyjp4y8v44hpZtPzs7JZfRAW5AhN7KfZcwv1RYggDs=
|
||||
github.com/knadh/koanf/v2 v2.2.0 h1:FZFwd9bUjpb8DyCWARUBy5ovuhDs1lI87dOEn2K8UVU=
|
||||
@@ -667,6 +681,8 @@ github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6Fm
|
||||
github.com/leodido/go-syslog/v4 v4.2.0 h1:A7vpbYxsO4e2E8udaurkLlxP5LDpDbmPMsGnuhb7jVk=
|
||||
github.com/leodido/go-syslog/v4 v4.2.0/go.mod h1:eJ8rUfDN5OS6dOkCOBYlg2a+hbAg6pJa99QXXgMrd98=
|
||||
github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
|
||||
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
||||
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
||||
github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b h1:11UHH39z1RhZ5dc4y4r/4koJo6IYFgTRMe/LlwRTEw0=
|
||||
github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b/go.mod h1:WZxr2/6a/Ar9bMDc2rN/LJrE/hF6bXE4LPyDSIxwAfg=
|
||||
github.com/linode/linodego v1.49.0 h1:MNd3qwvQzbXB5mCpvdCqlUIu1RPA9oC+50LyB9kK+GQ=
|
||||
@@ -1023,6 +1039,8 @@ github.com/trivago/tgo v1.0.7/go.mod h1:w4dpD+3tzNIIiIfkWWa85w5/B77tlvdZckQ+6PkF
|
||||
github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM=
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||
github.com/ugorji/go/codec v1.3.0 h1:Qd2W2sQawAfG8XSvzwhBeoGq71zXOC/Q1E9y/wUcsUA=
|
||||
github.com/ugorji/go/codec v1.3.0/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4=
|
||||
github.com/uptrace/bun v1.2.9 h1:OOt2DlIcRUMSZPr6iXDFg/LaQd59kOxbAjpIVHddKRs=
|
||||
github.com/uptrace/bun v1.2.9/go.mod h1:r2ZaaGs9Ru5bpGTr8GQfp8jp+TlCav9grYCPOu2CJSg=
|
||||
github.com/uptrace/bun/dialect/pgdialect v1.2.9 h1:caf5uFbOGiXvadV6pA5gn87k0awFFxL1kuuY3SpxnWk=
|
||||
@@ -1271,8 +1289,8 @@ go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
|
||||
go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
|
||||
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670 h1:18EFjUmQOcUvxNYSkA6jO9VAiXCnxFY6NyDX0bHDmkU=
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||
golang.org/x/arch v0.20.0 h1:dx1zTU0MAE98U+TQ8BLl7XsJbgze2WnNKF/8tGp/Q6c=
|
||||
golang.org/x/arch v0.20.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk=
|
||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
@@ -1717,6 +1735,8 @@ google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX
|
||||
google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
|
||||
google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
|
||||
google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
|
||||
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 h1:1tXaIXCracvtsRxSBsYDiSBN0cuJvM7QYW+MrpIRY78=
|
||||
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:49MsLSx0oWMOZqcpB3uL8ZOkAh1+TndpJ8ONoCBWiZk=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 h1:BIRfGDEjiHRrk0QKZe3Xv2ieMhtgRGeLcZQ0mIVn4EY=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5/go.mod h1:j3QtIyytwqGr1JUDtYXwtMXWPKsEa5LtzIFN1Wn5WvE=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 h1:eaY8u2EuxbRv7c3NiGK0/NedzVsCcV6hDuU5qPX5EGE=
|
||||
|
||||
150
pkg/apiserver/signozapiserver/gateway.go
Normal file
150
pkg/apiserver/signozapiserver/gateway.go
Normal file
@@ -0,0 +1,150 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/gatewaytypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addGatewayRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v2/gateway/ingestion_keys", handler.New(provider.authZ.AdminAccess(provider.gatewayHandler.GetIngestionKeys), handler.OpenAPIDef{
|
||||
ID: "GetIngestionKeys",
|
||||
Tags: []string{"gateway"},
|
||||
Summary: "Get ingestion keys for workspace",
|
||||
Description: "This endpoint returns the ingestion keys for a workspace",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(gatewaytypes.GettableIngestionKeys),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/gateway/ingestion_keys/search", handler.New(provider.authZ.AdminAccess(provider.gatewayHandler.SearchIngestionKeys), handler.OpenAPIDef{
|
||||
ID: "SearchIngestionKeys",
|
||||
Tags: []string{"gateway"},
|
||||
Summary: "Search ingestion keys for workspace",
|
||||
Description: "This endpoint returns the ingestion keys for a workspace",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(gatewaytypes.GettableIngestionKeys),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/gateway/ingestion_keys", handler.New(provider.authZ.AdminAccess(provider.gatewayHandler.CreateIngestionKey), handler.OpenAPIDef{
|
||||
ID: "CreateIngestionKey",
|
||||
Tags: []string{"gateway"},
|
||||
Summary: "Create ingestion key for workspace",
|
||||
Description: "This endpoint creates an ingestion key for the workspace",
|
||||
Request: new(gatewaytypes.PostableIngestionKey),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(gatewaytypes.GettableCreatedIngestionKey),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/gateway/ingestion_keys/{keyId}", handler.New(provider.authZ.AdminAccess(provider.gatewayHandler.UpdateIngestionKey), handler.OpenAPIDef{
|
||||
ID: "UpdateIngestionKey",
|
||||
Tags: []string{"gateway"},
|
||||
Summary: "Update ingestion key for workspace",
|
||||
Description: "This endpoint updates an ingestion key for the workspace",
|
||||
Request: new(gatewaytypes.PostableIngestionKey),
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodPatch).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/gateway/ingestion_keys/{keyId}", handler.New(provider.authZ.AdminAccess(provider.gatewayHandler.DeleteIngestionKey), handler.OpenAPIDef{
|
||||
ID: "DeleteIngestionKey",
|
||||
Tags: []string{"gateway"},
|
||||
Summary: "Delete ingestion key for workspace",
|
||||
Description: "This endpoint deletes an ingestion key for the workspace",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodDelete).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/gateway/ingestion_keys/{keyId}/limits", handler.New(provider.authZ.AdminAccess(provider.gatewayHandler.CreateIngestionKeyLimit), handler.OpenAPIDef{
|
||||
ID: "CreateIngestionKeyLimit",
|
||||
Tags: []string{"gateway"},
|
||||
Summary: "Create limit for the ingestion key",
|
||||
Description: "This endpoint creates an ingestion key limit",
|
||||
Request: new(gatewaytypes.PostableIngestionKeyLimit),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(gatewaytypes.GettableCreatedIngestionKeyLimit),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/gateway/ingestion_keys/limits/{limitId}", handler.New(provider.authZ.AdminAccess(provider.gatewayHandler.UpdateIngestionKeyLimit), handler.OpenAPIDef{
|
||||
ID: "UpdateIngestionKeyLimit",
|
||||
Tags: []string{"gateway"},
|
||||
Summary: "Update limit for the ingestion key",
|
||||
Description: "This endpoint updates an ingestion key limit",
|
||||
Request: new(gatewaytypes.UpdatableIngestionKeyLimit),
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodPatch).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/gateway/ingestion_keys/limits/{limitId}", handler.New(provider.authZ.AdminAccess(provider.gatewayHandler.DeleteIngestionKeyLimit), handler.OpenAPIDef{
|
||||
ID: "DeleteIngestionKeyLimit",
|
||||
Tags: []string{"gateway"},
|
||||
Summary: "Delete limit for the ingestion key",
|
||||
Description: "This endpoint deletes an ingestion key limit",
|
||||
Request: nil,
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodDelete).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
@@ -39,6 +40,7 @@ type provider struct {
|
||||
dashboardModule dashboard.Module
|
||||
dashboardHandler dashboard.Handler
|
||||
metricsExplorerHandler metricsexplorer.Handler
|
||||
gatewayHandler gateway.Handler
|
||||
}
|
||||
|
||||
func NewFactory(
|
||||
@@ -55,9 +57,10 @@ func NewFactory(
|
||||
dashboardModule dashboard.Module,
|
||||
dashboardHandler dashboard.Handler,
|
||||
metricsExplorerHandler metricsexplorer.Handler,
|
||||
gatewayHandler gateway.Handler,
|
||||
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
|
||||
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler, flaggerHandler, dashboardModule, dashboardHandler, metricsExplorerHandler)
|
||||
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler, flaggerHandler, dashboardModule, dashboardHandler, metricsExplorerHandler, gatewayHandler)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -78,6 +81,7 @@ func newProvider(
|
||||
dashboardModule dashboard.Module,
|
||||
dashboardHandler dashboard.Handler,
|
||||
metricsExplorerHandler metricsexplorer.Handler,
|
||||
gatewayHandler gateway.Handler,
|
||||
) (apiserver.APIServer, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
|
||||
router := mux.NewRouter().UseEncodedPath()
|
||||
@@ -97,6 +101,7 @@ func newProvider(
|
||||
dashboardModule: dashboardModule,
|
||||
dashboardHandler: dashboardHandler,
|
||||
metricsExplorerHandler: metricsExplorerHandler,
|
||||
gatewayHandler: gatewayHandler,
|
||||
}
|
||||
|
||||
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
|
||||
@@ -153,6 +158,10 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addGatewayRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -6,10 +6,15 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/client"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/coreos/go-oidc/v3/oidc"
|
||||
"golang.org/x/oauth2"
|
||||
"golang.org/x/oauth2/google"
|
||||
admin "google.golang.org/api/admin/directory/v1"
|
||||
"google.golang.org/api/option"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -17,19 +22,28 @@ const (
|
||||
redirectPath string = "/api/v1/complete/google"
|
||||
)
|
||||
|
||||
var (
|
||||
scopes []string = []string{"email"}
|
||||
)
|
||||
var scopes []string = []string{"email", "profile"}
|
||||
|
||||
var _ authn.CallbackAuthN = (*AuthN)(nil)
|
||||
|
||||
type AuthN struct {
|
||||
store authtypes.AuthNStore
|
||||
store authtypes.AuthNStore
|
||||
settings factory.ScopedProviderSettings
|
||||
httpClient *client.Client
|
||||
}
|
||||
|
||||
func New(ctx context.Context, store authtypes.AuthNStore) (*AuthN, error) {
|
||||
func New(ctx context.Context, store authtypes.AuthNStore, providerSettings factory.ProviderSettings) (*AuthN, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/authn/callbackauthn/googlecallbackauthn")
|
||||
|
||||
httpClient, err := client.New(settings.Logger(), providerSettings.TracerProvider, providerSettings.MeterProvider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &AuthN{
|
||||
store: store,
|
||||
store: store,
|
||||
settings: settings,
|
||||
httpClient: httpClient,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -58,11 +72,13 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
}
|
||||
|
||||
if err := query.Get("error"); err != "" {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: error while authenticating", "error", err, "error_description", query.Get("error_description"))
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: error while authenticating").WithAdditional(query.Get("error_description"))
|
||||
}
|
||||
|
||||
state, err := authtypes.NewStateFromString(query.Get("state"))
|
||||
if err != nil {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: invalid state", "error", err)
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, authtypes.ErrCodeInvalidState, "google: invalid state").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
@@ -76,10 +92,12 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
if err != nil {
|
||||
var retrieveError *oauth2.RetrieveError
|
||||
if errors.As(err, &retrieveError) {
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: failed to get token").WithAdditional(retrieveError.ErrorDescription).WithAdditional(string(retrieveError.Body))
|
||||
a.settings.Logger().ErrorContext(ctx, "google: failed to get token", "error", err, "error_description", retrieveError.ErrorDescription, "body", string(retrieveError.Body))
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: failed to get token").WithAdditional(retrieveError.ErrorDescription)
|
||||
}
|
||||
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: failed to get token").WithAdditional(err.Error())
|
||||
a.settings.Logger().ErrorContext(ctx, "google: failed to get token", "error", err)
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: failed to get token")
|
||||
}
|
||||
|
||||
rawIDToken, ok := token.Extra("id_token").(string)
|
||||
@@ -90,7 +108,8 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
verifier := oidcProvider.Verifier(&oidc.Config{ClientID: authDomain.AuthDomainConfig().Google.ClientID})
|
||||
idToken, err := verifier.Verify(ctx, rawIDToken)
|
||||
if err != nil {
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: failed to verify token").WithAdditional(err.Error())
|
||||
a.settings.Logger().ErrorContext(ctx, "google: failed to verify token", "error", err)
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: failed to verify token")
|
||||
}
|
||||
|
||||
var claims struct {
|
||||
@@ -101,11 +120,20 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
}
|
||||
|
||||
if err := idToken.Claims(&claims); err != nil {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: missing or invalid claims", "error", err)
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: missing or invalid claims").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
if claims.HostedDomain != authDomain.StorableAuthDomain().Name {
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: unexpected hd claim %s", claims.HostedDomain)
|
||||
a.settings.Logger().ErrorContext(ctx, "google: unexpected hd claim", "expected", authDomain.StorableAuthDomain().Name, "actual", claims.HostedDomain)
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: unexpected hd claim")
|
||||
}
|
||||
|
||||
if !authDomain.AuthDomainConfig().Google.InsecureSkipEmailVerified {
|
||||
if !claims.EmailVerified {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: email is not verified", "email", claims.Email)
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: email is not verified")
|
||||
}
|
||||
}
|
||||
|
||||
email, err := valuer.NewEmail(claims.Email)
|
||||
@@ -113,8 +141,24 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "google: failed to parse email").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return authtypes.NewCallbackIdentity(claims.Name, email, authDomain.StorableAuthDomain().OrgID, state), nil
|
||||
var groups []string
|
||||
if authDomain.AuthDomainConfig().Google.FetchGroups {
|
||||
groups, err = a.fetchGoogleWorkspaceGroups(ctx, claims.Email, authDomain.AuthDomainConfig().Google)
|
||||
if err != nil {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: could not fetch groups", "error", err)
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: could not fetch groups").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
allowedGroups := authDomain.AuthDomainConfig().Google.AllowedGroups
|
||||
if len(allowedGroups) > 0 {
|
||||
groups = filterGroups(groups, allowedGroups)
|
||||
if len(groups) == 0 {
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: user %q is not in any allowed groups", claims.Email).WithAdditional(allowedGroups...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return authtypes.NewCallbackIdentity(claims.Name, email, authDomain.StorableAuthDomain().OrgID, state, groups, ""), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
@@ -136,3 +180,90 @@ func (a *AuthN) oauth2Config(siteURL *url.URL, authDomain *authtypes.AuthDomain,
|
||||
}).String(),
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthN) fetchGoogleWorkspaceGroups(ctx context.Context, userEmail string, config *authtypes.GoogleConfig) ([]string, error) {
|
||||
adminEmail := config.GetAdminEmailForDomain(userEmail)
|
||||
if adminEmail == "" {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "no admin email configured for domain of %s", userEmail)
|
||||
}
|
||||
|
||||
jwtConfig, err := google.JWTConfigFromJSON([]byte(config.ServiceAccountJSON), admin.AdminDirectoryGroupReadonlyScope)
|
||||
if err != nil {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: invalid service account credentials", "error", err)
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid service account credentials")
|
||||
}
|
||||
|
||||
jwtConfig.Subject = adminEmail
|
||||
|
||||
customCtx := context.WithValue(ctx, oauth2.HTTPClient, a.httpClient.Client())
|
||||
|
||||
adminService, err := admin.NewService(ctx, option.WithHTTPClient(jwtConfig.Client(customCtx)))
|
||||
if err != nil {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: unable to create directory service", "error", err)
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "unable to create directory service")
|
||||
}
|
||||
|
||||
checkedGroups := make(map[string]struct{})
|
||||
|
||||
return a.getGroups(ctx, adminService, userEmail, config.FetchTransitiveGroupMembership, checkedGroups)
|
||||
}
|
||||
|
||||
// Recursive method
|
||||
func (a *AuthN) getGroups(ctx context.Context, adminService *admin.Service, userEmail string, fetchTransitive bool, checkedGroups map[string]struct{}) ([]string, error) {
|
||||
var userGroups []string
|
||||
var pageToken string
|
||||
|
||||
for {
|
||||
call := adminService.Groups.List().UserKey(userEmail)
|
||||
if pageToken != "" {
|
||||
call = call.PageToken(pageToken)
|
||||
}
|
||||
|
||||
groupList, err := call.Context(ctx).Do()
|
||||
if err != nil {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: unable to list groups", "error", err)
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "unable to list groups")
|
||||
}
|
||||
|
||||
for _, group := range groupList.Groups {
|
||||
if _, exists := checkedGroups[group.Email]; exists {
|
||||
continue
|
||||
}
|
||||
|
||||
checkedGroups[group.Email] = struct{}{}
|
||||
userGroups = append(userGroups, group.Email)
|
||||
|
||||
if fetchTransitive {
|
||||
transitiveGroups, err := a.getGroups(ctx, adminService, group.Email, fetchTransitive, checkedGroups)
|
||||
if err != nil {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: unable to list transitive groups", "error", err)
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "unable to list transitive groups")
|
||||
}
|
||||
userGroups = append(userGroups, transitiveGroups...)
|
||||
}
|
||||
}
|
||||
|
||||
pageToken = groupList.NextPageToken
|
||||
if pageToken == "" {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return userGroups, nil
|
||||
}
|
||||
|
||||
func filterGroups(userGroups, allowedGroups []string) []string {
|
||||
allowed := make(map[string]struct{}, len(allowedGroups))
|
||||
for _, g := range allowedGroups {
|
||||
allowed[g] = struct{}{} // just to make o(1) searches
|
||||
}
|
||||
|
||||
var filtered []string
|
||||
for _, g := range userGroups {
|
||||
if _, ok := allowed[g]; ok {
|
||||
filtered = append(filtered, g)
|
||||
}
|
||||
}
|
||||
|
||||
return filtered
|
||||
}
|
||||
|
||||
@@ -112,7 +112,7 @@ func (b *base) WithUrl(u string) *base {
|
||||
}
|
||||
}
|
||||
|
||||
// WithUrl adds additional messages to the base error and returns a new base error.
|
||||
// WithAdditional adds additional messages to the base error and returns a new base error.
|
||||
func (b *base) WithAdditional(a ...string) *base {
|
||||
return &base{
|
||||
t: b.t,
|
||||
|
||||
@@ -4,14 +4,9 @@ import (
|
||||
"net/url"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrCodeInvalidGatewayConfig = errors.MustNewCode("invalid_gateway_config")
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
URL *url.URL `mapstructure:"url"`
|
||||
}
|
||||
|
||||
60
pkg/gateway/gateway.go
Normal file
60
pkg/gateway/gateway.go
Normal file
@@ -0,0 +1,60 @@
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/gatewaytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrCodeGatewayUnsupported = errors.MustNewCode("gateway_unsupported")
|
||||
ErrCodeInvalidGatewayConfig = errors.MustNewCode("invalid_gateway_config")
|
||||
)
|
||||
|
||||
type Gateway interface {
|
||||
// Get Ingestions Keys (this is supposed to be for the current user but for now in gateway code this is ignoring the consumer user)
|
||||
GetIngestionKeys(ctx context.Context, orgID valuer.UUID, page, perPage int) (*gatewaytypes.GettableIngestionKeys, error)
|
||||
|
||||
// Search Ingestion Keys by Name (this is supposed to be for the current user but for now in gateway code this is ignoring the consumer user)
|
||||
SearchIngestionKeysByName(ctx context.Context, orgID valuer.UUID, name string, page, perPage int) (*gatewaytypes.GettableIngestionKeys, error)
|
||||
|
||||
// Create Ingestion Key
|
||||
CreateIngestionKey(ctx context.Context, orgID valuer.UUID, name string, tags []string, expiresAt time.Time) (*gatewaytypes.GettableCreatedIngestionKey, error)
|
||||
|
||||
// Update Ingestion Key
|
||||
UpdateIngestionKey(ctx context.Context, orgID valuer.UUID, keyID string, name string, tags []string, expiresAt time.Time) error
|
||||
|
||||
// Delete Ingestion Key
|
||||
DeleteIngestionKey(ctx context.Context, orgID valuer.UUID, keyID string) error
|
||||
|
||||
// Create Ingestion Key Limit
|
||||
CreateIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, keyID string, signal string, limitConfig gatewaytypes.LimitConfig, tags []string) (*gatewaytypes.GettableCreatedIngestionKeyLimit, error)
|
||||
|
||||
// Update Ingestion Key Limit
|
||||
UpdateIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, limitID string, limitConfig gatewaytypes.LimitConfig, tags []string) error
|
||||
|
||||
// Delete Ingestion Key Limit
|
||||
DeleteIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, limitID string) error
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
GetIngestionKeys(http.ResponseWriter, *http.Request)
|
||||
|
||||
SearchIngestionKeys(http.ResponseWriter, *http.Request)
|
||||
|
||||
CreateIngestionKey(http.ResponseWriter, *http.Request)
|
||||
|
||||
UpdateIngestionKey(http.ResponseWriter, *http.Request)
|
||||
|
||||
DeleteIngestionKey(http.ResponseWriter, *http.Request)
|
||||
|
||||
CreateIngestionKeyLimit(http.ResponseWriter, *http.Request)
|
||||
|
||||
UpdateIngestionKeyLimit(http.ResponseWriter, *http.Request)
|
||||
|
||||
DeleteIngestionKeyLimit(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
287
pkg/gateway/handler.go
Normal file
287
pkg/gateway/handler.go
Normal file
@@ -0,0 +1,287 @@
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/gatewaytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
const (
|
||||
DefaultPage = 1
|
||||
DefaultPageSize = 10
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
gateway Gateway
|
||||
}
|
||||
|
||||
func NewHandler(gateway Gateway) Handler {
|
||||
return &handler{
|
||||
gateway: gateway,
|
||||
}
|
||||
}
|
||||
|
||||
func (handler *handler) GetIngestionKeys(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
pageString := r.URL.Query().Get("page")
|
||||
perPageString := r.URL.Query().Get("per_page")
|
||||
|
||||
page, err := parseIntWithDefaultValue(pageString, DefaultPage)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "page must be a valid integer"))
|
||||
return
|
||||
}
|
||||
|
||||
perPage, err := parseIntWithDefaultValue(perPageString, DefaultPageSize)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "per_page must be a valid integer"))
|
||||
return
|
||||
}
|
||||
|
||||
response, err := handler.gateway.GetIngestionKeys(ctx, orgID, page, perPage)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, response)
|
||||
}
|
||||
|
||||
func (handler *handler) SearchIngestionKeys(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
pageString := r.URL.Query().Get("page")
|
||||
perPageString := r.URL.Query().Get("per_page")
|
||||
name := r.URL.Query().Get("name")
|
||||
|
||||
page, err := parseIntWithDefaultValue(pageString, DefaultPage)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "page must be a valid integer"))
|
||||
return
|
||||
}
|
||||
|
||||
perPage, err := parseIntWithDefaultValue(perPageString, DefaultPageSize)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "per_page must be a valid integer"))
|
||||
return
|
||||
}
|
||||
|
||||
response, err := handler.gateway.SearchIngestionKeysByName(ctx, orgID, name, page, perPage)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, response)
|
||||
}
|
||||
|
||||
func (handler *handler) CreateIngestionKey(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
var req gatewaytypes.PostableIngestionKey
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid request body"))
|
||||
return
|
||||
}
|
||||
|
||||
response, err := handler.gateway.CreateIngestionKey(ctx, orgID, req.Name, req.Tags, req.ExpiresAt)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, response)
|
||||
}
|
||||
|
||||
func (handler *handler) UpdateIngestionKey(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
keyID := mux.Vars(r)["keyId"]
|
||||
if keyID == "" {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "keyId is required"))
|
||||
return
|
||||
}
|
||||
|
||||
var req gatewaytypes.PostableIngestionKey
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid request body"))
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.gateway.UpdateIngestionKey(ctx, orgID, keyID, req.Name, req.Tags, req.ExpiresAt)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (handler *handler) DeleteIngestionKey(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
keyID := mux.Vars(r)["keyId"]
|
||||
if keyID == "" {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "keyId is required"))
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.gateway.DeleteIngestionKey(ctx, orgID, keyID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (handler *handler) CreateIngestionKeyLimit(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
keyID := mux.Vars(r)["keyId"]
|
||||
if keyID == "" {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "keyId is required"))
|
||||
return
|
||||
}
|
||||
|
||||
var req gatewaytypes.PostableIngestionKeyLimit
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid request body"))
|
||||
return
|
||||
}
|
||||
|
||||
response, err := handler.gateway.CreateIngestionKeyLimit(ctx, orgID, keyID, req.Signal, req.Config, req.Tags)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusCreated, response)
|
||||
}
|
||||
|
||||
func (handler *handler) UpdateIngestionKeyLimit(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
limitID := mux.Vars(r)["limitId"]
|
||||
if limitID == "" {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "limitId is required"))
|
||||
return
|
||||
}
|
||||
|
||||
var req gatewaytypes.UpdatableIngestionKeyLimit
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid request body"))
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.gateway.UpdateIngestionKeyLimit(ctx, orgID, limitID, req.Config, req.Tags)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (handler *handler) DeleteIngestionKeyLimit(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
limitID := mux.Vars(r)["limitId"]
|
||||
if limitID == "" {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "limitId is required"))
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.gateway.DeleteIngestionKeyLimit(ctx, orgID, limitID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func parseIntWithDefaultValue(value string, defaultValue int) (int, error) {
|
||||
if value == "" {
|
||||
return defaultValue, nil
|
||||
}
|
||||
|
||||
result, err := strconv.Atoi(value)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
56
pkg/gateway/noopgateway/provider.go
Normal file
56
pkg/gateway/noopgateway/provider.go
Normal file
@@ -0,0 +1,56 @@
|
||||
package noopgateway
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/types/gatewaytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type provider struct{}
|
||||
|
||||
func NewProviderFactory() factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("noop"), func(ctx context.Context, ps factory.ProviderSettings, c gateway.Config) (gateway.Gateway, error) {
|
||||
return New(ctx, ps, c)
|
||||
})
|
||||
}
|
||||
|
||||
func New(_ context.Context, _ factory.ProviderSettings, _ gateway.Config) (gateway.Gateway, error) {
|
||||
return &provider{}, nil
|
||||
}
|
||||
|
||||
func (p *provider) GetIngestionKeys(_ context.Context, _ valuer.UUID, _, _ int) (*gatewaytypes.GettableIngestionKeys, error) {
|
||||
return nil, errors.New(errors.TypeUnsupported, gateway.ErrCodeGatewayUnsupported, "unsupported call")
|
||||
}
|
||||
|
||||
func (p *provider) SearchIngestionKeysByName(_ context.Context, _ valuer.UUID, _ string, _, _ int) (*gatewaytypes.GettableIngestionKeys, error) {
|
||||
return nil, errors.New(errors.TypeUnsupported, gateway.ErrCodeGatewayUnsupported, "unsupported call")
|
||||
}
|
||||
|
||||
func (p *provider) CreateIngestionKey(_ context.Context, _ valuer.UUID, _ string, _ []string, _ time.Time) (*gatewaytypes.GettableCreatedIngestionKey, error) {
|
||||
return nil, errors.New(errors.TypeUnsupported, gateway.ErrCodeGatewayUnsupported, "unsupported call")
|
||||
}
|
||||
|
||||
func (p *provider) UpdateIngestionKey(_ context.Context, _ valuer.UUID, _ string, _ string, _ []string, _ time.Time) error {
|
||||
return errors.New(errors.TypeUnsupported, gateway.ErrCodeGatewayUnsupported, "unsupported call")
|
||||
}
|
||||
|
||||
func (p *provider) DeleteIngestionKey(_ context.Context, _ valuer.UUID, _ string) error {
|
||||
return errors.New(errors.TypeUnsupported, gateway.ErrCodeGatewayUnsupported, "unsupported call")
|
||||
}
|
||||
|
||||
func (p *provider) CreateIngestionKeyLimit(_ context.Context, _ valuer.UUID, _ string, _ string, _ gatewaytypes.LimitConfig, _ []string) (*gatewaytypes.GettableCreatedIngestionKeyLimit, error) {
|
||||
return nil, errors.New(errors.TypeUnsupported, gateway.ErrCodeGatewayUnsupported, "unsupported call")
|
||||
}
|
||||
|
||||
func (p *provider) UpdateIngestionKeyLimit(_ context.Context, _ valuer.UUID, _ string, _ gatewaytypes.LimitConfig, _ []string) error {
|
||||
return errors.New(errors.TypeUnsupported, gateway.ErrCodeGatewayUnsupported, "unsupported call")
|
||||
}
|
||||
|
||||
func (p *provider) DeleteIngestionKeyLimit(_ context.Context, _ valuer.UUID, _ string) error {
|
||||
return errors.New(errors.TypeUnsupported, gateway.ErrCodeGatewayUnsupported, "unsupported call")
|
||||
}
|
||||
@@ -16,8 +16,8 @@ func NewHandler(global global.Global) global.Handler {
|
||||
return &handler{global: global}
|
||||
}
|
||||
|
||||
func (handker *handler) GetConfig(rw http.ResponseWriter, r *http.Request) {
|
||||
cfg := handker.global.GetConfig()
|
||||
func (handler *handler) GetConfig(rw http.ResponseWriter, r *http.Request) {
|
||||
cfg := handler.global.GetConfig()
|
||||
|
||||
render.Success(rw, http.StatusOK, types.NewGettableGlobalConfig(cfg.ExternalURL, cfg.IngestionURL))
|
||||
}
|
||||
|
||||
@@ -9,10 +9,12 @@ import (
|
||||
var (
|
||||
ErrCodeInvalidRequestBody = errors.MustNewCode("invalid_request_body")
|
||||
ErrCodeInvalidRequestField = errors.MustNewCode("invalid_request_field")
|
||||
ErrCodeInvalidRequestQuery = errors.MustNewCode("invalid_request_query")
|
||||
)
|
||||
|
||||
var (
|
||||
JSON Binding = &jsonBinding{}
|
||||
JSON BindingBody = &jsonBinding{}
|
||||
Query BindingQuery = &queryBinding{}
|
||||
)
|
||||
|
||||
type bindBodyOptions struct {
|
||||
@@ -34,6 +36,10 @@ func WithUseNumber(useNumber bool) BindBodyOption {
|
||||
}
|
||||
}
|
||||
|
||||
type Binding interface {
|
||||
type BindingBody interface {
|
||||
BindBody(body io.Reader, obj any, opts ...BindBodyOption) error
|
||||
}
|
||||
|
||||
type BindingQuery interface {
|
||||
BindQuery(query map[string][]string, obj any) error
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ const (
|
||||
ErrMessageInvalidField string = "request body contains invalid field value"
|
||||
)
|
||||
|
||||
var _ Binding = (*jsonBinding)(nil)
|
||||
var _ BindingBody = (*jsonBinding)(nil)
|
||||
|
||||
type jsonBinding struct{}
|
||||
|
||||
|
||||
23
pkg/http/binding/query.go
Normal file
23
pkg/http/binding/query.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package binding
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
ginbinding "github.com/gin-gonic/gin/binding"
|
||||
)
|
||||
|
||||
const (
|
||||
ErrMessageInvalidQuery string = "request query contains invalid fields, please verify the format and try again."
|
||||
)
|
||||
|
||||
var _ BindingQuery = (*queryBinding)(nil)
|
||||
|
||||
type queryBinding struct{}
|
||||
|
||||
func (b *queryBinding) BindQuery(query map[string][]string, obj any) error {
|
||||
err := ginbinding.MapFormWithTag(obj, query, "query")
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidRequestQuery, ErrMessageInvalidQuery).WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
33
pkg/http/binding/query_test.go
Normal file
33
pkg/http/binding/query_test.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package binding
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestQueryBinding_BindQuery(t *testing.T) {
|
||||
one := 1
|
||||
zero := 0
|
||||
testCases := []struct {
|
||||
name string
|
||||
query map[string][]string
|
||||
obj any
|
||||
expected any
|
||||
}{
|
||||
{name: "SingleIntField_NonEmptyValue", query: map[string][]string{"a": {"1"}}, obj: &struct{A int `query:"a"`}{}, expected: &struct{ A int }{A: 1}},
|
||||
{name: "SingleIntField_EmptyValue", query: map[string][]string{"a": {""}}, obj: &struct{A int `query:"a"`}{}, expected: &struct{ A int }{A: 0}},
|
||||
{name: "SingleIntField_MissingField", query: map[string][]string{}, obj: &struct{A int `query:"a"`}{}, expected: &struct{ A int }{A: 0}},
|
||||
{name: "SinglePointerIntField_NonEmptyValue", query: map[string][]string{"a": {"1"}}, obj: &struct{A *int `query:"a"`}{}, expected: &struct{ A *int }{A: &one}},
|
||||
{name: "SinglePointerIntField_EmptyValue", query: map[string][]string{"a": {""}}, obj: &struct{A *int `query:"a"`}{}, expected: &struct{ A *int }{A: &zero}},
|
||||
{name: "SinglePointerIntField_MissingField", query: map[string][]string{}, obj: &struct{A *int `query:"a"`}{}, expected: &struct{ A *int }{A: nil}},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
err := Query.BindQuery(testCase.query, testCase.obj)
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, testCase.expected, testCase.obj)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -31,7 +31,13 @@ func (plugin *reqResLog) OnRequestStart(request *http.Request) {
|
||||
string(semconv.ServerAddressKey), host,
|
||||
string(semconv.ServerPortKey), port,
|
||||
string(semconv.HTTPRequestSizeKey), request.ContentLength,
|
||||
"http.request.headers", request.Header,
|
||||
}
|
||||
|
||||
// only include all the headers if we are at debug level
|
||||
if plugin.logger.Handler().Enabled(request.Context(), slog.LevelDebug) {
|
||||
fields = append(fields, "http.request.headers", request.Header)
|
||||
} else {
|
||||
fields = append(fields, "http.request.headers", redactSensitiveHeaders(request.Header))
|
||||
}
|
||||
|
||||
plugin.logger.InfoContext(request.Context(), "::SENT-REQUEST::", fields...)
|
||||
@@ -75,3 +81,24 @@ func (plugin *reqResLog) OnError(request *http.Request, err error) {
|
||||
|
||||
plugin.logger.ErrorContext(request.Context(), "::UNABLE-TO-SEND-REQUEST::", fields...)
|
||||
}
|
||||
|
||||
func redactSensitiveHeaders(headers http.Header) http.Header {
|
||||
// maintained list of headers to redact
|
||||
sensitiveHeaders := map[string]bool{
|
||||
"Authorization": true,
|
||||
"Cookie": true,
|
||||
"X-Signoz-Cloud-Api-Key": true,
|
||||
}
|
||||
|
||||
safeHeaders := make(http.Header)
|
||||
|
||||
for header, value := range headers {
|
||||
if sensitiveHeaders[header] {
|
||||
safeHeaders[header] = []string{"REDACTED"}
|
||||
} else {
|
||||
safeHeaders[header] = value
|
||||
}
|
||||
}
|
||||
|
||||
return safeHeaders
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/statsreporter"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
@@ -31,7 +32,9 @@ type Module interface {
|
||||
Delete(context.Context, valuer.UUID, valuer.UUID) error
|
||||
|
||||
// Get the IDP info of the domain provided.
|
||||
GetAuthNProviderInfo(context.Context, *authtypes.AuthDomain) (*authtypes.AuthNProviderInfo)
|
||||
GetAuthNProviderInfo(context.Context, *authtypes.AuthDomain) *authtypes.AuthNProviderInfo
|
||||
|
||||
statsreporter.StatsCollector
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
|
||||
@@ -52,3 +52,25 @@ func (module *module) ListByOrgID(ctx context.Context, orgID valuer.UUID) ([]*au
|
||||
func (module *module) Update(ctx context.Context, domain *authtypes.AuthDomain) error {
|
||||
return module.store.Update(ctx, domain)
|
||||
}
|
||||
|
||||
func (module *module) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) {
|
||||
domains, err := module.store.ListByOrgID(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
stats := make(map[string]any)
|
||||
|
||||
for _, domain := range domains {
|
||||
key := "authdomain." + domain.AuthDomainConfig().AuthNProvider.StringValue() + ".count"
|
||||
if value, ok := stats[key]; ok {
|
||||
stats[key] = value.(int64) + 1
|
||||
} else {
|
||||
stats[key] = int64(1)
|
||||
}
|
||||
}
|
||||
|
||||
stats["authdomain.count"] = len(domains)
|
||||
|
||||
return stats, nil
|
||||
}
|
||||
|
||||
@@ -123,7 +123,7 @@ func (module *module) DeprecatedCreateSessionByEmailPassword(ctx context.Context
|
||||
}
|
||||
|
||||
if !factorPassword.Equals(password) {
|
||||
return nil, errors.New(errors.TypeUnauthenticated, types.ErrCodeIncorrectPassword, "invalid email orpassword")
|
||||
return nil, errors.New(errors.TypeUnauthenticated, types.ErrCodeIncorrectPassword, "invalid email or password")
|
||||
}
|
||||
|
||||
identity := authtypes.NewIdentity(users[0].ID, users[0].OrgID, users[0].Email, users[0].Role)
|
||||
@@ -157,7 +157,15 @@ func (module *module) CreateCallbackAuthNSession(ctx context.Context, authNProvi
|
||||
return "", err
|
||||
}
|
||||
|
||||
user, err := types.NewUser(callbackIdentity.Name, callbackIdentity.Email, types.RoleViewer, callbackIdentity.OrgID)
|
||||
authDomain, err := module.authDomain.GetByOrgIDAndID(ctx, callbackIdentity.OrgID, callbackIdentity.State.DomainID)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
roleMapping := authDomain.AuthDomainConfig().RoleMapping
|
||||
role := roleMapping.NewRoleFromCallbackIdentity(callbackIdentity)
|
||||
|
||||
user, err := types.NewUser(callbackIdentity.Name, callbackIdentity.Email, role, callbackIdentity.OrgID)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -318,13 +318,14 @@ func (q *querier) applyFormulas(ctx context.Context, results map[string]*qbtypes
|
||||
}
|
||||
|
||||
// Check if we're dealing with time series or scalar data
|
||||
if req.RequestType == qbtypes.RequestTypeTimeSeries {
|
||||
switch req.RequestType {
|
||||
case qbtypes.RequestTypeTimeSeries:
|
||||
result := q.processTimeSeriesFormula(ctx, results, formula, req)
|
||||
if result != nil {
|
||||
result = q.applySeriesLimit(result, formula.Limit, formula.Order)
|
||||
results[name] = result
|
||||
}
|
||||
} else if req.RequestType == qbtypes.RequestTypeScalar {
|
||||
case qbtypes.RequestTypeScalar:
|
||||
result := q.processScalarFormula(ctx, results, formula, req)
|
||||
if result != nil {
|
||||
result = q.applySeriesLimit(result, formula.Limit, formula.Order)
|
||||
@@ -581,11 +582,14 @@ func (q *querier) filterDisabledQueries(results map[string]*qbtypes.Result, req
|
||||
}
|
||||
|
||||
// formatScalarResultsAsTable formats scalar results as a unified table for UI display
|
||||
func (q *querier) formatScalarResultsAsTable(results map[string]*qbtypes.Result, _ *qbtypes.QueryRangeRequest) map[string]any {
|
||||
func (q *querier) formatScalarResultsAsTable(results map[string]*qbtypes.Result, req *qbtypes.QueryRangeRequest) map[string]any {
|
||||
if len(results) == 0 {
|
||||
return map[string]any{"table": &qbtypes.ScalarData{}}
|
||||
}
|
||||
|
||||
// apply default sorting if no order specified
|
||||
applyDefaultSort := !req.HasOrderSpecified()
|
||||
|
||||
// Convert all results to ScalarData first
|
||||
scalarResults := make(map[string]*qbtypes.ScalarData)
|
||||
for name, result := range results {
|
||||
@@ -600,13 +604,13 @@ func (q *querier) formatScalarResultsAsTable(results map[string]*qbtypes.Result,
|
||||
if len(scalarResults) == 1 {
|
||||
for _, sd := range scalarResults {
|
||||
if hasMultipleQueries(sd) {
|
||||
return map[string]any{"table": deduplicateRows(sd)}
|
||||
return map[string]any{"table": deduplicateRows(sd, applyDefaultSort)}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise merge all results
|
||||
merged := mergeScalarData(scalarResults)
|
||||
merged := mergeScalarData(scalarResults, applyDefaultSort)
|
||||
return map[string]any{"table": merged}
|
||||
}
|
||||
|
||||
@@ -687,7 +691,7 @@ func hasMultipleQueries(sd *qbtypes.ScalarData) bool {
|
||||
}
|
||||
|
||||
// deduplicateRows removes duplicate rows based on group columns
|
||||
func deduplicateRows(sd *qbtypes.ScalarData) *qbtypes.ScalarData {
|
||||
func deduplicateRows(sd *qbtypes.ScalarData, applyDefaultSort bool) *qbtypes.ScalarData {
|
||||
// Find group column indices
|
||||
groupIndices := []int{}
|
||||
for i, col := range sd.Columns {
|
||||
@@ -696,8 +700,9 @@ func deduplicateRows(sd *qbtypes.ScalarData) *qbtypes.ScalarData {
|
||||
}
|
||||
}
|
||||
|
||||
// Build unique rows map
|
||||
// Build unique rows map, preserve order
|
||||
uniqueRows := make(map[string][]any)
|
||||
var keyOrder []string
|
||||
for _, row := range sd.Data {
|
||||
key := buildRowKey(row, groupIndices)
|
||||
if existing, found := uniqueRows[key]; found {
|
||||
@@ -711,17 +716,20 @@ func deduplicateRows(sd *qbtypes.ScalarData) *qbtypes.ScalarData {
|
||||
rowCopy := make([]any, len(row))
|
||||
copy(rowCopy, row)
|
||||
uniqueRows[key] = rowCopy
|
||||
keyOrder = append(keyOrder, key)
|
||||
}
|
||||
}
|
||||
|
||||
// Convert back to slice
|
||||
// Convert back to slice, preserve the original order
|
||||
data := make([][]any, 0, len(uniqueRows))
|
||||
for _, row := range uniqueRows {
|
||||
data = append(data, row)
|
||||
for _, key := range keyOrder {
|
||||
data = append(data, uniqueRows[key])
|
||||
}
|
||||
|
||||
// Sort by first aggregation column
|
||||
sortByFirstAggregation(data, sd.Columns)
|
||||
// sort by first aggregation (descending) if no order was specified
|
||||
if applyDefaultSort {
|
||||
sortByFirstAggregation(data, sd.Columns)
|
||||
}
|
||||
|
||||
return &qbtypes.ScalarData{
|
||||
Columns: sd.Columns,
|
||||
@@ -730,7 +738,7 @@ func deduplicateRows(sd *qbtypes.ScalarData) *qbtypes.ScalarData {
|
||||
}
|
||||
|
||||
// mergeScalarData merges multiple scalar data results
|
||||
func mergeScalarData(results map[string]*qbtypes.ScalarData) *qbtypes.ScalarData {
|
||||
func mergeScalarData(results map[string]*qbtypes.ScalarData, applyDefaultSort bool) *qbtypes.ScalarData {
|
||||
// Collect unique group columns
|
||||
groupCols := []string{}
|
||||
groupColMap := make(map[string]*qbtypes.ColumnDescriptor)
|
||||
@@ -770,10 +778,12 @@ func mergeScalarData(results map[string]*qbtypes.ScalarData) *qbtypes.ScalarData
|
||||
}
|
||||
}
|
||||
|
||||
// Merge rows
|
||||
// Merge rows, preserve order
|
||||
rowMap := make(map[string][]any)
|
||||
var keyOrder []string
|
||||
|
||||
for queryName, sd := range results {
|
||||
for _, queryName := range queryNames {
|
||||
sd := results[queryName]
|
||||
// Create index mappings
|
||||
groupMap := make(map[string]int)
|
||||
for i, col := range sd.Columns {
|
||||
@@ -802,6 +812,7 @@ func mergeScalarData(results map[string]*qbtypes.ScalarData) *qbtypes.ScalarData
|
||||
newRow[i] = "n/a"
|
||||
}
|
||||
rowMap[key] = newRow
|
||||
keyOrder = append(keyOrder, key)
|
||||
}
|
||||
|
||||
// Set aggregation values for this query
|
||||
@@ -825,14 +836,16 @@ func mergeScalarData(results map[string]*qbtypes.ScalarData) *qbtypes.ScalarData
|
||||
}
|
||||
}
|
||||
|
||||
// Convert to slice
|
||||
// Convert to slice, preserving insertion order
|
||||
data := make([][]any, 0, len(rowMap))
|
||||
for _, row := range rowMap {
|
||||
data = append(data, row)
|
||||
for _, key := range keyOrder {
|
||||
data = append(data, rowMap[key])
|
||||
}
|
||||
|
||||
// Sort by first aggregation column
|
||||
sortByFirstAggregation(data, columns)
|
||||
// sort by first aggregation (descending) if no order was specified
|
||||
if applyDefaultSort {
|
||||
sortByFirstAggregation(data, columns)
|
||||
}
|
||||
|
||||
return &qbtypes.ScalarData{
|
||||
Columns: columns,
|
||||
@@ -888,7 +901,7 @@ func sortByFirstAggregation(data [][]any, columns []*qbtypes.ColumnDescriptor) {
|
||||
|
||||
// compareValues compares two values for sorting (handles n/a and numeric types)
|
||||
func compareValues(a, b any) int {
|
||||
// Handle n/a values
|
||||
// n/a values gets pushed to the end
|
||||
if a == "n/a" && b == "n/a" {
|
||||
return 0
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"text/template"
|
||||
@@ -19,6 +20,50 @@ import (
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
)
|
||||
|
||||
// unquotedDottedNamePattern matches unquoted identifiers containing dots
|
||||
// that appear in metric or label name positions. This helps detect queries
|
||||
// using the old syntax that needs migration to UTF-8 quoted syntax.
|
||||
// Examples it matches: k8s.pod.name, deployment.environment, http.status_code
|
||||
var unquotedDottedNamePattern = regexp.MustCompile(`(?:^|[{,(\s])([a-zA-Z_][a-zA-Z0-9_]*(?:\.[a-zA-Z0-9_]+)+)(?:[}\s,=!~)\[]|$)`)
|
||||
|
||||
// quotedMetricOutsideBracesPattern matches the incorrect syntax where a quoted
|
||||
// metric name appears outside of braces followed by a selector block.
|
||||
// Example: "kube_pod_status_ready_time"{"condition"="true"}
|
||||
// This is a common mistake when migrating to UTF-8 syntax.
|
||||
var quotedMetricOutsideBracesPattern = regexp.MustCompile(`"([^"]+)"\s*\{`)
|
||||
|
||||
// enhancePromQLError adds helpful context to PromQL parse errors,
|
||||
// particularly for UTF-8 syntax migration issues where metric and label
|
||||
// names containing dots need to be quoted.
|
||||
func enhancePromQLError(query string, parseErr error) error {
|
||||
errMsg := parseErr.Error()
|
||||
|
||||
if matches := quotedMetricOutsideBracesPattern.FindStringSubmatch(query); len(matches) > 1 {
|
||||
metricName := matches[1]
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid promql query: %s. Hint: The metric name should be inside the braces. Use {\"__name__\"=\"%s\", ...} or {\"%s\", ...} instead of \"%s\"{...}",
|
||||
errMsg,
|
||||
metricName,
|
||||
metricName,
|
||||
metricName,
|
||||
)
|
||||
}
|
||||
|
||||
if matches := unquotedDottedNamePattern.FindStringSubmatch(query); len(matches) > 1 {
|
||||
dottedName := matches[1]
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid promql query: %s. Hint: Metric and label names containing dots require quoted notation in the new UTF-8 syntax, e.g., use \"%s\" instead of %s",
|
||||
errMsg,
|
||||
dottedName,
|
||||
dottedName,
|
||||
)
|
||||
}
|
||||
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query: %s", errMsg)
|
||||
}
|
||||
|
||||
type promqlQuery struct {
|
||||
logger *slog.Logger
|
||||
promEngine prometheus.Prometheus
|
||||
@@ -81,7 +126,7 @@ func (q *promqlQuery) removeAllVarMatchers(query string, vars map[string]qbv5.Va
|
||||
|
||||
expr, err := parser.ParseExpr(query)
|
||||
if err != nil {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query %q", query)
|
||||
return "", enhancePromQLError(query, err)
|
||||
}
|
||||
|
||||
// Create visitor and walk the AST
|
||||
@@ -161,7 +206,7 @@ func (q *promqlQuery) Execute(ctx context.Context) (*qbv5.Result, error) {
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query %q", query)
|
||||
return nil, enhancePromQLError(query, err)
|
||||
}
|
||||
|
||||
res := qry.Exec(ctx)
|
||||
|
||||
@@ -2,8 +2,10 @@ package querier
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
qbv5 "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
@@ -172,3 +174,268 @@ func TestRemoveAllVarMatchers(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnhancePromQLError(t *testing.T) {
|
||||
parseErr := errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unexpected character: '.' at position 12")
|
||||
|
||||
t.Run("dotted name patterns", func(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
query string
|
||||
wantDottedNameHint bool
|
||||
wantDottedNameExample string
|
||||
}{
|
||||
{
|
||||
name: "query with unquoted dotted metric name",
|
||||
query: `sum(rate(k8s.container.restarts[5m]))`,
|
||||
wantDottedNameHint: true,
|
||||
wantDottedNameExample: "k8s.container.restarts",
|
||||
},
|
||||
{
|
||||
name: "query with unquoted dotted label in group by",
|
||||
query: `sum by (k8s.pod.name) (rate(requests_total[5m]))`,
|
||||
wantDottedNameHint: true,
|
||||
wantDottedNameExample: "k8s.pod.name",
|
||||
},
|
||||
{
|
||||
name: "query with unquoted dotted label in filter",
|
||||
query: `requests_total{k8s.namespace.name="default"}`,
|
||||
wantDottedNameHint: true,
|
||||
wantDottedNameExample: "k8s.namespace.name",
|
||||
},
|
||||
{
|
||||
name: "query with multiple unquoted dotted names",
|
||||
query: `sum by (k8s.pod.name, deployment.environment) (increase(k8s.container.restarts[15m]))`,
|
||||
wantDottedNameHint: true,
|
||||
wantDottedNameExample: "k8s.pod.name", // should match first one
|
||||
},
|
||||
{
|
||||
name: "query without dotted names - no hint",
|
||||
query: `sum(rate(http_requests_total[5m]))`,
|
||||
wantDottedNameHint: false,
|
||||
},
|
||||
{
|
||||
name: "query with properly quoted dotted names - no hint",
|
||||
query: `sum(rate({"k8s.container.restarts"}[5m]))`,
|
||||
wantDottedNameHint: false,
|
||||
},
|
||||
{
|
||||
name: "query with dotted name inside regex string - no hint",
|
||||
query: `requests_total{pod=~"k8s.pod.name.*"}`,
|
||||
wantDottedNameHint: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := enhancePromQLError(tt.query, parseErr)
|
||||
errMsg := err.Error()
|
||||
|
||||
assert.True(t, strings.Contains(errMsg, parseErr.Error()),
|
||||
"error should contain original parse error message")
|
||||
|
||||
if tt.wantDottedNameHint {
|
||||
assert.True(t, strings.Contains(errMsg, "Hint:"),
|
||||
"error should contain hint for dotted name query")
|
||||
assert.True(t, strings.Contains(errMsg, "UTF-8 syntax"),
|
||||
"error should mention UTF-8 syntax")
|
||||
assert.True(t, strings.Contains(errMsg, tt.wantDottedNameExample),
|
||||
"error should contain the dotted name example: %s", tt.wantDottedNameExample)
|
||||
} else {
|
||||
assert.False(t, strings.Contains(errMsg, "Hint:"),
|
||||
"error should not contain hint for non-dotted-name query")
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("quoted metric outside braces patterns", func(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
query string
|
||||
wantHint bool
|
||||
wantMetricInHint string
|
||||
}{
|
||||
{
|
||||
name: "quoted metric name followed by selector",
|
||||
query: `"kube_pod_status_ready_time"{"condition"="true"}`,
|
||||
wantHint: true,
|
||||
wantMetricInHint: "kube_pod_status_ready_time",
|
||||
},
|
||||
{
|
||||
name: "quoted metric with space before brace",
|
||||
query: `"kube_pod_labels" {"label"!=""}`,
|
||||
wantHint: true,
|
||||
wantMetricInHint: "kube_pod_labels",
|
||||
},
|
||||
{
|
||||
name: "complex query with quoted metric outside braces",
|
||||
query: `min by (namespace) ("kube_pod_status_ready_time"{"condition"="true"})`,
|
||||
wantHint: true,
|
||||
wantMetricInHint: "kube_pod_status_ready_time",
|
||||
},
|
||||
{
|
||||
name: "label_replace with quoted metric outside braces",
|
||||
query: `label_replace("kube_pod_labels"{"label_cnpg_io_cluster"!=""}, "cluster","$1","label","(.+)")`,
|
||||
wantHint: true,
|
||||
wantMetricInHint: "kube_pod_labels",
|
||||
},
|
||||
{
|
||||
name: "correctly formatted query - no hint",
|
||||
query: `{"kube_pod_status_ready_time", condition="true"}`,
|
||||
wantHint: false,
|
||||
},
|
||||
{
|
||||
name: "old syntax without quotes - no hint for this pattern",
|
||||
query: `kube_pod_status_ready_time{condition="true"}`,
|
||||
wantHint: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := enhancePromQLError(tt.query, parseErr)
|
||||
errMsg := err.Error()
|
||||
|
||||
assert.True(t, strings.Contains(errMsg, parseErr.Error()),
|
||||
"error should contain original parse error message")
|
||||
|
||||
if tt.wantHint {
|
||||
assert.True(t, strings.Contains(errMsg, "Hint:"),
|
||||
"error should contain hint")
|
||||
assert.True(t, strings.Contains(errMsg, "inside the braces"),
|
||||
"error should mention putting metric inside braces")
|
||||
assert.True(t, strings.Contains(errMsg, tt.wantMetricInHint),
|
||||
"error should contain the metric name: %s", tt.wantMetricInHint)
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestUnquotedDottedNamePattern(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expected string // empty string means no match expected
|
||||
}{
|
||||
{
|
||||
name: "metric name at start",
|
||||
input: "k8s.pod.name",
|
||||
expected: "k8s.pod.name",
|
||||
},
|
||||
{
|
||||
name: "label in group by clause",
|
||||
input: "sum by (k8s.pod.name) (rate(x[5m]))",
|
||||
expected: "k8s.pod.name",
|
||||
},
|
||||
{
|
||||
name: "label in filter",
|
||||
input: "metric{k8s.namespace.name=\"default\"}",
|
||||
expected: "k8s.namespace.name",
|
||||
},
|
||||
{
|
||||
name: "metric with underscore and dots",
|
||||
input: "http_server.request.duration",
|
||||
expected: "http_server.request.duration",
|
||||
},
|
||||
{
|
||||
name: "quoted metric name - no match",
|
||||
input: `{"k8s.pod.name"}`,
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "inside regex string - no match",
|
||||
input: `{pod=~"k8s.pod.name.*"}`,
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "simple metric without dots - no match",
|
||||
input: "http_requests_total",
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "single dot only - no match",
|
||||
input: "a.b",
|
||||
expected: "a.b",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
matches := unquotedDottedNamePattern.FindStringSubmatch(tt.input)
|
||||
if tt.expected == "" {
|
||||
assert.True(t, len(matches) < 2 || matches[1] == "",
|
||||
"expected no match for input %q but got %v", tt.input, matches)
|
||||
} else {
|
||||
assert.True(t, len(matches) >= 2,
|
||||
"expected match for input %q but got none", tt.input)
|
||||
if len(matches) >= 2 {
|
||||
assert.Equal(t, tt.expected, matches[1],
|
||||
"unexpected match for input %q", tt.input)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestQuotedMetricOutsideBracesPattern(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expected string // empty string means no match expected
|
||||
}{
|
||||
{
|
||||
name: "quoted metric followed by braces",
|
||||
input: `"kube_pod_status_ready_time"{"condition"="true"}`,
|
||||
expected: "kube_pod_status_ready_time",
|
||||
},
|
||||
{
|
||||
name: "quoted metric with space before brace",
|
||||
input: `"kube_pod_labels" {"label"!=""}`,
|
||||
expected: "kube_pod_labels",
|
||||
},
|
||||
{
|
||||
name: "quoted metric in label_replace",
|
||||
input: `label_replace("kube_pod_labels"{"x"="y"}, "a","b","c","d")`,
|
||||
expected: "kube_pod_labels",
|
||||
},
|
||||
{
|
||||
name: "quoted metric with dots",
|
||||
input: `"k8s.container.restarts"{"pod"="test"}`,
|
||||
expected: "k8s.container.restarts",
|
||||
},
|
||||
{
|
||||
name: "correct UTF-8 syntax - no match",
|
||||
input: `{"kube_pod_status_ready_time", condition="true"}`,
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "old syntax without quotes - no match",
|
||||
input: `kube_pod_status_ready_time{condition="true"}`,
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "quoted string in label value - no match",
|
||||
input: `metric{label="value"}{other="x"}`,
|
||||
expected: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
matches := quotedMetricOutsideBracesPattern.FindStringSubmatch(tt.input)
|
||||
if tt.expected == "" {
|
||||
assert.True(t, len(matches) < 2 || matches[1] == "",
|
||||
"expected no match for input %q but got %v", tt.input, matches)
|
||||
} else {
|
||||
assert.True(t, len(matches) >= 2,
|
||||
"expected match for input %q but got none", tt.input)
|
||||
if len(matches) >= 2 {
|
||||
assert.Equal(t, tt.expected, matches[1],
|
||||
"unexpected match for input %q", tt.input)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ import (
|
||||
func NewAuthNs(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
|
||||
emailPasswordAuthN := emailpasswordauthn.New(store)
|
||||
|
||||
googleCallbackAuthN, err := googlecallbackauthn.New(ctx, store)
|
||||
googleCallbackAuthN, err := googlecallbackauthn.New(ctx, store, providerSettings)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ package signoz
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/global/signozglobal"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
@@ -39,9 +40,10 @@ type Handlers struct {
|
||||
MetricsExplorer metricsexplorer.Handler
|
||||
Global global.Handler
|
||||
FlaggerHandler flagger.Handler
|
||||
GatewayHandler gateway.Handler
|
||||
}
|
||||
|
||||
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global, flaggerService flagger.Flagger) Handlers {
|
||||
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global, flaggerService flagger.Flagger, gatewayService gateway.Gateway) Handlers {
|
||||
return Handlers{
|
||||
SavedView: implsavedview.NewHandler(modules.SavedView),
|
||||
Apdex: implapdex.NewHandler(modules.Apdex),
|
||||
@@ -54,5 +56,6 @@ func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, que
|
||||
SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile),
|
||||
Global: signozglobal.NewHandler(global),
|
||||
FlaggerHandler: flagger.NewHandler(flaggerService),
|
||||
GatewayHandler: gateway.NewHandler(gatewayService),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ func TestNewHandlers(t *testing.T) {
|
||||
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
|
||||
|
||||
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil)
|
||||
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil)
|
||||
|
||||
reflectVal := reflect.ValueOf(handlers)
|
||||
for i := 0; i < reflectVal.NumField(); i++ {
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/apiserver/signozapiserver"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
@@ -47,6 +48,7 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
|
||||
struct{ dashboard.Module }{},
|
||||
struct{ dashboard.Handler }{},
|
||||
struct{ metricsexplorer.Handler }{},
|
||||
struct{ gateway.Handler }{},
|
||||
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -167,15 +167,9 @@ func NewSQLMigrationProviderFactories(
|
||||
func NewTelemetryStoreProviderFactories() factory.NamedMap[factory.ProviderFactory[telemetrystore.TelemetryStore, telemetrystore.Config]] {
|
||||
return factory.MustNewNamedMap(
|
||||
clickhousetelemetrystore.NewFactory(
|
||||
telemetrystore.TelemetryStoreHookFactoryFunc(func(s string) factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
|
||||
return telemetrystorehook.NewSettingsFactory(s)
|
||||
}),
|
||||
telemetrystore.TelemetryStoreHookFactoryFunc(func(s string) factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
|
||||
return telemetrystorehook.NewLoggingFactory()
|
||||
}),
|
||||
telemetrystore.TelemetryStoreHookFactoryFunc(func(s string) factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
|
||||
return telemetrystorehook.NewInstrumentationFactory(s)
|
||||
}),
|
||||
telemetrystorehook.NewSettingsFactory(),
|
||||
telemetrystorehook.NewLoggingFactory(),
|
||||
telemetrystorehook.NewInstrumentationFactory(),
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -247,6 +241,7 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
|
||||
modules.Dashboard,
|
||||
handlers.Dashboard,
|
||||
handlers.MetricsExplorer,
|
||||
handlers.GatewayHandler,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/emailing"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
@@ -71,6 +72,7 @@ type SigNoz struct {
|
||||
Handlers Handlers
|
||||
QueryParser queryparser.QueryParser
|
||||
Flagger flagger.Flagger
|
||||
Gateway gateway.Gateway
|
||||
}
|
||||
|
||||
func New(
|
||||
@@ -89,6 +91,7 @@ func New(
|
||||
authNsCallback func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error),
|
||||
authzCallback func(context.Context, sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config],
|
||||
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, role.Module, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
|
||||
gatewayProviderFactory func(licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config],
|
||||
) (*SigNoz, error) {
|
||||
// Initialize instrumentation
|
||||
instrumentation, err := instrumentation.New(ctx, config.Instrumentation, version.Info, "signoz")
|
||||
@@ -336,6 +339,12 @@ func New(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gatewayFactory := gatewayProviderFactory(licensing)
|
||||
gateway, err := gatewayFactory.New(ctx, providerSettings, config.Gateway)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Initialize authns
|
||||
store := sqlauthnstore.NewStore(sqlstore)
|
||||
authNs, err := authNsCallback(ctx, providerSettings, store, licensing)
|
||||
@@ -382,7 +391,7 @@ func New(
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboardModule)
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger)
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway)
|
||||
|
||||
// Initialize the API server
|
||||
apiserver, err := factory.NewProviderFromNamedMap(
|
||||
@@ -406,6 +415,7 @@ func New(
|
||||
licensing,
|
||||
tokenizer,
|
||||
config,
|
||||
modules.AuthDomain,
|
||||
}
|
||||
|
||||
// Initialize stats reporter from the available stats reporter provider factories
|
||||
@@ -457,5 +467,6 @@ func New(
|
||||
Handlers: handlers,
|
||||
QueryParser: queryParser,
|
||||
Flagger: flagger,
|
||||
Gateway: gateway,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -16,13 +16,13 @@ type provider struct {
|
||||
hooks []telemetrystore.TelemetryStoreHook
|
||||
}
|
||||
|
||||
func NewFactory(hookFactories ...telemetrystore.TelemetryStoreHookFactoryFunc) factory.ProviderFactory[telemetrystore.TelemetryStore, telemetrystore.Config] {
|
||||
func NewFactory(hookFactories ...factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config]) factory.ProviderFactory[telemetrystore.TelemetryStore, telemetrystore.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("clickhouse"), func(ctx context.Context, providerSettings factory.ProviderSettings, config telemetrystore.Config) (telemetrystore.TelemetryStore, error) {
|
||||
return New(ctx, providerSettings, config, hookFactories...)
|
||||
})
|
||||
}
|
||||
|
||||
func New(ctx context.Context, providerSettings factory.ProviderSettings, config telemetrystore.Config, hookFactories ...telemetrystore.TelemetryStoreHookFactoryFunc) (telemetrystore.TelemetryStore, error) {
|
||||
func New(ctx context.Context, providerSettings factory.ProviderSettings, config telemetrystore.Config, hookFactories ...factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config]) (telemetrystore.TelemetryStore, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/telemetrystore/clickhousetelemetrystore")
|
||||
|
||||
options, err := clickhouse.ParseDSN(config.Clickhouse.DSN)
|
||||
@@ -40,14 +40,10 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var version string
|
||||
if err := chConn.QueryRow(ctx, "SELECT version()").Scan(&version); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
hooks := make([]telemetrystore.TelemetryStoreHook, len(hookFactories))
|
||||
for i, hookFactory := range hookFactories {
|
||||
hook, err := hookFactory(version).New(ctx, providerSettings, config)
|
||||
hook, err := hookFactory.New(ctx, providerSettings, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
)
|
||||
|
||||
type TelemetryStore interface {
|
||||
@@ -20,7 +19,6 @@ type TelemetryStoreHook interface {
|
||||
AfterQuery(ctx context.Context, event *QueryEvent)
|
||||
}
|
||||
|
||||
type TelemetryStoreHookFactoryFunc func(string) factory.ProviderFactory[TelemetryStoreHook, Config]
|
||||
|
||||
func WrapBeforeQuery(hooks []TelemetryStoreHook, ctx context.Context, event *QueryEvent) context.Context {
|
||||
for _, hook := range hooks {
|
||||
|
||||
@@ -13,23 +13,21 @@ import (
|
||||
)
|
||||
|
||||
type instrumentation struct {
|
||||
clickhouseVersion string
|
||||
clickhouseCluster string
|
||||
tracer trace.Tracer
|
||||
meter metric.Meter
|
||||
}
|
||||
|
||||
func NewInstrumentationFactory(version string) factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
|
||||
func NewInstrumentationFactory() factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("instrumentation"), func(ctx context.Context, ps factory.ProviderSettings, c telemetrystore.Config) (telemetrystore.TelemetryStoreHook, error) {
|
||||
return NewInstrumentation(ctx, ps, c, version)
|
||||
return NewInstrumentation(ctx, ps, c)
|
||||
})
|
||||
}
|
||||
|
||||
func NewInstrumentation(ctx context.Context, providerSettings factory.ProviderSettings, config telemetrystore.Config, version string) (telemetrystore.TelemetryStoreHook, error) {
|
||||
func NewInstrumentation(ctx context.Context, providerSettings factory.ProviderSettings, config telemetrystore.Config) (telemetrystore.TelemetryStoreHook, error) {
|
||||
meter := providerSettings.MeterProvider.Meter("github.com/SigNoz/signoz/pkg/telemetrystore")
|
||||
|
||||
return &instrumentation{
|
||||
clickhouseVersion: version,
|
||||
clickhouseCluster: config.Clickhouse.Cluster,
|
||||
tracer: providerSettings.TracerProvider.Tracer("github.com/SigNoz/signoz/pkg/telemetrystore"),
|
||||
meter: meter,
|
||||
@@ -54,7 +52,6 @@ func (hook *instrumentation) AfterQuery(ctx context.Context, event *telemetrysto
|
||||
attrs = append(
|
||||
attrs,
|
||||
semconv.DBStatementKey.String(event.Query),
|
||||
attribute.String("db.version", hook.clickhouseVersion),
|
||||
semconv.DBSystemKey.String("clickhouse"),
|
||||
semconv.DBOperationKey.String(event.Operation),
|
||||
attribute.String("clickhouse.cluster", hook.clickhouseCluster),
|
||||
|
||||
@@ -2,7 +2,6 @@ package telemetrystorehook
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
@@ -11,19 +10,17 @@ import (
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
clickHouseVersion string
|
||||
settings telemetrystore.QuerySettings
|
||||
}
|
||||
|
||||
func NewSettingsFactory(version string) factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
|
||||
func NewSettingsFactory() factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("settings"), func(ctx context.Context, providerSettings factory.ProviderSettings, config telemetrystore.Config) (telemetrystore.TelemetryStoreHook, error) {
|
||||
return NewSettings(ctx, providerSettings, config, version)
|
||||
return NewSettings(ctx, providerSettings, config)
|
||||
})
|
||||
}
|
||||
|
||||
func NewSettings(ctx context.Context, providerSettings factory.ProviderSettings, config telemetrystore.Config, version string) (telemetrystore.TelemetryStoreHook, error) {
|
||||
func NewSettings(ctx context.Context, providerSettings factory.ProviderSettings, config telemetrystore.Config) (telemetrystore.TelemetryStoreHook, error) {
|
||||
return &provider{
|
||||
clickHouseVersion: version,
|
||||
settings: config.Clickhouse.QuerySettings,
|
||||
}, nil
|
||||
}
|
||||
@@ -75,12 +72,9 @@ func (h *provider) BeforeQuery(ctx context.Context, _ *telemetrystore.QueryEvent
|
||||
settings["result_overflow_mode"] = ctx.Value("result_overflow_mode")
|
||||
}
|
||||
|
||||
// ClickHouse version check is added since this setting is not support on version below 25.5
|
||||
if strings.HasPrefix(h.clickHouseVersion, "25") && !h.settings.SecondaryIndicesEnableBulkFiltering {
|
||||
// TODO(srikanthccv): enable it when the "Cannot read all data" issue is fixed
|
||||
// https://github.com/ClickHouse/ClickHouse/issues/82283
|
||||
settings["secondary_indices_enable_bulk_filtering"] = false
|
||||
}
|
||||
// TODO(srikanthccv): enable it when the "Cannot read all data" issue is fixed
|
||||
// https://github.com/ClickHouse/ClickHouse/issues/82283
|
||||
settings["secondary_indices_enable_bulk_filtering"] = false
|
||||
|
||||
ctx = clickhouse.Context(ctx, clickhouse.WithSettings(settings))
|
||||
return ctx
|
||||
|
||||
@@ -44,7 +44,7 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/tokenizer/jwttokenizer")
|
||||
|
||||
if config.JWT.Secret == "" {
|
||||
settings.Logger().ErrorContext(ctx, "🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!", "error", "SIGNOZ_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application. Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access. Please set the SIGNOZ_TOKENIZER_JWT_SECRET environment variable immediately. For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.")
|
||||
settings.Logger().ErrorContext(ctx, "🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!", "error", "SIGNOZ_TOKENIZER_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application. Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access. Please set the SIGNOZ_TOKENIZER_JWT_SECRET environment variable immediately. For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.")
|
||||
}
|
||||
|
||||
lastObservedAtCache, err := ristretto.NewCache(&ristretto.Config[string, map[valuer.UUID]time.Time]{
|
||||
|
||||
@@ -32,10 +32,12 @@ type Identity struct {
|
||||
}
|
||||
|
||||
type CallbackIdentity struct {
|
||||
Name string `json:"name"`
|
||||
Email valuer.Email `json:"email"`
|
||||
OrgID valuer.UUID `json:"orgId"`
|
||||
State State `json:"state"`
|
||||
Name string `json:"name"`
|
||||
Email valuer.Email `json:"email"`
|
||||
OrgID valuer.UUID `json:"orgId"`
|
||||
State State `json:"state"`
|
||||
Groups []string `json:"groups,omitempty"`
|
||||
Role string `json:"role,omitempty"`
|
||||
}
|
||||
|
||||
type State struct {
|
||||
@@ -85,12 +87,14 @@ func NewIdentity(userID valuer.UUID, orgID valuer.UUID, email valuer.Email, role
|
||||
}
|
||||
}
|
||||
|
||||
func NewCallbackIdentity(name string, email valuer.Email, orgID valuer.UUID, state State) *CallbackIdentity {
|
||||
func NewCallbackIdentity(name string, email valuer.Email, orgID valuer.UUID, state State, groups []string, role string) *CallbackIdentity {
|
||||
return &CallbackIdentity{
|
||||
Name: name,
|
||||
Email: email,
|
||||
OrgID: orgID,
|
||||
State: state,
|
||||
Name: name,
|
||||
Email: email,
|
||||
OrgID: orgID,
|
||||
State: state,
|
||||
Groups: groups,
|
||||
Role: role,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -63,6 +63,7 @@ type AuthDomainConfig struct {
|
||||
SAML *SamlConfig `json:"samlConfig"`
|
||||
Google *GoogleConfig `json:"googleAuthConfig"`
|
||||
OIDC *OIDCConfig `json:"oidcConfig"`
|
||||
RoleMapping *RoleMapping `json:"roleMapping"`
|
||||
}
|
||||
|
||||
type AuthDomain struct {
|
||||
|
||||
@@ -2,10 +2,14 @@ package authtypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
const wildCardDomain = "*"
|
||||
|
||||
type GoogleConfig struct {
|
||||
// ClientID is the application's ID. For example, 292085223830.apps.googleusercontent.com.
|
||||
ClientID string `json:"clientId"`
|
||||
@@ -15,6 +19,30 @@ type GoogleConfig struct {
|
||||
|
||||
// What is the meaning of this? Should we remove this?
|
||||
RedirectURI string `json:"redirectURI"`
|
||||
|
||||
// Whether to fetch the Google workspace groups (required additional API scopes)
|
||||
FetchGroups bool `json:"fetchGroups"`
|
||||
|
||||
// Service Account creds JSON stored for Google Admin SDK access
|
||||
// This is content of the JSON file stored directly into db as string
|
||||
// Required if FetchGroups is true (unless running on GCE with default credentials)
|
||||
ServiceAccountJSON string `json:"serviceAccountJson,omitempty"`
|
||||
|
||||
// Map of workspace domain to admin email for service account impersonation
|
||||
// The service account will impersonate this admin to call the directory API
|
||||
// Use "*" as key for wildcard/default that matches any domain
|
||||
// Example: {"example.com": "admin@exmaple.com", "*": "fallbackadmin@company.com"}
|
||||
DomainToAdminEmail map[string]valuer.Email `json:"domainToAdminEmail,omitempty"`
|
||||
|
||||
// If true, fetch transitive group membership (recursive - groups that contains other groups)
|
||||
FetchTransitiveGroupMembership bool `json:"fetchTransitiveGroupMembership,omitempty"`
|
||||
|
||||
// Optional list of allowed groups
|
||||
// If this is present, only users belonging to one of these groups will be allowed to login
|
||||
AllowedGroups []string `json:"allowedGroups,omitempty"`
|
||||
|
||||
// Whether to skip email verification. Defaults to "false"
|
||||
InsecureSkipEmailVerified bool `json:"insecureSkipEmailVerified"`
|
||||
}
|
||||
|
||||
func (config *GoogleConfig) UnmarshalJSON(data []byte) error {
|
||||
@@ -33,6 +61,37 @@ func (config *GoogleConfig) UnmarshalJSON(data []byte) error {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "clientSecret is required")
|
||||
}
|
||||
|
||||
if temp.FetchGroups {
|
||||
if len(temp.DomainToAdminEmail) == 0 {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "domainToAdminEmail is required if fetchGroups is true")
|
||||
}
|
||||
|
||||
if temp.ServiceAccountJSON == "" {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "serviceAccountJSON is required if fetchGroups is true")
|
||||
}
|
||||
}
|
||||
|
||||
if len(temp.AllowedGroups) > 0 && !temp.FetchGroups {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "fetchGroups must be true when allowedGroups is configured")
|
||||
}
|
||||
|
||||
*config = GoogleConfig(temp)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (config *GoogleConfig) GetAdminEmailForDomain(userEmail string) string {
|
||||
domain := extractDomainFromEmail(userEmail)
|
||||
|
||||
if adminEmail, ok := config.DomainToAdminEmail[domain]; ok {
|
||||
return adminEmail.StringValue()
|
||||
}
|
||||
|
||||
return config.DomainToAdminEmail[wildCardDomain].StringValue()
|
||||
}
|
||||
|
||||
func extractDomainFromEmail(email string) string {
|
||||
if at := strings.LastIndex(email, "@"); at >= 0 {
|
||||
return email[at+1:]
|
||||
}
|
||||
return wildCardDomain
|
||||
}
|
||||
|
||||
133
pkg/types/authtypes/mapping.go
Normal file
133
pkg/types/authtypes/mapping.go
Normal file
@@ -0,0 +1,133 @@
|
||||
package authtypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
)
|
||||
|
||||
type AttributeMapping struct {
|
||||
// Key which contains the email in the claim/token/attributes map. Defaults to "email"
|
||||
Email string `json:"email"`
|
||||
|
||||
// Key which contains the name in the claim/token/attributes map. Defaults to "name"
|
||||
Name string `json:"name"`
|
||||
|
||||
// Key which contains the groups in the claim/token/attributes map. Defaults to "groups"
|
||||
Groups string `json:"groups"`
|
||||
|
||||
// Key which contains the role in the claim/token/attributes map. Defaults to "role"
|
||||
Role string `json:"role"`
|
||||
}
|
||||
|
||||
func (attr *AttributeMapping) UnmarshalJSON(data []byte) error {
|
||||
type Alias AttributeMapping
|
||||
|
||||
var temp Alias
|
||||
if err := json.Unmarshal(data, &temp); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if temp.Email == "" {
|
||||
temp.Email = "email"
|
||||
}
|
||||
|
||||
if temp.Name == "" {
|
||||
temp.Name = "name"
|
||||
}
|
||||
|
||||
if temp.Groups == "" {
|
||||
temp.Groups = "groups"
|
||||
}
|
||||
|
||||
if temp.Role == "" {
|
||||
temp.Role = "role"
|
||||
}
|
||||
|
||||
*attr = AttributeMapping(temp)
|
||||
return nil
|
||||
}
|
||||
|
||||
type RoleMapping struct {
|
||||
// Default role any new SSO users. Defaults to "VIEWER"
|
||||
DefaultRole string `json:"defaultRole"`
|
||||
// Map of IDP group names to SigNoz roles. Key is group name, value is SigNoz role
|
||||
GroupMappings map[string]string `json:"groupMappings"`
|
||||
// If true, use the role claim directly from IDP instead of group mappings
|
||||
UseRoleAttribute bool `json:"useRoleAttribute"`
|
||||
}
|
||||
|
||||
func (typ *RoleMapping) UnmarshalJSON(data []byte) error {
|
||||
type Alias RoleMapping
|
||||
|
||||
var temp Alias
|
||||
if err := json.Unmarshal(data, &temp); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if temp.DefaultRole != "" {
|
||||
if _, err := types.NewRole(strings.ToUpper(temp.DefaultRole)); err != nil {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid default role %s", temp.DefaultRole)
|
||||
}
|
||||
}
|
||||
|
||||
for group, role := range temp.GroupMappings {
|
||||
if _, err := types.NewRole(strings.ToUpper(role)); err != nil {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid role %s for group %s", role, group)
|
||||
}
|
||||
}
|
||||
|
||||
*typ = RoleMapping(temp)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (roleMapping *RoleMapping) NewRoleFromCallbackIdentity(callbackIdentity *CallbackIdentity) types.Role {
|
||||
if roleMapping == nil {
|
||||
return types.RoleViewer
|
||||
}
|
||||
|
||||
if roleMapping.UseRoleAttribute && callbackIdentity.Role != "" {
|
||||
if role, err := types.NewRole(strings.ToUpper(callbackIdentity.Role)); err == nil {
|
||||
return role
|
||||
}
|
||||
}
|
||||
|
||||
if len(roleMapping.GroupMappings) > 0 && len(callbackIdentity.Groups) > 0 {
|
||||
highestRole := types.RoleViewer
|
||||
found := false
|
||||
|
||||
for _, group := range callbackIdentity.Groups {
|
||||
if mappedRole, exists := roleMapping.GroupMappings[group]; exists {
|
||||
found = true
|
||||
if role, err := types.NewRole(strings.ToUpper(mappedRole)); err == nil {
|
||||
if compareRoles(role, highestRole) > 0 {
|
||||
highestRole = role
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if found {
|
||||
return highestRole
|
||||
}
|
||||
}
|
||||
|
||||
if roleMapping.DefaultRole != "" {
|
||||
if role, err := types.NewRole(strings.ToUpper(roleMapping.DefaultRole)); err == nil {
|
||||
return role
|
||||
}
|
||||
}
|
||||
|
||||
return types.RoleViewer
|
||||
}
|
||||
|
||||
func compareRoles(a, b types.Role) int {
|
||||
order := map[types.Role]int{
|
||||
types.RoleViewer: 0,
|
||||
types.RoleEditor: 1,
|
||||
types.RoleAdmin: 2,
|
||||
}
|
||||
return order[a] - order[b]
|
||||
}
|
||||
@@ -22,7 +22,7 @@ type OIDCConfig struct {
|
||||
ClientSecret string `json:"clientSecret"`
|
||||
|
||||
// Mapping of claims to the corresponding fields in the token.
|
||||
ClaimMapping ClaimMapping `json:"claimMapping"`
|
||||
ClaimMapping AttributeMapping `json:"claimMapping"`
|
||||
|
||||
// Whether to skip email verification. Defaults to "false"
|
||||
InsecureSkipEmailVerified bool `json:"insecureSkipEmailVerified"`
|
||||
@@ -31,11 +31,6 @@ type OIDCConfig struct {
|
||||
GetUserInfo bool `json:"getUserInfo"`
|
||||
}
|
||||
|
||||
type ClaimMapping struct {
|
||||
// Configurable key which contains the email claims. Defaults to "email"
|
||||
Email string `json:"email"`
|
||||
}
|
||||
|
||||
func (config *OIDCConfig) UnmarshalJSON(data []byte) error {
|
||||
type Alias OIDCConfig
|
||||
|
||||
@@ -56,8 +51,10 @@ func (config *OIDCConfig) UnmarshalJSON(data []byte) error {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "clientSecret is required")
|
||||
}
|
||||
|
||||
if temp.ClaimMapping.Email == "" {
|
||||
temp.ClaimMapping.Email = "email"
|
||||
if temp.ClaimMapping == (AttributeMapping{}) {
|
||||
if err := json.Unmarshal([]byte("{}"), &temp.ClaimMapping); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
*config = OIDCConfig(temp)
|
||||
|
||||
@@ -20,6 +20,9 @@ type SamlConfig struct {
|
||||
// For providers like jumpcloud, this should be set to true.
|
||||
// Note: This is the reverse of WantAuthnRequestsSigned. If WantAuthnRequestsSigned is false, then InsecureSkipAuthNRequestsSigned should be true.
|
||||
InsecureSkipAuthNRequestsSigned bool `json:"insecureSkipAuthNRequestsSigned"`
|
||||
|
||||
// Mapping of SAML assertion attributes
|
||||
AttributeMapping AttributeMapping `json:"attributeMapping"`
|
||||
}
|
||||
|
||||
func (config *SamlConfig) UnmarshalJSON(data []byte) error {
|
||||
@@ -42,6 +45,12 @@ func (config *SamlConfig) UnmarshalJSON(data []byte) error {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "samlCert is required")
|
||||
}
|
||||
|
||||
if temp.AttributeMapping == (AttributeMapping{}) {
|
||||
if err := json.Unmarshal([]byte("{}"), &temp.AttributeMapping); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
*config = SamlConfig(temp)
|
||||
return nil
|
||||
}
|
||||
|
||||
86
pkg/types/gatewaytypes/ingestionkey.go
Normal file
86
pkg/types/gatewaytypes/ingestionkey.go
Normal file
@@ -0,0 +1,86 @@
|
||||
package gatewaytypes
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
type IngestionKey struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Value string `json:"value"`
|
||||
ExpiresAt time.Time `json:"expires_at"`
|
||||
Tags []string `json:"tags"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
WorkspaceID string `json:"workspace_id"`
|
||||
Limits []Limit `json:"limits"`
|
||||
}
|
||||
|
||||
type Limit struct {
|
||||
ID string `json:"id"`
|
||||
Signal string `json:"signal"` // "logs", "traces", "metrics"
|
||||
Config LimitConfig `json:"config"`
|
||||
Tags []string `json:"tags"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
KeyID string `json:"key_id"`
|
||||
Metric LimitMetric `json:"metric"`
|
||||
}
|
||||
|
||||
type LimitConfig struct {
|
||||
Day *LimitValue `json:"day,omitempty"`
|
||||
Second *LimitValue `json:"second,omitempty"`
|
||||
}
|
||||
|
||||
type LimitValue struct {
|
||||
Size int64 `json:"size,omitempty"`
|
||||
Count int64 `json:"count,omitempty"`
|
||||
}
|
||||
|
||||
type LimitMetric struct {
|
||||
Day *LimitMetricValue `json:"day,omitempty"`
|
||||
Second *LimitMetricValue `json:"second,omitempty"`
|
||||
}
|
||||
|
||||
type LimitMetricValue struct {
|
||||
Count int64 `json:"count"`
|
||||
Size int64 `json:"size"`
|
||||
}
|
||||
|
||||
type Pagination struct {
|
||||
Page int `json:"page"`
|
||||
PerPage int `json:"per_page"`
|
||||
Pages int `json:"pages"`
|
||||
Total int `json:"total"`
|
||||
}
|
||||
|
||||
type GettableIngestionKeys struct {
|
||||
Keys []IngestionKey `json:"keys"`
|
||||
Pagination Pagination `json:"_pagination"`
|
||||
}
|
||||
|
||||
type PostableIngestionKey struct {
|
||||
Name string `json:"name"`
|
||||
Tags []string `json:"tags"`
|
||||
ExpiresAt time.Time `json:"expires_at"`
|
||||
}
|
||||
|
||||
type GettableCreatedIngestionKey struct {
|
||||
ID string `json:"id"`
|
||||
Value string `json:"value"`
|
||||
}
|
||||
|
||||
type PostableIngestionKeyLimit struct {
|
||||
Signal string `json:"signal"`
|
||||
Config LimitConfig `json:"config"`
|
||||
Tags []string `json:"tags"`
|
||||
}
|
||||
|
||||
type GettableCreatedIngestionKeyLimit struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
|
||||
type UpdatableIngestionKeyLimit struct {
|
||||
Config LimitConfig `json:"config"`
|
||||
Tags []string `json:"tags"`
|
||||
}
|
||||
@@ -276,6 +276,31 @@ func (r *QueryRangeRequest) NumAggregationForQuery(name string) int64 {
|
||||
return int64(numAgg)
|
||||
}
|
||||
|
||||
// HasOrderSpecified returns true if any query has an explicit order provided.
|
||||
func (r *QueryRangeRequest) HasOrderSpecified() bool {
|
||||
for _, query := range r.CompositeQuery.Queries {
|
||||
switch spec := query.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
if len(spec.Order) > 0 {
|
||||
return true
|
||||
}
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
if len(spec.Order) > 0 {
|
||||
return true
|
||||
}
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
if len(spec.Order) > 0 {
|
||||
return true
|
||||
}
|
||||
case QueryBuilderFormula:
|
||||
if len(spec.Order) > 0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (r *QueryRangeRequest) FuncsForQuery(name string) []Function {
|
||||
funcs := []Function{}
|
||||
for _, query := range r.CompositeQuery.Queries {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from typing import Any, Callable, Dict
|
||||
from urllib.parse import urljoin
|
||||
from typing import Any, Callable, Dict, List
|
||||
from urllib.parse import urljoin, urlparse
|
||||
from xml.etree import ElementTree
|
||||
|
||||
import pytest
|
||||
@@ -114,6 +114,43 @@ def create_saml_client(
|
||||
"attribute.name": "Role",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "groups",
|
||||
"protocol": "saml",
|
||||
"protocolMapper": "saml-group-membership-mapper",
|
||||
"consentRequired": False,
|
||||
"config": {
|
||||
"full.path": "false",
|
||||
"attribute.nameformat": "Basic",
|
||||
"single": "true", # ! this was changed to true as we need the groups in the single attribute section
|
||||
"friendly.name": "groups",
|
||||
"attribute.name": "groups",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "role attribute",
|
||||
"protocol": "saml",
|
||||
"protocolMapper": "saml-user-attribute-mapper",
|
||||
"consentRequired": False,
|
||||
"config": {
|
||||
"attribute.nameformat": "Basic",
|
||||
"user.attribute": "signoz_role",
|
||||
"friendly.name": "signoz_role",
|
||||
"attribute.name": "signoz_role",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "displayName",
|
||||
"protocol": "saml",
|
||||
"protocolMapper": "saml-user-property-mapper",
|
||||
"consentRequired": False,
|
||||
"config": {
|
||||
"attribute.nameformat": "Basic",
|
||||
"user.attribute": "firstName",
|
||||
"friendly.name": "displayName",
|
||||
"attribute.name": "displayName",
|
||||
},
|
||||
},
|
||||
],
|
||||
"defaultClientScopes": ["saml_organization", "role_list"],
|
||||
"optionalClientScopes": [],
|
||||
@@ -163,6 +200,8 @@ def create_oidc_client(
|
||||
realm_name="master",
|
||||
)
|
||||
|
||||
_ensure_groups_client_scope(client)
|
||||
|
||||
client.create_client(
|
||||
skip_exists=True,
|
||||
payload={
|
||||
@@ -208,6 +247,7 @@ def create_oidc_client(
|
||||
"profile",
|
||||
"basic",
|
||||
"email",
|
||||
"groups",
|
||||
],
|
||||
"optionalClientScopes": [
|
||||
"address",
|
||||
@@ -282,7 +322,9 @@ def get_oidc_settings(idp: types.TestContainerIDP) -> dict:
|
||||
|
||||
|
||||
@pytest.fixture(name="create_user_idp", scope="function")
|
||||
def create_user_idp(idp: types.TestContainerIDP) -> Callable[[str, str, bool], None]:
|
||||
def create_user_idp(
|
||||
idp: types.TestContainerIDP,
|
||||
) -> Callable[[str, str, bool, str, str], None]:
|
||||
client = KeycloakAdmin(
|
||||
server_url=idp.container.host_configs["6060"].base(),
|
||||
username=IDP_ROOT_USERNAME,
|
||||
@@ -292,17 +334,26 @@ def create_user_idp(idp: types.TestContainerIDP) -> Callable[[str, str, bool], N
|
||||
|
||||
created_users = []
|
||||
|
||||
def _create_user_idp(email: str, password: str, verified: bool = True) -> None:
|
||||
user_id = client.create_user(
|
||||
exist_ok=False,
|
||||
payload={
|
||||
"username": email,
|
||||
"email": email,
|
||||
"enabled": True,
|
||||
"emailVerified": verified,
|
||||
},
|
||||
)
|
||||
def _create_user_idp(
|
||||
email: str,
|
||||
password: str,
|
||||
verified: bool = True,
|
||||
first_name: str = "",
|
||||
last_name: str = "",
|
||||
) -> None:
|
||||
payload = {
|
||||
"username": email,
|
||||
"email": email,
|
||||
"enabled": True,
|
||||
"emailVerified": verified,
|
||||
}
|
||||
|
||||
if first_name:
|
||||
payload["firstName"] = first_name
|
||||
if last_name:
|
||||
payload["lastName"] = last_name
|
||||
|
||||
user_id = client.create_user(exist_ok=False, payload=payload)
|
||||
client.set_user_password(user_id, password, temporary=False)
|
||||
created_users.append(user_id)
|
||||
|
||||
@@ -333,3 +384,344 @@ def idp_login(driver: webdriver.Chrome) -> Callable[[str, str], None]:
|
||||
wait.until(EC.invisibility_of_element((By.ID, "kc-login")))
|
||||
|
||||
return _idp_login
|
||||
|
||||
|
||||
@pytest.fixture(name="create_group_idp", scope="function")
|
||||
def create_group_idp(idp: types.TestContainerIDP) -> Callable[[str], str]:
|
||||
"""Creates a group in Keycloak IDP."""
|
||||
client = KeycloakAdmin(
|
||||
server_url=idp.container.host_configs["6060"].base(),
|
||||
username=IDP_ROOT_USERNAME,
|
||||
password=IDP_ROOT_PASSWORD,
|
||||
realm_name="master",
|
||||
)
|
||||
|
||||
created_groups = []
|
||||
|
||||
def _create_group_idp(group_name: str) -> str:
|
||||
group_id = client.create_group({"name": group_name}, skip_exists=True)
|
||||
created_groups.append(group_id)
|
||||
return group_id
|
||||
|
||||
yield _create_group_idp
|
||||
|
||||
for group_id in created_groups:
|
||||
try:
|
||||
client.delete_group(group_id)
|
||||
except Exception: # pylint: disable=broad-exception-caught
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture(name="create_user_idp_with_groups", scope="function")
|
||||
def create_user_idp_with_groups(
|
||||
idp: types.TestContainerIDP,
|
||||
create_group_idp: Callable[[str], str], # pylint: disable=redefined-outer-name
|
||||
) -> Callable[[str, str, bool, List[str]], None]:
|
||||
"""Creates a user in Keycloak IDP with specified groups."""
|
||||
client = KeycloakAdmin(
|
||||
server_url=idp.container.host_configs["6060"].base(),
|
||||
username=IDP_ROOT_USERNAME,
|
||||
password=IDP_ROOT_PASSWORD,
|
||||
realm_name="master",
|
||||
)
|
||||
|
||||
created_users = []
|
||||
|
||||
def _create_user_idp_with_groups(
|
||||
email: str, password: str, verified: bool, groups: List[str]
|
||||
) -> None:
|
||||
# Create groups first
|
||||
group_ids = []
|
||||
for group_name in groups:
|
||||
group_id = create_group_idp(group_name)
|
||||
group_ids.append(group_id)
|
||||
|
||||
# Create user
|
||||
user_id = client.create_user(
|
||||
exist_ok=False,
|
||||
payload={
|
||||
"username": email,
|
||||
"email": email,
|
||||
"enabled": True,
|
||||
"emailVerified": verified,
|
||||
},
|
||||
)
|
||||
client.set_user_password(user_id, password, temporary=False)
|
||||
created_users.append(user_id)
|
||||
|
||||
# Add user to groups
|
||||
for group_id in group_ids:
|
||||
client.group_user_add(user_id, group_id)
|
||||
|
||||
yield _create_user_idp_with_groups
|
||||
|
||||
for user_id in created_users:
|
||||
try:
|
||||
client.delete_user(user_id)
|
||||
except Exception: # pylint: disable=broad-exception-caught
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture(name="add_user_to_group", scope="function")
|
||||
def add_user_to_group(
|
||||
idp: types.TestContainerIDP,
|
||||
create_group_idp: Callable[[str], str], # pylint: disable=redefined-outer-name
|
||||
) -> Callable[[str, str], None]:
|
||||
"""Adds an existing user to a group."""
|
||||
client = KeycloakAdmin(
|
||||
server_url=idp.container.host_configs["6060"].base(),
|
||||
username=IDP_ROOT_USERNAME,
|
||||
password=IDP_ROOT_PASSWORD,
|
||||
realm_name="master",
|
||||
)
|
||||
|
||||
def _add_user_to_group(email: str, group_name: str) -> None:
|
||||
user_id = client.get_user_id(email)
|
||||
group_id = create_group_idp(group_name)
|
||||
client.group_user_add(user_id, group_id)
|
||||
|
||||
return _add_user_to_group
|
||||
|
||||
|
||||
@pytest.fixture(name="create_user_idp_with_role", scope="function")
|
||||
def create_user_idp_with_role(
|
||||
idp: types.TestContainerIDP,
|
||||
create_group_idp: Callable[[str], str], # pylint: disable=redefined-outer-name
|
||||
) -> Callable[[str, str, bool, str, List[str]], None]:
|
||||
"""Creates a user in Keycloak IDP with a custom role attribute and optional groups."""
|
||||
client = KeycloakAdmin(
|
||||
server_url=idp.container.host_configs["6060"].base(),
|
||||
username=IDP_ROOT_USERNAME,
|
||||
password=IDP_ROOT_PASSWORD,
|
||||
realm_name="master",
|
||||
)
|
||||
|
||||
created_users = []
|
||||
|
||||
def _create_user_idp_with_role(
|
||||
email: str, password: str, verified: bool, role: str, groups: List[str]
|
||||
) -> None:
|
||||
# Create groups first
|
||||
group_ids = []
|
||||
for group_name in groups:
|
||||
group_id = create_group_idp(group_name)
|
||||
group_ids.append(group_id)
|
||||
|
||||
# Create user with role attribute
|
||||
user_id = client.create_user(
|
||||
exist_ok=False,
|
||||
payload={
|
||||
"username": email,
|
||||
"email": email,
|
||||
"enabled": True,
|
||||
"emailVerified": verified,
|
||||
"attributes": {
|
||||
"signoz_role": role,
|
||||
},
|
||||
},
|
||||
)
|
||||
client.set_user_password(user_id, password, temporary=False)
|
||||
created_users.append(user_id)
|
||||
|
||||
# Add user to groups
|
||||
for group_id in group_ids:
|
||||
client.group_user_add(user_id, group_id)
|
||||
|
||||
yield _create_user_idp_with_role
|
||||
|
||||
for user_id in created_users:
|
||||
try:
|
||||
client.delete_user(user_id)
|
||||
except Exception: # pylint: disable=broad-exception-caught
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture(name="setup_user_profile", scope="package")
|
||||
def setup_user_profile(idp: types.TestContainerIDP) -> Callable[[], None]:
|
||||
"""Setup Keycloak User Profile with signoz_role attribute."""
|
||||
|
||||
def _setup_user_profile() -> None:
|
||||
client = KeycloakAdmin(
|
||||
server_url=idp.container.host_configs["6060"].base(),
|
||||
username=IDP_ROOT_USERNAME,
|
||||
password=IDP_ROOT_PASSWORD,
|
||||
realm_name="master",
|
||||
)
|
||||
|
||||
# Get current user profile config
|
||||
profile = client.get_realm_users_profile()
|
||||
|
||||
# Check if signoz_role attribute already exists
|
||||
attributes = profile.get("attributes", [])
|
||||
signoz_role_exists = any(
|
||||
attr.get("name") == "signoz_role" for attr in attributes
|
||||
)
|
||||
|
||||
if not signoz_role_exists:
|
||||
# Add signoz_role attribute to user profile
|
||||
attributes.append(
|
||||
{
|
||||
"name": "signoz_role",
|
||||
"displayName": "SigNoz Role",
|
||||
"validations": {},
|
||||
"annotations": {},
|
||||
# "required": {
|
||||
# "roles": [] # Not required
|
||||
# },
|
||||
"permissions": {"view": ["admin", "user"], "edit": ["admin"]},
|
||||
"multivalued": False,
|
||||
}
|
||||
)
|
||||
profile["attributes"] = attributes
|
||||
|
||||
# Update the realm user profile
|
||||
client.update_realm_users_profile(payload=profile)
|
||||
|
||||
return _setup_user_profile
|
||||
|
||||
|
||||
def _ensure_groups_client_scope(client: KeycloakAdmin) -> None:
|
||||
"""Create 'groups' client scope if it doesn't exist."""
|
||||
# Check if groups scope exists
|
||||
scopes = client.get_client_scopes()
|
||||
groups_scope_exists = any(s.get("name") == "groups" for s in scopes)
|
||||
|
||||
if not groups_scope_exists:
|
||||
# Create the groups client scope
|
||||
client.create_client_scope(
|
||||
payload={
|
||||
"name": "groups",
|
||||
"description": "Group membership",
|
||||
"protocol": "openid-connect",
|
||||
"attributes": {
|
||||
"include.in.token.scope": "true",
|
||||
"display.on.consent.screen": "true",
|
||||
},
|
||||
"protocolMappers": [
|
||||
{
|
||||
"name": "groups",
|
||||
"protocol": "openid-connect",
|
||||
"protocolMapper": "oidc-group-membership-mapper",
|
||||
"consentRequired": False,
|
||||
"config": {
|
||||
"full.path": "false",
|
||||
"id.token.claim": "true",
|
||||
"access.token.claim": "true",
|
||||
"claim.name": "groups",
|
||||
"userinfo.token.claim": "true",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "signoz_role",
|
||||
"protocol": "openid-connect",
|
||||
"protocolMapper": "oidc-usermodel-attribute-mapper",
|
||||
"consentRequired": False,
|
||||
"config": {
|
||||
"user.attribute": "signoz_role",
|
||||
"id.token.claim": "true",
|
||||
"access.token.claim": "true",
|
||||
"claim.name": "signoz_role",
|
||||
"userinfo.token.claim": "true",
|
||||
"jsonType.label": "String",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
skip_exists=True,
|
||||
)
|
||||
|
||||
|
||||
def get_oidc_domain(signoz: types.SigNoz, admin_token: str) -> dict:
|
||||
"""Helper to get the OIDC domain."""
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/domains"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
return next(
|
||||
(
|
||||
domain
|
||||
for domain in response.json()["data"]
|
||||
if domain["name"] == "oidc.integration.test"
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def get_user_by_email(signoz: types.SigNoz, admin_token: str, email: str) -> dict:
|
||||
"""Helper to get a user by email."""
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
return next(
|
||||
(user for user in response.json()["data"] if user["email"] == email),
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def perform_oidc_login(
|
||||
signoz: types.SigNoz, # pylint: disable=unused-argument
|
||||
idp: types.TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
get_session_context: Callable[[str], str],
|
||||
idp_login: Callable[[str, str], None], # pylint: disable=redefined-outer-name
|
||||
email: str,
|
||||
password: str,
|
||||
) -> None:
|
||||
"""Helper to perform OIDC login flow."""
|
||||
session_context = get_session_context(email)
|
||||
url = session_context["orgs"][0]["authNSupport"]["callback"][0]["url"]
|
||||
parsed_url = urlparse(url)
|
||||
actual_url = (
|
||||
f"{idp.container.host_configs['6060'].get(parsed_url.path)}?{parsed_url.query}"
|
||||
)
|
||||
driver.get(actual_url)
|
||||
idp_login(email, password)
|
||||
|
||||
|
||||
def get_saml_domain(signoz: types.SigNoz, admin_token: str) -> dict:
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/domains"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
return next(
|
||||
(
|
||||
domain
|
||||
for domain in response.json()["data"]
|
||||
if domain["name"] == "saml.integration.test"
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def perform_saml_login(
|
||||
signoz: types.SigNoz, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
get_session_context: Callable[[str], str],
|
||||
idp_login: Callable[[str, str], None], # pylint: disable=redefined-outer-name
|
||||
email: str,
|
||||
password: str,
|
||||
) -> None:
|
||||
session_context = get_session_context(email)
|
||||
url = session_context["orgs"][0]["authNSupport"]["callback"][0]["url"]
|
||||
driver.get(url)
|
||||
idp_login(email, password)
|
||||
|
||||
|
||||
def delete_keycloak_client(idp: types.TestContainerIDP, client_id: str) -> None:
|
||||
keycloak_client = KeycloakAdmin(
|
||||
server_url=idp.container.host_configs["6060"].base(),
|
||||
username=IDP_ROOT_USERNAME,
|
||||
password=IDP_ROOT_PASSWORD,
|
||||
realm_name="master",
|
||||
)
|
||||
try:
|
||||
# Get the internal Keycloak client ID from the clientId
|
||||
internal_client_id = keycloak_client.get_client_id(client_id=client_id)
|
||||
if internal_client_id:
|
||||
keycloak_client.delete_client(internal_client_id)
|
||||
except Exception: # pylint: disable=broad-exception-caught
|
||||
pass # Client doesn't exist or already deleted, that's fine
|
||||
|
||||
@@ -329,3 +329,130 @@ def find_named_result(
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def build_scalar_query(
|
||||
name: str,
|
||||
signal: str,
|
||||
aggregations: List[Dict],
|
||||
*,
|
||||
group_by: Optional[List[Dict]] = None,
|
||||
order: Optional[List[Dict]] = None,
|
||||
limit: Optional[int] = None,
|
||||
filter_expression: Optional[str] = None,
|
||||
having_expression: Optional[str] = None,
|
||||
step_interval: int = DEFAULT_STEP_INTERVAL,
|
||||
disabled: bool = False,
|
||||
) -> Dict:
|
||||
spec: Dict[str, Any] = {
|
||||
"name": name,
|
||||
"signal": signal,
|
||||
"stepInterval": step_interval,
|
||||
"disabled": disabled,
|
||||
"aggregations": aggregations,
|
||||
}
|
||||
|
||||
if group_by:
|
||||
spec["groupBy"] = group_by
|
||||
|
||||
if order:
|
||||
spec["order"] = order
|
||||
|
||||
if limit is not None:
|
||||
spec["limit"] = limit
|
||||
|
||||
if filter_expression:
|
||||
spec["filter"] = {"expression": filter_expression}
|
||||
|
||||
if having_expression:
|
||||
spec["having"] = {"expression": having_expression}
|
||||
|
||||
return {"type": "builder_query", "spec": spec}
|
||||
|
||||
|
||||
def build_group_by_field(
|
||||
name: str,
|
||||
field_data_type: str = "string",
|
||||
field_context: str = "resource",
|
||||
) -> Dict:
|
||||
return {
|
||||
"name": name,
|
||||
"fieldDataType": field_data_type,
|
||||
"fieldContext": field_context,
|
||||
}
|
||||
|
||||
|
||||
def build_order_by(name: str, direction: str = "desc") -> Dict:
|
||||
return {"key": {"name": name}, "direction": direction}
|
||||
|
||||
|
||||
def build_logs_aggregation(expression: str, alias: Optional[str] = None) -> Dict:
|
||||
agg: Dict[str, Any] = {"expression": expression}
|
||||
if alias:
|
||||
agg["alias"] = alias
|
||||
return agg
|
||||
|
||||
|
||||
def build_metrics_aggregation(
|
||||
metric_name: str,
|
||||
time_aggregation: str,
|
||||
space_aggregation: str,
|
||||
temporality: str = "cumulative",
|
||||
) -> Dict:
|
||||
return {
|
||||
"metricName": metric_name,
|
||||
"temporality": temporality,
|
||||
"timeAggregation": time_aggregation,
|
||||
"spaceAggregation": space_aggregation,
|
||||
}
|
||||
|
||||
|
||||
def get_scalar_table_data(response_json: Dict) -> List[List[Any]]:
|
||||
results = response_json.get("data", {}).get("data", {}).get("results", [])
|
||||
if not results:
|
||||
return []
|
||||
return results[0].get("data", [])
|
||||
|
||||
|
||||
def get_scalar_columns(response_json: Dict) -> List[Dict]:
|
||||
results = response_json.get("data", {}).get("data", {}).get("results", [])
|
||||
if not results:
|
||||
return []
|
||||
return results[0].get("columns", [])
|
||||
|
||||
|
||||
def assert_scalar_result_order(
|
||||
data: List[List[Any]],
|
||||
expected_order: List[tuple],
|
||||
context: str = "",
|
||||
) -> None:
|
||||
assert len(data) == len(expected_order), (
|
||||
f"{context}: Expected {len(expected_order)} rows, got {len(data)}. "
|
||||
f"Data: {data}"
|
||||
)
|
||||
|
||||
for i, (row, expected) in enumerate(zip(data, expected_order)):
|
||||
for j, expected_val in enumerate(expected):
|
||||
actual_val = row[j]
|
||||
assert actual_val == expected_val, (
|
||||
f"{context}: Row {i}, column {j} mismatch. "
|
||||
f"Expected {expected_val}, got {actual_val}. "
|
||||
f"Full row: {row}, expected: {expected}"
|
||||
)
|
||||
|
||||
|
||||
def assert_scalar_column_order(
|
||||
data: List[List[Any]],
|
||||
column_index: int,
|
||||
expected_values: List[Any],
|
||||
context: str = "",
|
||||
) -> None:
|
||||
assert len(data) == len(
|
||||
expected_values
|
||||
), f"{context}: Expected {len(expected_values)} rows, got {len(data)}"
|
||||
|
||||
actual_values = [row[column_index] for row in data]
|
||||
assert actual_values == expected_values, (
|
||||
f"{context}: Column {column_index} order mismatch. "
|
||||
f"Expected {expected_values}, got {actual_values}"
|
||||
)
|
||||
|
||||
@@ -65,6 +65,8 @@ def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
"SIGNOZ_INSTRUMENTATION_LOGS_LEVEL": "debug",
|
||||
"SIGNOZ_PROMETHEUS_ACTIVE__QUERY__TRACKER_ENABLED": False,
|
||||
"SIGNOZ_GATEWAY_URL": gateway.container_configs["8080"].base(),
|
||||
"SIGNOZ_TOKENIZER_JWT_SECRET": "secret",
|
||||
"SIGNOZ_GLOBAL_INGESTION__URL": "https://ingest.test.signoz.cloud"
|
||||
}
|
||||
| sqlstore.env
|
||||
| clickhouse.env
|
||||
|
||||
@@ -78,11 +78,15 @@ def test_create_and_get_domain(
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
data = response.json()["data"]
|
||||
|
||||
assert len(data) == 2
|
||||
assert data[0]["name"] == "domain-google.integration.test"
|
||||
assert data[0]["ssoType"] == "google_auth"
|
||||
assert data[1]["name"] == "domain-saml.integration.test"
|
||||
assert data[1]["ssoType"] == "saml"
|
||||
|
||||
for domain in data:
|
||||
assert domain["name"] in [
|
||||
"domain-google.integration.test",
|
||||
"domain-saml.integration.test",
|
||||
]
|
||||
assert domain["ssoType"] in ["google_auth", "saml"]
|
||||
|
||||
|
||||
def test_create_invalid(
|
||||
@@ -165,3 +169,91 @@ def test_create_invalid(
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.BAD_REQUEST
|
||||
|
||||
|
||||
def test_create_invalid_role_mapping(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""Test that invalid role mappings are rejected."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create domain with invalid defaultRole
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/domains"),
|
||||
json={
|
||||
"name": "invalid-role-test.integration.test",
|
||||
"config": {
|
||||
"ssoEnabled": True,
|
||||
"ssoType": "saml",
|
||||
"samlConfig": {
|
||||
"samlEntity": "saml-entity",
|
||||
"samlIdp": "saml-idp",
|
||||
"samlCert": "saml-cert",
|
||||
},
|
||||
"roleMapping": {
|
||||
"defaultRole": "SUPERADMIN", # Invalid role
|
||||
},
|
||||
},
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.BAD_REQUEST
|
||||
|
||||
# Create domain with invalid role in groupMappings
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/domains"),
|
||||
json={
|
||||
"name": "invalid-group-role.integration.test",
|
||||
"config": {
|
||||
"ssoEnabled": True,
|
||||
"ssoType": "saml",
|
||||
"samlConfig": {
|
||||
"samlEntity": "saml-entity",
|
||||
"samlIdp": "saml-idp",
|
||||
"samlCert": "saml-cert",
|
||||
},
|
||||
"roleMapping": {
|
||||
"defaultRole": "VIEWER",
|
||||
"groupMappings": {
|
||||
"admins": "SUPERUSER", # Invalid role
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.BAD_REQUEST
|
||||
|
||||
# Valid role mapping should succeed
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/domains"),
|
||||
json={
|
||||
"name": "valid-role-mapping.integration.test",
|
||||
"config": {
|
||||
"ssoEnabled": True,
|
||||
"ssoType": "saml",
|
||||
"samlConfig": {
|
||||
"samlEntity": "saml-entity",
|
||||
"samlIdp": "saml-idp",
|
||||
"samlCert": "saml-cert",
|
||||
},
|
||||
"roleMapping": {
|
||||
"defaultRole": "VIEWER",
|
||||
"groupMappings": {
|
||||
"signoz-admins": "ADMIN",
|
||||
"signoz-editors": "EDITOR",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.CREATED
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import uuid
|
||||
from http import HTTPStatus
|
||||
from typing import Any, Callable, Dict, List
|
||||
|
||||
@@ -10,6 +11,11 @@ from fixtures.auth import (
|
||||
USER_ADMIN_PASSWORD,
|
||||
add_license,
|
||||
)
|
||||
from fixtures.idputils import (
|
||||
get_saml_domain,
|
||||
get_user_by_email,
|
||||
perform_saml_login,
|
||||
)
|
||||
from fixtures.types import Operation, SigNoz, TestContainerDocker, TestContainerIDP
|
||||
|
||||
|
||||
@@ -102,7 +108,7 @@ def test_saml_authn(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp: Callable[[str, str], None],
|
||||
create_user_idp: Callable[[str, str, bool, str, str], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
@@ -150,7 +156,7 @@ def test_idp_initiated_saml_authn(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp: Callable[[str, str], None],
|
||||
create_user_idp: Callable[[str, str, bool, str, str], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
@@ -195,3 +201,372 @@ def test_idp_initiated_saml_authn(
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
|
||||
|
||||
def test_saml_update_domain_with_group_mappings(
|
||||
signoz: SigNoz,
|
||||
get_token: Callable[[str, str], str],
|
||||
get_saml_settings: Callable[[], dict],
|
||||
) -> None:
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
domain = get_saml_domain(signoz, admin_token)
|
||||
settings = get_saml_settings()
|
||||
|
||||
# update the existing saml domain to have role mappings also
|
||||
response = requests.put(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/domains/{domain['id']}"),
|
||||
json={
|
||||
"config": {
|
||||
"ssoEnabled": True,
|
||||
"ssoType": "saml",
|
||||
"samlConfig": {
|
||||
"samlEntity": settings["entityID"],
|
||||
"samlIdp": settings["singleSignOnServiceLocation"],
|
||||
"samlCert": settings["certificate"],
|
||||
"attributeMapping": {
|
||||
"name": "givenName",
|
||||
"groups": "groups",
|
||||
"role": "signoz_role",
|
||||
},
|
||||
},
|
||||
"roleMapping": {
|
||||
"defaultRole": "VIEWER",
|
||||
"groupMappings": {
|
||||
"signoz-admins": "ADMIN",
|
||||
"signoz-editors": "EDITOR",
|
||||
"signoz-viewers": "VIEWER",
|
||||
},
|
||||
"useRoleAttribute": False,
|
||||
},
|
||||
},
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
|
||||
def test_saml_role_mapping_single_group_admin(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: User in 'signoz-admins' group gets ADMIN role.
|
||||
"""
|
||||
email = "admin-group-user@saml.integration.test"
|
||||
create_user_idp_with_groups(email, "password", True, ["signoz-admins"])
|
||||
|
||||
perform_saml_login(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
|
||||
|
||||
def test_saml_role_mapping_single_group_editor(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: User in 'signoz-editors' group gets EDITOR role.
|
||||
"""
|
||||
email = "editor-group-user@saml.integration.test"
|
||||
create_user_idp_with_groups(email, "password", True, ["signoz-editors"])
|
||||
|
||||
perform_saml_login(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
|
||||
|
||||
def test_saml_role_mapping_multiple_groups_highest_wins(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: User in multiple groups gets highest role.
|
||||
User is in both 'signoz-viewers' and 'signoz-editors'.
|
||||
Expected: User gets EDITOR (highest of VIEWER and EDITOR).
|
||||
"""
|
||||
email = f"multi-group-user-{uuid.uuid4().hex[:8]}@saml.integration.test"
|
||||
create_user_idp_with_groups(
|
||||
email, "password", True, ["signoz-viewers", "signoz-editors"]
|
||||
)
|
||||
|
||||
perform_saml_login(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
|
||||
|
||||
def test_saml_role_mapping_explicit_viewer_group(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: User explicitly mapped to VIEWER via groups should get VIEWER.
|
||||
This tests the bug where VIEWER group mappings were incorrectly ignored.
|
||||
"""
|
||||
email = "viewer-group-user@saml.integration.test"
|
||||
create_user_idp_with_groups(email, "password", True, ["signoz-viewers"])
|
||||
|
||||
perform_saml_login(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
|
||||
|
||||
def test_saml_role_mapping_unmapped_group_uses_default(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: User in unmapped group falls back to default role (VIEWER).
|
||||
"""
|
||||
email = "unmapped-group-user@saml.integration.test"
|
||||
create_user_idp_with_groups(email, "password", True, ["some-other-group"])
|
||||
|
||||
perform_saml_login(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
|
||||
|
||||
def test_saml_update_domain_with_use_role_claim(
|
||||
signoz: SigNoz,
|
||||
get_token: Callable[[str, str], str],
|
||||
get_saml_settings: Callable[[], dict],
|
||||
) -> None:
|
||||
"""
|
||||
Updates SAML domain to enable useRoleAttribute (direct role attribute).
|
||||
"""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
domain = get_saml_domain(signoz, admin_token)
|
||||
settings = get_saml_settings()
|
||||
|
||||
response = requests.put(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/domains/{domain['id']}"),
|
||||
json={
|
||||
"config": {
|
||||
"ssoEnabled": True,
|
||||
"ssoType": "saml",
|
||||
"samlConfig": {
|
||||
"samlEntity": settings["entityID"],
|
||||
"samlIdp": settings["singleSignOnServiceLocation"],
|
||||
"samlCert": settings["certificate"],
|
||||
"attributeMapping": {
|
||||
"name": "displayName",
|
||||
"groups": "groups",
|
||||
"role": "signoz_role",
|
||||
},
|
||||
},
|
||||
"roleMapping": {
|
||||
"defaultRole": "VIEWER",
|
||||
"groupMappings": {
|
||||
"signoz-admins": "ADMIN",
|
||||
"signoz-editors": "EDITOR",
|
||||
},
|
||||
"useRoleAttribute": True,
|
||||
},
|
||||
},
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
|
||||
def test_saml_role_mapping_role_claim_takes_precedence(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
setup_user_profile: Callable[[], None],
|
||||
) -> None:
|
||||
"""
|
||||
Test: useRoleAttribute takes precedence over group mappings.
|
||||
User is in 'signoz-editors' group but has role attribute 'ADMIN'.
|
||||
Expected: User gets ADMIN (from role attribute).
|
||||
"""
|
||||
|
||||
setup_user_profile()
|
||||
|
||||
email = "role-claim-precedence@saml.integration.test"
|
||||
create_user_idp_with_role(email, "password", True, "ADMIN", ["signoz-editors"])
|
||||
|
||||
perform_saml_login(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
|
||||
|
||||
def test_saml_role_mapping_invalid_role_claim_fallback(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
setup_user_profile: Callable[[], None],
|
||||
) -> None:
|
||||
"""
|
||||
Test: Invalid role claim falls back to group mappings.
|
||||
User has invalid role 'SUPERADMIN' and is in 'signoz-editors'.
|
||||
Expected: User gets EDITOR (from group mapping).
|
||||
"""
|
||||
setup_user_profile()
|
||||
email = "invalid-role-user@saml.integration.test"
|
||||
create_user_idp_with_role(email, "password", True, "SUPERADMIN", ["signoz-editors"])
|
||||
|
||||
perform_saml_login(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
|
||||
|
||||
def test_saml_role_mapping_case_insensitive(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
setup_user_profile: Callable[[], None],
|
||||
) -> None:
|
||||
"""
|
||||
Test: Role attribute matching is case-insensitive.
|
||||
User has role 'admin' (lowercase).
|
||||
Expected: User gets ADMIN role.
|
||||
"""
|
||||
setup_user_profile()
|
||||
email = "lowercase-role-user@saml.integration.test"
|
||||
create_user_idp_with_role(email, "password", True, "admin", [])
|
||||
|
||||
perform_saml_login(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
|
||||
|
||||
def test_saml_name_mapping(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp: Callable[[str, str, bool, str, str], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""Test that user's display name is mapped from SAML displayName attribute."""
|
||||
email = "named-user@saml.integration.test"
|
||||
|
||||
create_user_idp(email, "password", True, "Jane", "Smith")
|
||||
|
||||
perform_saml_login(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert (
|
||||
found_user["displayName"] == "Jane"
|
||||
) # We are only mapping the first name here
|
||||
assert found_user["role"] == "VIEWER"
|
||||
|
||||
|
||||
def test_saml_empty_name_fallback(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp: Callable[[str, str, bool, str, str], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""Test that user without displayName in IDP still gets created."""
|
||||
email = "no-name@saml.integration.test"
|
||||
|
||||
create_user_idp(email, "password", True)
|
||||
|
||||
perform_saml_login(
|
||||
signoz, driver, get_session_context, idp_login, email, "password"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
|
||||
@@ -11,6 +11,11 @@ from fixtures.auth import (
|
||||
USER_ADMIN_PASSWORD,
|
||||
add_license,
|
||||
)
|
||||
from fixtures.idputils import (
|
||||
get_oidc_domain,
|
||||
get_user_by_email,
|
||||
perform_oidc_login,
|
||||
)
|
||||
from fixtures.types import Operation, SigNoz, TestContainerDocker, TestContainerIDP
|
||||
|
||||
|
||||
@@ -75,7 +80,7 @@ def test_oidc_authn(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp: Callable[[str, str, bool], None],
|
||||
create_user_idp: Callable[[str, str, bool, str, str], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
@@ -127,3 +132,403 @@ def test_oidc_authn(
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
|
||||
|
||||
def test_oidc_update_domain_with_group_mappings(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
get_token: Callable[[str, str], str],
|
||||
get_oidc_settings: Callable[[str], dict],
|
||||
) -> None:
|
||||
"""
|
||||
Updates OIDC domain to add role mapping with group mappings and claim mapping.
|
||||
"""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
domain = get_oidc_domain(signoz, admin_token)
|
||||
client_id = f"oidc.integration.test.{signoz.self.host_configs['8080'].address}:{signoz.self.host_configs['8080'].port}"
|
||||
settings = get_oidc_settings(client_id)
|
||||
|
||||
response = requests.put(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/domains/{domain['id']}"),
|
||||
json={
|
||||
"config": {
|
||||
"ssoEnabled": True,
|
||||
"ssoType": "oidc",
|
||||
"oidcConfig": {
|
||||
"clientId": settings["client_id"],
|
||||
"clientSecret": settings["client_secret"],
|
||||
"issuer": f"{idp.container.container_configs['6060'].get(urlparse(settings['issuer']).path)}",
|
||||
"issuerAlias": settings["issuer"],
|
||||
"getUserInfo": True,
|
||||
"claimMapping": {
|
||||
"email": "email",
|
||||
"name": "name",
|
||||
"groups": "groups",
|
||||
"role": "signoz_role",
|
||||
},
|
||||
},
|
||||
"roleMapping": {
|
||||
"defaultRole": "VIEWER",
|
||||
"groupMappings": {
|
||||
"signoz-admins": "ADMIN",
|
||||
"signoz-editors": "EDITOR",
|
||||
"signoz-viewers": "VIEWER",
|
||||
},
|
||||
"useRoleAttribute": False,
|
||||
},
|
||||
},
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
|
||||
def test_oidc_role_mapping_single_group_admin(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: OIDC user in 'signoz-admins' group gets ADMIN role.
|
||||
"""
|
||||
email = "admin-group-user@oidc.integration.test"
|
||||
create_user_idp_with_groups(email, "password123", True, ["signoz-admins"])
|
||||
|
||||
perform_oidc_login(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
|
||||
|
||||
def test_oidc_role_mapping_single_group_editor(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: OIDC user in 'signoz-editors' group gets EDITOR role.
|
||||
"""
|
||||
email = "editor-group-user@oidc.integration.test"
|
||||
create_user_idp_with_groups(email, "password123", True, ["signoz-editors"])
|
||||
|
||||
perform_oidc_login(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
|
||||
|
||||
def test_oidc_role_mapping_multiple_groups_highest_wins(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: OIDC user in multiple groups gets highest role.
|
||||
User is in 'signoz-viewers' and 'signoz-admins'.
|
||||
Expected: User gets ADMIN (highest of the two).
|
||||
"""
|
||||
email = "multi-group-user@oidc.integration.test"
|
||||
create_user_idp_with_groups(
|
||||
email, "password123", True, ["signoz-viewers", "signoz-admins"]
|
||||
)
|
||||
|
||||
perform_oidc_login(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
|
||||
|
||||
def test_oidc_role_mapping_explicit_viewer_group(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: OIDC user explicitly mapped to VIEWER via groups gets VIEWER.
|
||||
Tests the bug where VIEWER mappings were ignored.
|
||||
"""
|
||||
email = "viewer-group-user@oidc.integration.test"
|
||||
create_user_idp_with_groups(email, "password123", True, ["signoz-viewers"])
|
||||
|
||||
perform_oidc_login(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
|
||||
|
||||
def test_oidc_role_mapping_unmapped_group_uses_default(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: OIDC user in unmapped group falls back to default role.
|
||||
"""
|
||||
email = "unmapped-group-user@oidc.integration.test"
|
||||
create_user_idp_with_groups(email, "password123", True, ["some-other-group"])
|
||||
|
||||
perform_oidc_login(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
|
||||
|
||||
def test_oidc_update_domain_with_use_role_claim(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
get_token: Callable[[str, str], str],
|
||||
get_oidc_settings: Callable[[str], dict],
|
||||
) -> None:
|
||||
"""
|
||||
Updates OIDC domain to enable useRoleClaim.
|
||||
"""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
domain = get_oidc_domain(signoz, admin_token)
|
||||
client_id = f"oidc.integration.test.{signoz.self.host_configs['8080'].address}:{signoz.self.host_configs['8080'].port}"
|
||||
settings = get_oidc_settings(client_id)
|
||||
|
||||
response = requests.put(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/domains/{domain['id']}"),
|
||||
json={
|
||||
"config": {
|
||||
"ssoEnabled": True,
|
||||
"ssoType": "oidc",
|
||||
"oidcConfig": {
|
||||
"clientId": settings["client_id"],
|
||||
"clientSecret": settings["client_secret"],
|
||||
"issuer": f"{idp.container.container_configs['6060'].get(urlparse(settings['issuer']).path)}",
|
||||
"issuerAlias": settings["issuer"],
|
||||
"getUserInfo": True,
|
||||
"claimMapping": {
|
||||
"email": "email",
|
||||
"name": "name",
|
||||
"groups": "groups",
|
||||
"role": "signoz_role",
|
||||
},
|
||||
},
|
||||
"roleMapping": {
|
||||
"defaultRole": "VIEWER",
|
||||
"groupMappings": {
|
||||
"signoz-admins": "ADMIN",
|
||||
"signoz-editors": "EDITOR",
|
||||
},
|
||||
"useRoleAttribute": True,
|
||||
},
|
||||
},
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
|
||||
def test_oidc_role_mapping_role_claim_takes_precedence(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
setup_user_profile: Callable[[], None],
|
||||
) -> None:
|
||||
"""
|
||||
Test: useRoleAttribute takes precedence over group mappings.
|
||||
User is in 'signoz-editors' group but has role claim 'ADMIN'.
|
||||
Expected: User gets ADMIN (from role claim).
|
||||
"""
|
||||
setup_user_profile()
|
||||
email = "role-claim-precedence@oidc.integration.test"
|
||||
create_user_idp_with_role(email, "password123", True, "ADMIN", ["signoz-editors"])
|
||||
|
||||
perform_oidc_login(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
|
||||
|
||||
def test_oidc_role_mapping_invalid_role_claim_fallback(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
setup_user_profile: Callable[[], None],
|
||||
) -> None:
|
||||
"""
|
||||
Test: Invalid role claim falls back to group mappings.
|
||||
User has invalid role 'SUPERADMIN' and is in 'signoz-editors'.
|
||||
Expected: User gets EDITOR (from group mapping).
|
||||
"""
|
||||
setup_user_profile()
|
||||
email = "invalid-role-user@oidc.integration.test"
|
||||
create_user_idp_with_role(
|
||||
email, "password123", True, "SUPERADMIN", ["signoz-editors"]
|
||||
)
|
||||
|
||||
perform_oidc_login(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
|
||||
|
||||
def test_oidc_role_mapping_case_insensitive(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
setup_user_profile: Callable[[], None],
|
||||
) -> None:
|
||||
"""
|
||||
Test: Role claim matching is case-insensitive.
|
||||
User has role 'editor' (lowercase).
|
||||
Expected: User gets EDITOR role.
|
||||
"""
|
||||
setup_user_profile()
|
||||
email = "lowercase-role-user@oidc.integration.test"
|
||||
create_user_idp_with_role(email, "password123", True, "editor", [])
|
||||
|
||||
perform_oidc_login(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
|
||||
|
||||
def test_oidc_name_mapping(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp: Callable[[str, str, bool, str, str], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], dict],
|
||||
) -> None:
|
||||
"""Test that user's display name is mapped from IDP name claim."""
|
||||
email = "named-user@oidc.integration.test"
|
||||
|
||||
# Create user with explicit first/last name
|
||||
create_user_idp(email, "password123", True, first_name="John", last_name="Doe")
|
||||
|
||||
perform_oidc_login(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
users = response.json()["data"]
|
||||
found_user = next((u for u in users if u["email"] == email), None)
|
||||
|
||||
assert found_user is not None
|
||||
# Keycloak concatenates firstName + lastName into "name" claim
|
||||
assert found_user["displayName"] == "John Doe"
|
||||
assert found_user["role"] == "VIEWER" # Default role
|
||||
|
||||
|
||||
def test_oidc_empty_name_uses_fallback(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp: Callable[[str, str, bool, str, str], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], dict],
|
||||
) -> None:
|
||||
"""Test that user without name in IDP still gets created (may have empty displayName)."""
|
||||
email = "no-name@oidc.integration.test"
|
||||
|
||||
# Create user without first/last name
|
||||
create_user_idp(email, "password123", True)
|
||||
|
||||
perform_oidc_login(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
users = response.json()["data"]
|
||||
found_user = next((u for u in users if u["email"] == email), None)
|
||||
|
||||
# User should still be created even with empty name
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
# Note: displayName may be empty - this is a known limitation
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user