mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-07 18:32:12 +00:00
Compare commits
4 Commits
refactor/c
...
chore/push
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8322e09485 | ||
|
|
6d137bcdff | ||
|
|
444161671d | ||
|
|
31e9e896ec |
62
.vscode/launch.json
vendored
Normal file
62
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
{
|
||||
"configurations": [
|
||||
{
|
||||
"name": "enterprise",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"buildFlags": [
|
||||
"-race",
|
||||
"-ldflags=-X github.com/SigNoz/signoz/pkg/version.version=dev -X github.com/SigNoz/signoz/pkg/version.variant=enterprise -X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud"
|
||||
],
|
||||
"program": "${workspaceFolder}/cmd/enterprise/",
|
||||
"args": ["server"],
|
||||
"env": {
|
||||
"SIGNOZ_VERSION_BANNER_ENABLED": "true",
|
||||
"SIGNOZ_INSTRUMENTATION_LOGS_LEVEL": "debug",
|
||||
"SIGNOZ_SQLSTORE_PROVIDER": "sqlite",
|
||||
"SIGNOZ_SQLSTORE_SQLITE_PATH": "${workspaceFolder}/.dev/data/sqlite/enterprise.db",
|
||||
"SIGNOZ_WEB_ENABLED": "false",
|
||||
"SIGNOZ_SQLMIGRATOR_LOCK_INTERVAL": "1m",
|
||||
"SIGNOZ_ALERTMANAGER_PROVIDER": "signoz",
|
||||
"SIGNOZ_TELEMETRYSTORE_PROVIDER": "clickhouse",
|
||||
"SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER": "cluster",
|
||||
"SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN": "tcp://0.0.0.0:9001",
|
||||
"SIGNOZ_PROMETHEUS_ACTIVE__QUERY__TRACKER_ENABLED": "false",
|
||||
"SIGNOZ_EMAILING_ENABLED": "false",
|
||||
"DOT_METRICS_ENABLED": "true",
|
||||
"SIGNOZ_GLOBAL_INGESTION__URL": "http://localhost:3001",
|
||||
"SIGNOZ_TOKENIZER_PROVIDER": "opaque"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "community",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"buildFlags": [
|
||||
"-race",
|
||||
"-ldflags=-X github.com/SigNoz/signoz/pkg/version.version=dev -X github.com/SigNoz/signoz/pkg/version.variant=community -X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud"
|
||||
],
|
||||
"program": "${workspaceFolder}/cmd/community/",
|
||||
"args": ["server"],
|
||||
"env": {
|
||||
"SIGNOZ_VERSION_BANNER_ENABLED": "true",
|
||||
"SIGNOZ_INSTRUMENTATION_LOGS_LEVEL": "debug",
|
||||
"SIGNOZ_SQLSTORE_PROVIDER": "sqlite",
|
||||
"SIGNOZ_SQLSTORE_SQLITE_PATH": "${workspaceFolder}/.dev/data/sqlite/community.db",
|
||||
"SIGNOZ_WEB_ENABLED": "false",
|
||||
"SIGNOZ_SQLMIGRATOR_LOCK_INTERVAL": "1m",
|
||||
"SIGNOZ_ALERTMANAGER_PROVIDER": "signoz",
|
||||
"SIGNOZ_TELEMETRYSTORE_PROVIDER": "clickhouse",
|
||||
"SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER": "cluster",
|
||||
"SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN": "tcp://0.0.0.0:9001",
|
||||
"SIGNOZ_PROMETHEUS_ACTIVE__QUERY__TRACKER_ENABLED": "false",
|
||||
"SIGNOZ_EMAILING_ENABLED": "false",
|
||||
"DOT_METRICS_ENABLED": "true",
|
||||
"SIGNOZ_GLOBAL_INGESTION__URL": "http://localhost:3001",
|
||||
"SIGNOZ_TOKENIZER_PROVIDER": "opaque"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
4
Makefile
4
Makefile
@@ -86,7 +86,7 @@ go-run-enterprise: ## Runs the enterprise go backend server
|
||||
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \
|
||||
SIGNOZ_SQLSTORE_SQLITE_PATH=signoz.db \
|
||||
SIGNOZ_WEB_ENABLED=false \
|
||||
SIGNOZ_JWT_SECRET=secret \
|
||||
SIGNOZ_TOKENIZER_JWT_SECRET=secret \
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
@@ -103,7 +103,7 @@ go-run-community: ## Runs the community go backend server
|
||||
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \
|
||||
SIGNOZ_SQLSTORE_SQLITE_PATH=signoz.db \
|
||||
SIGNOZ_WEB_ENABLED=false \
|
||||
SIGNOZ_JWT_SECRET=secret \
|
||||
SIGNOZ_TOKENIZER_JWT_SECRET=secret \
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
|
||||
@@ -195,7 +195,7 @@ services:
|
||||
- GODEBUG=netdns=go
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-swarm
|
||||
- SIGNOZ_JWT_SECRET=secret
|
||||
- SIGNOZ_TOKENIZER_JWT_SECRET=secret
|
||||
- DOT_METRICS_ENABLED=true
|
||||
healthcheck:
|
||||
test:
|
||||
|
||||
@@ -2736,12 +2736,25 @@ paths:
|
||||
- sessions
|
||||
components:
|
||||
schemas:
|
||||
AuthtypesAttributeMapping:
|
||||
properties:
|
||||
email:
|
||||
type: string
|
||||
groups:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
role:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesAuthDomainConfig:
|
||||
properties:
|
||||
googleAuthConfig:
|
||||
$ref: '#/components/schemas/AuthtypesGoogleConfig'
|
||||
oidcConfig:
|
||||
$ref: '#/components/schemas/AuthtypesOIDCConfig'
|
||||
roleMapping:
|
||||
$ref: '#/components/schemas/AuthtypesRoleMapping'
|
||||
samlConfig:
|
||||
$ref: '#/components/schemas/AuthtypesSamlConfig'
|
||||
ssoEnabled:
|
||||
@@ -2775,11 +2788,6 @@ components:
|
||||
url:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesClaimMapping:
|
||||
properties:
|
||||
email:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesDeprecatedGettableLogin:
|
||||
properties:
|
||||
accessJwt:
|
||||
@@ -2811,6 +2819,8 @@ components:
|
||||
$ref: '#/components/schemas/AuthtypesOIDCConfig'
|
||||
orgId:
|
||||
type: string
|
||||
roleMapping:
|
||||
$ref: '#/components/schemas/AuthtypesRoleMapping'
|
||||
samlConfig:
|
||||
$ref: '#/components/schemas/AuthtypesSamlConfig'
|
||||
ssoEnabled:
|
||||
@@ -2834,17 +2844,33 @@ components:
|
||||
type: object
|
||||
AuthtypesGoogleConfig:
|
||||
properties:
|
||||
allowedGroups:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
clientId:
|
||||
type: string
|
||||
clientSecret:
|
||||
type: string
|
||||
domainToAdminEmail:
|
||||
additionalProperties:
|
||||
type: string
|
||||
type: object
|
||||
fetchGroups:
|
||||
type: boolean
|
||||
fetchTransitiveGroupMembership:
|
||||
type: boolean
|
||||
insecureSkipEmailVerified:
|
||||
type: boolean
|
||||
redirectURI:
|
||||
type: string
|
||||
serviceAccountJson:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesOIDCConfig:
|
||||
properties:
|
||||
claimMapping:
|
||||
$ref: '#/components/schemas/AuthtypesClaimMapping'
|
||||
$ref: '#/components/schemas/AuthtypesAttributeMapping'
|
||||
clientId:
|
||||
type: string
|
||||
clientSecret:
|
||||
@@ -2895,8 +2921,22 @@ components:
|
||||
refreshToken:
|
||||
type: string
|
||||
type: object
|
||||
AuthtypesRoleMapping:
|
||||
properties:
|
||||
defaultRole:
|
||||
type: string
|
||||
groupMappings:
|
||||
additionalProperties:
|
||||
type: string
|
||||
nullable: true
|
||||
type: object
|
||||
useRoleAttribute:
|
||||
type: boolean
|
||||
type: object
|
||||
AuthtypesSamlConfig:
|
||||
properties:
|
||||
attributeMapping:
|
||||
$ref: '#/components/schemas/AuthtypesAttributeMapping'
|
||||
insecureSkipAuthNRequestsSigned:
|
||||
type: boolean
|
||||
samlCert:
|
||||
|
||||
@@ -2,6 +2,7 @@ package oidccallbackauthn
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
@@ -19,25 +20,27 @@ const (
|
||||
redirectPath string = "/api/v1/complete/oidc"
|
||||
)
|
||||
|
||||
var (
|
||||
scopes []string = []string{"email", oidc.ScopeOpenID}
|
||||
)
|
||||
var defaultScopes []string = []string{"email", "profile", oidc.ScopeOpenID}
|
||||
|
||||
var _ authn.CallbackAuthN = (*AuthN)(nil)
|
||||
|
||||
type AuthN struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
store authtypes.AuthNStore
|
||||
licensing licensing.Licensing
|
||||
httpClient *client.Client
|
||||
}
|
||||
|
||||
func New(store authtypes.AuthNStore, licensing licensing.Licensing, providerSettings factory.ProviderSettings) (*AuthN, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn")
|
||||
|
||||
httpClient, err := client.New(providerSettings.Logger, providerSettings.TracerProvider, providerSettings.MeterProvider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &AuthN{
|
||||
settings: settings,
|
||||
store: store,
|
||||
licensing: licensing,
|
||||
httpClient: httpClient,
|
||||
@@ -126,7 +129,40 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
}
|
||||
}
|
||||
|
||||
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
|
||||
name := ""
|
||||
if nameClaim := authDomain.AuthDomainConfig().OIDC.ClaimMapping.Name; nameClaim != "" {
|
||||
if n, ok := claims[nameClaim].(string); ok {
|
||||
name = n
|
||||
}
|
||||
}
|
||||
|
||||
var groups []string
|
||||
if groupsClaim := authDomain.AuthDomainConfig().OIDC.ClaimMapping.Groups; groupsClaim != "" {
|
||||
if claimValue, exists := claims[groupsClaim]; exists {
|
||||
switch g := claimValue.(type) {
|
||||
case []any:
|
||||
for _, group := range g {
|
||||
if gs, ok := group.(string); ok {
|
||||
groups = append(groups, gs)
|
||||
}
|
||||
}
|
||||
case string:
|
||||
// Some IDPs return a single group as a string instead of an array
|
||||
groups = append(groups, g)
|
||||
default:
|
||||
a.settings.Logger().WarnContext(ctx, "oidc: unsupported groups type", "type", fmt.Sprintf("%T", claimValue))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
role := ""
|
||||
if roleClaim := authDomain.AuthDomainConfig().OIDC.ClaimMapping.Role; roleClaim != "" {
|
||||
if r, ok := claims[roleClaim].(string); ok {
|
||||
role = r
|
||||
}
|
||||
}
|
||||
|
||||
return authtypes.NewCallbackIdentity(name, email, authDomain.StorableAuthDomain().OrgID, state, groups, role), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
@@ -145,6 +181,13 @@ func (a *AuthN) oidcProviderAndoauth2Config(ctx context.Context, siteURL *url.UR
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
scopes := make([]string, len(defaultScopes))
|
||||
copy(scopes, defaultScopes)
|
||||
|
||||
if authDomain.AuthDomainConfig().RoleMapping != nil && len(authDomain.AuthDomainConfig().RoleMapping.GroupMappings) > 0 {
|
||||
scopes = append(scopes, "groups")
|
||||
}
|
||||
|
||||
return oidcProvider, &oauth2.Config{
|
||||
ClientID: authDomain.AuthDomainConfig().OIDC.ClientID,
|
||||
ClientSecret: authDomain.AuthDomainConfig().OIDC.ClientSecret,
|
||||
|
||||
@@ -96,7 +96,26 @@ func (a *AuthN) HandleCallback(ctx context.Context, formValues url.Values) (*aut
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "saml: invalid email").WithAdditional("The nameID assertion is used to retrieve the email address, please check your IDP configuration and try again.")
|
||||
}
|
||||
|
||||
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
|
||||
name := ""
|
||||
if nameAttribute := authDomain.AuthDomainConfig().SAML.AttributeMapping.Name; nameAttribute != "" {
|
||||
if val := assertionInfo.Values.Get(nameAttribute); val != "" {
|
||||
name = val
|
||||
}
|
||||
}
|
||||
|
||||
var groups []string
|
||||
if groupAttribute := authDomain.AuthDomainConfig().SAML.AttributeMapping.Groups; groupAttribute != "" {
|
||||
groups = assertionInfo.Values.GetAll(groupAttribute)
|
||||
}
|
||||
|
||||
role := ""
|
||||
if roleAttribute := authDomain.AuthDomainConfig().SAML.AttributeMapping.Role; roleAttribute != "" {
|
||||
if val := assertionInfo.Values.Get(roleAttribute); val != "" {
|
||||
role = val
|
||||
}
|
||||
}
|
||||
|
||||
return authtypes.NewCallbackIdentity(name, email, authDomain.StorableAuthDomain().OrgID, state, groups, role), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
|
||||
@@ -211,7 +211,10 @@ describe('VariableItem Integration Tests', () => {
|
||||
await user.clear(textInput);
|
||||
await user.type(textInput, 'new-text-value');
|
||||
|
||||
// Should call onValueUpdate after debounce
|
||||
// Blur the input to trigger the value update
|
||||
await user.tab();
|
||||
|
||||
// Should call onValueUpdate after blur
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(mockOnValueUpdate).toHaveBeenCalledWith(
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
/* eslint-disable sonarjs/no-identical-functions */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { fireEvent, render, screen, waitFor } from 'tests/test-utils';
|
||||
import {
|
||||
fireEvent,
|
||||
render,
|
||||
screen,
|
||||
userEvent,
|
||||
waitFor,
|
||||
} from 'tests/test-utils';
|
||||
import {
|
||||
IDashboardVariable,
|
||||
TSortVariableValuesType,
|
||||
@@ -639,4 +645,186 @@ describe('VariableItem Component', () => {
|
||||
await expectCircularDependencyError();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Textbox Variable Default Value Handling', () => {
|
||||
test('saves textbox variable with defaultValue and selectedValue set to textboxValue', async () => {
|
||||
const user = userEvent.setup();
|
||||
const textboxVariable: IDashboardVariable = {
|
||||
id: TEST_VAR_IDS.VAR1,
|
||||
name: TEST_VAR_NAMES.VAR1,
|
||||
description: 'Test Textbox Variable',
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: 'my-default-value',
|
||||
...VARIABLE_DEFAULTS,
|
||||
order: 0,
|
||||
};
|
||||
|
||||
renderVariableItem(textboxVariable);
|
||||
|
||||
// Click save button
|
||||
const saveButton = screen.getByText(SAVE_BUTTON_TEXT);
|
||||
await user.click(saveButton);
|
||||
|
||||
// Verify that onSave was called with defaultValue and selectedValue equal to textboxValue
|
||||
expect(onSave).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: 'my-default-value',
|
||||
defaultValue: 'my-default-value',
|
||||
selectedValue: 'my-default-value',
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
test('saves textbox variable with empty values when textboxValue is empty', async () => {
|
||||
const user = userEvent.setup();
|
||||
const textboxVariable: IDashboardVariable = {
|
||||
id: TEST_VAR_IDS.VAR1,
|
||||
name: TEST_VAR_NAMES.VAR1,
|
||||
description: 'Test Textbox Variable',
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: '',
|
||||
...VARIABLE_DEFAULTS,
|
||||
order: 0,
|
||||
};
|
||||
|
||||
renderVariableItem(textboxVariable);
|
||||
|
||||
// Click save button
|
||||
const saveButton = screen.getByText(SAVE_BUTTON_TEXT);
|
||||
await user.click(saveButton);
|
||||
|
||||
// Verify that onSave was called with empty defaultValue and selectedValue
|
||||
expect(onSave).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: '',
|
||||
defaultValue: '',
|
||||
selectedValue: '',
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
test('updates textbox defaultValue and selectedValue when user changes textboxValue input', async () => {
|
||||
const user = userEvent.setup();
|
||||
const textboxVariable: IDashboardVariable = {
|
||||
id: TEST_VAR_IDS.VAR1,
|
||||
name: TEST_VAR_NAMES.VAR1,
|
||||
description: 'Test Textbox Variable',
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: 'initial-value',
|
||||
...VARIABLE_DEFAULTS,
|
||||
order: 0,
|
||||
};
|
||||
|
||||
renderVariableItem(textboxVariable);
|
||||
|
||||
// Change the textbox value
|
||||
const textboxInput = screen.getByPlaceholderText(
|
||||
'Enter a default value (if any)...',
|
||||
);
|
||||
await user.clear(textboxInput);
|
||||
await user.type(textboxInput, 'updated-value');
|
||||
|
||||
// Click save button
|
||||
const saveButton = screen.getByText(SAVE_BUTTON_TEXT);
|
||||
await user.click(saveButton);
|
||||
|
||||
// Verify that onSave was called with the updated defaultValue and selectedValue
|
||||
expect(onSave).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: 'updated-value',
|
||||
defaultValue: 'updated-value',
|
||||
selectedValue: 'updated-value',
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
test('non-textbox variables use variableDefaultValue instead of textboxValue', async () => {
|
||||
const user = userEvent.setup();
|
||||
const queryVariable: IDashboardVariable = {
|
||||
id: TEST_VAR_IDS.VAR1,
|
||||
name: TEST_VAR_NAMES.VAR1,
|
||||
description: 'Test Query Variable',
|
||||
type: 'QUERY',
|
||||
queryValue: 'SELECT * FROM test',
|
||||
textboxValue: 'should-not-be-used',
|
||||
defaultValue: 'query-default-value',
|
||||
...VARIABLE_DEFAULTS,
|
||||
order: 0,
|
||||
};
|
||||
|
||||
renderVariableItem(queryVariable);
|
||||
|
||||
// Click save button
|
||||
const saveButton = screen.getByText(SAVE_BUTTON_TEXT);
|
||||
await user.click(saveButton);
|
||||
|
||||
// Verify that onSave was called with defaultValue not being textboxValue
|
||||
expect(onSave).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
type: 'QUERY',
|
||||
defaultValue: 'query-default-value',
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
|
||||
// Verify that defaultValue is NOT the textboxValue
|
||||
const savedVariable = onSave.mock.calls[0][1];
|
||||
expect(savedVariable.defaultValue).not.toBe('should-not-be-used');
|
||||
});
|
||||
|
||||
test('switching to textbox type sets defaultValue and selectedValue correctly on save', async () => {
|
||||
const user = userEvent.setup();
|
||||
// Start with a QUERY variable
|
||||
const queryVariable: IDashboardVariable = {
|
||||
id: TEST_VAR_IDS.VAR1,
|
||||
name: TEST_VAR_NAMES.VAR1,
|
||||
description: 'Test Variable',
|
||||
type: 'QUERY',
|
||||
queryValue: 'SELECT * FROM test',
|
||||
...VARIABLE_DEFAULTS,
|
||||
order: 0,
|
||||
};
|
||||
|
||||
renderVariableItem(queryVariable);
|
||||
|
||||
// Switch to TEXTBOX type
|
||||
const textboxButton = findButtonByText(TEXT.TEXTBOX);
|
||||
expect(textboxButton).toBeInTheDocument();
|
||||
if (textboxButton) {
|
||||
await user.click(textboxButton);
|
||||
}
|
||||
|
||||
// Enter a default value in the textbox input
|
||||
const textboxInput = screen.getByPlaceholderText(
|
||||
'Enter a default value (if any)...',
|
||||
);
|
||||
await user.type(textboxInput, 'new-textbox-default');
|
||||
|
||||
// Click save button
|
||||
const saveButton = screen.getByText(SAVE_BUTTON_TEXT);
|
||||
await user.click(saveButton);
|
||||
|
||||
// Verify that onSave was called with type TEXTBOX and correct defaultValue and selectedValue
|
||||
expect(onSave).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: 'new-textbox-default',
|
||||
defaultValue: 'new-textbox-default',
|
||||
selectedValue: 'new-textbox-default',
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -320,6 +320,10 @@ function VariableItem({
|
||||
]);
|
||||
|
||||
const variableValue = useMemo(() => {
|
||||
if (queryType === 'TEXTBOX') {
|
||||
return variableTextboxValue;
|
||||
}
|
||||
|
||||
if (variableMultiSelect) {
|
||||
let value = variableData.selectedValue;
|
||||
if (isEmpty(value)) {
|
||||
@@ -352,6 +356,8 @@ function VariableItem({
|
||||
variableData.selectedValue,
|
||||
variableData.showALLOption,
|
||||
variableDefaultValue,
|
||||
variableTextboxValue,
|
||||
queryType,
|
||||
previewValues,
|
||||
]);
|
||||
|
||||
@@ -367,13 +373,10 @@ function VariableItem({
|
||||
multiSelect: variableMultiSelect,
|
||||
showALLOption: queryType === 'DYNAMIC' ? true : variableShowALLOption,
|
||||
sort: variableSortType,
|
||||
...(queryType === 'TEXTBOX' && {
|
||||
selectedValue: (variableData.selectedValue ||
|
||||
variableTextboxValue) as never,
|
||||
}),
|
||||
...(queryType !== 'TEXTBOX' && {
|
||||
defaultValue: variableDefaultValue as never,
|
||||
}),
|
||||
// the reason we need to do this is because defaultValues are treated differently in case of textbox type
|
||||
// They are the exact same and not like the other types where defaultValue is a separate field
|
||||
defaultValue:
|
||||
queryType === 'TEXTBOX' ? variableTextboxValue : variableDefaultValue,
|
||||
modificationUUID: generateUUID(),
|
||||
id: variableData.id || generateUUID(),
|
||||
order: variableData.order,
|
||||
|
||||
@@ -25,6 +25,12 @@
|
||||
}
|
||||
}
|
||||
|
||||
&.focused {
|
||||
.variable-value {
|
||||
outline: 1px solid var(--bg-robin-400);
|
||||
}
|
||||
}
|
||||
|
||||
.variable-value {
|
||||
display: flex;
|
||||
min-width: 120px;
|
||||
@@ -93,6 +99,12 @@
|
||||
|
||||
.lightMode {
|
||||
.variable-item {
|
||||
&.focused {
|
||||
.variable-value {
|
||||
border: 1px solid var(--bg-robin-400);
|
||||
}
|
||||
}
|
||||
|
||||
.variable-name {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
|
||||
@@ -94,7 +94,7 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
cycleNodes,
|
||||
});
|
||||
}
|
||||
}, [setVariablesToGetUpdated, variables, variablesTableData]);
|
||||
}, [variables, variablesTableData]);
|
||||
|
||||
// this handles the case where the dependency order changes i.e. variable list updated via creation or deletion etc. and we need to refetch the variables
|
||||
// also trigger when the global time changes
|
||||
|
||||
@@ -80,10 +80,12 @@ describe('VariableItem', () => {
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
expect(screen.getByPlaceholderText('Enter value')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByTestId('variable-textbox-test_variable'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('calls onChange event handler when Input value changes', async () => {
|
||||
test('calls onValueUpdate when Input value changes and blurs', async () => {
|
||||
render(
|
||||
<MockQueryClientProvider>
|
||||
<VariableItem
|
||||
@@ -102,13 +104,19 @@ describe('VariableItem', () => {
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
|
||||
const inputElement = screen.getByTestId('variable-textbox-test_variable');
|
||||
|
||||
// Change the value
|
||||
act(() => {
|
||||
const inputElement = screen.getByPlaceholderText('Enter value');
|
||||
fireEvent.change(inputElement, { target: { value: 'newValue' } });
|
||||
});
|
||||
|
||||
// Blur the input to trigger the update
|
||||
act(() => {
|
||||
fireEvent.blur(inputElement);
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
// expect(mockOnValueUpdate).toHaveBeenCalledTimes(1);
|
||||
expect(mockOnValueUpdate).toHaveBeenCalledWith(
|
||||
'testVariable',
|
||||
'test_variable',
|
||||
|
||||
@@ -8,14 +8,14 @@ import './DashboardVariableSelection.styles.scss';
|
||||
|
||||
import { orange } from '@ant-design/colors';
|
||||
import { InfoCircleOutlined, WarningOutlined } from '@ant-design/icons';
|
||||
import { Input, Popover, Tooltip, Typography } from 'antd';
|
||||
import { Input, InputRef, Popover, Tooltip, Typography } from 'antd';
|
||||
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
|
||||
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
|
||||
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
||||
import { debounce, isArray, isEmpty, isString } from 'lodash-es';
|
||||
import { memo, useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
@@ -71,6 +71,15 @@ function VariableItem({
|
||||
string | string[] | undefined
|
||||
>(undefined);
|
||||
|
||||
// Local state for textbox input to ensure smooth editing experience
|
||||
const [textboxInputValue, setTextboxInputValue] = useState<string>(
|
||||
(variableData.selectedValue?.toString() ||
|
||||
variableData.defaultValue?.toString()) ??
|
||||
'',
|
||||
);
|
||||
const [isTextboxFocused, setIsTextboxFocused] = useState<boolean>(false);
|
||||
const textboxInputRef = useRef<InputRef>(null);
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
@@ -371,7 +380,7 @@ function VariableItem({
|
||||
}, [variableData.type, variableData.customValue]);
|
||||
|
||||
return (
|
||||
<div className="variable-item">
|
||||
<div className={`variable-item${isTextboxFocused ? ' focused' : ''}`}>
|
||||
<Typography.Text className="variable-name" ellipsis>
|
||||
${variableData.name}
|
||||
{variableData.description && (
|
||||
@@ -384,16 +393,40 @@ function VariableItem({
|
||||
<div className="variable-value">
|
||||
{variableData.type === 'TEXTBOX' ? (
|
||||
<Input
|
||||
ref={textboxInputRef}
|
||||
placeholder="Enter value"
|
||||
data-testid={`variable-textbox-${variableData.id}`}
|
||||
bordered={false}
|
||||
key={variableData.selectedValue?.toString()}
|
||||
defaultValue={variableData.selectedValue?.toString()}
|
||||
value={textboxInputValue}
|
||||
title={textboxInputValue}
|
||||
onChange={(e): void => {
|
||||
debouncedHandleChange(e.target.value || '');
|
||||
setTextboxInputValue(e.target.value);
|
||||
}}
|
||||
style={{
|
||||
width:
|
||||
50 + ((variableData.selectedValue?.toString()?.length || 0) * 7 || 50),
|
||||
onFocus={(): void => {
|
||||
setIsTextboxFocused(true);
|
||||
}}
|
||||
onBlur={(e): void => {
|
||||
setIsTextboxFocused(false);
|
||||
const value = e.target.value.trim();
|
||||
// If empty, reset to default value
|
||||
if (!value && variableData.defaultValue) {
|
||||
setTextboxInputValue(variableData.defaultValue.toString());
|
||||
debouncedHandleChange(variableData.defaultValue.toString());
|
||||
} else {
|
||||
debouncedHandleChange(value);
|
||||
}
|
||||
}}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter') {
|
||||
const value = textboxInputValue.trim();
|
||||
if (!value && variableData.defaultValue) {
|
||||
setTextboxInputValue(variableData.defaultValue.toString());
|
||||
debouncedHandleChange(variableData.defaultValue.toString());
|
||||
} else {
|
||||
debouncedHandleChange(value);
|
||||
}
|
||||
textboxInputRef.current?.blur();
|
||||
}
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
|
||||
@@ -257,6 +257,15 @@ export const onUpdateVariableNode = (
|
||||
): void => {
|
||||
const visited = new Set<string>();
|
||||
|
||||
// If nodeToUpdate is not in topologicalOrder (e.g., CUSTOM variable),
|
||||
// we still need to mark its children as needing updates
|
||||
if (!topologicalOrder.includes(nodeToUpdate)) {
|
||||
// Mark direct children of the node as visited so they get processed
|
||||
(graph[nodeToUpdate] || []).forEach((child) => {
|
||||
visited.add(child);
|
||||
});
|
||||
}
|
||||
|
||||
// Start processing from the node to update
|
||||
topologicalOrder.forEach((node) => {
|
||||
if (node === nodeToUpdate || visited.has(node)) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { areArraysEqual } from './util';
|
||||
import { areArraysEqual, onUpdateVariableNode, VariableGraph } from './util';
|
||||
|
||||
describe('areArraysEqual', () => {
|
||||
it('should return true for equal arrays with same order', () => {
|
||||
@@ -31,3 +31,121 @@ describe('areArraysEqual', () => {
|
||||
expect(areArraysEqual(array1, array2)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onUpdateVariableNode', () => {
|
||||
// Graph structure:
|
||||
// deployment -> namespace -> service -> pod
|
||||
// deployment has no parents, namespace depends on deployment, etc.
|
||||
const graph: VariableGraph = {
|
||||
deployment: ['namespace'],
|
||||
namespace: ['service'],
|
||||
service: ['pod'],
|
||||
pod: [],
|
||||
customVar: ['namespace'], // CUSTOM variable that affects namespace
|
||||
};
|
||||
|
||||
const topologicalOrder = ['deployment', 'namespace', 'service', 'pod'];
|
||||
|
||||
it('should call callback for the node and all its descendants', () => {
|
||||
const visited: string[] = [];
|
||||
const callback = (node: string): void => {
|
||||
visited.push(node);
|
||||
};
|
||||
|
||||
onUpdateVariableNode('deployment', graph, topologicalOrder, callback);
|
||||
|
||||
expect(visited).toEqual(['deployment', 'namespace', 'service', 'pod']);
|
||||
});
|
||||
|
||||
it('should call callback starting from a middle node', () => {
|
||||
const visited: string[] = [];
|
||||
const callback = (node: string): void => {
|
||||
visited.push(node);
|
||||
};
|
||||
|
||||
onUpdateVariableNode('namespace', graph, topologicalOrder, callback);
|
||||
|
||||
expect(visited).toEqual(['namespace', 'service', 'pod']);
|
||||
});
|
||||
|
||||
it('should only call callback for the leaf node when updating leaf', () => {
|
||||
const visited: string[] = [];
|
||||
const callback = (node: string): void => {
|
||||
visited.push(node);
|
||||
};
|
||||
|
||||
onUpdateVariableNode('pod', graph, topologicalOrder, callback);
|
||||
|
||||
expect(visited).toEqual(['pod']);
|
||||
});
|
||||
|
||||
it('should handle CUSTOM variable not in topologicalOrder by updating its children', () => {
|
||||
const visited: string[] = [];
|
||||
const callback = (node: string): void => {
|
||||
visited.push(node);
|
||||
};
|
||||
|
||||
// customVar is not in topologicalOrder but has namespace as a child
|
||||
onUpdateVariableNode('customVar', graph, topologicalOrder, callback);
|
||||
|
||||
// Should process namespace and its descendants (service, pod)
|
||||
expect(visited).toEqual(['namespace', 'service', 'pod']);
|
||||
});
|
||||
|
||||
it('should handle node not in graph gracefully', () => {
|
||||
const visited: string[] = [];
|
||||
const callback = (node: string): void => {
|
||||
visited.push(node);
|
||||
};
|
||||
|
||||
onUpdateVariableNode('unknownNode', graph, topologicalOrder, callback);
|
||||
|
||||
// Should not call callback for any node since unknownNode has no children
|
||||
expect(visited).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle empty graph', () => {
|
||||
const visited: string[] = [];
|
||||
const callback = (node: string): void => {
|
||||
visited.push(node);
|
||||
};
|
||||
|
||||
onUpdateVariableNode('deployment', {}, topologicalOrder, callback);
|
||||
|
||||
// deployment is in topologicalOrder, so callback is called for it
|
||||
expect(visited).toEqual(['deployment']);
|
||||
});
|
||||
|
||||
it('should handle empty topologicalOrder', () => {
|
||||
const visited: string[] = [];
|
||||
const callback = (node: string): void => {
|
||||
visited.push(node);
|
||||
};
|
||||
|
||||
onUpdateVariableNode('deployment', graph, [], callback);
|
||||
|
||||
expect(visited).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle CUSTOM variable with multiple children', () => {
|
||||
const graphWithMultipleChildren: VariableGraph = {
|
||||
...graph,
|
||||
customMulti: ['namespace', 'service'], // CUSTOM variable affecting multiple nodes
|
||||
};
|
||||
|
||||
const visited: string[] = [];
|
||||
const callback = (node: string): void => {
|
||||
visited.push(node);
|
||||
};
|
||||
|
||||
onUpdateVariableNode(
|
||||
'customMulti',
|
||||
graphWithMultipleChildren,
|
||||
topologicalOrder,
|
||||
callback,
|
||||
);
|
||||
|
||||
// Should process namespace, service, and pod (descendants)
|
||||
expect(visited).toEqual(['namespace', 'service', 'pod']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -318,7 +318,9 @@ function GridCardGraph({
|
||||
version={version}
|
||||
threshold={threshold}
|
||||
headerMenuList={menuList}
|
||||
isFetchingResponse={queryResponse.isFetching}
|
||||
isFetchingResponse={
|
||||
queryResponse.isFetching || variablesToGetUpdated.length > 0
|
||||
}
|
||||
setRequestData={setRequestData}
|
||||
onClickHandler={onClickHandler}
|
||||
onDragSelect={onDragSelect}
|
||||
|
||||
@@ -291,6 +291,10 @@ export function DashboardProvider({
|
||||
|
||||
variable.order = order;
|
||||
existingOrders.add(order);
|
||||
// ! BWC - Specific case for backward compatibility where textboxValue was used instead of defaultValue
|
||||
if (variable.type === 'TEXTBOX' && !variable.defaultValue) {
|
||||
variable.defaultValue = variable.textboxValue || '';
|
||||
}
|
||||
}
|
||||
|
||||
if (variable.id === undefined) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { render, waitFor } from '@testing-library/react';
|
||||
import { render, screen, waitFor } from '@testing-library/react';
|
||||
import getDashboard from 'api/v1/dashboards/id/get';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import ROUTES from 'constants/routes';
|
||||
@@ -379,12 +379,9 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
// Empty URL variables - tests initialization flow
|
||||
mockGetUrlVariables.mockReturnValue({});
|
||||
|
||||
const { getByTestId } = renderWithDashboardProvider(
|
||||
`/dashboard/${DASHBOARD_ID}`,
|
||||
{
|
||||
dashboardId: DASHBOARD_ID,
|
||||
},
|
||||
);
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -415,16 +412,14 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
});
|
||||
|
||||
// Verify dashboard state contains the variables with default values
|
||||
await waitFor(() => {
|
||||
const dashboardVariables = getByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
const dashboardVariables = await screen.findByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
|
||||
expect(parsedVariables).toHaveProperty('environment');
|
||||
expect(parsedVariables).toHaveProperty('services');
|
||||
// Default allSelected values should be preserved
|
||||
expect(parsedVariables.environment.allSelected).toBe(false);
|
||||
expect(parsedVariables.services.allSelected).toBe(false);
|
||||
});
|
||||
expect(parsedVariables).toHaveProperty('environment');
|
||||
expect(parsedVariables).toHaveProperty('services');
|
||||
// Default allSelected values should be preserved
|
||||
expect(parsedVariables.environment.allSelected).toBe(false);
|
||||
expect(parsedVariables.services.allSelected).toBe(false);
|
||||
});
|
||||
|
||||
it('should merge URL variables with dashboard data and normalize values correctly', async () => {
|
||||
@@ -438,12 +433,9 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
.mockReturnValueOnce('development')
|
||||
.mockReturnValueOnce(['db', 'cache']);
|
||||
|
||||
const { getByTestId } = renderWithDashboardProvider(
|
||||
`/dashboard/${DASHBOARD_ID}`,
|
||||
{
|
||||
dashboardId: DASHBOARD_ID,
|
||||
},
|
||||
);
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -474,18 +466,16 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
});
|
||||
|
||||
// Verify the dashboard state reflects the normalized URL values
|
||||
await waitFor(() => {
|
||||
const dashboardVariables = getByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
const dashboardVariables = await screen.findByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
|
||||
// The selectedValue should be updated with normalized URL values
|
||||
expect(parsedVariables.environment.selectedValue).toBe('development');
|
||||
expect(parsedVariables.services.selectedValue).toEqual(['db', 'cache']);
|
||||
// The selectedValue should be updated with normalized URL values
|
||||
expect(parsedVariables.environment.selectedValue).toBe('development');
|
||||
expect(parsedVariables.services.selectedValue).toEqual(['db', 'cache']);
|
||||
|
||||
// allSelected should be set to false when URL values override
|
||||
expect(parsedVariables.environment.allSelected).toBe(false);
|
||||
expect(parsedVariables.services.allSelected).toBe(false);
|
||||
});
|
||||
// allSelected should be set to false when URL values override
|
||||
expect(parsedVariables.environment.allSelected).toBe(false);
|
||||
expect(parsedVariables.services.allSelected).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle ALL_SELECTED_VALUE from URL and set allSelected correctly', async () => {
|
||||
@@ -495,12 +485,9 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
|
||||
mockGetUrlVariables.mockReturnValue(urlVariables);
|
||||
|
||||
const { getByTestId } = renderWithDashboardProvider(
|
||||
`/dashboard/${DASHBOARD_ID}`,
|
||||
{
|
||||
dashboardId: DASHBOARD_ID,
|
||||
},
|
||||
);
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -513,8 +500,8 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
);
|
||||
|
||||
// Verify that allSelected is set to true for the services variable
|
||||
await waitFor(() => {
|
||||
const dashboardVariables = getByTestId('dashboard-variables');
|
||||
await waitFor(async () => {
|
||||
const dashboardVariables = await screen.findByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
|
||||
expect(parsedVariables.services.allSelected).toBe(true);
|
||||
@@ -563,3 +550,203 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Dashboard Provider - Textbox Variable Backward Compatibility', () => {
|
||||
const DASHBOARD_ID = 'test-dashboard-id';
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockGetUrlVariables.mockReturnValue({});
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
mockNormalizeUrlValueForVariable.mockImplementation((urlValue) => {
|
||||
if (urlValue === undefined || urlValue === null) {
|
||||
return urlValue;
|
||||
}
|
||||
return urlValue as IDashboardVariable['selectedValue'];
|
||||
});
|
||||
});
|
||||
|
||||
describe('Textbox Variable defaultValue Migration', () => {
|
||||
it('should set defaultValue from textboxValue for TEXTBOX variables without defaultValue (BWC)', async () => {
|
||||
// Mock dashboard with TEXTBOX variable that has textboxValue but no defaultValue
|
||||
// This simulates old data format before the migration
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
mockGetDashboard.mockResolvedValue({
|
||||
httpStatusCode: 200,
|
||||
data: {
|
||||
id: DASHBOARD_ID,
|
||||
title: 'Test Dashboard',
|
||||
data: {
|
||||
variables: {
|
||||
myTextbox: {
|
||||
id: 'textbox-id',
|
||||
name: 'myTextbox',
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: 'legacy-default-value',
|
||||
// defaultValue is intentionally missing to test BWC
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
sort: 'DISABLED',
|
||||
} as any,
|
||||
},
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
});
|
||||
|
||||
// Verify that defaultValue is set from textboxValue
|
||||
await waitFor(async () => {
|
||||
const dashboardVariables = await screen.findByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
|
||||
expect(parsedVariables.myTextbox.type).toBe('TEXTBOX');
|
||||
expect(parsedVariables.myTextbox.textboxValue).toBe('legacy-default-value');
|
||||
expect(parsedVariables.myTextbox.defaultValue).toBe('legacy-default-value');
|
||||
});
|
||||
});
|
||||
|
||||
it('should not override existing defaultValue for TEXTBOX variables', async () => {
|
||||
// Mock dashboard with TEXTBOX variable that already has defaultValue
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
mockGetDashboard.mockResolvedValue({
|
||||
httpStatusCode: 200,
|
||||
data: {
|
||||
id: DASHBOARD_ID,
|
||||
title: 'Test Dashboard',
|
||||
data: {
|
||||
variables: {
|
||||
myTextbox: {
|
||||
id: 'textbox-id',
|
||||
name: 'myTextbox',
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: 'old-textbox-value',
|
||||
defaultValue: 'existing-default-value',
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
sort: 'DISABLED',
|
||||
} as any,
|
||||
},
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
});
|
||||
|
||||
// Verify that existing defaultValue is preserved
|
||||
await waitFor(async () => {
|
||||
const dashboardVariables = await screen.findByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
|
||||
expect(parsedVariables.myTextbox.type).toBe('TEXTBOX');
|
||||
expect(parsedVariables.myTextbox.defaultValue).toBe(
|
||||
'existing-default-value',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should set empty defaultValue when textboxValue is also empty for TEXTBOX variables', async () => {
|
||||
// Mock dashboard with TEXTBOX variable with empty textboxValue and no defaultValue
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
mockGetDashboard.mockResolvedValue({
|
||||
httpStatusCode: 200,
|
||||
data: {
|
||||
id: DASHBOARD_ID,
|
||||
title: 'Test Dashboard',
|
||||
data: {
|
||||
variables: {
|
||||
myTextbox: {
|
||||
id: 'textbox-id',
|
||||
name: 'myTextbox',
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: '',
|
||||
// defaultValue is intentionally missing
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
sort: 'DISABLED',
|
||||
} as any,
|
||||
},
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
});
|
||||
|
||||
// Verify that defaultValue is set to empty string
|
||||
await waitFor(async () => {
|
||||
const dashboardVariables = await screen.findByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
|
||||
expect(parsedVariables.myTextbox.type).toBe('TEXTBOX');
|
||||
expect(parsedVariables.myTextbox.defaultValue).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
it('should not apply BWC logic to non-TEXTBOX variables', async () => {
|
||||
// Mock dashboard with QUERY variable that has no defaultValue
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
mockGetDashboard.mockResolvedValue({
|
||||
httpStatusCode: 200,
|
||||
data: {
|
||||
id: DASHBOARD_ID,
|
||||
title: 'Test Dashboard',
|
||||
data: {
|
||||
variables: {
|
||||
myQuery: {
|
||||
id: 'query-id',
|
||||
name: 'myQuery',
|
||||
type: 'QUERY',
|
||||
queryValue: 'SELECT * FROM test',
|
||||
textboxValue: 'should-not-be-used',
|
||||
// defaultValue is intentionally missing
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
sort: 'DISABLED',
|
||||
} as any,
|
||||
},
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
});
|
||||
|
||||
// Verify that defaultValue is NOT set from textboxValue for QUERY type
|
||||
await waitFor(async () => {
|
||||
const dashboardVariables = await screen.findByTestId('dashboard-variables');
|
||||
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
|
||||
|
||||
expect(parsedVariables.myQuery.type).toBe('QUERY');
|
||||
// defaultValue should not be set to textboxValue for non-TEXTBOX variables
|
||||
expect(parsedVariables.myQuery.defaultValue).not.toBe('should-not-be-used');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -37,6 +37,7 @@ export interface IDashboardVariable {
|
||||
// Custom
|
||||
customValue?: string;
|
||||
// Textbox
|
||||
// special case of variable where defaultValue is same as this. Otherwise, defaultValue is a single field
|
||||
textboxValue?: string;
|
||||
|
||||
sort: TSortVariableValuesType;
|
||||
|
||||
2
go.mod
2
go.mod
@@ -343,7 +343,7 @@ require (
|
||||
golang.org/x/time v0.11.0 // indirect
|
||||
golang.org/x/tools v0.39.0 // indirect
|
||||
gonum.org/v1/gonum v0.16.0 // indirect
|
||||
google.golang.org/api v0.236.0 // indirect
|
||||
google.golang.org/api v0.236.0
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 // indirect
|
||||
google.golang.org/grpc v1.75.1 // indirect
|
||||
|
||||
2
go.sum
2
go.sum
@@ -1717,6 +1717,8 @@ google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX
|
||||
google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
|
||||
google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
|
||||
google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
|
||||
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 h1:1tXaIXCracvtsRxSBsYDiSBN0cuJvM7QYW+MrpIRY78=
|
||||
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:49MsLSx0oWMOZqcpB3uL8ZOkAh1+TndpJ8ONoCBWiZk=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 h1:BIRfGDEjiHRrk0QKZe3Xv2ieMhtgRGeLcZQ0mIVn4EY=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5/go.mod h1:j3QtIyytwqGr1JUDtYXwtMXWPKsEa5LtzIFN1Wn5WvE=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 h1:eaY8u2EuxbRv7c3NiGK0/NedzVsCcV6hDuU5qPX5EGE=
|
||||
|
||||
@@ -6,10 +6,15 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/client"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/coreos/go-oidc/v3/oidc"
|
||||
"golang.org/x/oauth2"
|
||||
"golang.org/x/oauth2/google"
|
||||
admin "google.golang.org/api/admin/directory/v1"
|
||||
"google.golang.org/api/option"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -17,19 +22,28 @@ const (
|
||||
redirectPath string = "/api/v1/complete/google"
|
||||
)
|
||||
|
||||
var (
|
||||
scopes []string = []string{"email"}
|
||||
)
|
||||
var scopes []string = []string{"email", "profile"}
|
||||
|
||||
var _ authn.CallbackAuthN = (*AuthN)(nil)
|
||||
|
||||
type AuthN struct {
|
||||
store authtypes.AuthNStore
|
||||
store authtypes.AuthNStore
|
||||
settings factory.ScopedProviderSettings
|
||||
httpClient *client.Client
|
||||
}
|
||||
|
||||
func New(ctx context.Context, store authtypes.AuthNStore) (*AuthN, error) {
|
||||
func New(ctx context.Context, store authtypes.AuthNStore, providerSettings factory.ProviderSettings) (*AuthN, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/authn/callbackauthn/googlecallbackauthn")
|
||||
|
||||
httpClient, err := client.New(settings.Logger(), providerSettings.TracerProvider, providerSettings.MeterProvider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &AuthN{
|
||||
store: store,
|
||||
store: store,
|
||||
settings: settings,
|
||||
httpClient: httpClient,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -58,11 +72,13 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
}
|
||||
|
||||
if err := query.Get("error"); err != "" {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: error while authenticating", "error", err, "error_description", query.Get("error_description"))
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: error while authenticating").WithAdditional(query.Get("error_description"))
|
||||
}
|
||||
|
||||
state, err := authtypes.NewStateFromString(query.Get("state"))
|
||||
if err != nil {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: invalid state", "error", err)
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, authtypes.ErrCodeInvalidState, "google: invalid state").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
@@ -76,10 +92,12 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
if err != nil {
|
||||
var retrieveError *oauth2.RetrieveError
|
||||
if errors.As(err, &retrieveError) {
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: failed to get token").WithAdditional(retrieveError.ErrorDescription).WithAdditional(string(retrieveError.Body))
|
||||
a.settings.Logger().ErrorContext(ctx, "google: failed to get token", "error", err, "error_description", retrieveError.ErrorDescription, "body", string(retrieveError.Body))
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: failed to get token").WithAdditional(retrieveError.ErrorDescription)
|
||||
}
|
||||
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: failed to get token").WithAdditional(err.Error())
|
||||
a.settings.Logger().ErrorContext(ctx, "google: failed to get token", "error", err)
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: failed to get token")
|
||||
}
|
||||
|
||||
rawIDToken, ok := token.Extra("id_token").(string)
|
||||
@@ -90,7 +108,8 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
verifier := oidcProvider.Verifier(&oidc.Config{ClientID: authDomain.AuthDomainConfig().Google.ClientID})
|
||||
idToken, err := verifier.Verify(ctx, rawIDToken)
|
||||
if err != nil {
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: failed to verify token").WithAdditional(err.Error())
|
||||
a.settings.Logger().ErrorContext(ctx, "google: failed to verify token", "error", err)
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: failed to verify token")
|
||||
}
|
||||
|
||||
var claims struct {
|
||||
@@ -101,11 +120,20 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
}
|
||||
|
||||
if err := idToken.Claims(&claims); err != nil {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: missing or invalid claims", "error", err)
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: missing or invalid claims").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
if claims.HostedDomain != authDomain.StorableAuthDomain().Name {
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: unexpected hd claim %s", claims.HostedDomain)
|
||||
a.settings.Logger().ErrorContext(ctx, "google: unexpected hd claim", "expected", authDomain.StorableAuthDomain().Name, "actual", claims.HostedDomain)
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: unexpected hd claim")
|
||||
}
|
||||
|
||||
if !authDomain.AuthDomainConfig().Google.InsecureSkipEmailVerified {
|
||||
if !claims.EmailVerified {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: email is not verified", "email", claims.Email)
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: email is not verified")
|
||||
}
|
||||
}
|
||||
|
||||
email, err := valuer.NewEmail(claims.Email)
|
||||
@@ -113,8 +141,24 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "google: failed to parse email").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return authtypes.NewCallbackIdentity(claims.Name, email, authDomain.StorableAuthDomain().OrgID, state), nil
|
||||
var groups []string
|
||||
if authDomain.AuthDomainConfig().Google.FetchGroups {
|
||||
groups, err = a.fetchGoogleWorkspaceGroups(ctx, claims.Email, authDomain.AuthDomainConfig().Google)
|
||||
if err != nil {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: could not fetch groups", "error", err)
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: could not fetch groups").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
allowedGroups := authDomain.AuthDomainConfig().Google.AllowedGroups
|
||||
if len(allowedGroups) > 0 {
|
||||
groups = filterGroups(groups, allowedGroups)
|
||||
if len(groups) == 0 {
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: user %q is not in any allowed groups", claims.Email).WithAdditional(allowedGroups...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return authtypes.NewCallbackIdentity(claims.Name, email, authDomain.StorableAuthDomain().OrgID, state, groups, ""), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
@@ -136,3 +180,90 @@ func (a *AuthN) oauth2Config(siteURL *url.URL, authDomain *authtypes.AuthDomain,
|
||||
}).String(),
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthN) fetchGoogleWorkspaceGroups(ctx context.Context, userEmail string, config *authtypes.GoogleConfig) ([]string, error) {
|
||||
adminEmail := config.GetAdminEmailForDomain(userEmail)
|
||||
if adminEmail == "" {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "no admin email configured for domain of %s", userEmail)
|
||||
}
|
||||
|
||||
jwtConfig, err := google.JWTConfigFromJSON([]byte(config.ServiceAccountJSON), admin.AdminDirectoryGroupReadonlyScope)
|
||||
if err != nil {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: invalid service account credentials", "error", err)
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid service account credentials")
|
||||
}
|
||||
|
||||
jwtConfig.Subject = adminEmail
|
||||
|
||||
customCtx := context.WithValue(ctx, oauth2.HTTPClient, a.httpClient.Client())
|
||||
|
||||
adminService, err := admin.NewService(ctx, option.WithHTTPClient(jwtConfig.Client(customCtx)))
|
||||
if err != nil {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: unable to create directory service", "error", err)
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "unable to create directory service")
|
||||
}
|
||||
|
||||
checkedGroups := make(map[string]struct{})
|
||||
|
||||
return a.getGroups(ctx, adminService, userEmail, config.FetchTransitiveGroupMembership, checkedGroups)
|
||||
}
|
||||
|
||||
// Recursive method
|
||||
func (a *AuthN) getGroups(ctx context.Context, adminService *admin.Service, userEmail string, fetchTransitive bool, checkedGroups map[string]struct{}) ([]string, error) {
|
||||
var userGroups []string
|
||||
var pageToken string
|
||||
|
||||
for {
|
||||
call := adminService.Groups.List().UserKey(userEmail)
|
||||
if pageToken != "" {
|
||||
call = call.PageToken(pageToken)
|
||||
}
|
||||
|
||||
groupList, err := call.Context(ctx).Do()
|
||||
if err != nil {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: unable to list groups", "error", err)
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "unable to list groups")
|
||||
}
|
||||
|
||||
for _, group := range groupList.Groups {
|
||||
if _, exists := checkedGroups[group.Email]; exists {
|
||||
continue
|
||||
}
|
||||
|
||||
checkedGroups[group.Email] = struct{}{}
|
||||
userGroups = append(userGroups, group.Email)
|
||||
|
||||
if fetchTransitive {
|
||||
transitiveGroups, err := a.getGroups(ctx, adminService, group.Email, fetchTransitive, checkedGroups)
|
||||
if err != nil {
|
||||
a.settings.Logger().ErrorContext(ctx, "google: unable to list transitive groups", "error", err)
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "unable to list transitive groups")
|
||||
}
|
||||
userGroups = append(userGroups, transitiveGroups...)
|
||||
}
|
||||
}
|
||||
|
||||
pageToken = groupList.NextPageToken
|
||||
if pageToken == "" {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return userGroups, nil
|
||||
}
|
||||
|
||||
func filterGroups(userGroups, allowedGroups []string) []string {
|
||||
allowed := make(map[string]struct{}, len(allowedGroups))
|
||||
for _, g := range allowedGroups {
|
||||
allowed[g] = struct{}{} // just to make o(1) searches
|
||||
}
|
||||
|
||||
var filtered []string
|
||||
for _, g := range userGroups {
|
||||
if _, ok := allowed[g]; ok {
|
||||
filtered = append(filtered, g)
|
||||
}
|
||||
}
|
||||
|
||||
return filtered
|
||||
}
|
||||
|
||||
@@ -112,7 +112,7 @@ func (b *base) WithUrl(u string) *base {
|
||||
}
|
||||
}
|
||||
|
||||
// WithUrl adds additional messages to the base error and returns a new base error.
|
||||
// WithAdditional adds additional messages to the base error and returns a new base error.
|
||||
func (b *base) WithAdditional(a ...string) *base {
|
||||
return &base{
|
||||
t: b.t,
|
||||
|
||||
@@ -31,7 +31,13 @@ func (plugin *reqResLog) OnRequestStart(request *http.Request) {
|
||||
string(semconv.ServerAddressKey), host,
|
||||
string(semconv.ServerPortKey), port,
|
||||
string(semconv.HTTPRequestSizeKey), request.ContentLength,
|
||||
"http.request.headers", request.Header,
|
||||
}
|
||||
|
||||
// only include all the headers if we are at debug level
|
||||
if plugin.logger.Handler().Enabled(request.Context(), slog.LevelDebug) {
|
||||
fields = append(fields, "http.request.headers", request.Header)
|
||||
} else {
|
||||
fields = append(fields, "http.request.headers", redactSensitiveHeaders(request.Header))
|
||||
}
|
||||
|
||||
plugin.logger.InfoContext(request.Context(), "::SENT-REQUEST::", fields...)
|
||||
@@ -75,3 +81,24 @@ func (plugin *reqResLog) OnError(request *http.Request, err error) {
|
||||
|
||||
plugin.logger.ErrorContext(request.Context(), "::UNABLE-TO-SEND-REQUEST::", fields...)
|
||||
}
|
||||
|
||||
func redactSensitiveHeaders(headers http.Header) http.Header {
|
||||
// maintained list of headers to redact
|
||||
sensitiveHeaders := map[string]bool{
|
||||
"Authorization": true,
|
||||
"Cookie": true,
|
||||
"X-Signoz-Cloud-Api-Key": true,
|
||||
}
|
||||
|
||||
safeHeaders := make(http.Header)
|
||||
|
||||
for header, value := range headers {
|
||||
if sensitiveHeaders[header] {
|
||||
safeHeaders[header] = []string{"REDACTED"}
|
||||
} else {
|
||||
safeHeaders[header] = value
|
||||
}
|
||||
}
|
||||
|
||||
return safeHeaders
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ type Module interface {
|
||||
Delete(context.Context, valuer.UUID, valuer.UUID) error
|
||||
|
||||
// Get the IDP info of the domain provided.
|
||||
GetAuthNProviderInfo(context.Context, *authtypes.AuthDomain) (*authtypes.AuthNProviderInfo)
|
||||
GetAuthNProviderInfo(context.Context, *authtypes.AuthDomain) *authtypes.AuthNProviderInfo
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
|
||||
@@ -123,7 +123,7 @@ func (module *module) DeprecatedCreateSessionByEmailPassword(ctx context.Context
|
||||
}
|
||||
|
||||
if !factorPassword.Equals(password) {
|
||||
return nil, errors.New(errors.TypeUnauthenticated, types.ErrCodeIncorrectPassword, "invalid email orpassword")
|
||||
return nil, errors.New(errors.TypeUnauthenticated, types.ErrCodeIncorrectPassword, "invalid email or password")
|
||||
}
|
||||
|
||||
identity := authtypes.NewIdentity(users[0].ID, users[0].OrgID, users[0].Email, users[0].Role)
|
||||
@@ -157,7 +157,15 @@ func (module *module) CreateCallbackAuthNSession(ctx context.Context, authNProvi
|
||||
return "", err
|
||||
}
|
||||
|
||||
user, err := types.NewUser(callbackIdentity.Name, callbackIdentity.Email, types.RoleViewer, callbackIdentity.OrgID)
|
||||
authDomain, err := module.authDomain.GetByOrgIDAndID(ctx, callbackIdentity.OrgID, callbackIdentity.State.DomainID)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
roleMapping := authDomain.AuthDomainConfig().RoleMapping
|
||||
role := roleMapping.NewRoleFromCallbackIdentity(callbackIdentity)
|
||||
|
||||
user, err := types.NewUser(callbackIdentity.Name, callbackIdentity.Email, role, callbackIdentity.OrgID)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -887,7 +887,7 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
|
||||
|
||||
keys := make([]string, 0, len(queryRangeParams.Variables))
|
||||
|
||||
querytemplate.AssignReservedVars(queryRangeParams.Variables, queryRangeParams.Start, queryRangeParams.End)
|
||||
querytemplate.AssignReservedVarsV3(queryRangeParams)
|
||||
|
||||
for k := range queryRangeParams.Variables {
|
||||
keys = append(keys, k)
|
||||
@@ -927,7 +927,7 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
|
||||
continue
|
||||
}
|
||||
|
||||
querytemplate.AssignReservedVars(queryRangeParams.Variables, queryRangeParams.Start, queryRangeParams.End)
|
||||
querytemplate.AssignReservedVarsV3(queryRangeParams)
|
||||
|
||||
keys := make([]string, 0, len(queryRangeParams.Variables))
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,17 +1,8 @@
|
||||
package converter
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/errors"
|
||||
|
||||
// Unit represents a unit of measurement
|
||||
type Unit string
|
||||
|
||||
func (u Unit) Validate() error {
|
||||
if !IsValidUnit(u) {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid unit: %s", u)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Value represents a value with a unit of measurement
|
||||
type Value struct {
|
||||
F float64
|
||||
@@ -69,27 +60,6 @@ func FromUnit(u Unit) Converter {
|
||||
}
|
||||
}
|
||||
|
||||
// IsValidUnit returns true if the given unit is valid
|
||||
func IsValidUnit(u Unit) bool {
|
||||
switch u {
|
||||
// Duration unit
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min",
|
||||
// Data unit
|
||||
"bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy",
|
||||
// Data rate unit
|
||||
"binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s",
|
||||
// Percent unit
|
||||
"percent", "percentunit", "%",
|
||||
// Bool unit
|
||||
"bool", "bool_yes_no", "bool_true_false", "bool_1_0",
|
||||
// Throughput unit
|
||||
"cps", "ops", "reqps", "rps", "wps", "iops", "cpm", "opm", "rpm", "wpm", "{count}/s", "{ops}/s", "{req}/s", "{read}/s", "{write}/s", "{iops}/s", "{count}/min", "{ops}/min", "{read}/min", "{write}/min":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func UnitToName(u string) string {
|
||||
switch u {
|
||||
case "ns":
|
||||
|
||||
@@ -9,9 +9,8 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/converter"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/pkg/errors"
|
||||
"go.uber.org/zap"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
@@ -602,170 +601,43 @@ func (c *CompositeQuery) Sanitize() {
|
||||
|
||||
func (c *CompositeQuery) Validate() error {
|
||||
if c == nil {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"composite query is required",
|
||||
)
|
||||
return fmt.Errorf("composite query is required")
|
||||
}
|
||||
|
||||
// Validate unit if supplied
|
||||
if c.Unit != "" {
|
||||
unit := converter.Unit(c.Unit)
|
||||
err := unit.Validate()
|
||||
if err != nil {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid unit: %s",
|
||||
err.Error(),
|
||||
)
|
||||
if c.BuilderQueries == nil && c.ClickHouseQueries == nil && c.PromQueries == nil && len(c.Queries) == 0 {
|
||||
return fmt.Errorf("composite query must contain at least one query type")
|
||||
}
|
||||
|
||||
if c.QueryType == QueryTypeBuilder {
|
||||
for name, query := range c.BuilderQueries {
|
||||
if err := query.Validate(c.PanelType); err != nil {
|
||||
return fmt.Errorf("builder query %s is invalid: %w", name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if c.QueryType == QueryTypeClickHouseSQL {
|
||||
for name, query := range c.ClickHouseQueries {
|
||||
if err := query.Validate(); err != nil {
|
||||
return fmt.Errorf("clickhouse query %s is invalid: %w", name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if c.QueryType == QueryTypePromQL {
|
||||
for name, query := range c.PromQueries {
|
||||
if err := query.Validate(); err != nil {
|
||||
return fmt.Errorf("prom query %s is invalid: %w", name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := c.PanelType.Validate(); err != nil {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"panel type is invalid: %s",
|
||||
err.Error(),
|
||||
)
|
||||
return fmt.Errorf("panel type is invalid: %w", err)
|
||||
}
|
||||
|
||||
if err := c.QueryType.Validate(); err != nil {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"query type is invalid: %s",
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
|
||||
if len(c.Queries) == 0 {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"at least one query is required",
|
||||
)
|
||||
}
|
||||
|
||||
// Validate each query
|
||||
for i, envelope := range c.Queries {
|
||||
queryId := qbtypes.GetQueryIdentifier(envelope, i)
|
||||
|
||||
switch envelope.Type {
|
||||
case qbtypes.QueryTypeBuilder, qbtypes.QueryTypeSubQuery:
|
||||
switch spec := envelope.Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
if err := spec.Validate(qbtypes.RequestTypeTimeSeries); err != nil {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid %s: %s",
|
||||
queryId,
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
if err := spec.Validate(qbtypes.RequestTypeTimeSeries); err != nil {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid %s: %s",
|
||||
queryId,
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
if err := spec.Validate(qbtypes.RequestTypeTimeSeries); err != nil {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid %s: %s",
|
||||
queryId,
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
default:
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"unknown query spec type for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
case qbtypes.QueryTypePromQL:
|
||||
spec, ok := envelope.Spec.(qbtypes.PromQuery)
|
||||
if !ok {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
if spec.Query == "" {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"query expression is required for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
if err := validatePromQLQuery(spec.Query); err != nil {
|
||||
return err
|
||||
}
|
||||
case qbtypes.QueryTypeClickHouseSQL:
|
||||
spec, ok := envelope.Spec.(qbtypes.ClickHouseQuery)
|
||||
if !ok {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
if spec.Query == "" {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"query expression is required for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
if err := validateClickHouseQuery(spec.Query); err != nil {
|
||||
return err
|
||||
}
|
||||
case qbtypes.QueryTypeFormula:
|
||||
spec, ok := envelope.Spec.(qbtypes.QueryBuilderFormula)
|
||||
if !ok {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
if err := spec.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
case qbtypes.QueryTypeTraceOperator:
|
||||
spec, ok := envelope.Spec.(qbtypes.QueryBuilderTraceOperator)
|
||||
if !ok {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
err := spec.ValidateTraceOperator(c.Queries)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
default:
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"unknown query type '%s' for %s",
|
||||
envelope.Type,
|
||||
queryId,
|
||||
).WithAdditional(
|
||||
"Valid query types are: builder_query, builder_sub_query, builder_formula, builder_join, promql, clickhouse_sql, trace_operator",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Check if all queries are disabled
|
||||
if allDisabled := checkQueriesDisabled(c); allDisabled {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"all queries are disabled - at least one query must be enabled",
|
||||
)
|
||||
return fmt.Errorf("query type is invalid: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -1331,7 +1203,7 @@ func (f *FilterSet) Scan(src interface{}) error {
|
||||
func (f *FilterSet) Value() (driver.Value, error) {
|
||||
filterSetJson, err := json.Marshal(f)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "could not serialize FilterSet to JSON")
|
||||
return nil, errors.Wrap(err, "could not serialize FilterSet to JSON")
|
||||
}
|
||||
return filterSetJson, nil
|
||||
}
|
||||
|
||||
@@ -1,137 +0,0 @@
|
||||
package v3
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
clickhouse "github.com/AfterShip/clickhouse-sql-parser/parser"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
querytemplate "github.com/SigNoz/signoz/pkg/query-service/utils/queryTemplate"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
)
|
||||
|
||||
type QueryParseError struct {
|
||||
StartPosition *int
|
||||
EndPosition *int
|
||||
ErrorMessage string
|
||||
Query string
|
||||
}
|
||||
|
||||
func (e *QueryParseError) Error() string {
|
||||
if e.StartPosition != nil && e.EndPosition != nil {
|
||||
return fmt.Sprintf("query parse error: %s at position %d:%d", e.ErrorMessage, *e.StartPosition, *e.EndPosition)
|
||||
}
|
||||
return fmt.Sprintf("query parse error: %s", e.ErrorMessage)
|
||||
}
|
||||
|
||||
// validatePromQLQuery validates a PromQL query syntax using the Prometheus parser
|
||||
func validatePromQLQuery(query string) error {
|
||||
_, err := parser.ParseExpr(query)
|
||||
if err != nil {
|
||||
if syntaxErrs, ok := err.(parser.ParseErrors); ok {
|
||||
syntaxErr := syntaxErrs[0]
|
||||
startPosition := int(syntaxErr.PositionRange.Start)
|
||||
endPosition := int(syntaxErr.PositionRange.End)
|
||||
return &QueryParseError{
|
||||
StartPosition: &startPosition,
|
||||
EndPosition: &endPosition,
|
||||
ErrorMessage: syntaxErr.Error(),
|
||||
Query: query,
|
||||
}
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// validateClickHouseQuery validates a ClickHouse SQL query syntax using the ClickHouse parser
|
||||
func validateClickHouseQuery(query string) error {
|
||||
// Assign the default template variables with dummy values
|
||||
variables := make(map[string]interface{})
|
||||
start := time.Now().UnixMilli()
|
||||
end := start + 1000
|
||||
querytemplate.AssignReservedVars(variables, start, end)
|
||||
|
||||
// Apply the values for default template variables before parsing the query
|
||||
tmpl := template.New("clickhouse-query")
|
||||
tmpl, err := tmpl.Parse(query)
|
||||
if err != nil {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"failed to parse clickhouse query: %s",
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
var queryBuffer bytes.Buffer
|
||||
err = tmpl.Execute(&queryBuffer, variables)
|
||||
if err != nil {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"failed to execute clickhouse query template: %s",
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
|
||||
// Parse the ClickHouse query with the default template variables applied
|
||||
p := clickhouse.NewParser(queryBuffer.String())
|
||||
_, err = p.ParseStmts()
|
||||
if err != nil {
|
||||
// TODO: errors returned here is errors.errorString
|
||||
// we should think on using some other library that parses the CH query in more accurate manner,
|
||||
// current CH parser does very minimal checks and on just the known keywords, without validating the syntax of given query.
|
||||
// Sample Error: "line 0:36 expected table name or subquery, got ;\nSELECT department, avg(salary) FROM ;\n ^\n"
|
||||
return &QueryParseError{
|
||||
ErrorMessage: err.Error(),
|
||||
Query: query,
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// checkQueriesDisabled checks if all queries are disabled. Returns true if all queries are disabled, false otherwise.
|
||||
func checkQueriesDisabled(compositeQuery *CompositeQuery) bool {
|
||||
for _, envelope := range compositeQuery.Queries {
|
||||
switch envelope.Type {
|
||||
case qbtypes.QueryTypeBuilder, qbtypes.QueryTypeSubQuery:
|
||||
switch spec := envelope.Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
if !spec.Disabled {
|
||||
return false
|
||||
}
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
if !spec.Disabled {
|
||||
return false
|
||||
}
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
if !spec.Disabled {
|
||||
return false
|
||||
}
|
||||
}
|
||||
case qbtypes.QueryTypeFormula:
|
||||
if spec, ok := envelope.Spec.(qbtypes.QueryBuilderFormula); ok && !spec.Disabled {
|
||||
return false
|
||||
}
|
||||
case qbtypes.QueryTypeTraceOperator:
|
||||
if spec, ok := envelope.Spec.(qbtypes.QueryBuilderTraceOperator); ok && !spec.Disabled {
|
||||
return false
|
||||
}
|
||||
case qbtypes.QueryTypeJoin:
|
||||
if spec, ok := envelope.Spec.(qbtypes.QueryBuilderJoin); ok && !spec.Disabled {
|
||||
return false
|
||||
}
|
||||
case qbtypes.QueryTypePromQL:
|
||||
if spec, ok := envelope.Spec.(qbtypes.PromQuery); ok && !spec.Disabled {
|
||||
return false
|
||||
}
|
||||
case qbtypes.QueryTypeClickHouseSQL:
|
||||
if spec, ok := envelope.Spec.(qbtypes.ClickHouseQuery); ok && !spec.Disabled {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we reach here, all queries are disabled
|
||||
return true
|
||||
}
|
||||
@@ -1,528 +0,0 @@
|
||||
package v3
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestValidateCompositeQuery(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
compositeQuery *CompositeQuery
|
||||
wantErr bool
|
||||
errContains string
|
||||
}{
|
||||
{
|
||||
name: "nil composite query should return error",
|
||||
compositeQuery: nil,
|
||||
wantErr: true,
|
||||
errContains: "composite query is required",
|
||||
},
|
||||
{
|
||||
name: "empty queries array should return error",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "at least one query is required",
|
||||
},
|
||||
{
|
||||
name: "invalid input error",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Unit: "some_invalid_unit",
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "rate(http_requests_total[5m])",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "invalid unit",
|
||||
},
|
||||
{
|
||||
name: "valid metric builder query should pass",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Unit: "bytes", // valid unit
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "metric_query",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "cpu_usage",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "valid log builder query should pass",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Unit: "µs", // valid unit
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Name: "log_query",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "valid trace builder query should pass",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Unit: "MBs", // valid unit
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "trace_query",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "valid PromQL query should pass",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Unit: "{req}/s", // valid unit
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "rate(http_requests_total[5m])",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "valid ClickHouse query should pass",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeClickHouseSQL,
|
||||
Spec: qbtypes.ClickHouseQuery{
|
||||
Name: "ch_query",
|
||||
Query: "SELECT count(*) FROM metrics WHERE metric_name = 'cpu_usage'",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "valid formula query should pass",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeFormula,
|
||||
Spec: qbtypes.QueryBuilderFormula{
|
||||
Name: "formula_query",
|
||||
Expression: "A + B",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
// We've not added support for join query yet
|
||||
{
|
||||
name: "valid join query should pass",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeJoin,
|
||||
Spec: qbtypes.QueryBuilderJoin{
|
||||
Name: "join_query",
|
||||
Left: qbtypes.QueryRef{Name: "A"},
|
||||
Right: qbtypes.QueryRef{Name: "B"},
|
||||
Type: qbtypes.JoinTypeInner,
|
||||
On: "service_name",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "unknown query type",
|
||||
},
|
||||
{
|
||||
name: "valid trace operator query should pass",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "B",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeTraceOperator,
|
||||
Spec: qbtypes.QueryBuilderTraceOperator{
|
||||
Name: "trace_operator",
|
||||
Expression: "A && B",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "invalid metric builder query - missing aggregation should return error",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "metric_query",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "invalid",
|
||||
},
|
||||
{
|
||||
name: "invalid PromQL query - empty query should return error",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "query expression is required",
|
||||
},
|
||||
{
|
||||
name: "invalid PromQL query - syntax error should return error",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "rate(http_requests_total[5m",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "unclosed left parenthesis",
|
||||
},
|
||||
{
|
||||
name: "invalid ClickHouse query - empty query should return error",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeClickHouseSQL,
|
||||
Spec: qbtypes.ClickHouseQuery{
|
||||
Name: "ch_query",
|
||||
Query: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "query expression is required",
|
||||
},
|
||||
{
|
||||
name: "invalid ClickHouse query - syntax error should return error",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeClickHouseSQL,
|
||||
Spec: qbtypes.ClickHouseQuery{
|
||||
Name: "ch_query",
|
||||
Query: "SELECT * FROM metrics WHERE",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "query parse error",
|
||||
},
|
||||
{
|
||||
name: "invalid formula query - empty expression should return error",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeFormula,
|
||||
Spec: qbtypes.QueryBuilderFormula{
|
||||
Name: "formula_query",
|
||||
Expression: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "formula expression cannot be blank",
|
||||
},
|
||||
{
|
||||
name: "invalid trace operator query - empty expression should return error",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeTraceOperator,
|
||||
Spec: qbtypes.QueryBuilderTraceOperator{
|
||||
Name: "trace_operator",
|
||||
Expression: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "expression cannot be empty",
|
||||
},
|
||||
{
|
||||
name: "all queries disabled should return error",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "metric_query",
|
||||
Disabled: true,
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "cpu_usage",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "rate(http_requests_total[5m])",
|
||||
Disabled: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "all queries are disabled",
|
||||
},
|
||||
{
|
||||
name: "mixed disabled and enabled queries should pass",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "metric_query",
|
||||
Disabled: true,
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "cpu_usage",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "rate(http_requests_total[5m])",
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "multiple valid queries should pass",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "metric_query",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "cpu_usage",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "rate(http_requests_total[5m])",
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeClickHouseSQL,
|
||||
Spec: qbtypes.ClickHouseQuery{
|
||||
Name: "ch_query",
|
||||
Query: "SELECT count(*) FROM metrics WHERE metric_name = 'cpu_usage'",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "invalid query in multiple queries should return error",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "metric_query",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "cpu_usage",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypePromQL,
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "invalid promql syntax [",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "query parse error",
|
||||
},
|
||||
{
|
||||
name: "unknown query type should return error",
|
||||
compositeQuery: &CompositeQuery{
|
||||
QueryType: QueryTypeBuilder,
|
||||
PanelType: PanelTypeGraph,
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryType{String: valuer.NewString("invalid_query_type")},
|
||||
Spec: qbtypes.PromQuery{
|
||||
Name: "prom_query",
|
||||
Query: "rate(http_requests_total[5m])",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errContains: "unknown query type",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := tt.compositeQuery.Validate()
|
||||
if tt.wantErr {
|
||||
require.Error(t, err)
|
||||
if tt.errContains != "" {
|
||||
require.Contains(t, err.Error(), tt.errContains)
|
||||
}
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -133,7 +133,7 @@ func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: true,
|
||||
}
|
||||
querytemplate.AssignReservedVars(params.Variables, start, end)
|
||||
querytemplate.AssignReservedVarsV3(params)
|
||||
for name, chQuery := range r.ruleCondition.CompositeQuery.ClickHouseQueries {
|
||||
if chQuery.Disabled {
|
||||
continue
|
||||
|
||||
@@ -2,21 +2,26 @@ package querytemplate
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
func AssignReservedVars(variables map[string]interface{}, start int64, end int64) {
|
||||
variables["start_timestamp"] = start / 1000
|
||||
variables["end_timestamp"] = end / 1000
|
||||
// AssignReservedVars assigns values for go template vars. assumes that
|
||||
// model.QueryRangeParamsV3.Start and End are Unix Nano timestamps
|
||||
func AssignReservedVarsV3(queryRangeParams *v3.QueryRangeParamsV3) {
|
||||
queryRangeParams.Variables["start_timestamp"] = queryRangeParams.Start / 1000
|
||||
queryRangeParams.Variables["end_timestamp"] = queryRangeParams.End / 1000
|
||||
|
||||
variables["start_timestamp_ms"] = start
|
||||
variables["end_timestamp_ms"] = end
|
||||
queryRangeParams.Variables["start_timestamp_ms"] = queryRangeParams.Start
|
||||
queryRangeParams.Variables["end_timestamp_ms"] = queryRangeParams.End
|
||||
|
||||
variables["SIGNOZ_START_TIME"] = start
|
||||
variables["SIGNOZ_END_TIME"] = end
|
||||
queryRangeParams.Variables["SIGNOZ_START_TIME"] = queryRangeParams.Start
|
||||
queryRangeParams.Variables["SIGNOZ_END_TIME"] = queryRangeParams.End
|
||||
|
||||
variables["start_timestamp_nano"] = start * 1e6
|
||||
variables["end_timestamp_nano"] = end * 1e6
|
||||
queryRangeParams.Variables["start_timestamp_nano"] = queryRangeParams.Start * 1e6
|
||||
queryRangeParams.Variables["end_timestamp_nano"] = queryRangeParams.End * 1e6
|
||||
|
||||
queryRangeParams.Variables["start_datetime"] = fmt.Sprintf("toDateTime(%d)", queryRangeParams.Start/1000)
|
||||
queryRangeParams.Variables["end_datetime"] = fmt.Sprintf("toDateTime(%d)", queryRangeParams.End/1000)
|
||||
|
||||
variables["start_datetime"] = fmt.Sprintf("toDateTime(%d)", start/1000)
|
||||
variables["end_datetime"] = fmt.Sprintf("toDateTime(%d)", end/1000)
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ import (
|
||||
func NewAuthNs(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
|
||||
emailPasswordAuthN := emailpasswordauthn.New(store)
|
||||
|
||||
googleCallbackAuthN, err := googlecallbackauthn.New(ctx, store)
|
||||
googleCallbackAuthN, err := googlecallbackauthn.New(ctx, store, providerSettings)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/tokenizer/jwttokenizer")
|
||||
|
||||
if config.JWT.Secret == "" {
|
||||
settings.Logger().ErrorContext(ctx, "🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!", "error", "SIGNOZ_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application. Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access. Please set the SIGNOZ_TOKENIZER_JWT_SECRET environment variable immediately. For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.")
|
||||
settings.Logger().ErrorContext(ctx, "🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!", "error", "SIGNOZ_TOKENIZER_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application. Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access. Please set the SIGNOZ_TOKENIZER_JWT_SECRET environment variable immediately. For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.")
|
||||
}
|
||||
|
||||
lastObservedAtCache, err := ristretto.NewCache(&ristretto.Config[string, map[valuer.UUID]time.Time]{
|
||||
|
||||
@@ -32,10 +32,12 @@ type Identity struct {
|
||||
}
|
||||
|
||||
type CallbackIdentity struct {
|
||||
Name string `json:"name"`
|
||||
Email valuer.Email `json:"email"`
|
||||
OrgID valuer.UUID `json:"orgId"`
|
||||
State State `json:"state"`
|
||||
Name string `json:"name"`
|
||||
Email valuer.Email `json:"email"`
|
||||
OrgID valuer.UUID `json:"orgId"`
|
||||
State State `json:"state"`
|
||||
Groups []string `json:"groups,omitempty"`
|
||||
Role string `json:"role,omitempty"`
|
||||
}
|
||||
|
||||
type State struct {
|
||||
@@ -85,12 +87,14 @@ func NewIdentity(userID valuer.UUID, orgID valuer.UUID, email valuer.Email, role
|
||||
}
|
||||
}
|
||||
|
||||
func NewCallbackIdentity(name string, email valuer.Email, orgID valuer.UUID, state State) *CallbackIdentity {
|
||||
func NewCallbackIdentity(name string, email valuer.Email, orgID valuer.UUID, state State, groups []string, role string) *CallbackIdentity {
|
||||
return &CallbackIdentity{
|
||||
Name: name,
|
||||
Email: email,
|
||||
OrgID: orgID,
|
||||
State: state,
|
||||
Name: name,
|
||||
Email: email,
|
||||
OrgID: orgID,
|
||||
State: state,
|
||||
Groups: groups,
|
||||
Role: role,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -63,6 +63,7 @@ type AuthDomainConfig struct {
|
||||
SAML *SamlConfig `json:"samlConfig"`
|
||||
Google *GoogleConfig `json:"googleAuthConfig"`
|
||||
OIDC *OIDCConfig `json:"oidcConfig"`
|
||||
RoleMapping *RoleMapping `json:"roleMapping"`
|
||||
}
|
||||
|
||||
type AuthDomain struct {
|
||||
|
||||
@@ -2,10 +2,14 @@ package authtypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
const wildCardDomain = "*"
|
||||
|
||||
type GoogleConfig struct {
|
||||
// ClientID is the application's ID. For example, 292085223830.apps.googleusercontent.com.
|
||||
ClientID string `json:"clientId"`
|
||||
@@ -15,6 +19,30 @@ type GoogleConfig struct {
|
||||
|
||||
// What is the meaning of this? Should we remove this?
|
||||
RedirectURI string `json:"redirectURI"`
|
||||
|
||||
// Whether to fetch the Google workspace groups (required additional API scopes)
|
||||
FetchGroups bool `json:"fetchGroups"`
|
||||
|
||||
// Service Account creds JSON stored for Google Admin SDK access
|
||||
// This is content of the JSON file stored directly into db as string
|
||||
// Required if FetchGroups is true (unless running on GCE with default credentials)
|
||||
ServiceAccountJSON string `json:"serviceAccountJson,omitempty"`
|
||||
|
||||
// Map of workspace domain to admin email for service account impersonation
|
||||
// The service account will impersonate this admin to call the directory API
|
||||
// Use "*" as key for wildcard/default that matches any domain
|
||||
// Example: {"example.com": "admin@exmaple.com", "*": "fallbackadmin@company.com"}
|
||||
DomainToAdminEmail map[string]valuer.Email `json:"domainToAdminEmail,omitempty"`
|
||||
|
||||
// If true, fetch transitive group membership (recursive - groups that contains other groups)
|
||||
FetchTransitiveGroupMembership bool `json:"fetchTransitiveGroupMembership,omitempty"`
|
||||
|
||||
// Optional list of allowed groups
|
||||
// If this is present, only users belonging to one of these groups will be allowed to login
|
||||
AllowedGroups []string `json:"allowedGroups,omitempty"`
|
||||
|
||||
// Whether to skip email verification. Defaults to "false"
|
||||
InsecureSkipEmailVerified bool `json:"insecureSkipEmailVerified"`
|
||||
}
|
||||
|
||||
func (config *GoogleConfig) UnmarshalJSON(data []byte) error {
|
||||
@@ -33,6 +61,37 @@ func (config *GoogleConfig) UnmarshalJSON(data []byte) error {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "clientSecret is required")
|
||||
}
|
||||
|
||||
if temp.FetchGroups {
|
||||
if len(temp.DomainToAdminEmail) == 0 {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "domainToAdminEmail is required if fetchGroups is true")
|
||||
}
|
||||
|
||||
if temp.ServiceAccountJSON == "" {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "serviceAccountJSON is required if fetchGroups is true")
|
||||
}
|
||||
}
|
||||
|
||||
if len(temp.AllowedGroups) > 0 && !temp.FetchGroups {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "fetchGroups must be true when allowedGroups is configured")
|
||||
}
|
||||
|
||||
*config = GoogleConfig(temp)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (config *GoogleConfig) GetAdminEmailForDomain(userEmail string) string {
|
||||
domain := extractDomainFromEmail(userEmail)
|
||||
|
||||
if adminEmail, ok := config.DomainToAdminEmail[domain]; ok {
|
||||
return adminEmail.StringValue()
|
||||
}
|
||||
|
||||
return config.DomainToAdminEmail[wildCardDomain].StringValue()
|
||||
}
|
||||
|
||||
func extractDomainFromEmail(email string) string {
|
||||
if at := strings.LastIndex(email, "@"); at >= 0 {
|
||||
return email[at+1:]
|
||||
}
|
||||
return wildCardDomain
|
||||
}
|
||||
|
||||
133
pkg/types/authtypes/mapping.go
Normal file
133
pkg/types/authtypes/mapping.go
Normal file
@@ -0,0 +1,133 @@
|
||||
package authtypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
)
|
||||
|
||||
type AttributeMapping struct {
|
||||
// Key which contains the email in the claim/token/attributes map. Defaults to "email"
|
||||
Email string `json:"email"`
|
||||
|
||||
// Key which contains the name in the claim/token/attributes map. Defaults to "name"
|
||||
Name string `json:"name"`
|
||||
|
||||
// Key which contains the groups in the claim/token/attributes map. Defaults to "groups"
|
||||
Groups string `json:"groups"`
|
||||
|
||||
// Key which contains the role in the claim/token/attributes map. Defaults to "role"
|
||||
Role string `json:"role"`
|
||||
}
|
||||
|
||||
func (attr *AttributeMapping) UnmarshalJSON(data []byte) error {
|
||||
type Alias AttributeMapping
|
||||
|
||||
var temp Alias
|
||||
if err := json.Unmarshal(data, &temp); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if temp.Email == "" {
|
||||
temp.Email = "email"
|
||||
}
|
||||
|
||||
if temp.Name == "" {
|
||||
temp.Name = "name"
|
||||
}
|
||||
|
||||
if temp.Groups == "" {
|
||||
temp.Groups = "groups"
|
||||
}
|
||||
|
||||
if temp.Role == "" {
|
||||
temp.Role = "role"
|
||||
}
|
||||
|
||||
*attr = AttributeMapping(temp)
|
||||
return nil
|
||||
}
|
||||
|
||||
type RoleMapping struct {
|
||||
// Default role any new SSO users. Defaults to "VIEWER"
|
||||
DefaultRole string `json:"defaultRole"`
|
||||
// Map of IDP group names to SigNoz roles. Key is group name, value is SigNoz role
|
||||
GroupMappings map[string]string `json:"groupMappings"`
|
||||
// If true, use the role claim directly from IDP instead of group mappings
|
||||
UseRoleAttribute bool `json:"useRoleAttribute"`
|
||||
}
|
||||
|
||||
func (typ *RoleMapping) UnmarshalJSON(data []byte) error {
|
||||
type Alias RoleMapping
|
||||
|
||||
var temp Alias
|
||||
if err := json.Unmarshal(data, &temp); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if temp.DefaultRole != "" {
|
||||
if _, err := types.NewRole(strings.ToUpper(temp.DefaultRole)); err != nil {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid default role %s", temp.DefaultRole)
|
||||
}
|
||||
}
|
||||
|
||||
for group, role := range temp.GroupMappings {
|
||||
if _, err := types.NewRole(strings.ToUpper(role)); err != nil {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid role %s for group %s", role, group)
|
||||
}
|
||||
}
|
||||
|
||||
*typ = RoleMapping(temp)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (roleMapping *RoleMapping) NewRoleFromCallbackIdentity(callbackIdentity *CallbackIdentity) types.Role {
|
||||
if roleMapping == nil {
|
||||
return types.RoleViewer
|
||||
}
|
||||
|
||||
if roleMapping.UseRoleAttribute && callbackIdentity.Role != "" {
|
||||
if role, err := types.NewRole(strings.ToUpper(callbackIdentity.Role)); err == nil {
|
||||
return role
|
||||
}
|
||||
}
|
||||
|
||||
if len(roleMapping.GroupMappings) > 0 && len(callbackIdentity.Groups) > 0 {
|
||||
highestRole := types.RoleViewer
|
||||
found := false
|
||||
|
||||
for _, group := range callbackIdentity.Groups {
|
||||
if mappedRole, exists := roleMapping.GroupMappings[group]; exists {
|
||||
found = true
|
||||
if role, err := types.NewRole(strings.ToUpper(mappedRole)); err == nil {
|
||||
if compareRoles(role, highestRole) > 0 {
|
||||
highestRole = role
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if found {
|
||||
return highestRole
|
||||
}
|
||||
}
|
||||
|
||||
if roleMapping.DefaultRole != "" {
|
||||
if role, err := types.NewRole(strings.ToUpper(roleMapping.DefaultRole)); err == nil {
|
||||
return role
|
||||
}
|
||||
}
|
||||
|
||||
return types.RoleViewer
|
||||
}
|
||||
|
||||
func compareRoles(a, b types.Role) int {
|
||||
order := map[types.Role]int{
|
||||
types.RoleViewer: 0,
|
||||
types.RoleEditor: 1,
|
||||
types.RoleAdmin: 2,
|
||||
}
|
||||
return order[a] - order[b]
|
||||
}
|
||||
@@ -22,7 +22,7 @@ type OIDCConfig struct {
|
||||
ClientSecret string `json:"clientSecret"`
|
||||
|
||||
// Mapping of claims to the corresponding fields in the token.
|
||||
ClaimMapping ClaimMapping `json:"claimMapping"`
|
||||
ClaimMapping AttributeMapping `json:"claimMapping"`
|
||||
|
||||
// Whether to skip email verification. Defaults to "false"
|
||||
InsecureSkipEmailVerified bool `json:"insecureSkipEmailVerified"`
|
||||
@@ -31,11 +31,6 @@ type OIDCConfig struct {
|
||||
GetUserInfo bool `json:"getUserInfo"`
|
||||
}
|
||||
|
||||
type ClaimMapping struct {
|
||||
// Configurable key which contains the email claims. Defaults to "email"
|
||||
Email string `json:"email"`
|
||||
}
|
||||
|
||||
func (config *OIDCConfig) UnmarshalJSON(data []byte) error {
|
||||
type Alias OIDCConfig
|
||||
|
||||
@@ -56,8 +51,10 @@ func (config *OIDCConfig) UnmarshalJSON(data []byte) error {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "clientSecret is required")
|
||||
}
|
||||
|
||||
if temp.ClaimMapping.Email == "" {
|
||||
temp.ClaimMapping.Email = "email"
|
||||
if temp.ClaimMapping == (AttributeMapping{}) {
|
||||
if err := json.Unmarshal([]byte("{}"), &temp.ClaimMapping); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
*config = OIDCConfig(temp)
|
||||
|
||||
@@ -20,6 +20,9 @@ type SamlConfig struct {
|
||||
// For providers like jumpcloud, this should be set to true.
|
||||
// Note: This is the reverse of WantAuthnRequestsSigned. If WantAuthnRequestsSigned is false, then InsecureSkipAuthNRequestsSigned should be true.
|
||||
InsecureSkipAuthNRequestsSigned bool `json:"insecureSkipAuthNRequestsSigned"`
|
||||
|
||||
// Mapping of SAML assertion attributes
|
||||
AttributeMapping AttributeMapping `json:"attributeMapping"`
|
||||
}
|
||||
|
||||
func (config *SamlConfig) UnmarshalJSON(data []byte) error {
|
||||
@@ -42,6 +45,12 @@ func (config *SamlConfig) UnmarshalJSON(data []byte) error {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "samlCert is required")
|
||||
}
|
||||
|
||||
if temp.AttributeMapping == (AttributeMapping{}) {
|
||||
if err := json.Unmarshal([]byte("{}"), &temp.AttributeMapping); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
*config = SamlConfig(temp)
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -553,11 +553,6 @@ func (f Function) Copy() Function {
|
||||
return c
|
||||
}
|
||||
|
||||
// Validate validates the Function by calling Validate on its Name
|
||||
func (f Function) Validate() error {
|
||||
return f.Name.Validate()
|
||||
}
|
||||
|
||||
type LimitBy struct {
|
||||
// keys to limit by
|
||||
Keys []string `json:"keys"`
|
||||
|
||||
@@ -73,43 +73,6 @@ func (f *QueryBuilderFormula) UnmarshalJSON(data []byte) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Validate checks if the QueryBuilderFormula fields are valid
|
||||
func (f QueryBuilderFormula) Validate() error {
|
||||
// Validate name is not blank
|
||||
if strings.TrimSpace(f.Name) == "" {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"formula name cannot be blank",
|
||||
)
|
||||
}
|
||||
|
||||
// Validate expression is not blank
|
||||
if strings.TrimSpace(f.Expression) == "" {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"formula expression cannot be blank",
|
||||
)
|
||||
}
|
||||
|
||||
// Validate functions if present
|
||||
for i, fn := range f.Functions {
|
||||
if err := fn.Validate(); err != nil {
|
||||
fnId := fmt.Sprintf("function #%d", i+1)
|
||||
if f.Name != "" {
|
||||
fnId = fmt.Sprintf("function #%d in formula '%s'", i+1, f.Name)
|
||||
}
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid %s: %s",
|
||||
fnId,
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// small container to store the query name and index or alias reference
|
||||
// for a variable in the formula expression
|
||||
// read below for more details on aggregation references
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"slices"
|
||||
"strconv"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
@@ -34,37 +33,6 @@ var (
|
||||
FunctionNameFillZero = FunctionName{valuer.NewString("fillZero")}
|
||||
)
|
||||
|
||||
// Validate checks if the FunctionName is valid and one of the known types
|
||||
func (fn FunctionName) Validate() error {
|
||||
switch fn {
|
||||
case FunctionNameCutOffMin,
|
||||
FunctionNameCutOffMax,
|
||||
FunctionNameClampMin,
|
||||
FunctionNameClampMax,
|
||||
FunctionNameAbsolute,
|
||||
FunctionNameRunningDiff,
|
||||
FunctionNameLog2,
|
||||
FunctionNameLog10,
|
||||
FunctionNameCumulativeSum,
|
||||
FunctionNameEWMA3,
|
||||
FunctionNameEWMA5,
|
||||
FunctionNameEWMA7,
|
||||
FunctionNameMedian3,
|
||||
FunctionNameMedian5,
|
||||
FunctionNameMedian7,
|
||||
FunctionNameTimeShift,
|
||||
FunctionNameAnomaly,
|
||||
FunctionNameFillZero:
|
||||
return nil
|
||||
default:
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid function name: %s",
|
||||
fn.StringValue(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// ApplyFunction applies the given function to the result data
|
||||
func ApplyFunction(fn Function, result *TimeSeries) *TimeSeries {
|
||||
// Extract the function name and arguments
|
||||
|
||||
@@ -10,8 +10,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
// GetQueryIdentifier returns a friendly identifier for a query based on its type and name/content
|
||||
func GetQueryIdentifier(envelope QueryEnvelope, index int) string {
|
||||
// getQueryIdentifier returns a friendly identifier for a query based on its type and name/content
|
||||
func getQueryIdentifier(envelope QueryEnvelope, index int) string {
|
||||
switch envelope.Type {
|
||||
case QueryTypeBuilder, QueryTypeSubQuery:
|
||||
switch spec := envelope.Spec.(type) {
|
||||
@@ -567,7 +567,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
|
||||
switch spec := envelope.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
if err := spec.Validate(r.RequestType); err != nil {
|
||||
queryId := GetQueryIdentifier(envelope, i)
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return wrapValidationError(err, queryId, "invalid %s: %s")
|
||||
}
|
||||
// Check name uniqueness for non-formula context
|
||||
@@ -583,7 +583,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
|
||||
}
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
if err := spec.Validate(r.RequestType); err != nil {
|
||||
queryId := GetQueryIdentifier(envelope, i)
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return wrapValidationError(err, queryId, "invalid %s: %s")
|
||||
}
|
||||
// Check name uniqueness for non-formula context
|
||||
@@ -599,7 +599,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
|
||||
}
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
if err := spec.Validate(r.RequestType); err != nil {
|
||||
queryId := GetQueryIdentifier(envelope, i)
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return wrapValidationError(err, queryId, "invalid %s: %s")
|
||||
}
|
||||
// Check name uniqueness for non-formula context
|
||||
@@ -614,7 +614,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
|
||||
queryNames[spec.Name] = true
|
||||
}
|
||||
default:
|
||||
queryId := GetQueryIdentifier(envelope, i)
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"unknown spec type for %s",
|
||||
@@ -625,7 +625,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
|
||||
// Formula validation is handled separately
|
||||
spec, ok := envelope.Spec.(QueryBuilderFormula)
|
||||
if !ok {
|
||||
queryId := GetQueryIdentifier(envelope, i)
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
@@ -633,7 +633,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
|
||||
)
|
||||
}
|
||||
if spec.Expression == "" {
|
||||
queryId := GetQueryIdentifier(envelope, i)
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"expression is required for %s",
|
||||
@@ -644,7 +644,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
|
||||
// Join validation is handled separately
|
||||
_, ok := envelope.Spec.(QueryBuilderJoin)
|
||||
if !ok {
|
||||
queryId := GetQueryIdentifier(envelope, i)
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
@@ -654,7 +654,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
|
||||
case QueryTypeTraceOperator:
|
||||
spec, ok := envelope.Spec.(QueryBuilderTraceOperator)
|
||||
if !ok {
|
||||
queryId := GetQueryIdentifier(envelope, i)
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
@@ -662,7 +662,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
|
||||
)
|
||||
}
|
||||
if spec.Expression == "" {
|
||||
queryId := GetQueryIdentifier(envelope, i)
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"expression is required for %s",
|
||||
@@ -673,7 +673,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
|
||||
// PromQL validation is handled separately
|
||||
spec, ok := envelope.Spec.(PromQuery)
|
||||
if !ok {
|
||||
queryId := GetQueryIdentifier(envelope, i)
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
@@ -681,7 +681,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
|
||||
)
|
||||
}
|
||||
if spec.Query == "" {
|
||||
queryId := GetQueryIdentifier(envelope, i)
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"query expression is required for %s",
|
||||
@@ -692,7 +692,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
|
||||
// ClickHouse SQL validation is handled separately
|
||||
spec, ok := envelope.Spec.(ClickHouseQuery)
|
||||
if !ok {
|
||||
queryId := GetQueryIdentifier(envelope, i)
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
@@ -700,7 +700,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
|
||||
)
|
||||
}
|
||||
if spec.Query == "" {
|
||||
queryId := GetQueryIdentifier(envelope, i)
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"query expression is required for %s",
|
||||
@@ -708,7 +708,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
|
||||
)
|
||||
}
|
||||
default:
|
||||
queryId := GetQueryIdentifier(envelope, i)
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"unknown query type '%s' for %s",
|
||||
@@ -735,7 +735,7 @@ func (c *CompositeQuery) Validate(requestType RequestType) error {
|
||||
// Validate each query
|
||||
for i, envelope := range c.Queries {
|
||||
if err := validateQueryEnvelope(envelope, requestType); err != nil {
|
||||
queryId := GetQueryIdentifier(envelope, i)
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return wrapValidationError(err, queryId, "invalid %s: %s")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -405,7 +405,7 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
|
||||
"spec": [{
|
||||
"name": "existing_threshold",
|
||||
"target": 50.0,
|
||||
"targetUnit": "MBs",
|
||||
"targetUnit": "MB",
|
||||
"ruleUnit": "bytes",
|
||||
"matchType": "1",
|
||||
"op": "1"
|
||||
|
||||
@@ -252,15 +252,6 @@ func (b BasicRuleThreshold) Validate() error {
|
||||
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid match type: %s", string(b.MatchType)))
|
||||
}
|
||||
|
||||
// Only validate unit if specified
|
||||
if b.TargetUnit != "" {
|
||||
unit := converter.Unit(b.TargetUnit)
|
||||
err := unit.Validate()
|
||||
if err != nil {
|
||||
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid unit"))
|
||||
}
|
||||
}
|
||||
|
||||
return errors.Join(errs...)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from typing import Any, Callable, Dict
|
||||
from urllib.parse import urljoin
|
||||
from typing import Any, Callable, Dict, List
|
||||
from urllib.parse import urljoin, urlparse
|
||||
from xml.etree import ElementTree
|
||||
|
||||
import pytest
|
||||
@@ -114,6 +114,43 @@ def create_saml_client(
|
||||
"attribute.name": "Role",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "groups",
|
||||
"protocol": "saml",
|
||||
"protocolMapper": "saml-group-membership-mapper",
|
||||
"consentRequired": False,
|
||||
"config": {
|
||||
"full.path": "false",
|
||||
"attribute.nameformat": "Basic",
|
||||
"single": "true", # ! this was changed to true as we need the groups in the single attribute section
|
||||
"friendly.name": "groups",
|
||||
"attribute.name": "groups",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "role attribute",
|
||||
"protocol": "saml",
|
||||
"protocolMapper": "saml-user-attribute-mapper",
|
||||
"consentRequired": False,
|
||||
"config": {
|
||||
"attribute.nameformat": "Basic",
|
||||
"user.attribute": "signoz_role",
|
||||
"friendly.name": "signoz_role",
|
||||
"attribute.name": "signoz_role",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "displayName",
|
||||
"protocol": "saml",
|
||||
"protocolMapper": "saml-user-property-mapper",
|
||||
"consentRequired": False,
|
||||
"config": {
|
||||
"attribute.nameformat": "Basic",
|
||||
"user.attribute": "firstName",
|
||||
"friendly.name": "displayName",
|
||||
"attribute.name": "displayName",
|
||||
},
|
||||
},
|
||||
],
|
||||
"defaultClientScopes": ["saml_organization", "role_list"],
|
||||
"optionalClientScopes": [],
|
||||
@@ -163,6 +200,8 @@ def create_oidc_client(
|
||||
realm_name="master",
|
||||
)
|
||||
|
||||
_ensure_groups_client_scope(client)
|
||||
|
||||
client.create_client(
|
||||
skip_exists=True,
|
||||
payload={
|
||||
@@ -208,6 +247,7 @@ def create_oidc_client(
|
||||
"profile",
|
||||
"basic",
|
||||
"email",
|
||||
"groups",
|
||||
],
|
||||
"optionalClientScopes": [
|
||||
"address",
|
||||
@@ -282,7 +322,7 @@ def get_oidc_settings(idp: types.TestContainerIDP) -> dict:
|
||||
|
||||
|
||||
@pytest.fixture(name="create_user_idp", scope="function")
|
||||
def create_user_idp(idp: types.TestContainerIDP) -> Callable[[str, str, bool], None]:
|
||||
def create_user_idp(idp: types.TestContainerIDP) -> Callable[[str, str, bool, str, str], None]:
|
||||
client = KeycloakAdmin(
|
||||
server_url=idp.container.host_configs["6060"].base(),
|
||||
username=IDP_ROOT_USERNAME,
|
||||
@@ -292,17 +332,20 @@ def create_user_idp(idp: types.TestContainerIDP) -> Callable[[str, str, bool], N
|
||||
|
||||
created_users = []
|
||||
|
||||
def _create_user_idp(email: str, password: str, verified: bool = True) -> None:
|
||||
user_id = client.create_user(
|
||||
exist_ok=False,
|
||||
payload={
|
||||
"username": email,
|
||||
"email": email,
|
||||
"enabled": True,
|
||||
"emailVerified": verified,
|
||||
},
|
||||
)
|
||||
def _create_user_idp(email: str, password: str, verified: bool = True, first_name: str = "", last_name: str = "") -> None:
|
||||
payload = {
|
||||
"username": email,
|
||||
"email": email,
|
||||
"enabled": True,
|
||||
"emailVerified": verified,
|
||||
}
|
||||
|
||||
if first_name:
|
||||
payload["firstName"] = first_name
|
||||
if last_name:
|
||||
payload["lastName"] = last_name
|
||||
|
||||
user_id = client.create_user(exist_ok=False, payload=payload)
|
||||
client.set_user_password(user_id, password, temporary=False)
|
||||
created_users.append(user_id)
|
||||
|
||||
@@ -333,3 +376,342 @@ def idp_login(driver: webdriver.Chrome) -> Callable[[str, str], None]:
|
||||
wait.until(EC.invisibility_of_element((By.ID, "kc-login")))
|
||||
|
||||
return _idp_login
|
||||
|
||||
|
||||
@pytest.fixture(name="create_group_idp", scope="function")
|
||||
def create_group_idp(idp: types.TestContainerIDP) -> Callable[[str], str]:
|
||||
"""Creates a group in Keycloak IDP."""
|
||||
client = KeycloakAdmin(
|
||||
server_url=idp.container.host_configs["6060"].base(),
|
||||
username=IDP_ROOT_USERNAME,
|
||||
password=IDP_ROOT_PASSWORD,
|
||||
realm_name="master",
|
||||
)
|
||||
|
||||
created_groups = []
|
||||
|
||||
def _create_group_idp(group_name: str) -> str:
|
||||
group_id = client.create_group({"name": group_name}, skip_exists=True)
|
||||
created_groups.append(group_id)
|
||||
return group_id
|
||||
|
||||
yield _create_group_idp
|
||||
|
||||
for group_id in created_groups:
|
||||
try:
|
||||
client.delete_group(group_id)
|
||||
except Exception: # pylint: disable=broad-exception-caught
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture(name="create_user_idp_with_groups", scope="function")
|
||||
def create_user_idp_with_groups(
|
||||
idp: types.TestContainerIDP,
|
||||
create_group_idp: Callable[[str], str], # pylint: disable=redefined-outer-name
|
||||
) -> Callable[[str, str, bool, List[str]], None]:
|
||||
"""Creates a user in Keycloak IDP with specified groups."""
|
||||
client = KeycloakAdmin(
|
||||
server_url=idp.container.host_configs["6060"].base(),
|
||||
username=IDP_ROOT_USERNAME,
|
||||
password=IDP_ROOT_PASSWORD,
|
||||
realm_name="master",
|
||||
)
|
||||
|
||||
created_users = []
|
||||
|
||||
def _create_user_idp_with_groups(
|
||||
email: str, password: str, verified: bool, groups: List[str]
|
||||
) -> None:
|
||||
# Create groups first
|
||||
group_ids = []
|
||||
for group_name in groups:
|
||||
group_id = create_group_idp(group_name)
|
||||
group_ids.append(group_id)
|
||||
|
||||
# Create user
|
||||
user_id = client.create_user(
|
||||
exist_ok=False,
|
||||
payload={
|
||||
"username": email,
|
||||
"email": email,
|
||||
"enabled": True,
|
||||
"emailVerified": verified,
|
||||
},
|
||||
)
|
||||
client.set_user_password(user_id, password, temporary=False)
|
||||
created_users.append(user_id)
|
||||
|
||||
# Add user to groups
|
||||
for group_id in group_ids:
|
||||
client.group_user_add(user_id, group_id)
|
||||
|
||||
yield _create_user_idp_with_groups
|
||||
|
||||
for user_id in created_users:
|
||||
try:
|
||||
client.delete_user(user_id)
|
||||
except Exception: # pylint: disable=broad-exception-caught
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture(name="add_user_to_group", scope="function")
|
||||
def add_user_to_group(
|
||||
idp: types.TestContainerIDP,
|
||||
create_group_idp: Callable[[str], str], # pylint: disable=redefined-outer-name
|
||||
) -> Callable[[str, str], None]:
|
||||
"""Adds an existing user to a group."""
|
||||
client = KeycloakAdmin(
|
||||
server_url=idp.container.host_configs["6060"].base(),
|
||||
username=IDP_ROOT_USERNAME,
|
||||
password=IDP_ROOT_PASSWORD,
|
||||
realm_name="master",
|
||||
)
|
||||
|
||||
def _add_user_to_group(email: str, group_name: str) -> None:
|
||||
user_id = client.get_user_id(email)
|
||||
group_id = create_group_idp(group_name)
|
||||
client.group_user_add(user_id, group_id)
|
||||
|
||||
return _add_user_to_group
|
||||
|
||||
|
||||
@pytest.fixture(name="create_user_idp_with_role", scope="function")
|
||||
def create_user_idp_with_role(
|
||||
idp: types.TestContainerIDP,
|
||||
create_group_idp: Callable[[str], str], # pylint: disable=redefined-outer-name
|
||||
) -> Callable[[str, str, bool, str, List[str]], None]:
|
||||
"""Creates a user in Keycloak IDP with a custom role attribute and optional groups."""
|
||||
client = KeycloakAdmin(
|
||||
server_url=idp.container.host_configs["6060"].base(),
|
||||
username=IDP_ROOT_USERNAME,
|
||||
password=IDP_ROOT_PASSWORD,
|
||||
realm_name="master",
|
||||
)
|
||||
|
||||
created_users = []
|
||||
|
||||
def _create_user_idp_with_role(
|
||||
email: str, password: str, verified: bool, role: str, groups: List[str]
|
||||
) -> None:
|
||||
# Create groups first
|
||||
group_ids = []
|
||||
for group_name in groups:
|
||||
group_id = create_group_idp(group_name)
|
||||
group_ids.append(group_id)
|
||||
|
||||
# Create user with role attribute
|
||||
user_id = client.create_user(
|
||||
exist_ok=False,
|
||||
payload={
|
||||
"username": email,
|
||||
"email": email,
|
||||
"enabled": True,
|
||||
"emailVerified": verified,
|
||||
"attributes": {
|
||||
"signoz_role": role,
|
||||
},
|
||||
},
|
||||
)
|
||||
client.set_user_password(user_id, password, temporary=False)
|
||||
created_users.append(user_id)
|
||||
|
||||
# Add user to groups
|
||||
for group_id in group_ids:
|
||||
client.group_user_add(user_id, group_id)
|
||||
|
||||
yield _create_user_idp_with_role
|
||||
|
||||
for user_id in created_users:
|
||||
try:
|
||||
client.delete_user(user_id)
|
||||
except Exception: # pylint: disable=broad-exception-caught
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture(name="setup_user_profile", scope="package")
|
||||
def setup_user_profile(idp: types.TestContainerIDP) -> Callable[[], None]:
|
||||
"""Setup Keycloak User Profile with signoz_role attribute."""
|
||||
def _setup_user_profile() -> None:
|
||||
client = KeycloakAdmin(
|
||||
server_url=idp.container.host_configs["6060"].base(),
|
||||
username=IDP_ROOT_USERNAME,
|
||||
password=IDP_ROOT_PASSWORD,
|
||||
realm_name="master",
|
||||
)
|
||||
|
||||
# Get current user profile config
|
||||
profile = client.get_realm_users_profile()
|
||||
|
||||
# Check if signoz_role attribute already exists
|
||||
attributes = profile.get("attributes", [])
|
||||
signoz_role_exists = any(attr.get("name") == "signoz_role" for attr in attributes)
|
||||
|
||||
if not signoz_role_exists:
|
||||
# Add signoz_role attribute to user profile
|
||||
attributes.append({
|
||||
"name": "signoz_role",
|
||||
"displayName": "SigNoz Role",
|
||||
"validations": {},
|
||||
"annotations": {},
|
||||
# "required": {
|
||||
# "roles": [] # Not required
|
||||
# },
|
||||
"permissions": {
|
||||
"view": ["admin", "user"],
|
||||
"edit": ["admin"]
|
||||
},
|
||||
"multivalued": False
|
||||
})
|
||||
profile["attributes"] = attributes
|
||||
|
||||
# Update the realm user profile
|
||||
client.update_realm_users_profile(payload=profile)
|
||||
|
||||
return _setup_user_profile
|
||||
|
||||
|
||||
def _ensure_groups_client_scope(client: KeycloakAdmin) -> None:
|
||||
"""Create 'groups' client scope if it doesn't exist."""
|
||||
# Check if groups scope exists
|
||||
scopes = client.get_client_scopes()
|
||||
groups_scope_exists = any(s.get("name") == "groups" for s in scopes)
|
||||
|
||||
if not groups_scope_exists:
|
||||
# Create the groups client scope
|
||||
client.create_client_scope(
|
||||
payload={
|
||||
"name": "groups",
|
||||
"description": "Group membership",
|
||||
"protocol": "openid-connect",
|
||||
"attributes": {
|
||||
"include.in.token.scope": "true",
|
||||
"display.on.consent.screen": "true",
|
||||
},
|
||||
"protocolMappers": [
|
||||
{
|
||||
"name": "groups",
|
||||
"protocol": "openid-connect",
|
||||
"protocolMapper": "oidc-group-membership-mapper",
|
||||
"consentRequired": False,
|
||||
"config": {
|
||||
"full.path": "false",
|
||||
"id.token.claim": "true",
|
||||
"access.token.claim": "true",
|
||||
"claim.name": "groups",
|
||||
"userinfo.token.claim": "true",
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "signoz_role",
|
||||
"protocol": "openid-connect",
|
||||
"protocolMapper": "oidc-usermodel-attribute-mapper",
|
||||
"consentRequired": False,
|
||||
"config": {
|
||||
"user.attribute": "signoz_role",
|
||||
"id.token.claim": "true",
|
||||
"access.token.claim": "true",
|
||||
"claim.name": "signoz_role",
|
||||
"userinfo.token.claim": "true",
|
||||
"jsonType.label": "String",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
skip_exists=True,
|
||||
)
|
||||
|
||||
|
||||
def get_oidc_domain(signoz: types.SigNoz, admin_token: str) -> dict:
|
||||
"""Helper to get the OIDC domain."""
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/domains"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
return next(
|
||||
(
|
||||
domain
|
||||
for domain in response.json()["data"]
|
||||
if domain["name"] == "oidc.integration.test"
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def get_user_by_email(signoz: types.SigNoz, admin_token: str, email: str) -> dict:
|
||||
"""Helper to get a user by email."""
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
return next(
|
||||
(user for user in response.json()["data"] if user["email"] == email),
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def perform_oidc_login(
|
||||
signoz: types.SigNoz, # pylint: disable=unused-argument
|
||||
idp: types.TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
get_session_context: Callable[[str], str],
|
||||
idp_login: Callable[[str, str], None], # pylint: disable=redefined-outer-name
|
||||
email: str,
|
||||
password: str,
|
||||
) -> None:
|
||||
"""Helper to perform OIDC login flow."""
|
||||
session_context = get_session_context(email)
|
||||
url = session_context["orgs"][0]["authNSupport"]["callback"][0]["url"]
|
||||
parsed_url = urlparse(url)
|
||||
actual_url = (
|
||||
f"{idp.container.host_configs['6060'].get(parsed_url.path)}?{parsed_url.query}"
|
||||
)
|
||||
driver.get(actual_url)
|
||||
idp_login(email, password)
|
||||
|
||||
|
||||
def get_saml_domain(signoz: types.SigNoz, admin_token: str) -> dict:
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/domains"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
return next(
|
||||
(
|
||||
domain
|
||||
for domain in response.json()["data"]
|
||||
if domain["name"] == "saml.integration.test"
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def perform_saml_login(
|
||||
signoz: types.SigNoz, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
get_session_context: Callable[[str], str],
|
||||
idp_login: Callable[[str, str], None], # pylint: disable=redefined-outer-name
|
||||
email: str,
|
||||
password: str,
|
||||
) -> None:
|
||||
session_context = get_session_context(email)
|
||||
url = session_context["orgs"][0]["authNSupport"]["callback"][0]["url"]
|
||||
driver.get(url)
|
||||
idp_login(email, password)
|
||||
|
||||
|
||||
def delete_keycloak_client(idp: types.TestContainerIDP, client_id: str) -> None:
|
||||
keycloak_client = KeycloakAdmin(
|
||||
server_url=idp.container.host_configs["6060"].base(),
|
||||
username=IDP_ROOT_USERNAME,
|
||||
password=IDP_ROOT_PASSWORD,
|
||||
realm_name="master",
|
||||
)
|
||||
try:
|
||||
# Get the internal Keycloak client ID from the clientId
|
||||
internal_client_id = keycloak_client.get_client_id(client_id=client_id)
|
||||
if internal_client_id:
|
||||
keycloak_client.delete_client(internal_client_id)
|
||||
except Exception: # pylint: disable=broad-exception-caught
|
||||
pass # Client doesn't exist or already deleted, that's fine
|
||||
|
||||
@@ -65,6 +65,7 @@ def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
"SIGNOZ_INSTRUMENTATION_LOGS_LEVEL": "debug",
|
||||
"SIGNOZ_PROMETHEUS_ACTIVE__QUERY__TRACKER_ENABLED": False,
|
||||
"SIGNOZ_GATEWAY_URL": gateway.container_configs["8080"].base(),
|
||||
"SIGNOZ_TOKENIZER_JWT_SECRET": "secret",
|
||||
}
|
||||
| sqlstore.env
|
||||
| clickhouse.env
|
||||
|
||||
@@ -78,11 +78,12 @@ def test_create_and_get_domain(
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
data = response.json()["data"]
|
||||
|
||||
assert len(data) == 2
|
||||
assert data[0]["name"] == "domain-google.integration.test"
|
||||
assert data[0]["ssoType"] == "google_auth"
|
||||
assert data[1]["name"] == "domain-saml.integration.test"
|
||||
assert data[1]["ssoType"] == "saml"
|
||||
|
||||
for domain in data:
|
||||
assert domain["name"] in ["domain-google.integration.test", "domain-saml.integration.test"]
|
||||
assert domain["ssoType"] in ["google_auth", "saml"]
|
||||
|
||||
|
||||
def test_create_invalid(
|
||||
@@ -165,3 +166,91 @@ def test_create_invalid(
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.BAD_REQUEST
|
||||
|
||||
|
||||
def test_create_invalid_role_mapping(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""Test that invalid role mappings are rejected."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create domain with invalid defaultRole
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/domains"),
|
||||
json={
|
||||
"name": "invalid-role-test.integration.test",
|
||||
"config": {
|
||||
"ssoEnabled": True,
|
||||
"ssoType": "saml",
|
||||
"samlConfig": {
|
||||
"samlEntity": "saml-entity",
|
||||
"samlIdp": "saml-idp",
|
||||
"samlCert": "saml-cert",
|
||||
},
|
||||
"roleMapping": {
|
||||
"defaultRole": "SUPERADMIN", # Invalid role
|
||||
},
|
||||
},
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.BAD_REQUEST
|
||||
|
||||
# Create domain with invalid role in groupMappings
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/domains"),
|
||||
json={
|
||||
"name": "invalid-group-role.integration.test",
|
||||
"config": {
|
||||
"ssoEnabled": True,
|
||||
"ssoType": "saml",
|
||||
"samlConfig": {
|
||||
"samlEntity": "saml-entity",
|
||||
"samlIdp": "saml-idp",
|
||||
"samlCert": "saml-cert",
|
||||
},
|
||||
"roleMapping": {
|
||||
"defaultRole": "VIEWER",
|
||||
"groupMappings": {
|
||||
"admins": "SUPERUSER", # Invalid role
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.BAD_REQUEST
|
||||
|
||||
# Valid role mapping should succeed
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/domains"),
|
||||
json={
|
||||
"name": "valid-role-mapping.integration.test",
|
||||
"config": {
|
||||
"ssoEnabled": True,
|
||||
"ssoType": "saml",
|
||||
"samlConfig": {
|
||||
"samlEntity": "saml-entity",
|
||||
"samlIdp": "saml-idp",
|
||||
"samlCert": "saml-cert",
|
||||
},
|
||||
"roleMapping": {
|
||||
"defaultRole": "VIEWER",
|
||||
"groupMappings": {
|
||||
"signoz-admins": "ADMIN",
|
||||
"signoz-editors": "EDITOR",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.CREATED
|
||||
|
||||
@@ -4,12 +4,14 @@ from typing import Any, Callable, Dict, List
|
||||
import requests
|
||||
from selenium import webdriver
|
||||
from wiremock.resources.mappings import Mapping
|
||||
import uuid
|
||||
|
||||
from fixtures.auth import (
|
||||
USER_ADMIN_EMAIL,
|
||||
USER_ADMIN_PASSWORD,
|
||||
add_license,
|
||||
)
|
||||
from fixtures.idputils import get_saml_domain, perform_saml_login, get_user_by_email, delete_keycloak_client
|
||||
from fixtures.types import Operation, SigNoz, TestContainerDocker, TestContainerIDP
|
||||
|
||||
|
||||
@@ -102,7 +104,7 @@ def test_saml_authn(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp: Callable[[str, str], None],
|
||||
create_user_idp: Callable[[str, str, bool, str, str], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
@@ -150,7 +152,7 @@ def test_idp_initiated_saml_authn(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp: Callable[[str, str], None],
|
||||
create_user_idp: Callable[[str, str, bool, str, str], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
@@ -195,3 +197,348 @@ def test_idp_initiated_saml_authn(
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
|
||||
|
||||
def test_saml_update_domain_with_group_mappings(
|
||||
signoz: SigNoz,
|
||||
get_token: Callable[[str, str], str],
|
||||
get_saml_settings: Callable[[], dict],
|
||||
) -> None:
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
domain = get_saml_domain(signoz, admin_token)
|
||||
settings = get_saml_settings()
|
||||
|
||||
# update the existing saml domain to have role mappings also
|
||||
response = requests.put(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/domains/{domain['id']}"),
|
||||
json={
|
||||
"config": {
|
||||
"ssoEnabled": True,
|
||||
"ssoType": "saml",
|
||||
"samlConfig": {
|
||||
"samlEntity": settings["entityID"],
|
||||
"samlIdp": settings["singleSignOnServiceLocation"],
|
||||
"samlCert": settings["certificate"],
|
||||
"attributeMapping": {
|
||||
"name": "givenName",
|
||||
"groups": "groups",
|
||||
"role": "signoz_role",
|
||||
},
|
||||
},
|
||||
"roleMapping": {
|
||||
"defaultRole": "VIEWER",
|
||||
"groupMappings": {
|
||||
"signoz-admins": "ADMIN",
|
||||
"signoz-editors": "EDITOR",
|
||||
"signoz-viewers": "VIEWER",
|
||||
},
|
||||
"useRoleAttribute": False,
|
||||
},
|
||||
},
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
|
||||
def test_saml_role_mapping_single_group_admin(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: User in 'signoz-admins' group gets ADMIN role.
|
||||
"""
|
||||
email = "admin-group-user@saml.integration.test"
|
||||
create_user_idp_with_groups(email, "password", True, ["signoz-admins"])
|
||||
|
||||
perform_saml_login(signoz, driver, get_session_context, idp_login, email, "password")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
|
||||
|
||||
def test_saml_role_mapping_single_group_editor(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: User in 'signoz-editors' group gets EDITOR role.
|
||||
"""
|
||||
email = "editor-group-user@saml.integration.test"
|
||||
create_user_idp_with_groups(email, "password", True, ["signoz-editors"])
|
||||
|
||||
perform_saml_login(signoz, driver, get_session_context, idp_login, email, "password")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
|
||||
|
||||
def test_saml_role_mapping_multiple_groups_highest_wins(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: User in multiple groups gets highest role.
|
||||
User is in both 'signoz-viewers' and 'signoz-editors'.
|
||||
Expected: User gets EDITOR (highest of VIEWER and EDITOR).
|
||||
"""
|
||||
email = f"multi-group-user-{uuid.uuid4().hex[:8]}@saml.integration.test"
|
||||
create_user_idp_with_groups(email, "password", True, ["signoz-viewers", "signoz-editors"])
|
||||
|
||||
perform_saml_login(signoz, driver, get_session_context, idp_login, email, "password")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
|
||||
|
||||
def test_saml_role_mapping_explicit_viewer_group(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: User explicitly mapped to VIEWER via groups should get VIEWER.
|
||||
This tests the bug where VIEWER group mappings were incorrectly ignored.
|
||||
"""
|
||||
email = "viewer-group-user@saml.integration.test"
|
||||
create_user_idp_with_groups(email, "password", True, ["signoz-viewers"])
|
||||
|
||||
perform_saml_login(signoz, driver, get_session_context, idp_login, email, "password")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
|
||||
|
||||
def test_saml_role_mapping_unmapped_group_uses_default(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: User in unmapped group falls back to default role (VIEWER).
|
||||
"""
|
||||
email = "unmapped-group-user@saml.integration.test"
|
||||
create_user_idp_with_groups(email, "password", True, ["some-other-group"])
|
||||
|
||||
perform_saml_login(signoz, driver, get_session_context, idp_login, email, "password")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
|
||||
|
||||
def test_saml_update_domain_with_use_role_claim(
|
||||
signoz: SigNoz,
|
||||
get_token: Callable[[str, str], str],
|
||||
get_saml_settings: Callable[[], dict],
|
||||
) -> None:
|
||||
"""
|
||||
Updates SAML domain to enable useRoleAttribute (direct role attribute).
|
||||
"""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
domain = get_saml_domain(signoz, admin_token)
|
||||
settings = get_saml_settings()
|
||||
|
||||
response = requests.put(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/domains/{domain['id']}"),
|
||||
json={
|
||||
"config": {
|
||||
"ssoEnabled": True,
|
||||
"ssoType": "saml",
|
||||
"samlConfig": {
|
||||
"samlEntity": settings["entityID"],
|
||||
"samlIdp": settings["singleSignOnServiceLocation"],
|
||||
"samlCert": settings["certificate"],
|
||||
"attributeMapping": {
|
||||
"name": "displayName",
|
||||
"groups": "groups",
|
||||
"role": "signoz_role",
|
||||
},
|
||||
},
|
||||
"roleMapping": {
|
||||
"defaultRole": "VIEWER",
|
||||
"groupMappings": {
|
||||
"signoz-admins": "ADMIN",
|
||||
"signoz-editors": "EDITOR",
|
||||
},
|
||||
"useRoleAttribute": True,
|
||||
},
|
||||
},
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
|
||||
def test_saml_role_mapping_role_claim_takes_precedence(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
setup_user_profile: Callable[[], None],
|
||||
) -> None:
|
||||
"""
|
||||
Test: useRoleAttribute takes precedence over group mappings.
|
||||
User is in 'signoz-editors' group but has role attribute 'ADMIN'.
|
||||
Expected: User gets ADMIN (from role attribute).
|
||||
"""
|
||||
|
||||
setup_user_profile()
|
||||
|
||||
email = "role-claim-precedence@saml.integration.test"
|
||||
create_user_idp_with_role(email, "password", True, "ADMIN", ["signoz-editors"])
|
||||
|
||||
perform_saml_login(signoz, driver, get_session_context, idp_login, email, "password")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
|
||||
|
||||
def test_saml_role_mapping_invalid_role_claim_fallback(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
setup_user_profile: Callable[[], None],
|
||||
) -> None:
|
||||
"""
|
||||
Test: Invalid role claim falls back to group mappings.
|
||||
User has invalid role 'SUPERADMIN' and is in 'signoz-editors'.
|
||||
Expected: User gets EDITOR (from group mapping).
|
||||
"""
|
||||
setup_user_profile()
|
||||
email = "invalid-role-user@saml.integration.test"
|
||||
create_user_idp_with_role(email, "password", True, "SUPERADMIN", ["signoz-editors"])
|
||||
|
||||
perform_saml_login(signoz, driver, get_session_context, idp_login, email, "password")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
|
||||
|
||||
def test_saml_role_mapping_case_insensitive(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
setup_user_profile: Callable[[], None],
|
||||
) -> None:
|
||||
"""
|
||||
Test: Role attribute matching is case-insensitive.
|
||||
User has role 'admin' (lowercase).
|
||||
Expected: User gets ADMIN role.
|
||||
"""
|
||||
setup_user_profile()
|
||||
email = "lowercase-role-user@saml.integration.test"
|
||||
create_user_idp_with_role(email, "password", True, "admin", [])
|
||||
|
||||
perform_saml_login(signoz, driver, get_session_context, idp_login, email, "password")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
|
||||
|
||||
def test_saml_name_mapping(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp: Callable[[str, str, bool, str, str], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""Test that user's display name is mapped from SAML displayName attribute."""
|
||||
email = "named-user@saml.integration.test"
|
||||
|
||||
create_user_idp(email, "password", True, "Jane", "Smith")
|
||||
|
||||
perform_saml_login(signoz, driver, get_session_context, idp_login, email, "password")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["displayName"] == "Jane" # We are only mapping the first name here
|
||||
assert found_user["role"] == "VIEWER"
|
||||
|
||||
|
||||
def test_saml_empty_name_fallback(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp: Callable[[str, str, bool, str, str], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""Test that user without displayName in IDP still gets created."""
|
||||
email = "no-name@saml.integration.test"
|
||||
|
||||
create_user_idp(email, "password", True)
|
||||
|
||||
perform_saml_login(signoz, driver, get_session_context, idp_login, email, "password")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
|
||||
@@ -11,6 +11,7 @@ from fixtures.auth import (
|
||||
USER_ADMIN_PASSWORD,
|
||||
add_license,
|
||||
)
|
||||
from fixtures.idputils import get_oidc_domain, get_user_by_email, perform_oidc_login, delete_keycloak_client
|
||||
from fixtures.types import Operation, SigNoz, TestContainerDocker, TestContainerIDP
|
||||
|
||||
|
||||
@@ -75,7 +76,7 @@ def test_oidc_authn(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP, # pylint: disable=unused-argument
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp: Callable[[str, str, bool], None],
|
||||
create_user_idp: Callable[[str, str, bool, str, str], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
@@ -127,3 +128,385 @@ def test_oidc_authn(
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
|
||||
|
||||
def test_oidc_update_domain_with_group_mappings(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
get_token: Callable[[str, str], str],
|
||||
get_oidc_settings: Callable[[str], dict],
|
||||
) -> None:
|
||||
"""
|
||||
Updates OIDC domain to add role mapping with group mappings and claim mapping.
|
||||
"""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
domain = get_oidc_domain(signoz, admin_token)
|
||||
client_id = f"oidc.integration.test.{signoz.self.host_configs['8080'].address}:{signoz.self.host_configs['8080'].port}"
|
||||
settings = get_oidc_settings(client_id)
|
||||
|
||||
response = requests.put(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/domains/{domain['id']}"),
|
||||
json={
|
||||
"config": {
|
||||
"ssoEnabled": True,
|
||||
"ssoType": "oidc",
|
||||
"oidcConfig": {
|
||||
"clientId": settings["client_id"],
|
||||
"clientSecret": settings["client_secret"],
|
||||
"issuer": f"{idp.container.container_configs['6060'].get(urlparse(settings['issuer']).path)}",
|
||||
"issuerAlias": settings["issuer"],
|
||||
"getUserInfo": True,
|
||||
"claimMapping": {
|
||||
"email": "email",
|
||||
"name": "name",
|
||||
"groups": "groups",
|
||||
"role": "signoz_role",
|
||||
},
|
||||
},
|
||||
"roleMapping": {
|
||||
"defaultRole": "VIEWER",
|
||||
"groupMappings": {
|
||||
"signoz-admins": "ADMIN",
|
||||
"signoz-editors": "EDITOR",
|
||||
"signoz-viewers": "VIEWER",
|
||||
},
|
||||
"useRoleAttribute": False,
|
||||
},
|
||||
},
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
|
||||
def test_oidc_role_mapping_single_group_admin(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: OIDC user in 'signoz-admins' group gets ADMIN role.
|
||||
"""
|
||||
email = "admin-group-user@oidc.integration.test"
|
||||
create_user_idp_with_groups(email, "password123", True, ["signoz-admins"])
|
||||
|
||||
perform_oidc_login(signoz, idp, driver, get_session_context, idp_login, email, "password123")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
|
||||
|
||||
def test_oidc_role_mapping_single_group_editor(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: OIDC user in 'signoz-editors' group gets EDITOR role.
|
||||
"""
|
||||
email = "editor-group-user@oidc.integration.test"
|
||||
create_user_idp_with_groups(email, "password123", True, ["signoz-editors"])
|
||||
|
||||
perform_oidc_login(signoz, idp, driver, get_session_context, idp_login, email, "password123")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
|
||||
|
||||
def test_oidc_role_mapping_multiple_groups_highest_wins(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: OIDC user in multiple groups gets highest role.
|
||||
User is in 'signoz-viewers' and 'signoz-admins'.
|
||||
Expected: User gets ADMIN (highest of the two).
|
||||
"""
|
||||
email = "multi-group-user@oidc.integration.test"
|
||||
create_user_idp_with_groups(email, "password123", True, ["signoz-viewers", "signoz-admins"])
|
||||
|
||||
perform_oidc_login(signoz, idp, driver, get_session_context, idp_login, email, "password123")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
|
||||
|
||||
def test_oidc_role_mapping_explicit_viewer_group(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: OIDC user explicitly mapped to VIEWER via groups gets VIEWER.
|
||||
Tests the bug where VIEWER mappings were ignored.
|
||||
"""
|
||||
email = "viewer-group-user@oidc.integration.test"
|
||||
create_user_idp_with_groups(email, "password123", True, ["signoz-viewers"])
|
||||
|
||||
perform_oidc_login(signoz, idp, driver, get_session_context, idp_login, email, "password123")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
|
||||
|
||||
def test_oidc_role_mapping_unmapped_group_uses_default(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
) -> None:
|
||||
"""
|
||||
Test: OIDC user in unmapped group falls back to default role.
|
||||
"""
|
||||
email = "unmapped-group-user@oidc.integration.test"
|
||||
create_user_idp_with_groups(email, "password123", True, ["some-other-group"])
|
||||
|
||||
perform_oidc_login(signoz, idp, driver, get_session_context, idp_login, email, "password123")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
|
||||
|
||||
def test_oidc_update_domain_with_use_role_claim(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
get_token: Callable[[str, str], str],
|
||||
get_oidc_settings: Callable[[str], dict],
|
||||
) -> None:
|
||||
"""
|
||||
Updates OIDC domain to enable useRoleClaim.
|
||||
"""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
domain = get_oidc_domain(signoz, admin_token)
|
||||
client_id = f"oidc.integration.test.{signoz.self.host_configs['8080'].address}:{signoz.self.host_configs['8080'].port}"
|
||||
settings = get_oidc_settings(client_id)
|
||||
|
||||
response = requests.put(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/domains/{domain['id']}"),
|
||||
json={
|
||||
"config": {
|
||||
"ssoEnabled": True,
|
||||
"ssoType": "oidc",
|
||||
"oidcConfig": {
|
||||
"clientId": settings["client_id"],
|
||||
"clientSecret": settings["client_secret"],
|
||||
"issuer": f"{idp.container.container_configs['6060'].get(urlparse(settings['issuer']).path)}",
|
||||
"issuerAlias": settings["issuer"],
|
||||
"getUserInfo": True,
|
||||
"claimMapping": {
|
||||
"email": "email",
|
||||
"name": "name",
|
||||
"groups": "groups",
|
||||
"role": "signoz_role",
|
||||
},
|
||||
},
|
||||
"roleMapping": {
|
||||
"defaultRole": "VIEWER",
|
||||
"groupMappings": {
|
||||
"signoz-admins": "ADMIN",
|
||||
"signoz-editors": "EDITOR",
|
||||
},
|
||||
"useRoleAttribute": True,
|
||||
},
|
||||
},
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
|
||||
def test_oidc_role_mapping_role_claim_takes_precedence(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
setup_user_profile: Callable[[], None],
|
||||
) -> None:
|
||||
"""
|
||||
Test: useRoleAttribute takes precedence over group mappings.
|
||||
User is in 'signoz-editors' group but has role claim 'ADMIN'.
|
||||
Expected: User gets ADMIN (from role claim).
|
||||
"""
|
||||
setup_user_profile()
|
||||
email = "role-claim-precedence@oidc.integration.test"
|
||||
create_user_idp_with_role(email, "password123", True, "ADMIN", ["signoz-editors"])
|
||||
|
||||
perform_oidc_login(signoz, idp, driver, get_session_context, idp_login, email, "password123")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
|
||||
|
||||
def test_oidc_role_mapping_invalid_role_claim_fallback(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
setup_user_profile: Callable[[], None],
|
||||
) -> None:
|
||||
"""
|
||||
Test: Invalid role claim falls back to group mappings.
|
||||
User has invalid role 'SUPERADMIN' and is in 'signoz-editors'.
|
||||
Expected: User gets EDITOR (from group mapping).
|
||||
"""
|
||||
setup_user_profile()
|
||||
email = "invalid-role-user@oidc.integration.test"
|
||||
create_user_idp_with_role(email, "password123", True, "SUPERADMIN", ["signoz-editors"])
|
||||
|
||||
perform_oidc_login(signoz, idp, driver, get_session_context, idp_login, email, "password123")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
|
||||
|
||||
def test_oidc_role_mapping_case_insensitive(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], str],
|
||||
setup_user_profile: Callable[[], None],
|
||||
) -> None:
|
||||
"""
|
||||
Test: Role claim matching is case-insensitive.
|
||||
User has role 'editor' (lowercase).
|
||||
Expected: User gets EDITOR role.
|
||||
"""
|
||||
setup_user_profile()
|
||||
email = "lowercase-role-user@oidc.integration.test"
|
||||
create_user_idp_with_role(email, "password123", True, "editor", [])
|
||||
|
||||
perform_oidc_login(signoz, idp, driver, get_session_context, idp_login, email, "password123")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
|
||||
|
||||
def test_oidc_name_mapping(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp: Callable[[str, str, bool, str, str], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], dict],
|
||||
) -> None:
|
||||
"""Test that user's display name is mapped from IDP name claim."""
|
||||
email = "named-user@oidc.integration.test"
|
||||
|
||||
# Create user with explicit first/last name
|
||||
create_user_idp(
|
||||
email,
|
||||
"password123",
|
||||
True,
|
||||
first_name="John",
|
||||
last_name="Doe"
|
||||
)
|
||||
|
||||
perform_oidc_login(signoz, idp, driver, get_session_context, idp_login, email, "password123")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
users = response.json()["data"]
|
||||
found_user = next((u for u in users if u["email"] == email), None)
|
||||
|
||||
assert found_user is not None
|
||||
# Keycloak concatenates firstName + lastName into "name" claim
|
||||
assert found_user["displayName"] == "John Doe"
|
||||
assert found_user["role"] == "VIEWER" # Default role
|
||||
|
||||
|
||||
def test_oidc_empty_name_uses_fallback(
|
||||
signoz: SigNoz,
|
||||
idp: TestContainerIDP,
|
||||
driver: webdriver.Chrome,
|
||||
create_user_idp: Callable[[str, str, bool, str, str], None],
|
||||
idp_login: Callable[[str, str], None],
|
||||
get_token: Callable[[str, str], str],
|
||||
get_session_context: Callable[[str], dict],
|
||||
) -> None:
|
||||
"""Test that user without name in IDP still gets created (may have empty displayName)."""
|
||||
email = "no-name@oidc.integration.test"
|
||||
|
||||
# Create user without first/last name
|
||||
create_user_idp(email, "password123", True)
|
||||
|
||||
perform_oidc_login(signoz, idp, driver, get_session_context, idp_login, email, "password123")
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
users = response.json()["data"]
|
||||
found_user = next((u for u in users if u["email"] == email), None)
|
||||
|
||||
# User should still be created even with empty name
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
# Note: displayName may be empty - this is a known limitation
|
||||
|
||||
Reference in New Issue
Block a user