Compare commits

..

13 Commits

Author SHA1 Message Date
grandwizard28
6bfc4b4dff feat: move fields api to openapi spec 2026-01-20 19:04:37 +05:30
Pandey
445b0cace8 chore: add codeowners for scaffold (#10055) 2026-01-20 12:19:14 +00:00
Pandey
132f10f8a3 feat(binding): add support for query params (#10053)
- add support for query params in the binding package.
2026-01-20 11:59:12 +00:00
Srikanth Chekuri
14011bc277 fix: do not sort in descending locally if the other is explicitly spe… (#10033) 2026-01-20 11:17:08 +00:00
aniketio-ctrl
f17a332c23 feat(license-section): add section to view license key (#10039)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
* feat(license-section): added section to view license key

* feat(license-key): add license key page

* feat(license-section): added section to view license key

* feat(license-section): added section to view license key

* feat(license-section): added section to view license key

* feat(license-section): added section to view license key

* feat(license-section): resoved comments

* feat(license-section): resoved fmt error
2026-01-20 16:05:57 +05:30
Aditya Singh
5ae7a464e6 Fix Cmd/ctrl + Enter in qb to run query (#10048)
* feat: cmd enter in qb

* feat: cnd enter test added

* feat: update test case

* feat: update test case

* feat: minor refactor

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2026-01-20 13:30:54 +05:30
Pandey
51c3628f6e fix(signoz): remove version check at the beginning (#10046) 2026-01-20 06:56:06 +00:00
Aditya Singh
6a69076828 Add attribute action in common col of span details drawer (#10017)
* feat: add attribute action in common col of span details drawer

* feat: added test case
2026-01-20 05:44:31 +00:00
Aditya Singh
edd04e2f07 fix: fix auto collapse fields when emptied (#9988)
* fix: fix auto collapse fields when emptied

* fix: revert mocks

* fix: minor fix

* fix: add check for key count change to setSelectedView

* feat: revert change

* feat: instead of count check actual keys
2026-01-20 05:32:13 +00:00
Srikanth Chekuri
ee734cf78c chore: return original error message with hints for invalid promql query (#10034)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
one step towards better experience for https://github.com/SigNoz/signoz/issues/9764
2026-01-20 03:04:59 +05:30
Karan Balani
6d137bcdff feat: idp attributes mapping (#9841) 2026-01-19 22:27:21 +05:30
Ashwin Bhatkal
444161671d fix: query variable referencing custom variable (#10023)
* fix: custom variable to query mapping

* chore: add test
2026-01-19 13:52:58 +00:00
Ashwin Bhatkal
31e9e896ec fix: dashboard - textbox default variable not working (#9843) 2026-01-19 18:23:17 +05:30
101 changed files with 6780 additions and 1612 deletions

9
.github/CODEOWNERS vendored
View File

@@ -55,7 +55,6 @@
/pkg/telemetrymetrics/ @srikanthccv
/pkg/telemetrytraces/ @srikanthccv
# Metrics
/pkg/types/metrictypes/ @srikanthccv
@@ -91,6 +90,14 @@
# AuthN / AuthZ Owners
/pkg/authz/ @vikrantgupta25
/ee/authz/ @vikrantgupta25
/pkg/authn/ @vikrantgupta25
/ee/authn/ @vikrantgupta25
/pkg/modules/user/ @vikrantgupta25
/pkg/modules/session/ @vikrantgupta25
/pkg/modules/organization/ @vikrantgupta25
/pkg/modules/authdomain/ @vikrantgupta25
/pkg/modules/role/ @vikrantgupta25
# Integration tests

View File

@@ -86,7 +86,7 @@ go-run-enterprise: ## Runs the enterprise go backend server
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \
SIGNOZ_SQLSTORE_SQLITE_PATH=signoz.db \
SIGNOZ_WEB_ENABLED=false \
SIGNOZ_JWT_SECRET=secret \
SIGNOZ_TOKENIZER_JWT_SECRET=secret \
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
@@ -103,7 +103,7 @@ go-run-community: ## Runs the community go backend server
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \
SIGNOZ_SQLSTORE_SQLITE_PATH=signoz.db \
SIGNOZ_WEB_ENABLED=false \
SIGNOZ_JWT_SECRET=secret \
SIGNOZ_TOKENIZER_JWT_SECRET=secret \
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \

View File

@@ -195,7 +195,7 @@ services:
- GODEBUG=netdns=go
- TELEMETRY_ENABLED=true
- DEPLOYMENT_TYPE=docker-swarm
- SIGNOZ_JWT_SECRET=secret
- SIGNOZ_TOKENIZER_JWT_SECRET=secret
- DOT_METRICS_ENABLED=true
healthcheck:
test:

View File

@@ -607,6 +607,186 @@ paths:
summary: Update auth domain
tags:
- authdomains
/api/v1/fields/keys:
get:
deprecated: false
description: This endpoint returns field keys
operationId: GetFieldsKeys
parameters:
- in: query
name: signal
schema:
type: string
- in: query
name: source
schema:
type: string
- in: query
name: limit
schema:
type: integer
- in: query
name: startUnixMilli
schema:
format: int64
type: integer
- in: query
name: endUnixMilli
schema:
format: int64
type: integer
- in: query
name: fieldContext
schema:
type: string
- in: query
name: fieldDataType
schema:
type: string
- content:
application/json:
schema:
$ref: '#/components/schemas/TelemetrytypesMetricContext'
in: query
name: metricContext
- in: query
name: name
schema:
type: string
- in: query
name: searchText
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/TelemetrytypesGettableFieldKeys'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get field keys
tags:
- fields
/api/v1/fields/values:
get:
deprecated: false
description: This endpoint returns field values
operationId: GetFieldsValues
parameters:
- in: query
name: signal
schema:
type: string
- in: query
name: source
schema:
type: string
- in: query
name: limit
schema:
type: integer
- in: query
name: startUnixMilli
schema:
format: int64
type: integer
- in: query
name: endUnixMilli
schema:
format: int64
type: integer
- in: query
name: fieldContext
schema:
type: string
- in: query
name: fieldDataType
schema:
type: string
- content:
application/json:
schema:
$ref: '#/components/schemas/TelemetrytypesMetricContext'
in: query
name: metricContext
- in: query
name: name
schema:
type: string
- in: query
name: searchText
schema:
type: string
- in: query
name: existingQuery
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/TelemetrytypesGettableFieldValues'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get field values
tags:
- fields
/api/v1/getResetPasswordToken/{id}:
get:
deprecated: false
@@ -2736,12 +2916,25 @@ paths:
- sessions
components:
schemas:
AuthtypesAttributeMapping:
properties:
email:
type: string
groups:
type: string
name:
type: string
role:
type: string
type: object
AuthtypesAuthDomainConfig:
properties:
googleAuthConfig:
$ref: '#/components/schemas/AuthtypesGoogleConfig'
oidcConfig:
$ref: '#/components/schemas/AuthtypesOIDCConfig'
roleMapping:
$ref: '#/components/schemas/AuthtypesRoleMapping'
samlConfig:
$ref: '#/components/schemas/AuthtypesSamlConfig'
ssoEnabled:
@@ -2775,11 +2968,6 @@ components:
url:
type: string
type: object
AuthtypesClaimMapping:
properties:
email:
type: string
type: object
AuthtypesDeprecatedGettableLogin:
properties:
accessJwt:
@@ -2811,6 +2999,8 @@ components:
$ref: '#/components/schemas/AuthtypesOIDCConfig'
orgId:
type: string
roleMapping:
$ref: '#/components/schemas/AuthtypesRoleMapping'
samlConfig:
$ref: '#/components/schemas/AuthtypesSamlConfig'
ssoEnabled:
@@ -2834,17 +3024,33 @@ components:
type: object
AuthtypesGoogleConfig:
properties:
allowedGroups:
items:
type: string
type: array
clientId:
type: string
clientSecret:
type: string
domainToAdminEmail:
additionalProperties:
type: string
type: object
fetchGroups:
type: boolean
fetchTransitiveGroupMembership:
type: boolean
insecureSkipEmailVerified:
type: boolean
redirectURI:
type: string
serviceAccountJson:
type: string
type: object
AuthtypesOIDCConfig:
properties:
claimMapping:
$ref: '#/components/schemas/AuthtypesClaimMapping'
$ref: '#/components/schemas/AuthtypesAttributeMapping'
clientId:
type: string
clientSecret:
@@ -2895,8 +3101,22 @@ components:
refreshToken:
type: string
type: object
AuthtypesRoleMapping:
properties:
defaultRole:
type: string
groupMappings:
additionalProperties:
type: string
nullable: true
type: object
useRoleAttribute:
type: boolean
type: object
AuthtypesSamlConfig:
properties:
attributeMapping:
$ref: '#/components/schemas/AuthtypesAttributeMapping'
insecureSkipAuthNRequestsSigned:
type: boolean
samlCert:
@@ -3341,6 +3561,65 @@ components:
status:
type: string
type: object
TelemetrytypesGettableFieldKeys:
properties:
complete:
type: boolean
keys:
additionalProperties:
items:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
type: array
nullable: true
type: object
type: object
TelemetrytypesGettableFieldValues:
properties:
complete:
type: boolean
values:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldValues'
type: object
TelemetrytypesMetricContext:
properties:
metricName:
type: string
type: object
TelemetrytypesTelemetryFieldKey:
properties:
description:
type: string
fieldContext:
type: string
fieldDataType:
type: string
name:
type: string
signal:
type: string
unit:
type: string
type: object
TelemetrytypesTelemetryFieldValues:
properties:
boolValues:
items:
type: boolean
type: array
numberValues:
items:
format: double
type: number
type: array
relatedValues:
items:
type: string
type: array
stringValues:
items:
type: string
type: array
type: object
TypesChangePasswordRequest:
properties:
newPassword:

View File

@@ -2,6 +2,7 @@ package oidccallbackauthn
import (
"context"
"fmt"
"net/url"
"github.com/SigNoz/signoz/pkg/authn"
@@ -19,25 +20,27 @@ const (
redirectPath string = "/api/v1/complete/oidc"
)
var (
scopes []string = []string{"email", oidc.ScopeOpenID}
)
var defaultScopes []string = []string{"email", "profile", oidc.ScopeOpenID}
var _ authn.CallbackAuthN = (*AuthN)(nil)
type AuthN struct {
settings factory.ScopedProviderSettings
store authtypes.AuthNStore
licensing licensing.Licensing
httpClient *client.Client
}
func New(store authtypes.AuthNStore, licensing licensing.Licensing, providerSettings factory.ProviderSettings) (*AuthN, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn")
httpClient, err := client.New(providerSettings.Logger, providerSettings.TracerProvider, providerSettings.MeterProvider)
if err != nil {
return nil, err
}
return &AuthN{
settings: settings,
store: store,
licensing: licensing,
httpClient: httpClient,
@@ -126,7 +129,40 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
}
}
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
name := ""
if nameClaim := authDomain.AuthDomainConfig().OIDC.ClaimMapping.Name; nameClaim != "" {
if n, ok := claims[nameClaim].(string); ok {
name = n
}
}
var groups []string
if groupsClaim := authDomain.AuthDomainConfig().OIDC.ClaimMapping.Groups; groupsClaim != "" {
if claimValue, exists := claims[groupsClaim]; exists {
switch g := claimValue.(type) {
case []any:
for _, group := range g {
if gs, ok := group.(string); ok {
groups = append(groups, gs)
}
}
case string:
// Some IDPs return a single group as a string instead of an array
groups = append(groups, g)
default:
a.settings.Logger().WarnContext(ctx, "oidc: unsupported groups type", "type", fmt.Sprintf("%T", claimValue))
}
}
}
role := ""
if roleClaim := authDomain.AuthDomainConfig().OIDC.ClaimMapping.Role; roleClaim != "" {
if r, ok := claims[roleClaim].(string); ok {
role = r
}
}
return authtypes.NewCallbackIdentity(name, email, authDomain.StorableAuthDomain().OrgID, state, groups, role), nil
}
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
@@ -145,6 +181,13 @@ func (a *AuthN) oidcProviderAndoauth2Config(ctx context.Context, siteURL *url.UR
return nil, nil, err
}
scopes := make([]string, len(defaultScopes))
copy(scopes, defaultScopes)
if authDomain.AuthDomainConfig().RoleMapping != nil && len(authDomain.AuthDomainConfig().RoleMapping.GroupMappings) > 0 {
scopes = append(scopes, "groups")
}
return oidcProvider, &oauth2.Config{
ClientID: authDomain.AuthDomainConfig().OIDC.ClientID,
ClientSecret: authDomain.AuthDomainConfig().OIDC.ClientSecret,

View File

@@ -96,7 +96,26 @@ func (a *AuthN) HandleCallback(ctx context.Context, formValues url.Values) (*aut
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "saml: invalid email").WithAdditional("The nameID assertion is used to retrieve the email address, please check your IDP configuration and try again.")
}
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
name := ""
if nameAttribute := authDomain.AuthDomainConfig().SAML.AttributeMapping.Name; nameAttribute != "" {
if val := assertionInfo.Values.Get(nameAttribute); val != "" {
name = val
}
}
var groups []string
if groupAttribute := authDomain.AuthDomainConfig().SAML.AttributeMapping.Groups; groupAttribute != "" {
groups = assertionInfo.Values.GetAll(groupAttribute)
}
role := ""
if roleAttribute := authDomain.AuthDomainConfig().SAML.AttributeMapping.Role; roleAttribute != "" {
if val := assertionInfo.Values.Get(roleAttribute); val != "" {
role = val
}
}
return authtypes.NewCallbackIdentity(name, email, authDomain.StorableAuthDomain().OrgID, state, groups, role), nil
}
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {

View File

@@ -9,7 +9,6 @@ import (
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
@@ -54,7 +53,6 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
FluxInterval: opts.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),

View File

@@ -236,7 +236,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterLogsRoutes(r, am)
apiHandler.RegisterIntegrationRoutes(r, am)
apiHandler.RegisterCloudIntegrationsRoutes(r, am)
apiHandler.RegisterFieldsRoutes(r, am)
apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterInfraMetricsRoutes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am)

View File

@@ -211,7 +211,10 @@ describe('VariableItem Integration Tests', () => {
await user.clear(textInput);
await user.type(textInput, 'new-text-value');
// Should call onValueUpdate after debounce
// Blur the input to trigger the value update
await user.tab();
// Should call onValueUpdate after blur
await waitFor(
() => {
expect(mockOnValueUpdate).toHaveBeenCalledWith(

View File

@@ -4,7 +4,7 @@ import { OPERATORS, PANEL_TYPES } from 'constants/queryBuilder';
import { Formula } from 'container/QueryBuilder/components/Formula';
import { QueryBuilderProps } from 'container/QueryBuilder/QueryBuilder.interfaces';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { memo, useEffect, useMemo, useRef } from 'react';
import { memo, useCallback, useEffect, useMemo, useRef } from 'react';
import { IBuilderTraceOperator } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
@@ -33,6 +33,7 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
addTraceOperator,
panelType,
initialDataSource,
handleRunQuery,
} = useQueryBuilder();
const containerRef = useRef(null);
@@ -157,10 +158,29 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
[showTraceOperator, traceOperator, hasAtLeastOneTraceQuery],
);
const handleKeyDown = useCallback(
(e: React.KeyboardEvent<HTMLDivElement>): void => {
const target = e.target as HTMLElement | null;
const tagName = target?.tagName || '';
const isInputElement =
['INPUT', 'TEXTAREA', 'SELECT'].includes(tagName) ||
(target?.getAttribute('contenteditable') || '').toLowerCase() === 'true';
// Allow input elements in qb to run the query when Cmd/Ctrl + Enter is pressed
if (isInputElement && (e.metaKey || e.ctrlKey) && e.key === 'Enter') {
e.preventDefault();
e.stopPropagation();
handleRunQuery();
}
},
[handleRunQuery],
);
return (
<QueryBuilderV2Provider>
<div className="query-builder-v2">
<div className="qb-content-container">
<div className="qb-content-container" onKeyDownCapture={handleKeyDown}>
{!isMultiQueryAllowed ? (
<QueryV2
ref={containerRef}

View File

@@ -11,7 +11,7 @@ import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
import { get, isEmpty } from 'lodash-es';
import { BarChart2, ChevronUp, ExternalLink, ScrollText } from 'lucide-react';
import { useCallback, useEffect, useState } from 'react';
import { useCallback, useEffect, useRef, useState } from 'react';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { MetricAggregation } from 'types/api/v5/queryRange';
import { DataSource, ReduceOperators } from 'types/common/queryBuilder';
@@ -171,6 +171,9 @@ function QueryAddOns({
const [selectedViews, setSelectedViews] = useState<AddOn[]>([]);
const initializedRef = useRef(false);
const prevAvailableKeysRef = useRef<Set<string> | null>(null);
const { handleChangeQueryData } = useQueryOperations({
index,
query,
@@ -213,23 +216,41 @@ function QueryAddOns({
}
setAddOns(filteredAddOns);
const activeAddOnKeys = new Set(
Object.entries(ADD_ONS_KEYS_TO_QUERY_PATH)
.filter(([, path]) => hasValue(get(query, path)))
.map(([key]) => key),
);
const availableAddOnKeys = new Set(filteredAddOns.map((a) => a.key));
const previousKeys = prevAvailableKeysRef.current;
const hasAvailabilityItemsChanged =
previousKeys !== null &&
(previousKeys.size !== availableAddOnKeys.size ||
[...availableAddOnKeys].some((key) => !previousKeys.has(key)));
prevAvailableKeysRef.current = availableAddOnKeys;
const availableAddOnKeys = new Set(filteredAddOns.map((addOn) => addOn.key));
// Filter and set selected views: add-ons that are both active and available
setSelectedViews(
filteredAddOns.filter(
(addOn) =>
activeAddOnKeys.has(addOn.key) && availableAddOnKeys.has(addOn.key),
if (!initializedRef.current || hasAvailabilityItemsChanged) {
initializedRef.current = true;
const activeAddOnKeys = new Set(
Object.entries(ADD_ONS_KEYS_TO_QUERY_PATH)
.filter(([, path]) => hasValue(get(query, path)))
.map(([key]) => key),
);
// Initial seeding from query values on mount
setSelectedViews(
filteredAddOns.filter(
(addOn) =>
activeAddOnKeys.has(addOn.key) && availableAddOnKeys.has(addOn.key),
),
);
return;
}
setSelectedViews((prev) =>
prev.filter((view) =>
filteredAddOns.some((addOn) => addOn.key === view.key),
),
);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [panelType, isListViewPanel, query]);
}, [panelType, isListViewPanel, query, showReduceTo]);
const handleOptionClick = (e: RadioChangeEvent): void => {
if (selectedViews.find((view) => view.key === e.target.value.key)) {

View File

@@ -1379,8 +1379,6 @@ function QuerySearch({
run: (): boolean => {
if (onRun && typeof onRun === 'function') {
onRun(getCurrentExpression());
} else {
handleRunQuery();
}
return true;
},

View File

@@ -410,8 +410,6 @@ function TraceOperatorEditor({
run: (): boolean => {
if (onRun && typeof onRun === 'function') {
onRun(value);
} else {
handleRunQuery();
}
return true;
},

View File

@@ -270,44 +270,6 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
await waitFor(() => expect(onRun).toHaveBeenCalled(), { timeout: 2000 });
});
it('calls handleRunQuery when Mod-Enter without onRun', async () => {
const mockedHandleRunQuery = handleRunQueryMock as jest.MockedFunction<
() => void
>;
mockedHandleRunQuery.mockClear();
render(
<QuerySearch
onChange={jest.fn() as jest.MockedFunction<(v: string) => void>}
queryData={initialQueriesMap.logs.builder.queryData[0]}
dataSource={DataSource.LOGS}
/>,
);
// Wait for CodeMirror to initialize
await waitFor(() => {
const editor = document.querySelector(CM_EDITOR_SELECTOR);
expect(editor).toBeInTheDocument();
});
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
await userEvent.click(editor);
await userEvent.type(editor, SAMPLE_VALUE_TYPING_COMPLETE);
// Use fireEvent for keyboard shortcuts as userEvent might not work well with CodeMirror
const modKey = navigator.platform.includes('Mac') ? 'metaKey' : 'ctrlKey';
fireEvent.keyDown(editor, {
key: 'Enter',
code: 'Enter',
[modKey]: true,
keyCode: 13,
});
await waitFor(() => expect(mockedHandleRunQuery).toHaveBeenCalled(), {
timeout: 2000,
});
});
it('initializes CodeMirror with expression from queryData.filter.expression on mount', async () => {
const testExpression =
"http.status_code >= 500 AND service.name = 'frontend'";

View File

@@ -3,14 +3,21 @@
import '@testing-library/jest-dom';
import { jest } from '@jest/globals';
import { fireEvent, waitFor } from '@testing-library/react';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
import { render, screen } from 'tests/test-utils';
import { Having, IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { render, screen, userEvent } from 'tests/test-utils';
import {
Having,
IBuilderQuery,
Query,
} from 'types/api/queryBuilder/queryBuilderData';
import { EQueryType } from 'types/common/dashboard';
import { UseQueryOperations } from 'types/common/operations.types';
import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder';
import { QueryV2 } from '../QueryV2';
import { QueryBuilderV2 } from '../../QueryBuilderV2';
// Local mocks for domain-specific heavy child components
jest.mock(
@@ -36,16 +43,87 @@ const mockedUseQueryOperations = jest.mocked(
useQueryOperations,
) as jest.MockedFunction<UseQueryOperations>;
describe('QueryV2 - base render', () => {
describe('QueryBuilderV2 + QueryV2 - base render', () => {
let handleRunQueryMock: jest.MockedFunction<() => void>;
beforeEach(() => {
const mockCloneQuery = jest.fn() as jest.MockedFunction<
(type: string, q: IBuilderQuery) => void
>;
handleRunQueryMock = jest.fn() as jest.MockedFunction<() => void>;
const baseQuery: IBuilderQuery = {
queryName: 'A',
dataSource: DataSource.LOGS,
aggregateOperator: '',
aggregations: [],
timeAggregation: '',
spaceAggregation: '',
temporality: '',
functions: [],
filter: undefined,
filters: { items: [], op: 'AND' },
groupBy: [],
expression: '',
disabled: false,
having: [] as Having[],
limit: 10,
stepInterval: null,
orderBy: [],
legend: 'A',
};
const currentQueryObj: Query = {
id: 'test',
unit: undefined,
queryType: EQueryType.CLICKHOUSE,
promql: [],
clickhouse_sql: [],
builder: {
queryData: [baseQuery],
queryFormulas: [],
queryTraceOperator: [],
},
};
const updateAllQueriesOperators: QueryBuilderContextType['updateAllQueriesOperators'] = (
q,
) => q;
const updateQueriesData: QueryBuilderContextType['updateQueriesData'] = (q) =>
q;
mockedUseQueryBuilder.mockReturnValue(({
// Only fields used by QueryV2
currentQuery: currentQueryObj,
stagedQuery: null,
lastUsedQuery: null,
setLastUsedQuery: jest.fn(),
supersetQuery: currentQueryObj,
setSupersetQuery: jest.fn(),
initialDataSource: null,
panelType: PANEL_TYPES.TABLE,
isEnabledQuery: true,
handleSetQueryData: jest.fn(),
handleSetTraceOperatorData: jest.fn(),
handleSetFormulaData: jest.fn(),
handleSetQueryItemData: jest.fn(),
handleSetConfig: jest.fn(),
removeQueryBuilderEntityByIndex: jest.fn(),
removeAllQueryBuilderEntities: jest.fn(),
removeQueryTypeItemByIndex: jest.fn(),
addNewBuilderQuery: jest.fn(),
addNewFormula: jest.fn(),
removeTraceOperator: jest.fn(),
addTraceOperator: jest.fn(),
cloneQuery: mockCloneQuery,
panelType: null,
addNewQueryItem: jest.fn(),
redirectWithQueryBuilderData: jest.fn(),
handleRunQuery: handleRunQueryMock,
resetQuery: jest.fn(),
handleOnUnitsChange: jest.fn(),
updateAllQueriesOperators,
updateQueriesData,
initQueryBuilderData: jest.fn(),
isStagedQueryUpdated: jest.fn(() => false),
isDefaultQuery: jest.fn(() => false),
} as unknown) as QueryBuilderContextType);
mockedUseQueryOperations.mockReturnValue({
@@ -71,40 +149,7 @@ describe('QueryV2 - base render', () => {
});
it('renders limit input when dataSource is logs', () => {
const baseQuery: IBuilderQuery = {
queryName: 'A',
dataSource: DataSource.LOGS,
aggregateOperator: '',
aggregations: [],
timeAggregation: '',
spaceAggregation: '',
temporality: '',
functions: [],
filter: undefined,
filters: { items: [], op: 'AND' },
groupBy: [],
expression: '',
disabled: false,
having: [] as Having[],
limit: 10,
stepInterval: null,
orderBy: [],
legend: 'A',
};
render(
<QueryV2
index={0}
isAvailableToDisable
query={baseQuery}
version="v4"
onSignalSourceChange={jest.fn() as jest.MockedFunction<(v: string) => void>}
signalSourceChangeEnabled={false}
queriesCount={1}
showTraceOperator={false}
hasTraceOperator={false}
/>,
);
render(<QueryBuilderV2 panelType={PANEL_TYPES.TABLE} version="v4" />);
// Ensure the Limit add-on input is present and is of type number
const limitInput = screen.getByPlaceholderText(
@@ -115,4 +160,43 @@ describe('QueryV2 - base render', () => {
expect(limitInput).toHaveAttribute('name', 'limit');
expect(limitInput).toHaveAttribute('data-testid', 'input-Limit');
});
it('Cmd+Enter on an input triggers handleRunQuery via container handler', async () => {
render(<QueryBuilderV2 panelType={PANEL_TYPES.TABLE} version="v4" />);
const limitInput = screen.getByPlaceholderText('Enter limit');
fireEvent.keyDown(limitInput, {
key: 'Enter',
code: 'Enter',
metaKey: true,
});
expect(handleRunQueryMock).toHaveBeenCalled();
const legendInput = screen.getByPlaceholderText('Write legend format');
fireEvent.keyDown(legendInput, {
key: 'Enter',
code: 'Enter',
metaKey: true,
});
expect(handleRunQueryMock).toHaveBeenCalled();
const CM_EDITOR_SELECTOR = '.cm-editor .cm-content';
// Wait for CodeMirror to initialize
await waitFor(() => {
const editor = document.querySelector(CM_EDITOR_SELECTOR);
expect(editor).toBeInTheDocument();
});
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
await userEvent.click(editor);
fireEvent.keyDown(editor, {
key: 'Enter',
code: 'Enter',
metaKey: true,
});
expect(handleRunQueryMock).toHaveBeenCalled();
});
});

View File

@@ -1,6 +1,12 @@
/* eslint-disable sonarjs/no-identical-functions */
/* eslint-disable sonarjs/no-duplicate-string */
import { fireEvent, render, screen, waitFor } from 'tests/test-utils';
import {
fireEvent,
render,
screen,
userEvent,
waitFor,
} from 'tests/test-utils';
import {
IDashboardVariable,
TSortVariableValuesType,
@@ -639,4 +645,186 @@ describe('VariableItem Component', () => {
await expectCircularDependencyError();
});
});
describe('Textbox Variable Default Value Handling', () => {
test('saves textbox variable with defaultValue and selectedValue set to textboxValue', async () => {
const user = userEvent.setup();
const textboxVariable: IDashboardVariable = {
id: TEST_VAR_IDS.VAR1,
name: TEST_VAR_NAMES.VAR1,
description: 'Test Textbox Variable',
type: 'TEXTBOX',
textboxValue: 'my-default-value',
...VARIABLE_DEFAULTS,
order: 0,
};
renderVariableItem(textboxVariable);
// Click save button
const saveButton = screen.getByText(SAVE_BUTTON_TEXT);
await user.click(saveButton);
// Verify that onSave was called with defaultValue and selectedValue equal to textboxValue
expect(onSave).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({
type: 'TEXTBOX',
textboxValue: 'my-default-value',
defaultValue: 'my-default-value',
selectedValue: 'my-default-value',
}),
expect.anything(),
);
});
test('saves textbox variable with empty values when textboxValue is empty', async () => {
const user = userEvent.setup();
const textboxVariable: IDashboardVariable = {
id: TEST_VAR_IDS.VAR1,
name: TEST_VAR_NAMES.VAR1,
description: 'Test Textbox Variable',
type: 'TEXTBOX',
textboxValue: '',
...VARIABLE_DEFAULTS,
order: 0,
};
renderVariableItem(textboxVariable);
// Click save button
const saveButton = screen.getByText(SAVE_BUTTON_TEXT);
await user.click(saveButton);
// Verify that onSave was called with empty defaultValue and selectedValue
expect(onSave).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({
type: 'TEXTBOX',
textboxValue: '',
defaultValue: '',
selectedValue: '',
}),
expect.anything(),
);
});
test('updates textbox defaultValue and selectedValue when user changes textboxValue input', async () => {
const user = userEvent.setup();
const textboxVariable: IDashboardVariable = {
id: TEST_VAR_IDS.VAR1,
name: TEST_VAR_NAMES.VAR1,
description: 'Test Textbox Variable',
type: 'TEXTBOX',
textboxValue: 'initial-value',
...VARIABLE_DEFAULTS,
order: 0,
};
renderVariableItem(textboxVariable);
// Change the textbox value
const textboxInput = screen.getByPlaceholderText(
'Enter a default value (if any)...',
);
await user.clear(textboxInput);
await user.type(textboxInput, 'updated-value');
// Click save button
const saveButton = screen.getByText(SAVE_BUTTON_TEXT);
await user.click(saveButton);
// Verify that onSave was called with the updated defaultValue and selectedValue
expect(onSave).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({
type: 'TEXTBOX',
textboxValue: 'updated-value',
defaultValue: 'updated-value',
selectedValue: 'updated-value',
}),
expect.anything(),
);
});
test('non-textbox variables use variableDefaultValue instead of textboxValue', async () => {
const user = userEvent.setup();
const queryVariable: IDashboardVariable = {
id: TEST_VAR_IDS.VAR1,
name: TEST_VAR_NAMES.VAR1,
description: 'Test Query Variable',
type: 'QUERY',
queryValue: 'SELECT * FROM test',
textboxValue: 'should-not-be-used',
defaultValue: 'query-default-value',
...VARIABLE_DEFAULTS,
order: 0,
};
renderVariableItem(queryVariable);
// Click save button
const saveButton = screen.getByText(SAVE_BUTTON_TEXT);
await user.click(saveButton);
// Verify that onSave was called with defaultValue not being textboxValue
expect(onSave).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({
type: 'QUERY',
defaultValue: 'query-default-value',
}),
expect.anything(),
);
// Verify that defaultValue is NOT the textboxValue
const savedVariable = onSave.mock.calls[0][1];
expect(savedVariable.defaultValue).not.toBe('should-not-be-used');
});
test('switching to textbox type sets defaultValue and selectedValue correctly on save', async () => {
const user = userEvent.setup();
// Start with a QUERY variable
const queryVariable: IDashboardVariable = {
id: TEST_VAR_IDS.VAR1,
name: TEST_VAR_NAMES.VAR1,
description: 'Test Variable',
type: 'QUERY',
queryValue: 'SELECT * FROM test',
...VARIABLE_DEFAULTS,
order: 0,
};
renderVariableItem(queryVariable);
// Switch to TEXTBOX type
const textboxButton = findButtonByText(TEXT.TEXTBOX);
expect(textboxButton).toBeInTheDocument();
if (textboxButton) {
await user.click(textboxButton);
}
// Enter a default value in the textbox input
const textboxInput = screen.getByPlaceholderText(
'Enter a default value (if any)...',
);
await user.type(textboxInput, 'new-textbox-default');
// Click save button
const saveButton = screen.getByText(SAVE_BUTTON_TEXT);
await user.click(saveButton);
// Verify that onSave was called with type TEXTBOX and correct defaultValue and selectedValue
expect(onSave).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({
type: 'TEXTBOX',
textboxValue: 'new-textbox-default',
defaultValue: 'new-textbox-default',
selectedValue: 'new-textbox-default',
}),
expect.anything(),
);
});
});
});

View File

@@ -320,6 +320,10 @@ function VariableItem({
]);
const variableValue = useMemo(() => {
if (queryType === 'TEXTBOX') {
return variableTextboxValue;
}
if (variableMultiSelect) {
let value = variableData.selectedValue;
if (isEmpty(value)) {
@@ -352,6 +356,8 @@ function VariableItem({
variableData.selectedValue,
variableData.showALLOption,
variableDefaultValue,
variableTextboxValue,
queryType,
previewValues,
]);
@@ -367,13 +373,10 @@ function VariableItem({
multiSelect: variableMultiSelect,
showALLOption: queryType === 'DYNAMIC' ? true : variableShowALLOption,
sort: variableSortType,
...(queryType === 'TEXTBOX' && {
selectedValue: (variableData.selectedValue ||
variableTextboxValue) as never,
}),
...(queryType !== 'TEXTBOX' && {
defaultValue: variableDefaultValue as never,
}),
// the reason we need to do this is because defaultValues are treated differently in case of textbox type
// They are the exact same and not like the other types where defaultValue is a separate field
defaultValue:
queryType === 'TEXTBOX' ? variableTextboxValue : variableDefaultValue,
modificationUUID: generateUUID(),
id: variableData.id || generateUUID(),
order: variableData.order,

View File

@@ -25,6 +25,12 @@
}
}
&.focused {
.variable-value {
outline: 1px solid var(--bg-robin-400);
}
}
.variable-value {
display: flex;
min-width: 120px;
@@ -93,6 +99,12 @@
.lightMode {
.variable-item {
&.focused {
.variable-value {
border: 1px solid var(--bg-robin-400);
}
}
.variable-name {
border: 1px solid var(--bg-vanilla-300);
background: var(--bg-vanilla-100);

View File

@@ -94,7 +94,7 @@ function DashboardVariableSelection(): JSX.Element | null {
cycleNodes,
});
}
}, [setVariablesToGetUpdated, variables, variablesTableData]);
}, [variables, variablesTableData]);
// this handles the case where the dependency order changes i.e. variable list updated via creation or deletion etc. and we need to refetch the variables
// also trigger when the global time changes

View File

@@ -80,10 +80,12 @@ describe('VariableItem', () => {
/>
</MockQueryClientProvider>,
);
expect(screen.getByPlaceholderText('Enter value')).toBeInTheDocument();
expect(
screen.getByTestId('variable-textbox-test_variable'),
).toBeInTheDocument();
});
test('calls onChange event handler when Input value changes', async () => {
test('calls onValueUpdate when Input value changes and blurs', async () => {
render(
<MockQueryClientProvider>
<VariableItem
@@ -102,13 +104,19 @@ describe('VariableItem', () => {
</MockQueryClientProvider>,
);
const inputElement = screen.getByTestId('variable-textbox-test_variable');
// Change the value
act(() => {
const inputElement = screen.getByPlaceholderText('Enter value');
fireEvent.change(inputElement, { target: { value: 'newValue' } });
});
// Blur the input to trigger the update
act(() => {
fireEvent.blur(inputElement);
});
await waitFor(() => {
// expect(mockOnValueUpdate).toHaveBeenCalledTimes(1);
expect(mockOnValueUpdate).toHaveBeenCalledWith(
'testVariable',
'test_variable',

View File

@@ -8,14 +8,14 @@ import './DashboardVariableSelection.styles.scss';
import { orange } from '@ant-design/colors';
import { InfoCircleOutlined, WarningOutlined } from '@ant-design/icons';
import { Input, Popover, Tooltip, Typography } from 'antd';
import { Input, InputRef, Popover, Tooltip, Typography } from 'antd';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
import { debounce, isArray, isEmpty, isString } from 'lodash-es';
import { memo, useCallback, useEffect, useMemo, useState } from 'react';
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
@@ -71,6 +71,15 @@ function VariableItem({
string | string[] | undefined
>(undefined);
// Local state for textbox input to ensure smooth editing experience
const [textboxInputValue, setTextboxInputValue] = useState<string>(
(variableData.selectedValue?.toString() ||
variableData.defaultValue?.toString()) ??
'',
);
const [isTextboxFocused, setIsTextboxFocused] = useState<boolean>(false);
const textboxInputRef = useRef<InputRef>(null);
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
(state) => state.globalTime,
);
@@ -371,7 +380,7 @@ function VariableItem({
}, [variableData.type, variableData.customValue]);
return (
<div className="variable-item">
<div className={`variable-item${isTextboxFocused ? ' focused' : ''}`}>
<Typography.Text className="variable-name" ellipsis>
${variableData.name}
{variableData.description && (
@@ -384,16 +393,40 @@ function VariableItem({
<div className="variable-value">
{variableData.type === 'TEXTBOX' ? (
<Input
ref={textboxInputRef}
placeholder="Enter value"
data-testid={`variable-textbox-${variableData.id}`}
bordered={false}
key={variableData.selectedValue?.toString()}
defaultValue={variableData.selectedValue?.toString()}
value={textboxInputValue}
title={textboxInputValue}
onChange={(e): void => {
debouncedHandleChange(e.target.value || '');
setTextboxInputValue(e.target.value);
}}
style={{
width:
50 + ((variableData.selectedValue?.toString()?.length || 0) * 7 || 50),
onFocus={(): void => {
setIsTextboxFocused(true);
}}
onBlur={(e): void => {
setIsTextboxFocused(false);
const value = e.target.value.trim();
// If empty, reset to default value
if (!value && variableData.defaultValue) {
setTextboxInputValue(variableData.defaultValue.toString());
debouncedHandleChange(variableData.defaultValue.toString());
} else {
debouncedHandleChange(value);
}
}}
onKeyDown={(e): void => {
if (e.key === 'Enter') {
const value = textboxInputValue.trim();
if (!value && variableData.defaultValue) {
setTextboxInputValue(variableData.defaultValue.toString());
debouncedHandleChange(variableData.defaultValue.toString());
} else {
debouncedHandleChange(value);
}
textboxInputRef.current?.blur();
}
}}
/>
) : (

View File

@@ -257,6 +257,15 @@ export const onUpdateVariableNode = (
): void => {
const visited = new Set<string>();
// If nodeToUpdate is not in topologicalOrder (e.g., CUSTOM variable),
// we still need to mark its children as needing updates
if (!topologicalOrder.includes(nodeToUpdate)) {
// Mark direct children of the node as visited so they get processed
(graph[nodeToUpdate] || []).forEach((child) => {
visited.add(child);
});
}
// Start processing from the node to update
topologicalOrder.forEach((node) => {
if (node === nodeToUpdate || visited.has(node)) {

View File

@@ -1,4 +1,4 @@
import { areArraysEqual } from './util';
import { areArraysEqual, onUpdateVariableNode, VariableGraph } from './util';
describe('areArraysEqual', () => {
it('should return true for equal arrays with same order', () => {
@@ -31,3 +31,121 @@ describe('areArraysEqual', () => {
expect(areArraysEqual(array1, array2)).toBe(true);
});
});
describe('onUpdateVariableNode', () => {
// Graph structure:
// deployment -> namespace -> service -> pod
// deployment has no parents, namespace depends on deployment, etc.
const graph: VariableGraph = {
deployment: ['namespace'],
namespace: ['service'],
service: ['pod'],
pod: [],
customVar: ['namespace'], // CUSTOM variable that affects namespace
};
const topologicalOrder = ['deployment', 'namespace', 'service', 'pod'];
it('should call callback for the node and all its descendants', () => {
const visited: string[] = [];
const callback = (node: string): void => {
visited.push(node);
};
onUpdateVariableNode('deployment', graph, topologicalOrder, callback);
expect(visited).toEqual(['deployment', 'namespace', 'service', 'pod']);
});
it('should call callback starting from a middle node', () => {
const visited: string[] = [];
const callback = (node: string): void => {
visited.push(node);
};
onUpdateVariableNode('namespace', graph, topologicalOrder, callback);
expect(visited).toEqual(['namespace', 'service', 'pod']);
});
it('should only call callback for the leaf node when updating leaf', () => {
const visited: string[] = [];
const callback = (node: string): void => {
visited.push(node);
};
onUpdateVariableNode('pod', graph, topologicalOrder, callback);
expect(visited).toEqual(['pod']);
});
it('should handle CUSTOM variable not in topologicalOrder by updating its children', () => {
const visited: string[] = [];
const callback = (node: string): void => {
visited.push(node);
};
// customVar is not in topologicalOrder but has namespace as a child
onUpdateVariableNode('customVar', graph, topologicalOrder, callback);
// Should process namespace and its descendants (service, pod)
expect(visited).toEqual(['namespace', 'service', 'pod']);
});
it('should handle node not in graph gracefully', () => {
const visited: string[] = [];
const callback = (node: string): void => {
visited.push(node);
};
onUpdateVariableNode('unknownNode', graph, topologicalOrder, callback);
// Should not call callback for any node since unknownNode has no children
expect(visited).toEqual([]);
});
it('should handle empty graph', () => {
const visited: string[] = [];
const callback = (node: string): void => {
visited.push(node);
};
onUpdateVariableNode('deployment', {}, topologicalOrder, callback);
// deployment is in topologicalOrder, so callback is called for it
expect(visited).toEqual(['deployment']);
});
it('should handle empty topologicalOrder', () => {
const visited: string[] = [];
const callback = (node: string): void => {
visited.push(node);
};
onUpdateVariableNode('deployment', graph, [], callback);
expect(visited).toEqual([]);
});
it('should handle CUSTOM variable with multiple children', () => {
const graphWithMultipleChildren: VariableGraph = {
...graph,
customMulti: ['namespace', 'service'], // CUSTOM variable affecting multiple nodes
};
const visited: string[] = [];
const callback = (node: string): void => {
visited.push(node);
};
onUpdateVariableNode(
'customMulti',
graphWithMultipleChildren,
topologicalOrder,
callback,
);
// Should process namespace, service, and pod (descendants)
expect(visited).toEqual(['namespace', 'service', 'pod']);
});
});

View File

@@ -318,7 +318,9 @@ function GridCardGraph({
version={version}
threshold={threshold}
headerMenuList={menuList}
isFetchingResponse={queryResponse.isFetching}
isFetchingResponse={
queryResponse.isFetching || variablesToGetUpdated.length > 0
}
setRequestData={setRequestData}
onClickHandler={onClickHandler}
onDragSelect={onDragSelect}

View File

@@ -0,0 +1,84 @@
.license-section {
display: flex;
flex-direction: column;
gap: 16px;
.license-section-header {
display: flex;
flex-direction: column;
gap: 4px;
.license-section-title {
color: #fff;
font-family: Inter;
font-size: 16px;
font-style: normal;
line-height: 24px;
letter-spacing: -0.08px;
}
}
.license-section-content {
display: flex;
flex-direction: column;
gap: 16px;
.license-section-content-item {
padding: 16px;
border: 1px solid var(--Slate-500, #161922);
background: var(--Ink-400, #121317);
border-radius: 3px;
.license-section-content-item-title-action {
display: flex;
align-items: center;
justify-content: space-between;
gap: 8px;
color: var(--Vanilla-300, #eee);
font-family: Inter;
font-size: 14px;
font-style: normal;
font-weight: 500;
line-height: normal;
letter-spacing: -0.07px;
margin-bottom: 8px;
}
.license-section-content-item-description {
color: var(--Vanilla-400, #c0c1c3);
font-family: Inter;
font-size: 12px;
font-style: normal;
line-height: 20px;
letter-spacing: -0.07px;
}
}
}
}
.lightMode {
.license-section {
.license-section-header {
.license-section-title {
color: var(--bg-ink-400);
}
}
.license-section-content {
.license-section-content-item {
border: 1px solid var(--bg-vanilla-300);
background: var(--bg-vanilla-100);
.license-section-content-item-title-action {
color: var(--bg-ink-400);
}
.license-section-content-item-description {
color: var(--bg-ink-300);
}
}
}
}
}

View File

@@ -0,0 +1,65 @@
import './LicenseSection.styles.scss';
import { Button } from '@signozhq/button';
import { Typography } from 'antd';
import { useNotifications } from 'hooks/useNotifications';
import { Copy } from 'lucide-react';
import { useAppContext } from 'providers/App/App';
import { useCopyToClipboard } from 'react-use';
function LicenseSection(): JSX.Element | null {
const { activeLicense } = useAppContext();
const { notifications } = useNotifications();
const [, handleCopyToClipboard] = useCopyToClipboard();
const getMaskedKey = (key: string): string => {
if (!key || key.length < 4) return key || 'N/A';
return `${key.substring(0, 2)}********${key
.substring(key.length - 2)
.trim()}`;
};
const handleCopyKey = (text: string): void => {
handleCopyToClipboard(text);
notifications.success({
message: 'Copied to clipboard',
});
};
if (!activeLicense?.key) {
return <></>;
}
return (
<div className="license-section">
<div className="license-section-header">
<div className="license-section-title">License</div>
</div>
<div className="license-section-content">
<div className="license-section-content-item">
<div className="license-section-content-item-title-action">
<span>License key</span>
<span style={{ display: 'flex', alignItems: 'center', gap: '8px' }}>
<Typography.Text code>{getMaskedKey(activeLicense.key)}</Typography.Text>
<Button
variant="ghost"
aria-label="Copy license key"
data-testid="license-key-copy-btn"
onClick={(): void => handleCopyKey(activeLicense.key)}
>
<Copy size={14} />
</Button>
</span>
</div>
<div className="license-section-content-item-description">
Your SigNoz license key.
</div>
</div>
</div>
</div>
);
}
export default LicenseSection;

View File

@@ -0,0 +1 @@
export { default } from './LicenseSection';

View File

@@ -1,8 +1,31 @@
import userEvent from '@testing-library/user-event';
import MySettingsContainer from 'container/MySettings';
import { act, fireEvent, render, screen, waitFor } from 'tests/test-utils';
import {
act,
fireEvent,
render,
screen,
waitFor,
within,
} from 'tests/test-utils';
const toggleThemeFunction = jest.fn();
const logEventFunction = jest.fn();
const copyToClipboardFn = jest.fn();
const editUserFn = jest.fn();
jest.mock('react-use', () => ({
__esModule: true,
useCopyToClipboard: (): [unknown, (text: string) => void] => [
null,
copyToClipboardFn,
],
}));
jest.mock('api/v1/user/id/update', () => ({
__esModule: true,
default: (...args: unknown[]): Promise<unknown> => editUserFn(...args),
}));
jest.mock('hooks/useDarkMode', () => ({
__esModule: true,
@@ -44,6 +67,7 @@ const PASSWORD_VALIDATION_MESSAGE_TEST_ID = 'password-validation-message';
describe('MySettings Flows', () => {
beforeEach(() => {
jest.clearAllMocks();
editUserFn.mockResolvedValue({});
render(<MySettingsContainer />);
});
@@ -215,4 +239,71 @@ describe('MySettings Flows', () => {
expect(submitButton).not.toBeDisabled();
});
});
describe('License section', () => {
it('Should render license section content when license key exists', () => {
expect(screen.getByText('License')).toBeInTheDocument();
expect(screen.getByText('License key')).toBeInTheDocument();
expect(screen.getByText('Your SigNoz license key.')).toBeInTheDocument();
});
it('Should not render license section when license key is missing', () => {
const { container } = render(<MySettingsContainer />, undefined, {
appContextOverrides: {
activeLicense: null,
},
});
const scoped = within(container);
expect(scoped.queryByText('License')).not.toBeInTheDocument();
expect(scoped.queryByText('License key')).not.toBeInTheDocument();
expect(
scoped.queryByText('Your SigNoz license key.'),
).not.toBeInTheDocument();
});
it('Should mask license key in the UI', () => {
const { container } = render(<MySettingsContainer />, undefined, {
appContextOverrides: {
activeLicense: {
key: 'abcd',
} as any,
},
});
expect(within(container).getByText('ab********cd')).toBeInTheDocument();
});
it('Should not mask license key if it is too short', () => {
const { container } = render(<MySettingsContainer />, undefined, {
appContextOverrides: {
activeLicense: {
key: 'abc',
} as any,
},
});
expect(within(container).getByText('abc')).toBeInTheDocument();
});
it('Should copy license key and show success toast', async () => {
const user = userEvent.setup();
const { container } = render(<MySettingsContainer />, undefined, {
appContextOverrides: {
activeLicense: {
key: 'test-license-key-12345',
} as any,
},
});
await user.click(within(container).getByTestId('license-key-copy-btn'));
await waitFor(() => {
expect(copyToClipboardFn).toHaveBeenCalledWith('test-license-key-12345');
expect(successNotification).toHaveBeenCalledWith({
message: 'Copied to clipboard',
});
});
});
});
});

View File

@@ -15,6 +15,7 @@ import { useMutation } from 'react-query';
import { UserPreference } from 'types/api/preferences/preference';
import { showErrorNotification } from 'utils/error';
import LicenseSection from './LicenseSection';
import TimezoneAdaptation from './TimezoneAdaptation/TimezoneAdaptation';
import UserInfo from './UserInfo';
@@ -230,6 +231,8 @@ function MySettings(): JSX.Element {
</div>
</div>
</div>
<LicenseSection />
</div>
);
}

View File

@@ -20,13 +20,17 @@ interface AttributeRecord {
interface AttributeActionsProps {
record: AttributeRecord;
isPinned?: boolean;
onTogglePin: (fieldKey: string) => void;
onTogglePin?: (fieldKey: string) => void;
showPinned?: boolean;
showCopyOptions?: boolean;
}
export default function AttributeActions({
record,
isPinned,
onTogglePin,
showPinned = true,
showCopyOptions = true,
}: AttributeActionsProps): JSX.Element {
const [isOpen, setIsOpen] = useState<boolean>(false);
const [isFilterInLoading, setIsFilterInLoading] = useState<boolean>(false);
@@ -91,7 +95,7 @@ export default function AttributeActions({
}, [onCopyFieldValue, textToCopy]);
const handleTogglePin = useCallback((): void => {
onTogglePin(record.field);
onTogglePin?.(record.field);
}, [onTogglePin, record.field]);
const moreActionsContent = (
@@ -105,35 +109,41 @@ export default function AttributeActions({
>
Group By Attribute
</Button>
<Button
type="text"
icon={<Copy size={14} />}
onClick={handleCopyFieldName}
block
>
Copy Field Name
</Button>
<Button
type="text"
icon={<Copy size={14} />}
onClick={handleCopyFieldValue}
block
>
Copy Field Value
</Button>
{showCopyOptions && (
<>
<Button
type="text"
icon={<Copy size={14} />}
onClick={handleCopyFieldName}
block
>
Copy Field Name
</Button>
<Button
type="text"
icon={<Copy size={14} />}
onClick={handleCopyFieldValue}
block
>
Copy Field Value
</Button>
</>
)}
</div>
);
return (
<div className={cx('action-btn', { 'action-btn--is-open': isOpen })}>
<Tooltip title={isPinned ? 'Unpin attribute' : 'Pin attribute'}>
<Button
className={`filter-btn periscope-btn ${isPinned ? 'pinned' : ''}`}
aria-label={isPinned ? 'Unpin attribute' : 'Pin attribute'}
icon={<Pin size={14} fill={isPinned ? 'currentColor' : 'none'} />}
onClick={handleTogglePin}
/>
</Tooltip>
{showPinned && (
<Tooltip title={isPinned ? 'Unpin attribute' : 'Pin attribute'}>
<Button
className={`filter-btn periscope-btn ${isPinned ? 'pinned' : ''}`}
aria-label={isPinned ? 'Unpin attribute' : 'Pin attribute'}
icon={<Pin size={14} fill={isPinned ? 'currentColor' : 'none'} />}
onClick={handleTogglePin}
/>
</Tooltip>
)}
<Tooltip title="Filter for value">
<Button
className="filter-btn periscope-btn"
@@ -184,4 +194,7 @@ export default function AttributeActions({
AttributeActions.defaultProps = {
isPinned: false,
showPinned: true,
showCopyOptions: true,
onTogglePin: undefined,
};

View File

@@ -47,15 +47,56 @@
.description {
display: flex;
flex-direction: column;
gap: 16px;
padding: 10px 12px;
padding: 10px 0px;
.item {
padding: 8px 12px;
&,
.attribute-container {
display: flex;
flex-direction: column;
gap: 8px;
position: relative; // ensure absolutely-positioned children anchor to the row
}
// Show attribute actions on hover for hardcoded rows
.attribute-actions-wrapper {
display: none;
gap: 8px;
position: absolute;
right: 8px;
top: 50%;
transform: translateY(-50%);
border-radius: 4px;
padding: 2px;
// style the action button group
.action-btn {
display: flex;
gap: 4px;
}
.filter-btn {
display: flex;
align-items: center;
border: none;
box-shadow: none;
border-radius: 2px;
background: var(--bg-slate-400);
padding: 4px;
gap: 3px;
height: 24px;
width: 24px;
&:hover {
background: var(--bg-slate-300);
}
}
}
&:hover {
background-color: var(--bg-slate-500);
.attribute-actions-wrapper {
display: flex;
}
}
.span-name-wrapper {
@@ -646,6 +687,29 @@
.description {
.item {
.attribute-actions-wrapper {
display: none;
gap: 8px;
position: absolute;
right: 8px;
top: 50%;
transform: translateY(-50%);
border-radius: 4px;
padding: 2px;
.filter-btn {
background: var(--bg-vanilla-200);
&:hover {
background: var(--bg-vanilla-100);
}
}
}
&:hover {
background-color: var(--bg-vanilla-300);
.attribute-actions-wrapper {
display: flex;
}
}
.span-name-wrapper {
.span-percentile-value-container {
&.span-percentile-value-container-open {

View File

@@ -21,6 +21,7 @@ import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { themeColors } from 'constants/theme';
import { USER_PREFERENCES } from 'constants/userPreferences';
import AttributeActions from 'container/SpanDetailsDrawer/Attributes/AttributeActions';
import dayjs from 'dayjs';
import useClickOutside from 'hooks/useClickOutside';
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
@@ -103,6 +104,10 @@ interface IResourceAttribute {
const DEFAULT_RESOURCE_ATTRIBUTES = {
serviceName: 'service.name',
name: 'name',
spanId: 'span_id',
spanKind: 'kind_string',
statusCodeString: 'status_code_string',
statusMessage: 'status_message',
};
// eslint-disable-next-line sonarjs/cognitive-complexity
@@ -835,6 +840,16 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
{selectedSpan.spanId}
</Typography.Text>
</div>
<div className="attribute-actions-wrapper">
<AttributeActions
record={{
field: DEFAULT_RESOURCE_ATTRIBUTES.spanId,
value: selectedSpan.spanId,
}}
showPinned={false}
showCopyOptions={false}
/>
</div>
</div>
<div className="item">
<Typography.Text className="attribute-key">start time</Typography.Text>
@@ -863,6 +878,16 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
</Typography.Text>
</Tooltip>
</div>
<div className="attribute-actions-wrapper">
<AttributeActions
record={{
field: DEFAULT_RESOURCE_ATTRIBUTES.serviceName,
value: selectedSpan.serviceName,
}}
showPinned={false}
showCopyOptions={false}
/>
</div>
</div>
</div>
<div className="item">
@@ -872,6 +897,16 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
{selectedSpan.spanKind}
</Typography.Text>
</div>
<div className="attribute-actions-wrapper">
<AttributeActions
record={{
field: DEFAULT_RESOURCE_ATTRIBUTES.spanKind,
value: selectedSpan.spanKind,
}}
showPinned={false}
showCopyOptions={false}
/>
</div>
</div>
<div className="item">
<Typography.Text className="attribute-key">
@@ -882,6 +917,16 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
{selectedSpan.statusCodeString}
</Typography.Text>
</div>
<div className="attribute-actions-wrapper">
<AttributeActions
record={{
field: DEFAULT_RESOURCE_ATTRIBUTES.statusCodeString,
value: selectedSpan.statusCodeString,
}}
showPinned={false}
showCopyOptions={false}
/>
</div>
</div>
{selectedSpan.statusMessage && (
@@ -891,6 +936,16 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
attributeValue={selectedSpan.statusMessage}
onExpand={showStatusMessageModal}
/>
<div className="attribute-actions-wrapper">
<AttributeActions
record={{
field: DEFAULT_RESOURCE_ATTRIBUTES.statusMessage,
value: selectedSpan.statusMessage,
}}
showPinned={false}
showCopyOptions={false}
/>
</div>
</div>
)}
<div className="item">

View File

@@ -0,0 +1,247 @@
/* eslint-disable sonarjs/no-duplicate-string */
import {
fireEvent,
render,
screen,
userEvent,
waitFor,
} from 'tests/test-utils';
import AttributeActions from '../Attributes/AttributeActions';
// Mock only Popover from antd to simplify hover/open behavior while keeping other components real
jest.mock('antd', () => {
const actual = jest.requireActual('antd');
const MockPopover = ({
content,
children,
open,
onOpenChange,
...rest
}: any): JSX.Element => (
<div
data-testid="mock-popover-wrapper"
onMouseEnter={(): void => onOpenChange?.(true)}
{...rest}
>
{children}
{open ? <div data-testid="mock-popover-content">{content}</div> : null}
</div>
);
return { ...actual, Popover: MockPopover };
});
// Mock getAggregateKeys API used inside useTraceActions to resolve autocomplete keys
jest.mock('api/queryBuilder/getAttributeKeys', () => ({
getAggregateKeys: jest.fn().mockResolvedValue({
payload: {
attributeKeys: [
{
key: 'http.method',
dataType: 'string',
type: 'tag',
isColumn: true,
},
],
},
}),
}));
const record = { field: 'http.method', value: 'GET' };
describe('AttributeActions (unit)', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('renders core action buttons (pin, filter in/out, more)', async () => {
render(<AttributeActions record={record} isPinned={false} />);
expect(
screen.getByRole('button', { name: 'Pin attribute' }),
).toBeInTheDocument();
expect(
screen.getByRole('button', { name: 'Filter for value' }),
).toBeInTheDocument();
expect(
screen.getByRole('button', { name: 'Filter out value' }),
).toBeInTheDocument();
// more actions (ellipsis) button
expect(
document.querySelector('.lucide-ellipsis')?.closest('button'),
).toBeInTheDocument();
});
it('applies "Filter for" and calls redirectWithQueryBuilderData with correct query', async () => {
const redirectWithQueryBuilderData = jest.fn();
const currentQuery = {
builder: {
queryData: [
{
aggregateOperator: 'count',
aggregateAttribute: { key: 'signoz_span_duration' },
filters: { items: [], op: 'AND' },
filter: { expression: '' },
groupBy: [],
},
],
},
} as any;
render(<AttributeActions record={record} />, undefined, {
queryBuilderOverrides: { currentQuery, redirectWithQueryBuilderData },
});
const filterForBtn = screen.getByRole('button', { name: 'Filter for value' });
await userEvent.click(filterForBtn);
await waitFor(() => {
expect(redirectWithQueryBuilderData).toHaveBeenCalledWith(
expect.objectContaining({
builder: expect.objectContaining({
queryData: expect.arrayContaining([
expect.objectContaining({
filters: expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: 'http.method' }),
op: '=',
value: 'GET',
}),
]),
}),
}),
]),
}),
}),
{},
expect.any(String),
);
});
});
it('applies "Filter out" and calls redirectWithQueryBuilderData with correct query', async () => {
const redirectWithQueryBuilderData = jest.fn();
const currentQuery = {
builder: {
queryData: [
{
aggregateOperator: 'count',
aggregateAttribute: { key: 'signoz_span_duration' },
filters: { items: [], op: 'AND' },
filter: { expression: '' },
groupBy: [],
},
],
},
} as any;
render(<AttributeActions record={record} />, undefined, {
queryBuilderOverrides: { currentQuery, redirectWithQueryBuilderData },
});
const filterOutBtn = screen.getByRole('button', { name: 'Filter out value' });
await userEvent.click(filterOutBtn);
await waitFor(() => {
expect(redirectWithQueryBuilderData).toHaveBeenCalledWith(
expect.objectContaining({
builder: expect.objectContaining({
queryData: expect.arrayContaining([
expect.objectContaining({
filters: expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: 'http.method' }),
op: '!=',
value: 'GET',
}),
]),
}),
}),
]),
}),
}),
{},
expect.any(String),
);
});
});
it('opens more actions on hover and calls Group By handler; closes after click', async () => {
const redirectWithQueryBuilderData = jest.fn();
const currentQuery = {
builder: {
queryData: [
{
aggregateOperator: 'count',
aggregateAttribute: { key: 'signoz_span_duration' },
filters: { items: [], op: 'AND' },
filter: { expression: '' },
groupBy: [],
},
],
},
} as any;
render(<AttributeActions record={record} />, undefined, {
queryBuilderOverrides: { currentQuery, redirectWithQueryBuilderData },
});
const ellipsisBtn = document
.querySelector('.lucide-ellipsis')
?.closest('button') as HTMLElement;
expect(ellipsisBtn).toBeInTheDocument();
// hover to trigger Popover open via mock
fireEvent.mouseEnter(ellipsisBtn.parentElement as Element);
// content appears
await waitFor(() =>
expect(screen.getByText('Group By Attribute')).toBeInTheDocument(),
);
await userEvent.click(screen.getByText('Group By Attribute'));
await waitFor(() => {
expect(redirectWithQueryBuilderData).toHaveBeenCalledWith(
expect.objectContaining({
builder: expect.objectContaining({
queryData: expect.arrayContaining([
expect.objectContaining({
groupBy: expect.arrayContaining([
expect.objectContaining({ key: 'http.method' }),
]),
}),
]),
}),
}),
{},
expect.any(String),
);
});
// After clicking group by, popover should close
await waitFor(() =>
expect(screen.queryByTestId('mock-popover-content')).not.toBeInTheDocument(),
);
});
it('hides pin button when showPinned=false', async () => {
render(<AttributeActions record={record} showPinned={false} />);
expect(
screen.queryByRole('button', { name: /pin attribute/i }),
).not.toBeInTheDocument();
});
it('hides copy options when showCopyOptions=false', async () => {
render(<AttributeActions record={record} showCopyOptions={false} />);
const ellipsisBtn = document
.querySelector('.lucide-ellipsis')
?.closest('button') as HTMLElement;
fireEvent.mouseEnter(ellipsisBtn.parentElement as Element);
await waitFor(() =>
expect(screen.queryByText('Copy Field Name')).not.toBeInTheDocument(),
);
expect(screen.queryByText('Copy Field Value')).not.toBeInTheDocument();
});
});

View File

@@ -291,6 +291,10 @@ export function DashboardProvider({
variable.order = order;
existingOrders.add(order);
// ! BWC - Specific case for backward compatibility where textboxValue was used instead of defaultValue
if (variable.type === 'TEXTBOX' && !variable.defaultValue) {
variable.defaultValue = variable.textboxValue || '';
}
}
if (variable.id === undefined) {

View File

@@ -1,5 +1,5 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { render, waitFor } from '@testing-library/react';
import { render, screen, waitFor } from '@testing-library/react';
import getDashboard from 'api/v1/dashboards/id/get';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import ROUTES from 'constants/routes';
@@ -379,12 +379,9 @@ describe('Dashboard Provider - URL Variables Integration', () => {
// Empty URL variables - tests initialization flow
mockGetUrlVariables.mockReturnValue({});
const { getByTestId } = renderWithDashboardProvider(
`/dashboard/${DASHBOARD_ID}`,
{
dashboardId: DASHBOARD_ID,
},
);
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
dashboardId: DASHBOARD_ID,
});
await waitFor(() => {
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
@@ -415,16 +412,14 @@ describe('Dashboard Provider - URL Variables Integration', () => {
});
// Verify dashboard state contains the variables with default values
await waitFor(() => {
const dashboardVariables = getByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
const dashboardVariables = await screen.findByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
expect(parsedVariables).toHaveProperty('environment');
expect(parsedVariables).toHaveProperty('services');
// Default allSelected values should be preserved
expect(parsedVariables.environment.allSelected).toBe(false);
expect(parsedVariables.services.allSelected).toBe(false);
});
expect(parsedVariables).toHaveProperty('environment');
expect(parsedVariables).toHaveProperty('services');
// Default allSelected values should be preserved
expect(parsedVariables.environment.allSelected).toBe(false);
expect(parsedVariables.services.allSelected).toBe(false);
});
it('should merge URL variables with dashboard data and normalize values correctly', async () => {
@@ -438,12 +433,9 @@ describe('Dashboard Provider - URL Variables Integration', () => {
.mockReturnValueOnce('development')
.mockReturnValueOnce(['db', 'cache']);
const { getByTestId } = renderWithDashboardProvider(
`/dashboard/${DASHBOARD_ID}`,
{
dashboardId: DASHBOARD_ID,
},
);
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
dashboardId: DASHBOARD_ID,
});
await waitFor(() => {
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
@@ -474,18 +466,16 @@ describe('Dashboard Provider - URL Variables Integration', () => {
});
// Verify the dashboard state reflects the normalized URL values
await waitFor(() => {
const dashboardVariables = getByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
const dashboardVariables = await screen.findByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
// The selectedValue should be updated with normalized URL values
expect(parsedVariables.environment.selectedValue).toBe('development');
expect(parsedVariables.services.selectedValue).toEqual(['db', 'cache']);
// The selectedValue should be updated with normalized URL values
expect(parsedVariables.environment.selectedValue).toBe('development');
expect(parsedVariables.services.selectedValue).toEqual(['db', 'cache']);
// allSelected should be set to false when URL values override
expect(parsedVariables.environment.allSelected).toBe(false);
expect(parsedVariables.services.allSelected).toBe(false);
});
// allSelected should be set to false when URL values override
expect(parsedVariables.environment.allSelected).toBe(false);
expect(parsedVariables.services.allSelected).toBe(false);
});
it('should handle ALL_SELECTED_VALUE from URL and set allSelected correctly', async () => {
@@ -495,12 +485,9 @@ describe('Dashboard Provider - URL Variables Integration', () => {
mockGetUrlVariables.mockReturnValue(urlVariables);
const { getByTestId } = renderWithDashboardProvider(
`/dashboard/${DASHBOARD_ID}`,
{
dashboardId: DASHBOARD_ID,
},
);
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
dashboardId: DASHBOARD_ID,
});
await waitFor(() => {
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
@@ -513,8 +500,8 @@ describe('Dashboard Provider - URL Variables Integration', () => {
);
// Verify that allSelected is set to true for the services variable
await waitFor(() => {
const dashboardVariables = getByTestId('dashboard-variables');
await waitFor(async () => {
const dashboardVariables = await screen.findByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
expect(parsedVariables.services.allSelected).toBe(true);
@@ -563,3 +550,203 @@ describe('Dashboard Provider - URL Variables Integration', () => {
});
});
});
describe('Dashboard Provider - Textbox Variable Backward Compatibility', () => {
const DASHBOARD_ID = 'test-dashboard-id';
beforeEach(() => {
jest.clearAllMocks();
mockGetUrlVariables.mockReturnValue({});
// eslint-disable-next-line sonarjs/no-identical-functions
mockNormalizeUrlValueForVariable.mockImplementation((urlValue) => {
if (urlValue === undefined || urlValue === null) {
return urlValue;
}
return urlValue as IDashboardVariable['selectedValue'];
});
});
describe('Textbox Variable defaultValue Migration', () => {
it('should set defaultValue from textboxValue for TEXTBOX variables without defaultValue (BWC)', async () => {
// Mock dashboard with TEXTBOX variable that has textboxValue but no defaultValue
// This simulates old data format before the migration
/* eslint-disable @typescript-eslint/no-explicit-any */
mockGetDashboard.mockResolvedValue({
httpStatusCode: 200,
data: {
id: DASHBOARD_ID,
title: 'Test Dashboard',
data: {
variables: {
myTextbox: {
id: 'textbox-id',
name: 'myTextbox',
type: 'TEXTBOX',
textboxValue: 'legacy-default-value',
// defaultValue is intentionally missing to test BWC
multiSelect: false,
showALLOption: false,
sort: 'DISABLED',
} as any,
},
},
},
} as any);
/* eslint-enable @typescript-eslint/no-explicit-any */
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
dashboardId: DASHBOARD_ID,
});
await waitFor(() => {
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
});
// Verify that defaultValue is set from textboxValue
await waitFor(async () => {
const dashboardVariables = await screen.findByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
expect(parsedVariables.myTextbox.type).toBe('TEXTBOX');
expect(parsedVariables.myTextbox.textboxValue).toBe('legacy-default-value');
expect(parsedVariables.myTextbox.defaultValue).toBe('legacy-default-value');
});
});
it('should not override existing defaultValue for TEXTBOX variables', async () => {
// Mock dashboard with TEXTBOX variable that already has defaultValue
/* eslint-disable @typescript-eslint/no-explicit-any */
mockGetDashboard.mockResolvedValue({
httpStatusCode: 200,
data: {
id: DASHBOARD_ID,
title: 'Test Dashboard',
data: {
variables: {
myTextbox: {
id: 'textbox-id',
name: 'myTextbox',
type: 'TEXTBOX',
textboxValue: 'old-textbox-value',
defaultValue: 'existing-default-value',
multiSelect: false,
showALLOption: false,
sort: 'DISABLED',
} as any,
},
},
},
} as any);
/* eslint-enable @typescript-eslint/no-explicit-any */
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
dashboardId: DASHBOARD_ID,
});
await waitFor(() => {
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
});
// Verify that existing defaultValue is preserved
await waitFor(async () => {
const dashboardVariables = await screen.findByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
expect(parsedVariables.myTextbox.type).toBe('TEXTBOX');
expect(parsedVariables.myTextbox.defaultValue).toBe(
'existing-default-value',
);
});
});
it('should set empty defaultValue when textboxValue is also empty for TEXTBOX variables', async () => {
// Mock dashboard with TEXTBOX variable with empty textboxValue and no defaultValue
/* eslint-disable @typescript-eslint/no-explicit-any */
mockGetDashboard.mockResolvedValue({
httpStatusCode: 200,
data: {
id: DASHBOARD_ID,
title: 'Test Dashboard',
data: {
variables: {
myTextbox: {
id: 'textbox-id',
name: 'myTextbox',
type: 'TEXTBOX',
textboxValue: '',
// defaultValue is intentionally missing
multiSelect: false,
showALLOption: false,
sort: 'DISABLED',
} as any,
},
},
},
} as any);
/* eslint-enable @typescript-eslint/no-explicit-any */
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
dashboardId: DASHBOARD_ID,
});
await waitFor(() => {
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
});
// Verify that defaultValue is set to empty string
await waitFor(async () => {
const dashboardVariables = await screen.findByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
expect(parsedVariables.myTextbox.type).toBe('TEXTBOX');
expect(parsedVariables.myTextbox.defaultValue).toBe('');
});
});
it('should not apply BWC logic to non-TEXTBOX variables', async () => {
// Mock dashboard with QUERY variable that has no defaultValue
/* eslint-disable @typescript-eslint/no-explicit-any */
mockGetDashboard.mockResolvedValue({
httpStatusCode: 200,
data: {
id: DASHBOARD_ID,
title: 'Test Dashboard',
data: {
variables: {
myQuery: {
id: 'query-id',
name: 'myQuery',
type: 'QUERY',
queryValue: 'SELECT * FROM test',
textboxValue: 'should-not-be-used',
// defaultValue is intentionally missing
multiSelect: false,
showALLOption: false,
sort: 'DISABLED',
} as any,
},
},
},
} as any);
/* eslint-enable @typescript-eslint/no-explicit-any */
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
dashboardId: DASHBOARD_ID,
});
await waitFor(() => {
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
});
// Verify that defaultValue is NOT set from textboxValue for QUERY type
await waitFor(async () => {
const dashboardVariables = await screen.findByTestId('dashboard-variables');
const parsedVariables = JSON.parse(dashboardVariables.textContent || '{}');
expect(parsedVariables.myQuery.type).toBe('QUERY');
// defaultValue should not be set to textboxValue for non-TEXTBOX variables
expect(parsedVariables.myQuery.defaultValue).not.toBe('should-not-be-used');
});
});
});
});

View File

@@ -37,6 +37,7 @@ export interface IDashboardVariable {
// Custom
customValue?: string;
// Textbox
// special case of variable where defaultValue is same as this. Otherwise, defaultValue is a single field
textboxValue?: string;
sort: TSortVariableValuesType;

14
go.mod
View File

@@ -16,6 +16,7 @@ require (
github.com/coreos/go-oidc/v3 v3.14.1
github.com/dgraph-io/ristretto/v2 v2.3.0
github.com/dustin/go-humanize v1.0.1
github.com/gin-gonic/gin v1.11.0
github.com/go-co-op/gocron v1.30.1
github.com/go-openapi/runtime v0.28.0
github.com/go-openapi/strfmt v0.23.0
@@ -92,6 +93,12 @@ require (
github.com/bytedance/gopkg v0.1.3 // indirect
github.com/bytedance/sonic/loader v0.3.0 // indirect
github.com/cloudwego/base64x v0.1.6 // indirect
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.27.0 // indirect
github.com/goccy/go-yaml v1.18.0 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/redis/go-redis/extra/rediscmd/v9 v9.15.1 // indirect
@@ -99,10 +106,11 @@ require (
github.com/swaggest/refl v1.4.0 // indirect
github.com/swaggest/usecase v1.3.1 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.3.0 // indirect
github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 // indirect
go.opentelemetry.io/collector/config/configretry v1.34.0 // indirect
go.yaml.in/yaml/v2 v2.4.2 // indirect
golang.org/x/arch v0.0.0-20210923205945-b76863e36670 // indirect
golang.org/x/arch v0.20.0 // indirect
golang.org/x/tools/godoc v0.1.0-deprecated // indirect
modernc.org/libc v1.66.10 // indirect
modernc.org/mathutil v1.7.1 // indirect
@@ -199,7 +207,7 @@ require (
github.com/jpillora/backoff v1.0.0 // indirect
github.com/jtolds/gls v4.20.0+incompatible // indirect
github.com/klauspost/compress v1.18.0 // indirect
github.com/klauspost/cpuid/v2 v2.2.10 // indirect
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect
@@ -343,7 +351,7 @@ require (
golang.org/x/time v0.11.0 // indirect
golang.org/x/tools v0.39.0 // indirect
gonum.org/v1/gonum v0.16.0 // indirect
google.golang.org/api v0.236.0 // indirect
google.golang.org/api v0.236.0
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 // indirect
google.golang.org/grpc v1.75.1 // indirect

28
go.sum
View File

@@ -292,7 +292,11 @@ github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
github.com/fxamacker/cbor/v2 v2.9.0 h1:NpKPmjDBgUfBms6tr6JZkTHtfFGcMKsw3eGcmD/sapM=
github.com/fxamacker/cbor/v2 v2.9.0/go.mod h1:vM4b+DJCtHn+zz7h3FFp/hDAI9WNWCsZj23V5ytsSxQ=
github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM=
github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/gin-gonic/gin v1.11.0 h1:OW/6PLjyusp2PPXtyxKHU0RbX6I/l28FTdDlae5ueWk=
github.com/gin-gonic/gin v1.11.0/go.mod h1:+iq/FyxlGzII0KHiBGjuNn4UNENUlKbGlNmc+W50Dls=
github.com/go-co-op/gocron v1.30.1 h1:tjWUvJl5KrcwpkEkSXFSQFr4F9h5SfV/m4+RX0cV2fs=
github.com/go-co-op/gocron v1.30.1/go.mod h1:39f6KNSGVOU1LO/ZOoZfcSxwlsJDQOKSu8erN0SH48Y=
github.com/go-faster/city v1.0.1 h1:4WAxSZ3V2Ws4QRDrscLEDcibJY8uf41H6AhXDrNDcGw=
@@ -340,9 +344,17 @@ github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ
github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58=
github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ=
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4=
github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
github.com/go-redis/redismock/v9 v9.2.0 h1:ZrMYQeKPECZPjOj5u9eyOjg8Nnb0BS9lkVIZ6IpsKLw=
github.com/go-redis/redismock/v9 v9.2.0/go.mod h1:18KHfGDK4Y6c2R0H38EUGWAdc7ZQS9gfYxc94k7rWT0=
github.com/go-resty/resty/v2 v2.16.5 h1:hBKqmWrr7uRc3euHVqmh1HTHcKn99Smr7o5spptdhTM=
@@ -360,6 +372,8 @@ github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJA
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/goccy/go-yaml v1.9.5/go.mod h1:U/jl18uSupI5rdI2jmuCswEA2htH9eXfferR3KfscvA=
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/gofrs/uuid v4.4.0+incompatible h1:3qXRTX8/NbyulANqlc0lchS1gqAVxRgsuW1YrTJupqA=
github.com/gofrs/uuid v4.4.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
@@ -636,8 +650,8 @@ github.com/kisielk/sqlstruct v0.0.0-20201105191214-5f3e10d3ab46/go.mod h1:yyMNCy
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE=
github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y=
github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
github.com/knadh/koanf v1.5.0 h1:q2TSd/3Pyc/5yP9ldIrSdIz26MCcyNQzW0pEAugLPNs=
github.com/knadh/koanf v1.5.0/go.mod h1:Hgyjp4y8v44hpZtPzs7JZfRAW5AhN7KfZcwv1RYggDs=
github.com/knadh/koanf/v2 v2.2.0 h1:FZFwd9bUjpb8DyCWARUBy5ovuhDs1lI87dOEn2K8UVU=
@@ -667,6 +681,8 @@ github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6Fm
github.com/leodido/go-syslog/v4 v4.2.0 h1:A7vpbYxsO4e2E8udaurkLlxP5LDpDbmPMsGnuhb7jVk=
github.com/leodido/go-syslog/v4 v4.2.0/go.mod h1:eJ8rUfDN5OS6dOkCOBYlg2a+hbAg6pJa99QXXgMrd98=
github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b h1:11UHH39z1RhZ5dc4y4r/4koJo6IYFgTRMe/LlwRTEw0=
github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b/go.mod h1:WZxr2/6a/Ar9bMDc2rN/LJrE/hF6bXE4LPyDSIxwAfg=
github.com/linode/linodego v1.49.0 h1:MNd3qwvQzbXB5mCpvdCqlUIu1RPA9oC+50LyB9kK+GQ=
@@ -1023,6 +1039,8 @@ github.com/trivago/tgo v1.0.7/go.mod h1:w4dpD+3tzNIIiIfkWWa85w5/B77tlvdZckQ+6PkF
github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ugorji/go/codec v1.3.0 h1:Qd2W2sQawAfG8XSvzwhBeoGq71zXOC/Q1E9y/wUcsUA=
github.com/ugorji/go/codec v1.3.0/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4=
github.com/uptrace/bun v1.2.9 h1:OOt2DlIcRUMSZPr6iXDFg/LaQd59kOxbAjpIVHddKRs=
github.com/uptrace/bun v1.2.9/go.mod h1:r2ZaaGs9Ru5bpGTr8GQfp8jp+TlCav9grYCPOu2CJSg=
github.com/uptrace/bun/dialect/pgdialect v1.2.9 h1:caf5uFbOGiXvadV6pA5gn87k0awFFxL1kuuY3SpxnWk=
@@ -1271,8 +1289,8 @@ go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670 h1:18EFjUmQOcUvxNYSkA6jO9VAiXCnxFY6NyDX0bHDmkU=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.20.0 h1:dx1zTU0MAE98U+TQ8BLl7XsJbgze2WnNKF/8tGp/Q6c=
golang.org/x/arch v0.20.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
@@ -1717,6 +1735,8 @@ google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX
google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 h1:1tXaIXCracvtsRxSBsYDiSBN0cuJvM7QYW+MrpIRY78=
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:49MsLSx0oWMOZqcpB3uL8ZOkAh1+TndpJ8ONoCBWiZk=
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 h1:BIRfGDEjiHRrk0QKZe3Xv2ieMhtgRGeLcZQ0mIVn4EY=
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5/go.mod h1:j3QtIyytwqGr1JUDtYXwtMXWPKsEa5LtzIFN1Wn5WvE=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 h1:eaY8u2EuxbRv7c3NiGK0/NedzVsCcV6hDuU5qPX5EGE=

View File

@@ -1,127 +0,0 @@
package fields
import (
"bytes"
"io"
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
"github.com/SigNoz/signoz/pkg/telemetrymeter"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrytraces"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type API struct {
telemetryStore telemetrystore.TelemetryStore
telemetryMetadataStore telemetrytypes.MetadataStore
}
// TODO: move this to module and remove metastore init
func NewAPI(
settings factory.ProviderSettings,
telemetryStore telemetrystore.TelemetryStore,
) *API {
telemetryMetadataStore := telemetrymetadata.NewTelemetryMetaStore(
settings,
telemetryStore,
telemetrytraces.DBName,
telemetrytraces.TagAttributesV2TableName,
telemetrytraces.SpanAttributesKeysTblName,
telemetrytraces.SpanIndexV3TableName,
telemetrymetrics.DBName,
telemetrymetrics.AttributesMetadataTableName,
telemetrymeter.DBName,
telemetrymeter.SamplesAgg1dTableName,
telemetrylogs.DBName,
telemetrylogs.LogsV2TableName,
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetrymetadata.DBName,
telemetrymetadata.AttributesMetadataLocalTableName,
)
return &API{
telemetryStore: telemetryStore,
telemetryMetadataStore: telemetryMetadataStore,
}
}
func (api *API) GetFieldsKeys(w http.ResponseWriter, r *http.Request) {
type fieldKeysResponse struct {
Keys map[string][]*telemetrytypes.TelemetryFieldKey `json:"keys"`
Complete bool `json:"complete"`
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
fieldKeySelector, err := parseFieldKeyRequest(r)
if err != nil {
render.Error(w, err)
return
}
keys, complete, err := api.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(w, err)
return
}
response := fieldKeysResponse{
Keys: keys,
Complete: complete,
}
render.Success(w, http.StatusOK, response)
}
func (api *API) GetFieldsValues(w http.ResponseWriter, r *http.Request) {
type fieldValuesResponse struct {
Values *telemetrytypes.TelemetryFieldValues `json:"values"`
Complete bool `json:"complete"`
}
bodyBytes, _ := io.ReadAll(r.Body)
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := r.Context()
fieldValueSelector, err := parseFieldValueRequest(r)
if err != nil {
render.Error(w, err)
return
}
allValues, allComplete, err := api.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(w, err)
return
}
relatedValues, relatedComplete, err := api.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
}
values := &telemetrytypes.TelemetryFieldValues{
StringValues: allValues.StringValues,
NumberValues: allValues.NumberValues,
RelatedValues: relatedValues,
}
response := fieldValuesResponse{
Values: values,
Complete: allComplete && relatedComplete,
}
render.Success(w, http.StatusOK, response)
}

View File

@@ -1,162 +0,0 @@
package fields
import (
"net/http"
"strconv"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, error) {
var req telemetrytypes.FieldKeySelector
var signal telemetrytypes.Signal
var source telemetrytypes.Source
var err error
signalStr := r.URL.Query().Get("signal")
if signalStr != "" {
signal = telemetrytypes.Signal{String: valuer.NewString(signalStr)}
} else {
signal = telemetrytypes.SignalUnspecified
}
sourceStr := r.URL.Query().Get("source")
if sourceStr != "" {
source = telemetrytypes.Source{String: valuer.NewString(sourceStr)}
} else {
source = telemetrytypes.SourceUnspecified
}
if r.URL.Query().Get("limit") != "" {
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse limit")
}
req.Limit = limit
} else {
req.Limit = 1000
}
var startUnixMilli, endUnixMilli int64
if r.URL.Query().Get("startUnixMilli") != "" {
startUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("startUnixMilli"), 10, 64)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse startUnixMilli")
}
// Round down to the nearest 6 hours (21600000 milliseconds)
startUnixMilli -= startUnixMilli % 21600000
}
if r.URL.Query().Get("endUnixMilli") != "" {
endUnixMilli, err = strconv.ParseInt(r.URL.Query().Get("endUnixMilli"), 10, 64)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse endUnixMilli")
}
}
// Parse fieldContext directly instead of using JSON unmarshalling.
var fieldContext telemetrytypes.FieldContext
fieldContextStr := r.URL.Query().Get("fieldContext")
if fieldContextStr != "" {
fieldContext = telemetrytypes.FieldContext{String: valuer.NewString(fieldContextStr)}
}
// Parse fieldDataType directly instead of using JSON unmarshalling.
var fieldDataType telemetrytypes.FieldDataType
fieldDataTypeStr := r.URL.Query().Get("fieldDataType")
if fieldDataTypeStr != "" {
fieldDataType = telemetrytypes.FieldDataType{String: valuer.NewString(fieldDataTypeStr)}
}
metricName := r.URL.Query().Get("metricName")
var metricContext *telemetrytypes.MetricContext
if metricName != "" {
metricContext = &telemetrytypes.MetricContext{
MetricName: metricName,
}
}
name := r.URL.Query().Get("searchText")
if name != "" && fieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, signal) {
name = parsedFieldKey.Name
fieldContext = parsedFieldKey.FieldContext
}
}
}
req = telemetrytypes.FieldKeySelector{
StartUnixMilli: startUnixMilli,
EndUnixMilli: endUnixMilli,
Signal: signal,
Source: source,
Name: name,
FieldContext: fieldContext,
FieldDataType: fieldDataType,
Limit: req.Limit,
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeFuzzy,
MetricContext: metricContext,
}
return &req, nil
}
func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector, error) {
keySelector, err := parseFieldKeyRequest(r)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse field key request")
}
name := r.URL.Query().Get("name")
if name != "" && keySelector.FieldContext == telemetrytypes.FieldContextUnspecified {
parsedFieldKey := telemetrytypes.GetFieldKeyFromKeyText(name)
if parsedFieldKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, keySelector.Signal) {
name = parsedFieldKey.Name
keySelector.FieldContext = parsedFieldKey.FieldContext
}
}
}
keySelector.Name = name
existingQuery := r.URL.Query().Get("existingQuery")
value := r.URL.Query().Get("searchText")
// Parse limit for fieldValue request, fallback to default 50 if parsing fails.
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))
if err != nil {
limit = 50
}
req := telemetrytypes.FieldValueSelector{
FieldKeySelector: keySelector,
ExistingQuery: existingQuery,
Value: value,
Limit: limit,
}
return &req, nil
}
func isContextValidForSignal(ctx telemetrytypes.FieldContext, signal telemetrytypes.Signal) bool {
if ctx == telemetrytypes.FieldContextResource ||
ctx == telemetrytypes.FieldContextAttribute ||
ctx == telemetrytypes.FieldContextScope {
return true
}
switch signal.StringValue() {
case telemetrytypes.SignalLogs.StringValue():
return ctx == telemetrytypes.FieldContextLog || ctx == telemetrytypes.FieldContextBody
case telemetrytypes.SignalTraces.StringValue():
return ctx == telemetrytypes.FieldContextSpan || ctx == telemetrytypes.FieldContextEvent || ctx == telemetrytypes.FieldContextTrace
case telemetrytypes.SignalMetrics.StringValue():
return ctx == telemetrytypes.FieldContextMetric
}
return true
}

View File

@@ -0,0 +1,50 @@
package signozapiserver
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/gorilla/mux"
)
func (provider *provider) addFieldsRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/fields/keys", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsKeys), handler.OpenAPIDef{
ID: "GetFieldsKeys",
Tags: []string{"fields"},
Summary: "Get field keys",
Description: "This endpoint returns field keys",
Request: nil,
RequestParams: new(telemetrytypes.PostableFieldKeysParams),
RequestContentType: "",
Response: new(telemetrytypes.GettableFieldKeys),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v1/fields/values", handler.New(provider.authZ.ViewAccess(provider.fieldsHandler.GetFieldsValues), handler.OpenAPIDef{
ID: "GetFieldsValues",
Tags: []string{"fields"},
Summary: "Get field values",
Description: "This endpoint returns field values",
Request: nil,
RequestParams: new(telemetrytypes.PostableFieldValueParams),
RequestContentType: "",
Response: new(telemetrytypes.GettableFieldValues),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -12,6 +12,7 @@ import (
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
@@ -39,6 +40,7 @@ type provider struct {
dashboardModule dashboard.Module
dashboardHandler dashboard.Handler
metricsExplorerHandler metricsexplorer.Handler
fieldsHandler fields.Handler
}
func NewFactory(
@@ -55,9 +57,28 @@ func NewFactory(
dashboardModule dashboard.Module,
dashboardHandler dashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
fieldsHandler fields.Handler,
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler, flaggerHandler, dashboardModule, dashboardHandler, metricsExplorerHandler)
return newProvider(
ctx,
providerSettings,
config,
orgGetter,
authz,
orgHandler,
userHandler,
sessionHandler,
authDomainHandler,
preferenceHandler,
globalHandler,
promoteHandler,
flaggerHandler,
dashboardModule,
dashboardHandler,
metricsExplorerHandler,
fieldsHandler,
)
})
}
@@ -78,6 +99,7 @@ func newProvider(
dashboardModule dashboard.Module,
dashboardHandler dashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
fieldsHandler fields.Handler,
) (apiserver.APIServer, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
router := mux.NewRouter().UseEncodedPath()
@@ -97,6 +119,7 @@ func newProvider(
dashboardModule: dashboardModule,
dashboardHandler: dashboardHandler,
metricsExplorerHandler: metricsExplorerHandler,
fieldsHandler: fieldsHandler,
}
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
@@ -153,6 +176,10 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
return err
}
if err := provider.addFieldsRoutes(router); err != nil {
return err
}
return nil
}

View File

@@ -6,10 +6,15 @@ import (
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/client"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/coreos/go-oidc/v3/oidc"
"golang.org/x/oauth2"
"golang.org/x/oauth2/google"
admin "google.golang.org/api/admin/directory/v1"
"google.golang.org/api/option"
)
const (
@@ -17,19 +22,28 @@ const (
redirectPath string = "/api/v1/complete/google"
)
var (
scopes []string = []string{"email"}
)
var scopes []string = []string{"email", "profile"}
var _ authn.CallbackAuthN = (*AuthN)(nil)
type AuthN struct {
store authtypes.AuthNStore
store authtypes.AuthNStore
settings factory.ScopedProviderSettings
httpClient *client.Client
}
func New(ctx context.Context, store authtypes.AuthNStore) (*AuthN, error) {
func New(ctx context.Context, store authtypes.AuthNStore, providerSettings factory.ProviderSettings) (*AuthN, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/authn/callbackauthn/googlecallbackauthn")
httpClient, err := client.New(settings.Logger(), providerSettings.TracerProvider, providerSettings.MeterProvider)
if err != nil {
return nil, err
}
return &AuthN{
store: store,
store: store,
settings: settings,
httpClient: httpClient,
}, nil
}
@@ -58,11 +72,13 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
}
if err := query.Get("error"); err != "" {
a.settings.Logger().ErrorContext(ctx, "google: error while authenticating", "error", err, "error_description", query.Get("error_description"))
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: error while authenticating").WithAdditional(query.Get("error_description"))
}
state, err := authtypes.NewStateFromString(query.Get("state"))
if err != nil {
a.settings.Logger().ErrorContext(ctx, "google: invalid state", "error", err)
return nil, errors.Newf(errors.TypeInvalidInput, authtypes.ErrCodeInvalidState, "google: invalid state").WithAdditional(err.Error())
}
@@ -76,10 +92,12 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
if err != nil {
var retrieveError *oauth2.RetrieveError
if errors.As(err, &retrieveError) {
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: failed to get token").WithAdditional(retrieveError.ErrorDescription).WithAdditional(string(retrieveError.Body))
a.settings.Logger().ErrorContext(ctx, "google: failed to get token", "error", err, "error_description", retrieveError.ErrorDescription, "body", string(retrieveError.Body))
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: failed to get token").WithAdditional(retrieveError.ErrorDescription)
}
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: failed to get token").WithAdditional(err.Error())
a.settings.Logger().ErrorContext(ctx, "google: failed to get token", "error", err)
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: failed to get token")
}
rawIDToken, ok := token.Extra("id_token").(string)
@@ -90,7 +108,8 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
verifier := oidcProvider.Verifier(&oidc.Config{ClientID: authDomain.AuthDomainConfig().Google.ClientID})
idToken, err := verifier.Verify(ctx, rawIDToken)
if err != nil {
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: failed to verify token").WithAdditional(err.Error())
a.settings.Logger().ErrorContext(ctx, "google: failed to verify token", "error", err)
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: failed to verify token")
}
var claims struct {
@@ -101,11 +120,20 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
}
if err := idToken.Claims(&claims); err != nil {
a.settings.Logger().ErrorContext(ctx, "google: missing or invalid claims", "error", err)
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: missing or invalid claims").WithAdditional(err.Error())
}
if claims.HostedDomain != authDomain.StorableAuthDomain().Name {
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: unexpected hd claim %s", claims.HostedDomain)
a.settings.Logger().ErrorContext(ctx, "google: unexpected hd claim", "expected", authDomain.StorableAuthDomain().Name, "actual", claims.HostedDomain)
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: unexpected hd claim")
}
if !authDomain.AuthDomainConfig().Google.InsecureSkipEmailVerified {
if !claims.EmailVerified {
a.settings.Logger().ErrorContext(ctx, "google: email is not verified", "email", claims.Email)
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: email is not verified")
}
}
email, err := valuer.NewEmail(claims.Email)
@@ -113,8 +141,24 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "google: failed to parse email").WithAdditional(err.Error())
}
return authtypes.NewCallbackIdentity(claims.Name, email, authDomain.StorableAuthDomain().OrgID, state), nil
var groups []string
if authDomain.AuthDomainConfig().Google.FetchGroups {
groups, err = a.fetchGoogleWorkspaceGroups(ctx, claims.Email, authDomain.AuthDomainConfig().Google)
if err != nil {
a.settings.Logger().ErrorContext(ctx, "google: could not fetch groups", "error", err)
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: could not fetch groups").WithAdditional(err.Error())
}
allowedGroups := authDomain.AuthDomainConfig().Google.AllowedGroups
if len(allowedGroups) > 0 {
groups = filterGroups(groups, allowedGroups)
if len(groups) == 0 {
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: user %q is not in any allowed groups", claims.Email).WithAdditional(allowedGroups...)
}
}
}
return authtypes.NewCallbackIdentity(claims.Name, email, authDomain.StorableAuthDomain().OrgID, state, groups, ""), nil
}
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
@@ -136,3 +180,90 @@ func (a *AuthN) oauth2Config(siteURL *url.URL, authDomain *authtypes.AuthDomain,
}).String(),
}
}
func (a *AuthN) fetchGoogleWorkspaceGroups(ctx context.Context, userEmail string, config *authtypes.GoogleConfig) ([]string, error) {
adminEmail := config.GetAdminEmailForDomain(userEmail)
if adminEmail == "" {
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "no admin email configured for domain of %s", userEmail)
}
jwtConfig, err := google.JWTConfigFromJSON([]byte(config.ServiceAccountJSON), admin.AdminDirectoryGroupReadonlyScope)
if err != nil {
a.settings.Logger().ErrorContext(ctx, "google: invalid service account credentials", "error", err)
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid service account credentials")
}
jwtConfig.Subject = adminEmail
customCtx := context.WithValue(ctx, oauth2.HTTPClient, a.httpClient.Client())
adminService, err := admin.NewService(ctx, option.WithHTTPClient(jwtConfig.Client(customCtx)))
if err != nil {
a.settings.Logger().ErrorContext(ctx, "google: unable to create directory service", "error", err)
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "unable to create directory service")
}
checkedGroups := make(map[string]struct{})
return a.getGroups(ctx, adminService, userEmail, config.FetchTransitiveGroupMembership, checkedGroups)
}
// Recursive method
func (a *AuthN) getGroups(ctx context.Context, adminService *admin.Service, userEmail string, fetchTransitive bool, checkedGroups map[string]struct{}) ([]string, error) {
var userGroups []string
var pageToken string
for {
call := adminService.Groups.List().UserKey(userEmail)
if pageToken != "" {
call = call.PageToken(pageToken)
}
groupList, err := call.Context(ctx).Do()
if err != nil {
a.settings.Logger().ErrorContext(ctx, "google: unable to list groups", "error", err)
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "unable to list groups")
}
for _, group := range groupList.Groups {
if _, exists := checkedGroups[group.Email]; exists {
continue
}
checkedGroups[group.Email] = struct{}{}
userGroups = append(userGroups, group.Email)
if fetchTransitive {
transitiveGroups, err := a.getGroups(ctx, adminService, group.Email, fetchTransitive, checkedGroups)
if err != nil {
a.settings.Logger().ErrorContext(ctx, "google: unable to list transitive groups", "error", err)
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "unable to list transitive groups")
}
userGroups = append(userGroups, transitiveGroups...)
}
}
pageToken = groupList.NextPageToken
if pageToken == "" {
break
}
}
return userGroups, nil
}
func filterGroups(userGroups, allowedGroups []string) []string {
allowed := make(map[string]struct{}, len(allowedGroups))
for _, g := range allowedGroups {
allowed[g] = struct{}{} // just to make o(1) searches
}
var filtered []string
for _, g := range userGroups {
if _, ok := allowed[g]; ok {
filtered = append(filtered, g)
}
}
return filtered
}

View File

@@ -112,7 +112,7 @@ func (b *base) WithUrl(u string) *base {
}
}
// WithUrl adds additional messages to the base error and returns a new base error.
// WithAdditional adds additional messages to the base error and returns a new base error.
func (b *base) WithAdditional(a ...string) *base {
return &base{
t: b.t,

View File

@@ -9,10 +9,12 @@ import (
var (
ErrCodeInvalidRequestBody = errors.MustNewCode("invalid_request_body")
ErrCodeInvalidRequestField = errors.MustNewCode("invalid_request_field")
ErrCodeInvalidRequestQuery = errors.MustNewCode("invalid_request_query")
)
var (
JSON Binding = &jsonBinding{}
JSON BindingBody = &jsonBinding{}
Query BindingQuery = &queryBinding{}
)
type bindBodyOptions struct {
@@ -34,6 +36,10 @@ func WithUseNumber(useNumber bool) BindBodyOption {
}
}
type Binding interface {
type BindingBody interface {
BindBody(body io.Reader, obj any, opts ...BindBodyOption) error
}
type BindingQuery interface {
BindQuery(query map[string][]string, obj any) error
}

View File

@@ -12,7 +12,7 @@ const (
ErrMessageInvalidField string = "request body contains invalid field value"
)
var _ Binding = (*jsonBinding)(nil)
var _ BindingBody = (*jsonBinding)(nil)
type jsonBinding struct{}

23
pkg/http/binding/query.go Normal file
View File

@@ -0,0 +1,23 @@
package binding
import (
"github.com/SigNoz/signoz/pkg/errors"
ginbinding "github.com/gin-gonic/gin/binding"
)
const (
ErrMessageInvalidQuery string = "request query contains invalid fields, please verify the format and try again."
)
var _ BindingQuery = (*queryBinding)(nil)
type queryBinding struct{}
func (b *queryBinding) BindQuery(query map[string][]string, obj any) error {
err := ginbinding.MapFormWithTag(obj, query, "query")
if err != nil {
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidRequestQuery, ErrMessageInvalidQuery).WithAdditional(err.Error())
}
return nil
}

View File

@@ -0,0 +1,33 @@
package binding
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestQueryBinding_BindQuery(t *testing.T) {
one := 1
zero := 0
testCases := []struct {
name string
query map[string][]string
obj any
expected any
}{
{name: "SingleIntField_NonEmptyValue", query: map[string][]string{"a": {"1"}}, obj: &struct{A int `query:"a"`}{}, expected: &struct{ A int }{A: 1}},
{name: "SingleIntField_EmptyValue", query: map[string][]string{"a": {""}}, obj: &struct{A int `query:"a"`}{}, expected: &struct{ A int }{A: 0}},
{name: "SingleIntField_MissingField", query: map[string][]string{}, obj: &struct{A int `query:"a"`}{}, expected: &struct{ A int }{A: 0}},
{name: "SinglePointerIntField_NonEmptyValue", query: map[string][]string{"a": {"1"}}, obj: &struct{A *int `query:"a"`}{}, expected: &struct{ A *int }{A: &one}},
{name: "SinglePointerIntField_EmptyValue", query: map[string][]string{"a": {""}}, obj: &struct{A *int `query:"a"`}{}, expected: &struct{ A *int }{A: &zero}},
{name: "SinglePointerIntField_MissingField", query: map[string][]string{}, obj: &struct{A *int `query:"a"`}{}, expected: &struct{ A *int }{A: nil}},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
err := Query.BindQuery(testCase.query, testCase.obj)
assert.NoError(t, err)
assert.EqualValues(t, testCase.expected, testCase.obj)
})
}
}

View File

@@ -31,7 +31,13 @@ func (plugin *reqResLog) OnRequestStart(request *http.Request) {
string(semconv.ServerAddressKey), host,
string(semconv.ServerPortKey), port,
string(semconv.HTTPRequestSizeKey), request.ContentLength,
"http.request.headers", request.Header,
}
// only include all the headers if we are at debug level
if plugin.logger.Handler().Enabled(request.Context(), slog.LevelDebug) {
fields = append(fields, "http.request.headers", request.Header)
} else {
fields = append(fields, "http.request.headers", redactSensitiveHeaders(request.Header))
}
plugin.logger.InfoContext(request.Context(), "::SENT-REQUEST::", fields...)
@@ -75,3 +81,24 @@ func (plugin *reqResLog) OnError(request *http.Request, err error) {
plugin.logger.ErrorContext(request.Context(), "::UNABLE-TO-SEND-REQUEST::", fields...)
}
func redactSensitiveHeaders(headers http.Header) http.Header {
// maintained list of headers to redact
sensitiveHeaders := map[string]bool{
"Authorization": true,
"Cookie": true,
"X-Signoz-Cloud-Api-Key": true,
}
safeHeaders := make(http.Header)
for header, value := range headers {
if sensitiveHeaders[header] {
safeHeaders[header] = []string{"REDACTED"}
} else {
safeHeaders[header] = value
}
}
return safeHeaders
}

View File

@@ -60,6 +60,7 @@ func (handler *handler) ServeOpenAPI(opCtx openapi.OperationContext) {
// Add request structure
opCtx.AddReqStructure(handler.openAPIDef.Request, openapi.WithContentType(handler.openAPIDef.RequestContentType))
opCtx.AddReqStructure(handler.openAPIDef.RequestParams)
// Add success response
if handler.openAPIDef.Response != nil {

View File

@@ -16,6 +16,7 @@ type OpenAPIDef struct {
Summary string
Description string
Request any
RequestParams any
RequestContentType string
Response any
ResponseContentType string

View File

@@ -31,7 +31,7 @@ type Module interface {
Delete(context.Context, valuer.UUID, valuer.UUID) error
// Get the IDP info of the domain provided.
GetAuthNProviderInfo(context.Context, *authtypes.AuthDomain) (*authtypes.AuthNProviderInfo)
GetAuthNProviderInfo(context.Context, *authtypes.AuthDomain) *authtypes.AuthNProviderInfo
}
type Handler interface {

View File

@@ -0,0 +1,11 @@
package fields
import "net/http"
type Handler interface {
// Gets the fields keys for the given field key selector
GetFieldsKeys(http.ResponseWriter, *http.Request)
// Gets the fields values for the given field value selector
GetFieldsValues(http.ResponseWriter, *http.Request)
}

View File

@@ -0,0 +1,90 @@
package implfields
import (
"net/http"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type handler struct {
telemetryStore telemetrystore.TelemetryStore
telemetryMetadataStore telemetrytypes.MetadataStore
}
func NewHandler(settings factory.ProviderSettings,telemetryMetadataStore telemetrytypes.MetadataStore,telemetryStore telemetrystore.TelemetryStore) fields.Handler {
return &handler{
telemetryStore: telemetryStore,
telemetryMetadataStore: telemetryMetadataStore,
}
}
func (handler *handler) GetFieldsKeys(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
var params telemetrytypes.PostableFieldKeysParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
fieldKeySelector, err := telemetrytypes.NewFieldKeySelectorFromPostableFieldKeysParams(params)
if err != nil {
render.Error(rw, err)
return
}
keys, complete, err := handler.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldKeys{
Keys: keys,
Complete: complete,
})
}
func (handler *handler) GetFieldsValues(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
var params telemetrytypes.PostableFieldValueParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
fieldValueSelector, err := telemetrytypes.NewFieldValueSelectorFromPostableFieldValueParams(params)
if err != nil {
render.Error(rw, err)
return
}
allValues, allComplete, err := handler.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(rw, err)
return
}
relatedValues, relatedComplete, err := handler.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
}
values := &telemetrytypes.TelemetryFieldValues{
StringValues: allValues.StringValues,
NumberValues: allValues.NumberValues,
RelatedValues: relatedValues,
}
render.Success(rw, http.StatusOK, &telemetrytypes.GettableFieldValues{
Values: values,
Complete: allComplete && relatedComplete,
})
}

View File

@@ -123,7 +123,7 @@ func (module *module) DeprecatedCreateSessionByEmailPassword(ctx context.Context
}
if !factorPassword.Equals(password) {
return nil, errors.New(errors.TypeUnauthenticated, types.ErrCodeIncorrectPassword, "invalid email orpassword")
return nil, errors.New(errors.TypeUnauthenticated, types.ErrCodeIncorrectPassword, "invalid email or password")
}
identity := authtypes.NewIdentity(users[0].ID, users[0].OrgID, users[0].Email, users[0].Role)
@@ -157,7 +157,15 @@ func (module *module) CreateCallbackAuthNSession(ctx context.Context, authNProvi
return "", err
}
user, err := types.NewUser(callbackIdentity.Name, callbackIdentity.Email, types.RoleViewer, callbackIdentity.OrgID)
authDomain, err := module.authDomain.GetByOrgIDAndID(ctx, callbackIdentity.OrgID, callbackIdentity.State.DomainID)
if err != nil {
return "", err
}
roleMapping := authDomain.AuthDomainConfig().RoleMapping
role := roleMapping.NewRoleFromCallbackIdentity(callbackIdentity)
user, err := types.NewUser(callbackIdentity.Name, callbackIdentity.Email, role, callbackIdentity.OrgID)
if err != nil {
return "", err
}

View File

@@ -318,13 +318,14 @@ func (q *querier) applyFormulas(ctx context.Context, results map[string]*qbtypes
}
// Check if we're dealing with time series or scalar data
if req.RequestType == qbtypes.RequestTypeTimeSeries {
switch req.RequestType {
case qbtypes.RequestTypeTimeSeries:
result := q.processTimeSeriesFormula(ctx, results, formula, req)
if result != nil {
result = q.applySeriesLimit(result, formula.Limit, formula.Order)
results[name] = result
}
} else if req.RequestType == qbtypes.RequestTypeScalar {
case qbtypes.RequestTypeScalar:
result := q.processScalarFormula(ctx, results, formula, req)
if result != nil {
result = q.applySeriesLimit(result, formula.Limit, formula.Order)
@@ -581,11 +582,14 @@ func (q *querier) filterDisabledQueries(results map[string]*qbtypes.Result, req
}
// formatScalarResultsAsTable formats scalar results as a unified table for UI display
func (q *querier) formatScalarResultsAsTable(results map[string]*qbtypes.Result, _ *qbtypes.QueryRangeRequest) map[string]any {
func (q *querier) formatScalarResultsAsTable(results map[string]*qbtypes.Result, req *qbtypes.QueryRangeRequest) map[string]any {
if len(results) == 0 {
return map[string]any{"table": &qbtypes.ScalarData{}}
}
// apply default sorting if no order specified
applyDefaultSort := !req.HasOrderSpecified()
// Convert all results to ScalarData first
scalarResults := make(map[string]*qbtypes.ScalarData)
for name, result := range results {
@@ -600,13 +604,13 @@ func (q *querier) formatScalarResultsAsTable(results map[string]*qbtypes.Result,
if len(scalarResults) == 1 {
for _, sd := range scalarResults {
if hasMultipleQueries(sd) {
return map[string]any{"table": deduplicateRows(sd)}
return map[string]any{"table": deduplicateRows(sd, applyDefaultSort)}
}
}
}
// Otherwise merge all results
merged := mergeScalarData(scalarResults)
merged := mergeScalarData(scalarResults, applyDefaultSort)
return map[string]any{"table": merged}
}
@@ -687,7 +691,7 @@ func hasMultipleQueries(sd *qbtypes.ScalarData) bool {
}
// deduplicateRows removes duplicate rows based on group columns
func deduplicateRows(sd *qbtypes.ScalarData) *qbtypes.ScalarData {
func deduplicateRows(sd *qbtypes.ScalarData, applyDefaultSort bool) *qbtypes.ScalarData {
// Find group column indices
groupIndices := []int{}
for i, col := range sd.Columns {
@@ -696,8 +700,9 @@ func deduplicateRows(sd *qbtypes.ScalarData) *qbtypes.ScalarData {
}
}
// Build unique rows map
// Build unique rows map, preserve order
uniqueRows := make(map[string][]any)
var keyOrder []string
for _, row := range sd.Data {
key := buildRowKey(row, groupIndices)
if existing, found := uniqueRows[key]; found {
@@ -711,17 +716,20 @@ func deduplicateRows(sd *qbtypes.ScalarData) *qbtypes.ScalarData {
rowCopy := make([]any, len(row))
copy(rowCopy, row)
uniqueRows[key] = rowCopy
keyOrder = append(keyOrder, key)
}
}
// Convert back to slice
// Convert back to slice, preserve the original order
data := make([][]any, 0, len(uniqueRows))
for _, row := range uniqueRows {
data = append(data, row)
for _, key := range keyOrder {
data = append(data, uniqueRows[key])
}
// Sort by first aggregation column
sortByFirstAggregation(data, sd.Columns)
// sort by first aggregation (descending) if no order was specified
if applyDefaultSort {
sortByFirstAggregation(data, sd.Columns)
}
return &qbtypes.ScalarData{
Columns: sd.Columns,
@@ -730,7 +738,7 @@ func deduplicateRows(sd *qbtypes.ScalarData) *qbtypes.ScalarData {
}
// mergeScalarData merges multiple scalar data results
func mergeScalarData(results map[string]*qbtypes.ScalarData) *qbtypes.ScalarData {
func mergeScalarData(results map[string]*qbtypes.ScalarData, applyDefaultSort bool) *qbtypes.ScalarData {
// Collect unique group columns
groupCols := []string{}
groupColMap := make(map[string]*qbtypes.ColumnDescriptor)
@@ -770,10 +778,12 @@ func mergeScalarData(results map[string]*qbtypes.ScalarData) *qbtypes.ScalarData
}
}
// Merge rows
// Merge rows, preserve order
rowMap := make(map[string][]any)
var keyOrder []string
for queryName, sd := range results {
for _, queryName := range queryNames {
sd := results[queryName]
// Create index mappings
groupMap := make(map[string]int)
for i, col := range sd.Columns {
@@ -802,6 +812,7 @@ func mergeScalarData(results map[string]*qbtypes.ScalarData) *qbtypes.ScalarData
newRow[i] = "n/a"
}
rowMap[key] = newRow
keyOrder = append(keyOrder, key)
}
// Set aggregation values for this query
@@ -825,14 +836,16 @@ func mergeScalarData(results map[string]*qbtypes.ScalarData) *qbtypes.ScalarData
}
}
// Convert to slice
// Convert to slice, preserving insertion order
data := make([][]any, 0, len(rowMap))
for _, row := range rowMap {
data = append(data, row)
for _, key := range keyOrder {
data = append(data, rowMap[key])
}
// Sort by first aggregation column
sortByFirstAggregation(data, columns)
// sort by first aggregation (descending) if no order was specified
if applyDefaultSort {
sortByFirstAggregation(data, columns)
}
return &qbtypes.ScalarData{
Columns: columns,
@@ -888,7 +901,7 @@ func sortByFirstAggregation(data [][]any, columns []*qbtypes.ColumnDescriptor) {
// compareValues compares two values for sorting (handles n/a and numeric types)
func compareValues(a, b any) int {
// Handle n/a values
// n/a values gets pushed to the end
if a == "n/a" && b == "n/a" {
return 0
}

View File

@@ -5,6 +5,7 @@ import (
"context"
"fmt"
"log/slog"
"regexp"
"sort"
"strings"
"text/template"
@@ -19,6 +20,50 @@ import (
"github.com/prometheus/prometheus/promql/parser"
)
// unquotedDottedNamePattern matches unquoted identifiers containing dots
// that appear in metric or label name positions. This helps detect queries
// using the old syntax that needs migration to UTF-8 quoted syntax.
// Examples it matches: k8s.pod.name, deployment.environment, http.status_code
var unquotedDottedNamePattern = regexp.MustCompile(`(?:^|[{,(\s])([a-zA-Z_][a-zA-Z0-9_]*(?:\.[a-zA-Z0-9_]+)+)(?:[}\s,=!~)\[]|$)`)
// quotedMetricOutsideBracesPattern matches the incorrect syntax where a quoted
// metric name appears outside of braces followed by a selector block.
// Example: "kube_pod_status_ready_time"{"condition"="true"}
// This is a common mistake when migrating to UTF-8 syntax.
var quotedMetricOutsideBracesPattern = regexp.MustCompile(`"([^"]+)"\s*\{`)
// enhancePromQLError adds helpful context to PromQL parse errors,
// particularly for UTF-8 syntax migration issues where metric and label
// names containing dots need to be quoted.
func enhancePromQLError(query string, parseErr error) error {
errMsg := parseErr.Error()
if matches := quotedMetricOutsideBracesPattern.FindStringSubmatch(query); len(matches) > 1 {
metricName := matches[1]
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid promql query: %s. Hint: The metric name should be inside the braces. Use {\"__name__\"=\"%s\", ...} or {\"%s\", ...} instead of \"%s\"{...}",
errMsg,
metricName,
metricName,
metricName,
)
}
if matches := unquotedDottedNamePattern.FindStringSubmatch(query); len(matches) > 1 {
dottedName := matches[1]
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid promql query: %s. Hint: Metric and label names containing dots require quoted notation in the new UTF-8 syntax, e.g., use \"%s\" instead of %s",
errMsg,
dottedName,
dottedName,
)
}
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query: %s", errMsg)
}
type promqlQuery struct {
logger *slog.Logger
promEngine prometheus.Prometheus
@@ -81,7 +126,7 @@ func (q *promqlQuery) removeAllVarMatchers(query string, vars map[string]qbv5.Va
expr, err := parser.ParseExpr(query)
if err != nil {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query %q", query)
return "", enhancePromQLError(query, err)
}
// Create visitor and walk the AST
@@ -161,7 +206,7 @@ func (q *promqlQuery) Execute(ctx context.Context) (*qbv5.Result, error) {
)
if err != nil {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query %q", query)
return nil, enhancePromQLError(query, err)
}
res := qry.Exec(ctx)

View File

@@ -2,8 +2,10 @@ package querier
import (
"log/slog"
"strings"
"testing"
"github.com/SigNoz/signoz/pkg/errors"
qbv5 "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/stretchr/testify/assert"
)
@@ -172,3 +174,268 @@ func TestRemoveAllVarMatchers(t *testing.T) {
})
}
}
func TestEnhancePromQLError(t *testing.T) {
parseErr := errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unexpected character: '.' at position 12")
t.Run("dotted name patterns", func(t *testing.T) {
tests := []struct {
name string
query string
wantDottedNameHint bool
wantDottedNameExample string
}{
{
name: "query with unquoted dotted metric name",
query: `sum(rate(k8s.container.restarts[5m]))`,
wantDottedNameHint: true,
wantDottedNameExample: "k8s.container.restarts",
},
{
name: "query with unquoted dotted label in group by",
query: `sum by (k8s.pod.name) (rate(requests_total[5m]))`,
wantDottedNameHint: true,
wantDottedNameExample: "k8s.pod.name",
},
{
name: "query with unquoted dotted label in filter",
query: `requests_total{k8s.namespace.name="default"}`,
wantDottedNameHint: true,
wantDottedNameExample: "k8s.namespace.name",
},
{
name: "query with multiple unquoted dotted names",
query: `sum by (k8s.pod.name, deployment.environment) (increase(k8s.container.restarts[15m]))`,
wantDottedNameHint: true,
wantDottedNameExample: "k8s.pod.name", // should match first one
},
{
name: "query without dotted names - no hint",
query: `sum(rate(http_requests_total[5m]))`,
wantDottedNameHint: false,
},
{
name: "query with properly quoted dotted names - no hint",
query: `sum(rate({"k8s.container.restarts"}[5m]))`,
wantDottedNameHint: false,
},
{
name: "query with dotted name inside regex string - no hint",
query: `requests_total{pod=~"k8s.pod.name.*"}`,
wantDottedNameHint: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := enhancePromQLError(tt.query, parseErr)
errMsg := err.Error()
assert.True(t, strings.Contains(errMsg, parseErr.Error()),
"error should contain original parse error message")
if tt.wantDottedNameHint {
assert.True(t, strings.Contains(errMsg, "Hint:"),
"error should contain hint for dotted name query")
assert.True(t, strings.Contains(errMsg, "UTF-8 syntax"),
"error should mention UTF-8 syntax")
assert.True(t, strings.Contains(errMsg, tt.wantDottedNameExample),
"error should contain the dotted name example: %s", tt.wantDottedNameExample)
} else {
assert.False(t, strings.Contains(errMsg, "Hint:"),
"error should not contain hint for non-dotted-name query")
}
})
}
})
t.Run("quoted metric outside braces patterns", func(t *testing.T) {
tests := []struct {
name string
query string
wantHint bool
wantMetricInHint string
}{
{
name: "quoted metric name followed by selector",
query: `"kube_pod_status_ready_time"{"condition"="true"}`,
wantHint: true,
wantMetricInHint: "kube_pod_status_ready_time",
},
{
name: "quoted metric with space before brace",
query: `"kube_pod_labels" {"label"!=""}`,
wantHint: true,
wantMetricInHint: "kube_pod_labels",
},
{
name: "complex query with quoted metric outside braces",
query: `min by (namespace) ("kube_pod_status_ready_time"{"condition"="true"})`,
wantHint: true,
wantMetricInHint: "kube_pod_status_ready_time",
},
{
name: "label_replace with quoted metric outside braces",
query: `label_replace("kube_pod_labels"{"label_cnpg_io_cluster"!=""}, "cluster","$1","label","(.+)")`,
wantHint: true,
wantMetricInHint: "kube_pod_labels",
},
{
name: "correctly formatted query - no hint",
query: `{"kube_pod_status_ready_time", condition="true"}`,
wantHint: false,
},
{
name: "old syntax without quotes - no hint for this pattern",
query: `kube_pod_status_ready_time{condition="true"}`,
wantHint: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := enhancePromQLError(tt.query, parseErr)
errMsg := err.Error()
assert.True(t, strings.Contains(errMsg, parseErr.Error()),
"error should contain original parse error message")
if tt.wantHint {
assert.True(t, strings.Contains(errMsg, "Hint:"),
"error should contain hint")
assert.True(t, strings.Contains(errMsg, "inside the braces"),
"error should mention putting metric inside braces")
assert.True(t, strings.Contains(errMsg, tt.wantMetricInHint),
"error should contain the metric name: %s", tt.wantMetricInHint)
}
})
}
})
}
func TestUnquotedDottedNamePattern(t *testing.T) {
tests := []struct {
name string
input string
expected string // empty string means no match expected
}{
{
name: "metric name at start",
input: "k8s.pod.name",
expected: "k8s.pod.name",
},
{
name: "label in group by clause",
input: "sum by (k8s.pod.name) (rate(x[5m]))",
expected: "k8s.pod.name",
},
{
name: "label in filter",
input: "metric{k8s.namespace.name=\"default\"}",
expected: "k8s.namespace.name",
},
{
name: "metric with underscore and dots",
input: "http_server.request.duration",
expected: "http_server.request.duration",
},
{
name: "quoted metric name - no match",
input: `{"k8s.pod.name"}`,
expected: "",
},
{
name: "inside regex string - no match",
input: `{pod=~"k8s.pod.name.*"}`,
expected: "",
},
{
name: "simple metric without dots - no match",
input: "http_requests_total",
expected: "",
},
{
name: "single dot only - no match",
input: "a.b",
expected: "a.b",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
matches := unquotedDottedNamePattern.FindStringSubmatch(tt.input)
if tt.expected == "" {
assert.True(t, len(matches) < 2 || matches[1] == "",
"expected no match for input %q but got %v", tt.input, matches)
} else {
assert.True(t, len(matches) >= 2,
"expected match for input %q but got none", tt.input)
if len(matches) >= 2 {
assert.Equal(t, tt.expected, matches[1],
"unexpected match for input %q", tt.input)
}
}
})
}
}
func TestQuotedMetricOutsideBracesPattern(t *testing.T) {
tests := []struct {
name string
input string
expected string // empty string means no match expected
}{
{
name: "quoted metric followed by braces",
input: `"kube_pod_status_ready_time"{"condition"="true"}`,
expected: "kube_pod_status_ready_time",
},
{
name: "quoted metric with space before brace",
input: `"kube_pod_labels" {"label"!=""}`,
expected: "kube_pod_labels",
},
{
name: "quoted metric in label_replace",
input: `label_replace("kube_pod_labels"{"x"="y"}, "a","b","c","d")`,
expected: "kube_pod_labels",
},
{
name: "quoted metric with dots",
input: `"k8s.container.restarts"{"pod"="test"}`,
expected: "k8s.container.restarts",
},
{
name: "correct UTF-8 syntax - no match",
input: `{"kube_pod_status_ready_time", condition="true"}`,
expected: "",
},
{
name: "old syntax without quotes - no match",
input: `kube_pod_status_ready_time{condition="true"}`,
expected: "",
},
{
name: "quoted string in label value - no match",
input: `metric{label="value"}{other="x"}`,
expected: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
matches := quotedMetricOutsideBracesPattern.FindStringSubmatch(tt.input)
if tt.expected == "" {
assert.True(t, len(matches) < 2 || matches[1] == "",
"expected no match for input %q but got %v", tt.input, matches)
} else {
assert.True(t, len(matches) >= 2,
"expected match for input %q but got none", tt.input)
if len(matches) >= 2 {
assert.Equal(t, tt.expected, matches[1],
"unexpected match for input %q", tt.input)
}
}
})
}
}

View File

@@ -25,7 +25,6 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/http/render"
@@ -145,8 +144,6 @@ type APIHandler struct {
LicensingAPI licensing.API
FieldsAPI *fields.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
@@ -177,8 +174,6 @@ type APIHandlerOpts struct {
LicensingAPI licensing.API
FieldsAPI *fields.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
@@ -243,7 +238,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
AlertmanagerAPI: opts.AlertmanagerAPI,
LicensingAPI: opts.LicensingAPI,
Signoz: opts.Signoz,
FieldsAPI: opts.FieldsAPI,
QuerierAPI: opts.QuerierAPI,
QueryParserAPI: opts.QueryParserAPI,
}
@@ -399,12 +393,6 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *middlew
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.QuerierAPI.QueryRawStream)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterFieldsRoutes(router *mux.Router, am *middleware.AuthZ) {
subRouter := router.PathPrefix("/api/v1").Subrouter()
subRouter.HandleFunc("/fields/keys", am.ViewAccess(aH.FieldsAPI.GetFieldsKeys)).Methods(http.MethodGet)
subRouter.HandleFunc("/fields/values", am.ViewAccess(aH.FieldsAPI.GetFieldsValues)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *middleware.AuthZ) {
hostsSubRouter := router.PathPrefix("/api/v1/hosts").Subrouter()

View File

@@ -887,7 +887,7 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
keys := make([]string, 0, len(queryRangeParams.Variables))
querytemplate.AssignReservedVars(queryRangeParams.Variables, queryRangeParams.Start, queryRangeParams.End)
querytemplate.AssignReservedVarsV3(queryRangeParams)
for k := range queryRangeParams.Variables {
keys = append(keys, k)
@@ -927,7 +927,7 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
continue
}
querytemplate.AssignReservedVars(queryRangeParams.Variables, queryRangeParams.Start, queryRangeParams.End)
querytemplate.AssignReservedVarsV3(queryRangeParams)
keys := make([]string, 0, len(queryRangeParams.Variables))

File diff suppressed because it is too large Load Diff

View File

@@ -17,7 +17,6 @@ import (
"github.com/gorilla/handlers"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
@@ -133,7 +132,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
FluxInterval: config.Querier.FluxInterval,
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: nooplicensing.NewLicenseAPI(),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
@@ -215,7 +213,6 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
api.RegisterLogsRoutes(r, am)
api.RegisterIntegrationRoutes(r, am)
api.RegisterCloudIntegrationsRoutes(r, am)
api.RegisterFieldsRoutes(r, am)
api.RegisterQueryRangeV3Routes(r, am)
api.RegisterInfraMetricsRoutes(r, am)
api.RegisterWebSocketPaths(r, am)

View File

@@ -1,17 +1,8 @@
package converter
import "github.com/SigNoz/signoz/pkg/errors"
// Unit represents a unit of measurement
type Unit string
func (u Unit) Validate() error {
if !IsValidUnit(u) {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid unit: %s", u)
}
return nil
}
// Value represents a value with a unit of measurement
type Value struct {
F float64
@@ -69,27 +60,6 @@ func FromUnit(u Unit) Converter {
}
}
// IsValidUnit returns true if the given unit is valid
func IsValidUnit(u Unit) bool {
switch u {
// Duration unit
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min",
// Data unit
"bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy",
// Data rate unit
"binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s",
// Percent unit
"percent", "percentunit", "%",
// Bool unit
"bool", "bool_yes_no", "bool_true_false", "bool_1_0",
// Throughput unit
"cps", "ops", "reqps", "rps", "wps", "iops", "cpm", "opm", "rpm", "wpm", "{count}/s", "{ops}/s", "{req}/s", "{read}/s", "{write}/s", "{iops}/s", "{count}/min", "{ops}/min", "{read}/min", "{write}/min":
return true
default:
return false
}
}
func UnitToName(u string) string {
switch u {
case "ns":

View File

@@ -9,9 +9,8 @@ import (
"strings"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/converter"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/pkg/errors"
"go.uber.org/zap"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
@@ -602,170 +601,43 @@ func (c *CompositeQuery) Sanitize() {
func (c *CompositeQuery) Validate() error {
if c == nil {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"composite query is required",
)
return fmt.Errorf("composite query is required")
}
// Validate unit if supplied
if c.Unit != "" {
unit := converter.Unit(c.Unit)
err := unit.Validate()
if err != nil {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid unit: %s",
err.Error(),
)
if c.BuilderQueries == nil && c.ClickHouseQueries == nil && c.PromQueries == nil && len(c.Queries) == 0 {
return fmt.Errorf("composite query must contain at least one query type")
}
if c.QueryType == QueryTypeBuilder {
for name, query := range c.BuilderQueries {
if err := query.Validate(c.PanelType); err != nil {
return fmt.Errorf("builder query %s is invalid: %w", name, err)
}
}
}
if c.QueryType == QueryTypeClickHouseSQL {
for name, query := range c.ClickHouseQueries {
if err := query.Validate(); err != nil {
return fmt.Errorf("clickhouse query %s is invalid: %w", name, err)
}
}
}
if c.QueryType == QueryTypePromQL {
for name, query := range c.PromQueries {
if err := query.Validate(); err != nil {
return fmt.Errorf("prom query %s is invalid: %w", name, err)
}
}
}
if err := c.PanelType.Validate(); err != nil {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"panel type is invalid: %s",
err.Error(),
)
return fmt.Errorf("panel type is invalid: %w", err)
}
if err := c.QueryType.Validate(); err != nil {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"query type is invalid: %s",
err.Error(),
)
}
if len(c.Queries) == 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"at least one query is required",
)
}
// Validate each query
for i, envelope := range c.Queries {
queryId := qbtypes.GetQueryIdentifier(envelope, i)
switch envelope.Type {
case qbtypes.QueryTypeBuilder, qbtypes.QueryTypeSubQuery:
switch spec := envelope.Spec.(type) {
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
if err := spec.Validate(qbtypes.RequestTypeTimeSeries); err != nil {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid %s: %s",
queryId,
err.Error(),
)
}
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
if err := spec.Validate(qbtypes.RequestTypeTimeSeries); err != nil {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid %s: %s",
queryId,
err.Error(),
)
}
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
if err := spec.Validate(qbtypes.RequestTypeTimeSeries); err != nil {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid %s: %s",
queryId,
err.Error(),
)
}
default:
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown query spec type for %s",
queryId,
)
}
case qbtypes.QueryTypePromQL:
spec, ok := envelope.Spec.(qbtypes.PromQuery)
if !ok {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
if spec.Query == "" {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"query expression is required for %s",
queryId,
)
}
if err := validatePromQLQuery(spec.Query); err != nil {
return err
}
case qbtypes.QueryTypeClickHouseSQL:
spec, ok := envelope.Spec.(qbtypes.ClickHouseQuery)
if !ok {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
if spec.Query == "" {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"query expression is required for %s",
queryId,
)
}
if err := validateClickHouseQuery(spec.Query); err != nil {
return err
}
case qbtypes.QueryTypeFormula:
spec, ok := envelope.Spec.(qbtypes.QueryBuilderFormula)
if !ok {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
if err := spec.Validate(); err != nil {
return err
}
case qbtypes.QueryTypeTraceOperator:
spec, ok := envelope.Spec.(qbtypes.QueryBuilderTraceOperator)
if !ok {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
err := spec.ValidateTraceOperator(c.Queries)
if err != nil {
return err
}
default:
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown query type '%s' for %s",
envelope.Type,
queryId,
).WithAdditional(
"Valid query types are: builder_query, builder_sub_query, builder_formula, builder_join, promql, clickhouse_sql, trace_operator",
)
}
}
// Check if all queries are disabled
if allDisabled := checkQueriesDisabled(c); allDisabled {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"all queries are disabled - at least one query must be enabled",
)
return fmt.Errorf("query type is invalid: %w", err)
}
return nil
@@ -1331,7 +1203,7 @@ func (f *FilterSet) Scan(src interface{}) error {
func (f *FilterSet) Value() (driver.Value, error) {
filterSetJson, err := json.Marshal(f)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "could not serialize FilterSet to JSON")
return nil, errors.Wrap(err, "could not serialize FilterSet to JSON")
}
return filterSetJson, nil
}

View File

@@ -1,137 +0,0 @@
package v3
import (
"bytes"
"fmt"
"text/template"
"time"
clickhouse "github.com/AfterShip/clickhouse-sql-parser/parser"
"github.com/SigNoz/signoz/pkg/errors"
querytemplate "github.com/SigNoz/signoz/pkg/query-service/utils/queryTemplate"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/prometheus/prometheus/promql/parser"
)
type QueryParseError struct {
StartPosition *int
EndPosition *int
ErrorMessage string
Query string
}
func (e *QueryParseError) Error() string {
if e.StartPosition != nil && e.EndPosition != nil {
return fmt.Sprintf("query parse error: %s at position %d:%d", e.ErrorMessage, *e.StartPosition, *e.EndPosition)
}
return fmt.Sprintf("query parse error: %s", e.ErrorMessage)
}
// validatePromQLQuery validates a PromQL query syntax using the Prometheus parser
func validatePromQLQuery(query string) error {
_, err := parser.ParseExpr(query)
if err != nil {
if syntaxErrs, ok := err.(parser.ParseErrors); ok {
syntaxErr := syntaxErrs[0]
startPosition := int(syntaxErr.PositionRange.Start)
endPosition := int(syntaxErr.PositionRange.End)
return &QueryParseError{
StartPosition: &startPosition,
EndPosition: &endPosition,
ErrorMessage: syntaxErr.Error(),
Query: query,
}
}
}
return err
}
// validateClickHouseQuery validates a ClickHouse SQL query syntax using the ClickHouse parser
func validateClickHouseQuery(query string) error {
// Assign the default template variables with dummy values
variables := make(map[string]interface{})
start := time.Now().UnixMilli()
end := start + 1000
querytemplate.AssignReservedVars(variables, start, end)
// Apply the values for default template variables before parsing the query
tmpl := template.New("clickhouse-query")
tmpl, err := tmpl.Parse(query)
if err != nil {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"failed to parse clickhouse query: %s",
err.Error(),
)
}
var queryBuffer bytes.Buffer
err = tmpl.Execute(&queryBuffer, variables)
if err != nil {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"failed to execute clickhouse query template: %s",
err.Error(),
)
}
// Parse the ClickHouse query with the default template variables applied
p := clickhouse.NewParser(queryBuffer.String())
_, err = p.ParseStmts()
if err != nil {
// TODO: errors returned here is errors.errorString
// we should think on using some other library that parses the CH query in more accurate manner,
// current CH parser does very minimal checks and on just the known keywords, without validating the syntax of given query.
// Sample Error: "line 0:36 expected table name or subquery, got ;\nSELECT department, avg(salary) FROM ;\n ^\n"
return &QueryParseError{
ErrorMessage: err.Error(),
Query: query,
}
}
return nil
}
// checkQueriesDisabled checks if all queries are disabled. Returns true if all queries are disabled, false otherwise.
func checkQueriesDisabled(compositeQuery *CompositeQuery) bool {
for _, envelope := range compositeQuery.Queries {
switch envelope.Type {
case qbtypes.QueryTypeBuilder, qbtypes.QueryTypeSubQuery:
switch spec := envelope.Spec.(type) {
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
if !spec.Disabled {
return false
}
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
if !spec.Disabled {
return false
}
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
if !spec.Disabled {
return false
}
}
case qbtypes.QueryTypeFormula:
if spec, ok := envelope.Spec.(qbtypes.QueryBuilderFormula); ok && !spec.Disabled {
return false
}
case qbtypes.QueryTypeTraceOperator:
if spec, ok := envelope.Spec.(qbtypes.QueryBuilderTraceOperator); ok && !spec.Disabled {
return false
}
case qbtypes.QueryTypeJoin:
if spec, ok := envelope.Spec.(qbtypes.QueryBuilderJoin); ok && !spec.Disabled {
return false
}
case qbtypes.QueryTypePromQL:
if spec, ok := envelope.Spec.(qbtypes.PromQuery); ok && !spec.Disabled {
return false
}
case qbtypes.QueryTypeClickHouseSQL:
if spec, ok := envelope.Spec.(qbtypes.ClickHouseQuery); ok && !spec.Disabled {
return false
}
}
}
// If we reach here, all queries are disabled
return true
}

View File

@@ -1,528 +0,0 @@
package v3
import (
"testing"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/stretchr/testify/require"
)
func TestValidateCompositeQuery(t *testing.T) {
tests := []struct {
name string
compositeQuery *CompositeQuery
wantErr bool
errContains string
}{
{
name: "nil composite query should return error",
compositeQuery: nil,
wantErr: true,
errContains: "composite query is required",
},
{
name: "empty queries array should return error",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{},
},
wantErr: true,
errContains: "at least one query is required",
},
{
name: "invalid input error",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Unit: "some_invalid_unit",
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "rate(http_requests_total[5m])",
},
},
},
},
wantErr: true,
errContains: "invalid unit",
},
{
name: "valid metric builder query should pass",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Unit: "bytes", // valid unit
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "metric_query",
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "cpu_usage",
},
},
},
},
},
},
wantErr: false,
},
{
name: "valid log builder query should pass",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Unit: "µs", // valid unit
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Name: "log_query",
Signal: telemetrytypes.SignalLogs,
Aggregations: []qbtypes.LogAggregation{
{
Expression: "count()",
},
},
},
},
},
},
wantErr: false,
},
{
name: "valid trace builder query should pass",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Unit: "MBs", // valid unit
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Name: "trace_query",
Signal: telemetrytypes.SignalTraces,
Aggregations: []qbtypes.TraceAggregation{
{
Expression: "count()",
},
},
},
},
},
},
wantErr: false,
},
{
name: "valid PromQL query should pass",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Unit: "{req}/s", // valid unit
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "rate(http_requests_total[5m])",
},
},
},
},
wantErr: false,
},
{
name: "valid ClickHouse query should pass",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeClickHouseSQL,
Spec: qbtypes.ClickHouseQuery{
Name: "ch_query",
Query: "SELECT count(*) FROM metrics WHERE metric_name = 'cpu_usage'",
},
},
},
},
wantErr: false,
},
{
name: "valid formula query should pass",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeFormula,
Spec: qbtypes.QueryBuilderFormula{
Name: "formula_query",
Expression: "A + B",
},
},
},
},
wantErr: false,
},
// We've not added support for join query yet
{
name: "valid join query should pass",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeJoin,
Spec: qbtypes.QueryBuilderJoin{
Name: "join_query",
Left: qbtypes.QueryRef{Name: "A"},
Right: qbtypes.QueryRef{Name: "B"},
Type: qbtypes.JoinTypeInner,
On: "service_name",
},
},
},
},
wantErr: true,
errContains: "unknown query type",
},
{
name: "valid trace operator query should pass",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Name: "A",
Signal: telemetrytypes.SignalTraces,
Aggregations: []qbtypes.TraceAggregation{
{
Expression: "count()",
},
},
},
},
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Name: "B",
Signal: telemetrytypes.SignalTraces,
Aggregations: []qbtypes.TraceAggregation{
{
Expression: "count()",
},
},
},
},
{
Type: qbtypes.QueryTypeTraceOperator,
Spec: qbtypes.QueryBuilderTraceOperator{
Name: "trace_operator",
Expression: "A && B",
},
},
},
},
wantErr: false,
},
{
name: "invalid metric builder query - missing aggregation should return error",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "metric_query",
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{},
},
},
},
},
wantErr: true,
errContains: "invalid",
},
{
name: "invalid PromQL query - empty query should return error",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "",
},
},
},
},
wantErr: true,
errContains: "query expression is required",
},
{
name: "invalid PromQL query - syntax error should return error",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "rate(http_requests_total[5m",
},
},
},
},
wantErr: true,
errContains: "unclosed left parenthesis",
},
{
name: "invalid ClickHouse query - empty query should return error",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeClickHouseSQL,
Spec: qbtypes.ClickHouseQuery{
Name: "ch_query",
Query: "",
},
},
},
},
wantErr: true,
errContains: "query expression is required",
},
{
name: "invalid ClickHouse query - syntax error should return error",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeClickHouseSQL,
Spec: qbtypes.ClickHouseQuery{
Name: "ch_query",
Query: "SELECT * FROM metrics WHERE",
},
},
},
},
wantErr: true,
errContains: "query parse error",
},
{
name: "invalid formula query - empty expression should return error",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeFormula,
Spec: qbtypes.QueryBuilderFormula{
Name: "formula_query",
Expression: "",
},
},
},
},
wantErr: true,
errContains: "formula expression cannot be blank",
},
{
name: "invalid trace operator query - empty expression should return error",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeTraceOperator,
Spec: qbtypes.QueryBuilderTraceOperator{
Name: "trace_operator",
Expression: "",
},
},
},
},
wantErr: true,
errContains: "expression cannot be empty",
},
{
name: "all queries disabled should return error",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "metric_query",
Disabled: true,
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "cpu_usage",
},
},
},
},
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "rate(http_requests_total[5m])",
Disabled: true,
},
},
},
},
wantErr: true,
errContains: "all queries are disabled",
},
{
name: "mixed disabled and enabled queries should pass",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "metric_query",
Disabled: true,
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "cpu_usage",
},
},
},
},
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "rate(http_requests_total[5m])",
Disabled: false,
},
},
},
},
wantErr: false,
},
{
name: "multiple valid queries should pass",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "metric_query",
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "cpu_usage",
},
},
},
},
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "rate(http_requests_total[5m])",
},
},
{
Type: qbtypes.QueryTypeClickHouseSQL,
Spec: qbtypes.ClickHouseQuery{
Name: "ch_query",
Query: "SELECT count(*) FROM metrics WHERE metric_name = 'cpu_usage'",
},
},
},
},
wantErr: false,
},
{
name: "invalid query in multiple queries should return error",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "metric_query",
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "cpu_usage",
},
},
},
},
{
Type: qbtypes.QueryTypePromQL,
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "invalid promql syntax [",
},
},
},
},
wantErr: true,
errContains: "query parse error",
},
{
name: "unknown query type should return error",
compositeQuery: &CompositeQuery{
QueryType: QueryTypeBuilder,
PanelType: PanelTypeGraph,
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryType{String: valuer.NewString("invalid_query_type")},
Spec: qbtypes.PromQuery{
Name: "prom_query",
Query: "rate(http_requests_total[5m])",
},
},
},
},
wantErr: true,
errContains: "unknown query type",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := tt.compositeQuery.Validate()
if tt.wantErr {
require.Error(t, err)
if tt.errContains != "" {
require.Contains(t, err.Error(), tt.errContains)
}
} else {
require.NoError(t, err)
}
})
}
}

View File

@@ -133,7 +133,7 @@ func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v
Variables: make(map[string]interface{}, 0),
NoCache: true,
}
querytemplate.AssignReservedVars(params.Variables, start, end)
querytemplate.AssignReservedVarsV3(params)
for name, chQuery := range r.ruleCondition.CompositeQuery.ClickHouseQueries {
if chQuery.Disabled {
continue

View File

@@ -2,21 +2,26 @@ package querytemplate
import (
"fmt"
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
)
func AssignReservedVars(variables map[string]interface{}, start int64, end int64) {
variables["start_timestamp"] = start / 1000
variables["end_timestamp"] = end / 1000
// AssignReservedVars assigns values for go template vars. assumes that
// model.QueryRangeParamsV3.Start and End are Unix Nano timestamps
func AssignReservedVarsV3(queryRangeParams *v3.QueryRangeParamsV3) {
queryRangeParams.Variables["start_timestamp"] = queryRangeParams.Start / 1000
queryRangeParams.Variables["end_timestamp"] = queryRangeParams.End / 1000
variables["start_timestamp_ms"] = start
variables["end_timestamp_ms"] = end
queryRangeParams.Variables["start_timestamp_ms"] = queryRangeParams.Start
queryRangeParams.Variables["end_timestamp_ms"] = queryRangeParams.End
variables["SIGNOZ_START_TIME"] = start
variables["SIGNOZ_END_TIME"] = end
queryRangeParams.Variables["SIGNOZ_START_TIME"] = queryRangeParams.Start
queryRangeParams.Variables["SIGNOZ_END_TIME"] = queryRangeParams.End
variables["start_timestamp_nano"] = start * 1e6
variables["end_timestamp_nano"] = end * 1e6
queryRangeParams.Variables["start_timestamp_nano"] = queryRangeParams.Start * 1e6
queryRangeParams.Variables["end_timestamp_nano"] = queryRangeParams.End * 1e6
queryRangeParams.Variables["start_datetime"] = fmt.Sprintf("toDateTime(%d)", queryRangeParams.Start/1000)
queryRangeParams.Variables["end_datetime"] = fmt.Sprintf("toDateTime(%d)", queryRangeParams.End/1000)
variables["start_datetime"] = fmt.Sprintf("toDateTime(%d)", start/1000)
variables["end_datetime"] = fmt.Sprintf("toDateTime(%d)", end/1000)
}

View File

@@ -14,7 +14,7 @@ import (
func NewAuthNs(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
emailPasswordAuthN := emailpasswordauthn.New(store)
googleCallbackAuthN, err := googlecallbackauthn.New(ctx, store)
googleCallbackAuthN, err := googlecallbackauthn.New(ctx, store, providerSettings)
if err != nil {
return nil, err
}

View File

@@ -10,6 +10,8 @@ import (
"github.com/SigNoz/signoz/pkg/modules/apdex/implapdex"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/fields/implfields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
@@ -25,6 +27,8 @@ import (
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type Handlers struct {
@@ -38,10 +42,20 @@ type Handlers struct {
Services services.Handler
MetricsExplorer metricsexplorer.Handler
Global global.Handler
FlaggerHandler flagger.Handler
Flagger flagger.Handler
Fields fields.Handler
}
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global, flaggerService flagger.Flagger) Handlers {
func NewHandlers(
modules Modules,
providerSettings factory.ProviderSettings,
querier querier.Querier,
licensing licensing.Licensing,
global global.Global,
flaggr flagger.Flagger,
telemetryMetadataStore telemetrytypes.MetadataStore,
telemetryStore telemetrystore.TelemetryStore,
) Handlers {
return Handlers{
SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex),
@@ -53,6 +67,7 @@ func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, que
MetricsExplorer: implmetricsexplorer.NewHandler(modules.MetricsExplorer),
SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile),
Global: signozglobal.NewHandler(global),
FlaggerHandler: flagger.NewHandler(flaggerService),
Flagger: flagger.NewHandler(flaggr),
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore,telemetryStore),
}
}

View File

@@ -42,7 +42,7 @@ func TestNewHandlers(t *testing.T) {
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil,nil)
reflectVal := reflect.ValueOf(handlers)
for i := 0; i < reflectVal.NumField(); i++ {

View File

@@ -14,6 +14,7 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/fields"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
@@ -47,6 +48,7 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
struct{ dashboard.Module }{},
struct{ dashboard.Handler }{},
struct{ metricsexplorer.Handler }{},
struct{ fields.Handler }{},
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
if err != nil {
return nil, err

View File

@@ -167,15 +167,9 @@ func NewSQLMigrationProviderFactories(
func NewTelemetryStoreProviderFactories() factory.NamedMap[factory.ProviderFactory[telemetrystore.TelemetryStore, telemetrystore.Config]] {
return factory.MustNewNamedMap(
clickhousetelemetrystore.NewFactory(
telemetrystore.TelemetryStoreHookFactoryFunc(func(s string) factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
return telemetrystorehook.NewSettingsFactory(s)
}),
telemetrystore.TelemetryStoreHookFactoryFunc(func(s string) factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
return telemetrystorehook.NewLoggingFactory()
}),
telemetrystore.TelemetryStoreHookFactoryFunc(func(s string) factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
return telemetrystorehook.NewInstrumentationFactory(s)
}),
telemetrystorehook.NewSettingsFactory(),
telemetrystorehook.NewLoggingFactory(),
telemetrystorehook.NewInstrumentationFactory(),
),
)
}
@@ -243,10 +237,11 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
implpreference.NewHandler(modules.Preference),
signozglobal.NewHandler(global),
implpromote.NewHandler(modules.Promote),
handlers.FlaggerHandler,
handlers.Flagger,
modules.Dashboard,
handlers.Dashboard,
handlers.MetricsExplorer,
handlers.Fields,
),
)
}

View File

@@ -382,7 +382,7 @@ func New(
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboardModule)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger)
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, telemetryMetadataStore, telemetrystore)
// Initialize the API server
apiserver, err := factory.NewProviderFromNamedMap(

View File

@@ -16,13 +16,13 @@ type provider struct {
hooks []telemetrystore.TelemetryStoreHook
}
func NewFactory(hookFactories ...telemetrystore.TelemetryStoreHookFactoryFunc) factory.ProviderFactory[telemetrystore.TelemetryStore, telemetrystore.Config] {
func NewFactory(hookFactories ...factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config]) factory.ProviderFactory[telemetrystore.TelemetryStore, telemetrystore.Config] {
return factory.NewProviderFactory(factory.MustNewName("clickhouse"), func(ctx context.Context, providerSettings factory.ProviderSettings, config telemetrystore.Config) (telemetrystore.TelemetryStore, error) {
return New(ctx, providerSettings, config, hookFactories...)
})
}
func New(ctx context.Context, providerSettings factory.ProviderSettings, config telemetrystore.Config, hookFactories ...telemetrystore.TelemetryStoreHookFactoryFunc) (telemetrystore.TelemetryStore, error) {
func New(ctx context.Context, providerSettings factory.ProviderSettings, config telemetrystore.Config, hookFactories ...factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config]) (telemetrystore.TelemetryStore, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/telemetrystore/clickhousetelemetrystore")
options, err := clickhouse.ParseDSN(config.Clickhouse.DSN)
@@ -40,14 +40,10 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
return nil, err
}
var version string
if err := chConn.QueryRow(ctx, "SELECT version()").Scan(&version); err != nil {
return nil, err
}
hooks := make([]telemetrystore.TelemetryStoreHook, len(hookFactories))
for i, hookFactory := range hookFactories {
hook, err := hookFactory(version).New(ctx, providerSettings, config)
hook, err := hookFactory.New(ctx, providerSettings, config)
if err != nil {
return nil, err
}

View File

@@ -4,7 +4,6 @@ import (
"context"
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/SigNoz/signoz/pkg/factory"
)
type TelemetryStore interface {
@@ -20,7 +19,6 @@ type TelemetryStoreHook interface {
AfterQuery(ctx context.Context, event *QueryEvent)
}
type TelemetryStoreHookFactoryFunc func(string) factory.ProviderFactory[TelemetryStoreHook, Config]
func WrapBeforeQuery(hooks []TelemetryStoreHook, ctx context.Context, event *QueryEvent) context.Context {
for _, hook := range hooks {

View File

@@ -13,23 +13,21 @@ import (
)
type instrumentation struct {
clickhouseVersion string
clickhouseCluster string
tracer trace.Tracer
meter metric.Meter
}
func NewInstrumentationFactory(version string) factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
func NewInstrumentationFactory() factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
return factory.NewProviderFactory(factory.MustNewName("instrumentation"), func(ctx context.Context, ps factory.ProviderSettings, c telemetrystore.Config) (telemetrystore.TelemetryStoreHook, error) {
return NewInstrumentation(ctx, ps, c, version)
return NewInstrumentation(ctx, ps, c)
})
}
func NewInstrumentation(ctx context.Context, providerSettings factory.ProviderSettings, config telemetrystore.Config, version string) (telemetrystore.TelemetryStoreHook, error) {
func NewInstrumentation(ctx context.Context, providerSettings factory.ProviderSettings, config telemetrystore.Config) (telemetrystore.TelemetryStoreHook, error) {
meter := providerSettings.MeterProvider.Meter("github.com/SigNoz/signoz/pkg/telemetrystore")
return &instrumentation{
clickhouseVersion: version,
clickhouseCluster: config.Clickhouse.Cluster,
tracer: providerSettings.TracerProvider.Tracer("github.com/SigNoz/signoz/pkg/telemetrystore"),
meter: meter,
@@ -54,7 +52,6 @@ func (hook *instrumentation) AfterQuery(ctx context.Context, event *telemetrysto
attrs = append(
attrs,
semconv.DBStatementKey.String(event.Query),
attribute.String("db.version", hook.clickhouseVersion),
semconv.DBSystemKey.String("clickhouse"),
semconv.DBOperationKey.String(event.Operation),
attribute.String("clickhouse.cluster", hook.clickhouseCluster),

View File

@@ -2,7 +2,6 @@ package telemetrystorehook
import (
"context"
"strings"
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/SigNoz/signoz/pkg/factory"
@@ -11,19 +10,17 @@ import (
)
type provider struct {
clickHouseVersion string
settings telemetrystore.QuerySettings
}
func NewSettingsFactory(version string) factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
func NewSettingsFactory() factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
return factory.NewProviderFactory(factory.MustNewName("settings"), func(ctx context.Context, providerSettings factory.ProviderSettings, config telemetrystore.Config) (telemetrystore.TelemetryStoreHook, error) {
return NewSettings(ctx, providerSettings, config, version)
return NewSettings(ctx, providerSettings, config)
})
}
func NewSettings(ctx context.Context, providerSettings factory.ProviderSettings, config telemetrystore.Config, version string) (telemetrystore.TelemetryStoreHook, error) {
func NewSettings(ctx context.Context, providerSettings factory.ProviderSettings, config telemetrystore.Config) (telemetrystore.TelemetryStoreHook, error) {
return &provider{
clickHouseVersion: version,
settings: config.Clickhouse.QuerySettings,
}, nil
}
@@ -75,12 +72,9 @@ func (h *provider) BeforeQuery(ctx context.Context, _ *telemetrystore.QueryEvent
settings["result_overflow_mode"] = ctx.Value("result_overflow_mode")
}
// ClickHouse version check is added since this setting is not support on version below 25.5
if strings.HasPrefix(h.clickHouseVersion, "25") && !h.settings.SecondaryIndicesEnableBulkFiltering {
// TODO(srikanthccv): enable it when the "Cannot read all data" issue is fixed
// https://github.com/ClickHouse/ClickHouse/issues/82283
settings["secondary_indices_enable_bulk_filtering"] = false
}
// TODO(srikanthccv): enable it when the "Cannot read all data" issue is fixed
// https://github.com/ClickHouse/ClickHouse/issues/82283
settings["secondary_indices_enable_bulk_filtering"] = false
ctx = clickhouse.Context(ctx, clickhouse.WithSettings(settings))
return ctx

View File

@@ -44,7 +44,7 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/tokenizer/jwttokenizer")
if config.JWT.Secret == "" {
settings.Logger().ErrorContext(ctx, "🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!", "error", "SIGNOZ_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application. Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access. Please set the SIGNOZ_TOKENIZER_JWT_SECRET environment variable immediately. For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.")
settings.Logger().ErrorContext(ctx, "🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!", "error", "SIGNOZ_TOKENIZER_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application. Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access. Please set the SIGNOZ_TOKENIZER_JWT_SECRET environment variable immediately. For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.")
}
lastObservedAtCache, err := ristretto.NewCache(&ristretto.Config[string, map[valuer.UUID]time.Time]{

View File

@@ -32,10 +32,12 @@ type Identity struct {
}
type CallbackIdentity struct {
Name string `json:"name"`
Email valuer.Email `json:"email"`
OrgID valuer.UUID `json:"orgId"`
State State `json:"state"`
Name string `json:"name"`
Email valuer.Email `json:"email"`
OrgID valuer.UUID `json:"orgId"`
State State `json:"state"`
Groups []string `json:"groups,omitempty"`
Role string `json:"role,omitempty"`
}
type State struct {
@@ -85,12 +87,14 @@ func NewIdentity(userID valuer.UUID, orgID valuer.UUID, email valuer.Email, role
}
}
func NewCallbackIdentity(name string, email valuer.Email, orgID valuer.UUID, state State) *CallbackIdentity {
func NewCallbackIdentity(name string, email valuer.Email, orgID valuer.UUID, state State, groups []string, role string) *CallbackIdentity {
return &CallbackIdentity{
Name: name,
Email: email,
OrgID: orgID,
State: state,
Name: name,
Email: email,
OrgID: orgID,
State: state,
Groups: groups,
Role: role,
}
}

View File

@@ -63,6 +63,7 @@ type AuthDomainConfig struct {
SAML *SamlConfig `json:"samlConfig"`
Google *GoogleConfig `json:"googleAuthConfig"`
OIDC *OIDCConfig `json:"oidcConfig"`
RoleMapping *RoleMapping `json:"roleMapping"`
}
type AuthDomain struct {

View File

@@ -2,10 +2,14 @@ package authtypes
import (
"encoding/json"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/valuer"
)
const wildCardDomain = "*"
type GoogleConfig struct {
// ClientID is the application's ID. For example, 292085223830.apps.googleusercontent.com.
ClientID string `json:"clientId"`
@@ -15,6 +19,30 @@ type GoogleConfig struct {
// What is the meaning of this? Should we remove this?
RedirectURI string `json:"redirectURI"`
// Whether to fetch the Google workspace groups (required additional API scopes)
FetchGroups bool `json:"fetchGroups"`
// Service Account creds JSON stored for Google Admin SDK access
// This is content of the JSON file stored directly into db as string
// Required if FetchGroups is true (unless running on GCE with default credentials)
ServiceAccountJSON string `json:"serviceAccountJson,omitempty"`
// Map of workspace domain to admin email for service account impersonation
// The service account will impersonate this admin to call the directory API
// Use "*" as key for wildcard/default that matches any domain
// Example: {"example.com": "admin@exmaple.com", "*": "fallbackadmin@company.com"}
DomainToAdminEmail map[string]valuer.Email `json:"domainToAdminEmail,omitempty"`
// If true, fetch transitive group membership (recursive - groups that contains other groups)
FetchTransitiveGroupMembership bool `json:"fetchTransitiveGroupMembership,omitempty"`
// Optional list of allowed groups
// If this is present, only users belonging to one of these groups will be allowed to login
AllowedGroups []string `json:"allowedGroups,omitempty"`
// Whether to skip email verification. Defaults to "false"
InsecureSkipEmailVerified bool `json:"insecureSkipEmailVerified"`
}
func (config *GoogleConfig) UnmarshalJSON(data []byte) error {
@@ -33,6 +61,37 @@ func (config *GoogleConfig) UnmarshalJSON(data []byte) error {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "clientSecret is required")
}
if temp.FetchGroups {
if len(temp.DomainToAdminEmail) == 0 {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "domainToAdminEmail is required if fetchGroups is true")
}
if temp.ServiceAccountJSON == "" {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "serviceAccountJSON is required if fetchGroups is true")
}
}
if len(temp.AllowedGroups) > 0 && !temp.FetchGroups {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "fetchGroups must be true when allowedGroups is configured")
}
*config = GoogleConfig(temp)
return nil
}
func (config *GoogleConfig) GetAdminEmailForDomain(userEmail string) string {
domain := extractDomainFromEmail(userEmail)
if adminEmail, ok := config.DomainToAdminEmail[domain]; ok {
return adminEmail.StringValue()
}
return config.DomainToAdminEmail[wildCardDomain].StringValue()
}
func extractDomainFromEmail(email string) string {
if at := strings.LastIndex(email, "@"); at >= 0 {
return email[at+1:]
}
return wildCardDomain
}

View File

@@ -0,0 +1,133 @@
package authtypes
import (
"encoding/json"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types"
)
type AttributeMapping struct {
// Key which contains the email in the claim/token/attributes map. Defaults to "email"
Email string `json:"email"`
// Key which contains the name in the claim/token/attributes map. Defaults to "name"
Name string `json:"name"`
// Key which contains the groups in the claim/token/attributes map. Defaults to "groups"
Groups string `json:"groups"`
// Key which contains the role in the claim/token/attributes map. Defaults to "role"
Role string `json:"role"`
}
func (attr *AttributeMapping) UnmarshalJSON(data []byte) error {
type Alias AttributeMapping
var temp Alias
if err := json.Unmarshal(data, &temp); err != nil {
return err
}
if temp.Email == "" {
temp.Email = "email"
}
if temp.Name == "" {
temp.Name = "name"
}
if temp.Groups == "" {
temp.Groups = "groups"
}
if temp.Role == "" {
temp.Role = "role"
}
*attr = AttributeMapping(temp)
return nil
}
type RoleMapping struct {
// Default role any new SSO users. Defaults to "VIEWER"
DefaultRole string `json:"defaultRole"`
// Map of IDP group names to SigNoz roles. Key is group name, value is SigNoz role
GroupMappings map[string]string `json:"groupMappings"`
// If true, use the role claim directly from IDP instead of group mappings
UseRoleAttribute bool `json:"useRoleAttribute"`
}
func (typ *RoleMapping) UnmarshalJSON(data []byte) error {
type Alias RoleMapping
var temp Alias
if err := json.Unmarshal(data, &temp); err != nil {
return err
}
if temp.DefaultRole != "" {
if _, err := types.NewRole(strings.ToUpper(temp.DefaultRole)); err != nil {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid default role %s", temp.DefaultRole)
}
}
for group, role := range temp.GroupMappings {
if _, err := types.NewRole(strings.ToUpper(role)); err != nil {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid role %s for group %s", role, group)
}
}
*typ = RoleMapping(temp)
return nil
}
func (roleMapping *RoleMapping) NewRoleFromCallbackIdentity(callbackIdentity *CallbackIdentity) types.Role {
if roleMapping == nil {
return types.RoleViewer
}
if roleMapping.UseRoleAttribute && callbackIdentity.Role != "" {
if role, err := types.NewRole(strings.ToUpper(callbackIdentity.Role)); err == nil {
return role
}
}
if len(roleMapping.GroupMappings) > 0 && len(callbackIdentity.Groups) > 0 {
highestRole := types.RoleViewer
found := false
for _, group := range callbackIdentity.Groups {
if mappedRole, exists := roleMapping.GroupMappings[group]; exists {
found = true
if role, err := types.NewRole(strings.ToUpper(mappedRole)); err == nil {
if compareRoles(role, highestRole) > 0 {
highestRole = role
}
}
}
}
if found {
return highestRole
}
}
if roleMapping.DefaultRole != "" {
if role, err := types.NewRole(strings.ToUpper(roleMapping.DefaultRole)); err == nil {
return role
}
}
return types.RoleViewer
}
func compareRoles(a, b types.Role) int {
order := map[types.Role]int{
types.RoleViewer: 0,
types.RoleEditor: 1,
types.RoleAdmin: 2,
}
return order[a] - order[b]
}

View File

@@ -22,7 +22,7 @@ type OIDCConfig struct {
ClientSecret string `json:"clientSecret"`
// Mapping of claims to the corresponding fields in the token.
ClaimMapping ClaimMapping `json:"claimMapping"`
ClaimMapping AttributeMapping `json:"claimMapping"`
// Whether to skip email verification. Defaults to "false"
InsecureSkipEmailVerified bool `json:"insecureSkipEmailVerified"`
@@ -31,11 +31,6 @@ type OIDCConfig struct {
GetUserInfo bool `json:"getUserInfo"`
}
type ClaimMapping struct {
// Configurable key which contains the email claims. Defaults to "email"
Email string `json:"email"`
}
func (config *OIDCConfig) UnmarshalJSON(data []byte) error {
type Alias OIDCConfig
@@ -56,8 +51,10 @@ func (config *OIDCConfig) UnmarshalJSON(data []byte) error {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "clientSecret is required")
}
if temp.ClaimMapping.Email == "" {
temp.ClaimMapping.Email = "email"
if temp.ClaimMapping == (AttributeMapping{}) {
if err := json.Unmarshal([]byte("{}"), &temp.ClaimMapping); err != nil {
return err
}
}
*config = OIDCConfig(temp)

View File

@@ -20,6 +20,9 @@ type SamlConfig struct {
// For providers like jumpcloud, this should be set to true.
// Note: This is the reverse of WantAuthnRequestsSigned. If WantAuthnRequestsSigned is false, then InsecureSkipAuthNRequestsSigned should be true.
InsecureSkipAuthNRequestsSigned bool `json:"insecureSkipAuthNRequestsSigned"`
// Mapping of SAML assertion attributes
AttributeMapping AttributeMapping `json:"attributeMapping"`
}
func (config *SamlConfig) UnmarshalJSON(data []byte) error {
@@ -42,6 +45,12 @@ func (config *SamlConfig) UnmarshalJSON(data []byte) error {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "samlCert is required")
}
if temp.AttributeMapping == (AttributeMapping{}) {
if err := json.Unmarshal([]byte("{}"), &temp.AttributeMapping); err != nil {
return err
}
}
*config = SamlConfig(temp)
return nil
}

View File

@@ -553,11 +553,6 @@ func (f Function) Copy() Function {
return c
}
// Validate validates the Function by calling Validate on its Name
func (f Function) Validate() error {
return f.Name.Validate()
}
type LimitBy struct {
// keys to limit by
Keys []string `json:"keys"`

View File

@@ -73,43 +73,6 @@ func (f *QueryBuilderFormula) UnmarshalJSON(data []byte) error {
return nil
}
// Validate checks if the QueryBuilderFormula fields are valid
func (f QueryBuilderFormula) Validate() error {
// Validate name is not blank
if strings.TrimSpace(f.Name) == "" {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"formula name cannot be blank",
)
}
// Validate expression is not blank
if strings.TrimSpace(f.Expression) == "" {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"formula expression cannot be blank",
)
}
// Validate functions if present
for i, fn := range f.Functions {
if err := fn.Validate(); err != nil {
fnId := fmt.Sprintf("function #%d", i+1)
if f.Name != "" {
fnId = fmt.Sprintf("function #%d in formula '%s'", i+1, f.Name)
}
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid %s: %s",
fnId,
err.Error(),
)
}
}
return nil
}
// small container to store the query name and index or alias reference
// for a variable in the formula expression
// read below for more details on aggregation references

View File

@@ -5,7 +5,6 @@ import (
"slices"
"strconv"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -34,37 +33,6 @@ var (
FunctionNameFillZero = FunctionName{valuer.NewString("fillZero")}
)
// Validate checks if the FunctionName is valid and one of the known types
func (fn FunctionName) Validate() error {
switch fn {
case FunctionNameCutOffMin,
FunctionNameCutOffMax,
FunctionNameClampMin,
FunctionNameClampMax,
FunctionNameAbsolute,
FunctionNameRunningDiff,
FunctionNameLog2,
FunctionNameLog10,
FunctionNameCumulativeSum,
FunctionNameEWMA3,
FunctionNameEWMA5,
FunctionNameEWMA7,
FunctionNameMedian3,
FunctionNameMedian5,
FunctionNameMedian7,
FunctionNameTimeShift,
FunctionNameAnomaly,
FunctionNameFillZero:
return nil
default:
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid function name: %s",
fn.StringValue(),
)
}
}
// ApplyFunction applies the given function to the result data
func ApplyFunction(fn Function, result *TimeSeries) *TimeSeries {
// Extract the function name and arguments

View File

@@ -276,6 +276,31 @@ func (r *QueryRangeRequest) NumAggregationForQuery(name string) int64 {
return int64(numAgg)
}
// HasOrderSpecified returns true if any query has an explicit order provided.
func (r *QueryRangeRequest) HasOrderSpecified() bool {
for _, query := range r.CompositeQuery.Queries {
switch spec := query.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
if len(spec.Order) > 0 {
return true
}
case QueryBuilderQuery[LogAggregation]:
if len(spec.Order) > 0 {
return true
}
case QueryBuilderQuery[MetricAggregation]:
if len(spec.Order) > 0 {
return true
}
case QueryBuilderFormula:
if len(spec.Order) > 0 {
return true
}
}
}
return false
}
func (r *QueryRangeRequest) FuncsForQuery(name string) []Function {
funcs := []Function{}
for _, query := range r.CompositeQuery.Queries {

View File

@@ -10,8 +10,8 @@ import (
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
// GetQueryIdentifier returns a friendly identifier for a query based on its type and name/content
func GetQueryIdentifier(envelope QueryEnvelope, index int) string {
// getQueryIdentifier returns a friendly identifier for a query based on its type and name/content
func getQueryIdentifier(envelope QueryEnvelope, index int) string {
switch envelope.Type {
case QueryTypeBuilder, QueryTypeSubQuery:
switch spec := envelope.Spec.(type) {
@@ -567,7 +567,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
switch spec := envelope.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
if err := spec.Validate(r.RequestType); err != nil {
queryId := GetQueryIdentifier(envelope, i)
queryId := getQueryIdentifier(envelope, i)
return wrapValidationError(err, queryId, "invalid %s: %s")
}
// Check name uniqueness for non-formula context
@@ -583,7 +583,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
}
case QueryBuilderQuery[LogAggregation]:
if err := spec.Validate(r.RequestType); err != nil {
queryId := GetQueryIdentifier(envelope, i)
queryId := getQueryIdentifier(envelope, i)
return wrapValidationError(err, queryId, "invalid %s: %s")
}
// Check name uniqueness for non-formula context
@@ -599,7 +599,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
}
case QueryBuilderQuery[MetricAggregation]:
if err := spec.Validate(r.RequestType); err != nil {
queryId := GetQueryIdentifier(envelope, i)
queryId := getQueryIdentifier(envelope, i)
return wrapValidationError(err, queryId, "invalid %s: %s")
}
// Check name uniqueness for non-formula context
@@ -614,7 +614,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
queryNames[spec.Name] = true
}
default:
queryId := GetQueryIdentifier(envelope, i)
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown spec type for %s",
@@ -625,7 +625,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
// Formula validation is handled separately
spec, ok := envelope.Spec.(QueryBuilderFormula)
if !ok {
queryId := GetQueryIdentifier(envelope, i)
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
@@ -633,7 +633,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
)
}
if spec.Expression == "" {
queryId := GetQueryIdentifier(envelope, i)
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"expression is required for %s",
@@ -644,7 +644,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
// Join validation is handled separately
_, ok := envelope.Spec.(QueryBuilderJoin)
if !ok {
queryId := GetQueryIdentifier(envelope, i)
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
@@ -654,7 +654,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
case QueryTypeTraceOperator:
spec, ok := envelope.Spec.(QueryBuilderTraceOperator)
if !ok {
queryId := GetQueryIdentifier(envelope, i)
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
@@ -662,7 +662,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
)
}
if spec.Expression == "" {
queryId := GetQueryIdentifier(envelope, i)
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"expression is required for %s",
@@ -673,7 +673,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
// PromQL validation is handled separately
spec, ok := envelope.Spec.(PromQuery)
if !ok {
queryId := GetQueryIdentifier(envelope, i)
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
@@ -681,7 +681,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
)
}
if spec.Query == "" {
queryId := GetQueryIdentifier(envelope, i)
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"query expression is required for %s",
@@ -692,7 +692,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
// ClickHouse SQL validation is handled separately
spec, ok := envelope.Spec.(ClickHouseQuery)
if !ok {
queryId := GetQueryIdentifier(envelope, i)
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
@@ -700,7 +700,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
)
}
if spec.Query == "" {
queryId := GetQueryIdentifier(envelope, i)
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"query expression is required for %s",
@@ -708,7 +708,7 @@ func (r *QueryRangeRequest) validateCompositeQuery() error {
)
}
default:
queryId := GetQueryIdentifier(envelope, i)
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown query type '%s' for %s",
@@ -735,7 +735,7 @@ func (c *CompositeQuery) Validate(requestType RequestType) error {
// Validate each query
for i, envelope := range c.Queries {
if err := validateQueryEnvelope(envelope, requestType); err != nil {
queryId := GetQueryIdentifier(envelope, i)
queryId := getQueryIdentifier(envelope, i)
return wrapValidationError(err, queryId, "invalid %s: %s")
}
}

View File

@@ -405,7 +405,7 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
"spec": [{
"name": "existing_threshold",
"target": 50.0,
"targetUnit": "MBs",
"targetUnit": "MB",
"ruleUnit": "bytes",
"matchType": "1",
"op": "1"

View File

@@ -252,15 +252,6 @@ func (b BasicRuleThreshold) Validate() error {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid match type: %s", string(b.MatchType)))
}
// Only validate unit if specified
if b.TargetUnit != "" {
unit := converter.Unit(b.TargetUnit)
err := unit.Validate()
if err != nil {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid unit"))
}
}
return errors.Join(errs...)
}

View File

@@ -5,6 +5,7 @@ import (
"strings"
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -152,3 +153,99 @@ type FieldValueSelector struct {
Value string `json:"value"`
Limit int `json:"limit"`
}
type GettableFieldKeys struct {
Keys map[string][]*TelemetryFieldKey `json:"keys"`
Complete bool `json:"complete"`
}
type PostableFieldKeysParams struct {
Signal Signal `query:"signal"`
Source Source `query:"source"`
Limit int `query:"limit"`
StartUnixMilli int64 `query:"startUnixMilli"`
EndUnixMilli int64 `query:"endUnixMilli"`
FieldContext FieldContext `query:"fieldContext"`
FieldDataType FieldDataType `query:"fieldDataType"`
MetricContext *MetricContext `query:"metricContext"`
Name string `query:"name"`
SearchText string `query:"searchText"`
}
type GettableFieldValues struct {
Values *TelemetryFieldValues `json:"values"`
Complete bool `json:"complete"`
}
type PostableFieldValueParams struct {
PostableFieldKeysParams
ExistingQuery string `query:"existingQuery"`
}
func NewFieldKeySelectorFromPostableFieldKeysParams(params PostableFieldKeysParams) (*FieldKeySelector, error) {
var req FieldKeySelector
var signal Signal
if params.Limit != 0 {
req.Limit = params.Limit
} else {
req.Limit = 1000
}
if params.StartUnixMilli != 0 {
req.StartUnixMilli = params.StartUnixMilli
// Round down to the nearest 6 hours (21600000 milliseconds)
req.StartUnixMilli -= req.StartUnixMilli % 21600000
}
if params.EndUnixMilli != 0 {
req.EndUnixMilli = params.EndUnixMilli
}
if params.SearchText != "" && params.FieldContext == FieldContextUnspecified {
parsedFieldKey := GetFieldKeyFromKeyText(params.SearchText )
if parsedFieldKey.FieldContext != FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, signal) {
req.Name = parsedFieldKey.Name
req.FieldContext = parsedFieldKey.FieldContext
}
}
}
return &req, nil
}
func NewFieldValueSelectorFromPostableFieldValueParams(params PostableFieldValueParams) (*FieldValueSelector, error) {
var fieldValueSelector FieldValueSelector
keySelector, err := NewFieldKeySelectorFromPostableFieldKeysParams(params.PostableFieldKeysParams)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse field key request").WithAdditional(err.Error())
}
if params.Name != "" && keySelector.FieldContext == FieldContextUnspecified {
parsedFieldKey := GetFieldKeyFromKeyText(params.Name)
if parsedFieldKey.FieldContext != FieldContextUnspecified {
// Only apply inferred context if it is valid for the current signal
if isContextValidForSignal(parsedFieldKey.FieldContext, keySelector.Signal) {
fieldValueSelector.Name = parsedFieldKey.Name
keySelector.FieldContext = parsedFieldKey.FieldContext
}
}
}
keySelector.Name = fieldValueSelector.Name
fieldValueSelector.ExistingQuery = params.ExistingQuery
fieldValueSelector.Value = params.SearchText
if params.Limit != 0 {
fieldValueSelector.Limit = params.Limit
} else {
fieldValueSelector.Limit = 50
}
return &fieldValueSelector, nil
}

View File

@@ -154,3 +154,21 @@ func (f FieldContext) TagType() string {
}
return ""
}
func isContextValidForSignal(ctx FieldContext, signal Signal) bool {
if ctx == FieldContextResource ||
ctx == FieldContextAttribute ||
ctx == FieldContextScope {
return true
}
switch signal.StringValue() {
case SignalLogs.StringValue():
return ctx == FieldContextLog || ctx == FieldContextBody
case SignalTraces.StringValue():
return ctx == FieldContextSpan || ctx == FieldContextEvent || ctx == FieldContextTrace
case SignalMetrics.StringValue():
return ctx == FieldContextMetric
}
return true
}

View File

@@ -1,5 +1,5 @@
from typing import Any, Callable, Dict
from urllib.parse import urljoin
from typing import Any, Callable, Dict, List
from urllib.parse import urljoin, urlparse
from xml.etree import ElementTree
import pytest
@@ -114,6 +114,43 @@ def create_saml_client(
"attribute.name": "Role",
},
},
{
"name": "groups",
"protocol": "saml",
"protocolMapper": "saml-group-membership-mapper",
"consentRequired": False,
"config": {
"full.path": "false",
"attribute.nameformat": "Basic",
"single": "true", # ! this was changed to true as we need the groups in the single attribute section
"friendly.name": "groups",
"attribute.name": "groups",
},
},
{
"name": "role attribute",
"protocol": "saml",
"protocolMapper": "saml-user-attribute-mapper",
"consentRequired": False,
"config": {
"attribute.nameformat": "Basic",
"user.attribute": "signoz_role",
"friendly.name": "signoz_role",
"attribute.name": "signoz_role",
},
},
{
"name": "displayName",
"protocol": "saml",
"protocolMapper": "saml-user-property-mapper",
"consentRequired": False,
"config": {
"attribute.nameformat": "Basic",
"user.attribute": "firstName",
"friendly.name": "displayName",
"attribute.name": "displayName",
},
},
],
"defaultClientScopes": ["saml_organization", "role_list"],
"optionalClientScopes": [],
@@ -163,6 +200,8 @@ def create_oidc_client(
realm_name="master",
)
_ensure_groups_client_scope(client)
client.create_client(
skip_exists=True,
payload={
@@ -208,6 +247,7 @@ def create_oidc_client(
"profile",
"basic",
"email",
"groups",
],
"optionalClientScopes": [
"address",
@@ -282,7 +322,9 @@ def get_oidc_settings(idp: types.TestContainerIDP) -> dict:
@pytest.fixture(name="create_user_idp", scope="function")
def create_user_idp(idp: types.TestContainerIDP) -> Callable[[str, str, bool], None]:
def create_user_idp(
idp: types.TestContainerIDP,
) -> Callable[[str, str, bool, str, str], None]:
client = KeycloakAdmin(
server_url=idp.container.host_configs["6060"].base(),
username=IDP_ROOT_USERNAME,
@@ -292,17 +334,26 @@ def create_user_idp(idp: types.TestContainerIDP) -> Callable[[str, str, bool], N
created_users = []
def _create_user_idp(email: str, password: str, verified: bool = True) -> None:
user_id = client.create_user(
exist_ok=False,
payload={
"username": email,
"email": email,
"enabled": True,
"emailVerified": verified,
},
)
def _create_user_idp(
email: str,
password: str,
verified: bool = True,
first_name: str = "",
last_name: str = "",
) -> None:
payload = {
"username": email,
"email": email,
"enabled": True,
"emailVerified": verified,
}
if first_name:
payload["firstName"] = first_name
if last_name:
payload["lastName"] = last_name
user_id = client.create_user(exist_ok=False, payload=payload)
client.set_user_password(user_id, password, temporary=False)
created_users.append(user_id)
@@ -333,3 +384,344 @@ def idp_login(driver: webdriver.Chrome) -> Callable[[str, str], None]:
wait.until(EC.invisibility_of_element((By.ID, "kc-login")))
return _idp_login
@pytest.fixture(name="create_group_idp", scope="function")
def create_group_idp(idp: types.TestContainerIDP) -> Callable[[str], str]:
"""Creates a group in Keycloak IDP."""
client = KeycloakAdmin(
server_url=idp.container.host_configs["6060"].base(),
username=IDP_ROOT_USERNAME,
password=IDP_ROOT_PASSWORD,
realm_name="master",
)
created_groups = []
def _create_group_idp(group_name: str) -> str:
group_id = client.create_group({"name": group_name}, skip_exists=True)
created_groups.append(group_id)
return group_id
yield _create_group_idp
for group_id in created_groups:
try:
client.delete_group(group_id)
except Exception: # pylint: disable=broad-exception-caught
pass
@pytest.fixture(name="create_user_idp_with_groups", scope="function")
def create_user_idp_with_groups(
idp: types.TestContainerIDP,
create_group_idp: Callable[[str], str], # pylint: disable=redefined-outer-name
) -> Callable[[str, str, bool, List[str]], None]:
"""Creates a user in Keycloak IDP with specified groups."""
client = KeycloakAdmin(
server_url=idp.container.host_configs["6060"].base(),
username=IDP_ROOT_USERNAME,
password=IDP_ROOT_PASSWORD,
realm_name="master",
)
created_users = []
def _create_user_idp_with_groups(
email: str, password: str, verified: bool, groups: List[str]
) -> None:
# Create groups first
group_ids = []
for group_name in groups:
group_id = create_group_idp(group_name)
group_ids.append(group_id)
# Create user
user_id = client.create_user(
exist_ok=False,
payload={
"username": email,
"email": email,
"enabled": True,
"emailVerified": verified,
},
)
client.set_user_password(user_id, password, temporary=False)
created_users.append(user_id)
# Add user to groups
for group_id in group_ids:
client.group_user_add(user_id, group_id)
yield _create_user_idp_with_groups
for user_id in created_users:
try:
client.delete_user(user_id)
except Exception: # pylint: disable=broad-exception-caught
pass
@pytest.fixture(name="add_user_to_group", scope="function")
def add_user_to_group(
idp: types.TestContainerIDP,
create_group_idp: Callable[[str], str], # pylint: disable=redefined-outer-name
) -> Callable[[str, str], None]:
"""Adds an existing user to a group."""
client = KeycloakAdmin(
server_url=idp.container.host_configs["6060"].base(),
username=IDP_ROOT_USERNAME,
password=IDP_ROOT_PASSWORD,
realm_name="master",
)
def _add_user_to_group(email: str, group_name: str) -> None:
user_id = client.get_user_id(email)
group_id = create_group_idp(group_name)
client.group_user_add(user_id, group_id)
return _add_user_to_group
@pytest.fixture(name="create_user_idp_with_role", scope="function")
def create_user_idp_with_role(
idp: types.TestContainerIDP,
create_group_idp: Callable[[str], str], # pylint: disable=redefined-outer-name
) -> Callable[[str, str, bool, str, List[str]], None]:
"""Creates a user in Keycloak IDP with a custom role attribute and optional groups."""
client = KeycloakAdmin(
server_url=idp.container.host_configs["6060"].base(),
username=IDP_ROOT_USERNAME,
password=IDP_ROOT_PASSWORD,
realm_name="master",
)
created_users = []
def _create_user_idp_with_role(
email: str, password: str, verified: bool, role: str, groups: List[str]
) -> None:
# Create groups first
group_ids = []
for group_name in groups:
group_id = create_group_idp(group_name)
group_ids.append(group_id)
# Create user with role attribute
user_id = client.create_user(
exist_ok=False,
payload={
"username": email,
"email": email,
"enabled": True,
"emailVerified": verified,
"attributes": {
"signoz_role": role,
},
},
)
client.set_user_password(user_id, password, temporary=False)
created_users.append(user_id)
# Add user to groups
for group_id in group_ids:
client.group_user_add(user_id, group_id)
yield _create_user_idp_with_role
for user_id in created_users:
try:
client.delete_user(user_id)
except Exception: # pylint: disable=broad-exception-caught
pass
@pytest.fixture(name="setup_user_profile", scope="package")
def setup_user_profile(idp: types.TestContainerIDP) -> Callable[[], None]:
"""Setup Keycloak User Profile with signoz_role attribute."""
def _setup_user_profile() -> None:
client = KeycloakAdmin(
server_url=idp.container.host_configs["6060"].base(),
username=IDP_ROOT_USERNAME,
password=IDP_ROOT_PASSWORD,
realm_name="master",
)
# Get current user profile config
profile = client.get_realm_users_profile()
# Check if signoz_role attribute already exists
attributes = profile.get("attributes", [])
signoz_role_exists = any(
attr.get("name") == "signoz_role" for attr in attributes
)
if not signoz_role_exists:
# Add signoz_role attribute to user profile
attributes.append(
{
"name": "signoz_role",
"displayName": "SigNoz Role",
"validations": {},
"annotations": {},
# "required": {
# "roles": [] # Not required
# },
"permissions": {"view": ["admin", "user"], "edit": ["admin"]},
"multivalued": False,
}
)
profile["attributes"] = attributes
# Update the realm user profile
client.update_realm_users_profile(payload=profile)
return _setup_user_profile
def _ensure_groups_client_scope(client: KeycloakAdmin) -> None:
"""Create 'groups' client scope if it doesn't exist."""
# Check if groups scope exists
scopes = client.get_client_scopes()
groups_scope_exists = any(s.get("name") == "groups" for s in scopes)
if not groups_scope_exists:
# Create the groups client scope
client.create_client_scope(
payload={
"name": "groups",
"description": "Group membership",
"protocol": "openid-connect",
"attributes": {
"include.in.token.scope": "true",
"display.on.consent.screen": "true",
},
"protocolMappers": [
{
"name": "groups",
"protocol": "openid-connect",
"protocolMapper": "oidc-group-membership-mapper",
"consentRequired": False,
"config": {
"full.path": "false",
"id.token.claim": "true",
"access.token.claim": "true",
"claim.name": "groups",
"userinfo.token.claim": "true",
},
},
{
"name": "signoz_role",
"protocol": "openid-connect",
"protocolMapper": "oidc-usermodel-attribute-mapper",
"consentRequired": False,
"config": {
"user.attribute": "signoz_role",
"id.token.claim": "true",
"access.token.claim": "true",
"claim.name": "signoz_role",
"userinfo.token.claim": "true",
"jsonType.label": "String",
},
},
],
},
skip_exists=True,
)
def get_oidc_domain(signoz: types.SigNoz, admin_token: str) -> dict:
"""Helper to get the OIDC domain."""
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/domains"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
return next(
(
domain
for domain in response.json()["data"]
if domain["name"] == "oidc.integration.test"
),
None,
)
def get_user_by_email(signoz: types.SigNoz, admin_token: str, email: str) -> dict:
"""Helper to get a user by email."""
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
return next(
(user for user in response.json()["data"] if user["email"] == email),
None,
)
def perform_oidc_login(
signoz: types.SigNoz, # pylint: disable=unused-argument
idp: types.TestContainerIDP,
driver: webdriver.Chrome,
get_session_context: Callable[[str], str],
idp_login: Callable[[str, str], None], # pylint: disable=redefined-outer-name
email: str,
password: str,
) -> None:
"""Helper to perform OIDC login flow."""
session_context = get_session_context(email)
url = session_context["orgs"][0]["authNSupport"]["callback"][0]["url"]
parsed_url = urlparse(url)
actual_url = (
f"{idp.container.host_configs['6060'].get(parsed_url.path)}?{parsed_url.query}"
)
driver.get(actual_url)
idp_login(email, password)
def get_saml_domain(signoz: types.SigNoz, admin_token: str) -> dict:
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/domains"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
return next(
(
domain
for domain in response.json()["data"]
if domain["name"] == "saml.integration.test"
),
None,
)
def perform_saml_login(
signoz: types.SigNoz, # pylint: disable=unused-argument
driver: webdriver.Chrome,
get_session_context: Callable[[str], str],
idp_login: Callable[[str, str], None], # pylint: disable=redefined-outer-name
email: str,
password: str,
) -> None:
session_context = get_session_context(email)
url = session_context["orgs"][0]["authNSupport"]["callback"][0]["url"]
driver.get(url)
idp_login(email, password)
def delete_keycloak_client(idp: types.TestContainerIDP, client_id: str) -> None:
keycloak_client = KeycloakAdmin(
server_url=idp.container.host_configs["6060"].base(),
username=IDP_ROOT_USERNAME,
password=IDP_ROOT_PASSWORD,
realm_name="master",
)
try:
# Get the internal Keycloak client ID from the clientId
internal_client_id = keycloak_client.get_client_id(client_id=client_id)
if internal_client_id:
keycloak_client.delete_client(internal_client_id)
except Exception: # pylint: disable=broad-exception-caught
pass # Client doesn't exist or already deleted, that's fine

View File

@@ -329,3 +329,130 @@ def find_named_result(
),
None,
)
def build_scalar_query(
name: str,
signal: str,
aggregations: List[Dict],
*,
group_by: Optional[List[Dict]] = None,
order: Optional[List[Dict]] = None,
limit: Optional[int] = None,
filter_expression: Optional[str] = None,
having_expression: Optional[str] = None,
step_interval: int = DEFAULT_STEP_INTERVAL,
disabled: bool = False,
) -> Dict:
spec: Dict[str, Any] = {
"name": name,
"signal": signal,
"stepInterval": step_interval,
"disabled": disabled,
"aggregations": aggregations,
}
if group_by:
spec["groupBy"] = group_by
if order:
spec["order"] = order
if limit is not None:
spec["limit"] = limit
if filter_expression:
spec["filter"] = {"expression": filter_expression}
if having_expression:
spec["having"] = {"expression": having_expression}
return {"type": "builder_query", "spec": spec}
def build_group_by_field(
name: str,
field_data_type: str = "string",
field_context: str = "resource",
) -> Dict:
return {
"name": name,
"fieldDataType": field_data_type,
"fieldContext": field_context,
}
def build_order_by(name: str, direction: str = "desc") -> Dict:
return {"key": {"name": name}, "direction": direction}
def build_logs_aggregation(expression: str, alias: Optional[str] = None) -> Dict:
agg: Dict[str, Any] = {"expression": expression}
if alias:
agg["alias"] = alias
return agg
def build_metrics_aggregation(
metric_name: str,
time_aggregation: str,
space_aggregation: str,
temporality: str = "cumulative",
) -> Dict:
return {
"metricName": metric_name,
"temporality": temporality,
"timeAggregation": time_aggregation,
"spaceAggregation": space_aggregation,
}
def get_scalar_table_data(response_json: Dict) -> List[List[Any]]:
results = response_json.get("data", {}).get("data", {}).get("results", [])
if not results:
return []
return results[0].get("data", [])
def get_scalar_columns(response_json: Dict) -> List[Dict]:
results = response_json.get("data", {}).get("data", {}).get("results", [])
if not results:
return []
return results[0].get("columns", [])
def assert_scalar_result_order(
data: List[List[Any]],
expected_order: List[tuple],
context: str = "",
) -> None:
assert len(data) == len(expected_order), (
f"{context}: Expected {len(expected_order)} rows, got {len(data)}. "
f"Data: {data}"
)
for i, (row, expected) in enumerate(zip(data, expected_order)):
for j, expected_val in enumerate(expected):
actual_val = row[j]
assert actual_val == expected_val, (
f"{context}: Row {i}, column {j} mismatch. "
f"Expected {expected_val}, got {actual_val}. "
f"Full row: {row}, expected: {expected}"
)
def assert_scalar_column_order(
data: List[List[Any]],
column_index: int,
expected_values: List[Any],
context: str = "",
) -> None:
assert len(data) == len(
expected_values
), f"{context}: Expected {len(expected_values)} rows, got {len(data)}"
actual_values = [row[column_index] for row in data]
assert actual_values == expected_values, (
f"{context}: Column {column_index} order mismatch. "
f"Expected {expected_values}, got {actual_values}"
)

View File

@@ -65,6 +65,7 @@ def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
"SIGNOZ_INSTRUMENTATION_LOGS_LEVEL": "debug",
"SIGNOZ_PROMETHEUS_ACTIVE__QUERY__TRACKER_ENABLED": False,
"SIGNOZ_GATEWAY_URL": gateway.container_configs["8080"].base(),
"SIGNOZ_TOKENIZER_JWT_SECRET": "secret",
}
| sqlstore.env
| clickhouse.env

View File

@@ -78,11 +78,15 @@ def test_create_and_get_domain(
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
assert len(data) == 2
assert data[0]["name"] == "domain-google.integration.test"
assert data[0]["ssoType"] == "google_auth"
assert data[1]["name"] == "domain-saml.integration.test"
assert data[1]["ssoType"] == "saml"
for domain in data:
assert domain["name"] in [
"domain-google.integration.test",
"domain-saml.integration.test",
]
assert domain["ssoType"] in ["google_auth", "saml"]
def test_create_invalid(
@@ -165,3 +169,91 @@ def test_create_invalid(
)
assert response.status_code == HTTPStatus.BAD_REQUEST
def test_create_invalid_role_mapping(
signoz: SigNoz,
create_user_admin: Operation, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
):
"""Test that invalid role mappings are rejected."""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Create domain with invalid defaultRole
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/domains"),
json={
"name": "invalid-role-test.integration.test",
"config": {
"ssoEnabled": True,
"ssoType": "saml",
"samlConfig": {
"samlEntity": "saml-entity",
"samlIdp": "saml-idp",
"samlCert": "saml-cert",
},
"roleMapping": {
"defaultRole": "SUPERADMIN", # Invalid role
},
},
},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.BAD_REQUEST
# Create domain with invalid role in groupMappings
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/domains"),
json={
"name": "invalid-group-role.integration.test",
"config": {
"ssoEnabled": True,
"ssoType": "saml",
"samlConfig": {
"samlEntity": "saml-entity",
"samlIdp": "saml-idp",
"samlCert": "saml-cert",
},
"roleMapping": {
"defaultRole": "VIEWER",
"groupMappings": {
"admins": "SUPERUSER", # Invalid role
},
},
},
},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.BAD_REQUEST
# Valid role mapping should succeed
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/domains"),
json={
"name": "valid-role-mapping.integration.test",
"config": {
"ssoEnabled": True,
"ssoType": "saml",
"samlConfig": {
"samlEntity": "saml-entity",
"samlIdp": "saml-idp",
"samlCert": "saml-cert",
},
"roleMapping": {
"defaultRole": "VIEWER",
"groupMappings": {
"signoz-admins": "ADMIN",
"signoz-editors": "EDITOR",
},
},
},
},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.CREATED

View File

@@ -1,3 +1,4 @@
import uuid
from http import HTTPStatus
from typing import Any, Callable, Dict, List
@@ -10,6 +11,11 @@ from fixtures.auth import (
USER_ADMIN_PASSWORD,
add_license,
)
from fixtures.idputils import (
get_saml_domain,
get_user_by_email,
perform_saml_login,
)
from fixtures.types import Operation, SigNoz, TestContainerDocker, TestContainerIDP
@@ -102,7 +108,7 @@ def test_saml_authn(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
driver: webdriver.Chrome,
create_user_idp: Callable[[str, str], None],
create_user_idp: Callable[[str, str, bool, str, str], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
@@ -150,7 +156,7 @@ def test_idp_initiated_saml_authn(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
driver: webdriver.Chrome,
create_user_idp: Callable[[str, str], None],
create_user_idp: Callable[[str, str, bool, str, str], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
@@ -195,3 +201,372 @@ def test_idp_initiated_saml_authn(
assert found_user is not None
assert found_user["role"] == "VIEWER"
def test_saml_update_domain_with_group_mappings(
signoz: SigNoz,
get_token: Callable[[str, str], str],
get_saml_settings: Callable[[], dict],
) -> None:
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
domain = get_saml_domain(signoz, admin_token)
settings = get_saml_settings()
# update the existing saml domain to have role mappings also
response = requests.put(
signoz.self.host_configs["8080"].get(f"/api/v1/domains/{domain['id']}"),
json={
"config": {
"ssoEnabled": True,
"ssoType": "saml",
"samlConfig": {
"samlEntity": settings["entityID"],
"samlIdp": settings["singleSignOnServiceLocation"],
"samlCert": settings["certificate"],
"attributeMapping": {
"name": "givenName",
"groups": "groups",
"role": "signoz_role",
},
},
"roleMapping": {
"defaultRole": "VIEWER",
"groupMappings": {
"signoz-admins": "ADMIN",
"signoz-editors": "EDITOR",
"signoz-viewers": "VIEWER",
},
"useRoleAttribute": False,
},
},
},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
def test_saml_role_mapping_single_group_admin(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
driver: webdriver.Chrome,
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
"""
Test: User in 'signoz-admins' group gets ADMIN role.
"""
email = "admin-group-user@saml.integration.test"
create_user_idp_with_groups(email, "password", True, ["signoz-admins"])
perform_saml_login(
signoz, driver, get_session_context, idp_login, email, "password"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "ADMIN"
def test_saml_role_mapping_single_group_editor(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
driver: webdriver.Chrome,
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
"""
Test: User in 'signoz-editors' group gets EDITOR role.
"""
email = "editor-group-user@saml.integration.test"
create_user_idp_with_groups(email, "password", True, ["signoz-editors"])
perform_saml_login(
signoz, driver, get_session_context, idp_login, email, "password"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "EDITOR"
def test_saml_role_mapping_multiple_groups_highest_wins(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
driver: webdriver.Chrome,
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
"""
Test: User in multiple groups gets highest role.
User is in both 'signoz-viewers' and 'signoz-editors'.
Expected: User gets EDITOR (highest of VIEWER and EDITOR).
"""
email = f"multi-group-user-{uuid.uuid4().hex[:8]}@saml.integration.test"
create_user_idp_with_groups(
email, "password", True, ["signoz-viewers", "signoz-editors"]
)
perform_saml_login(
signoz, driver, get_session_context, idp_login, email, "password"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "EDITOR"
def test_saml_role_mapping_explicit_viewer_group(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
driver: webdriver.Chrome,
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
"""
Test: User explicitly mapped to VIEWER via groups should get VIEWER.
This tests the bug where VIEWER group mappings were incorrectly ignored.
"""
email = "viewer-group-user@saml.integration.test"
create_user_idp_with_groups(email, "password", True, ["signoz-viewers"])
perform_saml_login(
signoz, driver, get_session_context, idp_login, email, "password"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "VIEWER"
def test_saml_role_mapping_unmapped_group_uses_default(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
driver: webdriver.Chrome,
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
"""
Test: User in unmapped group falls back to default role (VIEWER).
"""
email = "unmapped-group-user@saml.integration.test"
create_user_idp_with_groups(email, "password", True, ["some-other-group"])
perform_saml_login(
signoz, driver, get_session_context, idp_login, email, "password"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "VIEWER"
def test_saml_update_domain_with_use_role_claim(
signoz: SigNoz,
get_token: Callable[[str, str], str],
get_saml_settings: Callable[[], dict],
) -> None:
"""
Updates SAML domain to enable useRoleAttribute (direct role attribute).
"""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
domain = get_saml_domain(signoz, admin_token)
settings = get_saml_settings()
response = requests.put(
signoz.self.host_configs["8080"].get(f"/api/v1/domains/{domain['id']}"),
json={
"config": {
"ssoEnabled": True,
"ssoType": "saml",
"samlConfig": {
"samlEntity": settings["entityID"],
"samlIdp": settings["singleSignOnServiceLocation"],
"samlCert": settings["certificate"],
"attributeMapping": {
"name": "displayName",
"groups": "groups",
"role": "signoz_role",
},
},
"roleMapping": {
"defaultRole": "VIEWER",
"groupMappings": {
"signoz-admins": "ADMIN",
"signoz-editors": "EDITOR",
},
"useRoleAttribute": True,
},
},
},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
def test_saml_role_mapping_role_claim_takes_precedence(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
driver: webdriver.Chrome,
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
setup_user_profile: Callable[[], None],
) -> None:
"""
Test: useRoleAttribute takes precedence over group mappings.
User is in 'signoz-editors' group but has role attribute 'ADMIN'.
Expected: User gets ADMIN (from role attribute).
"""
setup_user_profile()
email = "role-claim-precedence@saml.integration.test"
create_user_idp_with_role(email, "password", True, "ADMIN", ["signoz-editors"])
perform_saml_login(
signoz, driver, get_session_context, idp_login, email, "password"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "ADMIN"
def test_saml_role_mapping_invalid_role_claim_fallback(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
driver: webdriver.Chrome,
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
setup_user_profile: Callable[[], None],
) -> None:
"""
Test: Invalid role claim falls back to group mappings.
User has invalid role 'SUPERADMIN' and is in 'signoz-editors'.
Expected: User gets EDITOR (from group mapping).
"""
setup_user_profile()
email = "invalid-role-user@saml.integration.test"
create_user_idp_with_role(email, "password", True, "SUPERADMIN", ["signoz-editors"])
perform_saml_login(
signoz, driver, get_session_context, idp_login, email, "password"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "EDITOR"
def test_saml_role_mapping_case_insensitive(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
driver: webdriver.Chrome,
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
setup_user_profile: Callable[[], None],
) -> None:
"""
Test: Role attribute matching is case-insensitive.
User has role 'admin' (lowercase).
Expected: User gets ADMIN role.
"""
setup_user_profile()
email = "lowercase-role-user@saml.integration.test"
create_user_idp_with_role(email, "password", True, "admin", [])
perform_saml_login(
signoz, driver, get_session_context, idp_login, email, "password"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "ADMIN"
def test_saml_name_mapping(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
driver: webdriver.Chrome,
create_user_idp: Callable[[str, str, bool, str, str], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
"""Test that user's display name is mapped from SAML displayName attribute."""
email = "named-user@saml.integration.test"
create_user_idp(email, "password", True, "Jane", "Smith")
perform_saml_login(
signoz, driver, get_session_context, idp_login, email, "password"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert (
found_user["displayName"] == "Jane"
) # We are only mapping the first name here
assert found_user["role"] == "VIEWER"
def test_saml_empty_name_fallback(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
driver: webdriver.Chrome,
create_user_idp: Callable[[str, str, bool, str, str], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
"""Test that user without displayName in IDP still gets created."""
email = "no-name@saml.integration.test"
create_user_idp(email, "password", True)
perform_saml_login(
signoz, driver, get_session_context, idp_login, email, "password"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "VIEWER"

View File

@@ -11,6 +11,11 @@ from fixtures.auth import (
USER_ADMIN_PASSWORD,
add_license,
)
from fixtures.idputils import (
get_oidc_domain,
get_user_by_email,
perform_oidc_login,
)
from fixtures.types import Operation, SigNoz, TestContainerDocker, TestContainerIDP
@@ -75,7 +80,7 @@ def test_oidc_authn(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
driver: webdriver.Chrome,
create_user_idp: Callable[[str, str, bool], None],
create_user_idp: Callable[[str, str, bool, str, str], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
@@ -127,3 +132,403 @@ def test_oidc_authn(
assert found_user is not None
assert found_user["role"] == "VIEWER"
def test_oidc_update_domain_with_group_mappings(
signoz: SigNoz,
idp: TestContainerIDP,
get_token: Callable[[str, str], str],
get_oidc_settings: Callable[[str], dict],
) -> None:
"""
Updates OIDC domain to add role mapping with group mappings and claim mapping.
"""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
domain = get_oidc_domain(signoz, admin_token)
client_id = f"oidc.integration.test.{signoz.self.host_configs['8080'].address}:{signoz.self.host_configs['8080'].port}"
settings = get_oidc_settings(client_id)
response = requests.put(
signoz.self.host_configs["8080"].get(f"/api/v1/domains/{domain['id']}"),
json={
"config": {
"ssoEnabled": True,
"ssoType": "oidc",
"oidcConfig": {
"clientId": settings["client_id"],
"clientSecret": settings["client_secret"],
"issuer": f"{idp.container.container_configs['6060'].get(urlparse(settings['issuer']).path)}",
"issuerAlias": settings["issuer"],
"getUserInfo": True,
"claimMapping": {
"email": "email",
"name": "name",
"groups": "groups",
"role": "signoz_role",
},
},
"roleMapping": {
"defaultRole": "VIEWER",
"groupMappings": {
"signoz-admins": "ADMIN",
"signoz-editors": "EDITOR",
"signoz-viewers": "VIEWER",
},
"useRoleAttribute": False,
},
},
},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
def test_oidc_role_mapping_single_group_admin(
signoz: SigNoz,
idp: TestContainerIDP,
driver: webdriver.Chrome,
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
"""
Test: OIDC user in 'signoz-admins' group gets ADMIN role.
"""
email = "admin-group-user@oidc.integration.test"
create_user_idp_with_groups(email, "password123", True, ["signoz-admins"])
perform_oidc_login(
signoz, idp, driver, get_session_context, idp_login, email, "password123"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "ADMIN"
def test_oidc_role_mapping_single_group_editor(
signoz: SigNoz,
idp: TestContainerIDP,
driver: webdriver.Chrome,
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
"""
Test: OIDC user in 'signoz-editors' group gets EDITOR role.
"""
email = "editor-group-user@oidc.integration.test"
create_user_idp_with_groups(email, "password123", True, ["signoz-editors"])
perform_oidc_login(
signoz, idp, driver, get_session_context, idp_login, email, "password123"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "EDITOR"
def test_oidc_role_mapping_multiple_groups_highest_wins(
signoz: SigNoz,
idp: TestContainerIDP,
driver: webdriver.Chrome,
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
"""
Test: OIDC user in multiple groups gets highest role.
User is in 'signoz-viewers' and 'signoz-admins'.
Expected: User gets ADMIN (highest of the two).
"""
email = "multi-group-user@oidc.integration.test"
create_user_idp_with_groups(
email, "password123", True, ["signoz-viewers", "signoz-admins"]
)
perform_oidc_login(
signoz, idp, driver, get_session_context, idp_login, email, "password123"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "ADMIN"
def test_oidc_role_mapping_explicit_viewer_group(
signoz: SigNoz,
idp: TestContainerIDP,
driver: webdriver.Chrome,
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
"""
Test: OIDC user explicitly mapped to VIEWER via groups gets VIEWER.
Tests the bug where VIEWER mappings were ignored.
"""
email = "viewer-group-user@oidc.integration.test"
create_user_idp_with_groups(email, "password123", True, ["signoz-viewers"])
perform_oidc_login(
signoz, idp, driver, get_session_context, idp_login, email, "password123"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "VIEWER"
def test_oidc_role_mapping_unmapped_group_uses_default(
signoz: SigNoz,
idp: TestContainerIDP,
driver: webdriver.Chrome,
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
"""
Test: OIDC user in unmapped group falls back to default role.
"""
email = "unmapped-group-user@oidc.integration.test"
create_user_idp_with_groups(email, "password123", True, ["some-other-group"])
perform_oidc_login(
signoz, idp, driver, get_session_context, idp_login, email, "password123"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "VIEWER"
def test_oidc_update_domain_with_use_role_claim(
signoz: SigNoz,
idp: TestContainerIDP,
get_token: Callable[[str, str], str],
get_oidc_settings: Callable[[str], dict],
) -> None:
"""
Updates OIDC domain to enable useRoleClaim.
"""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
domain = get_oidc_domain(signoz, admin_token)
client_id = f"oidc.integration.test.{signoz.self.host_configs['8080'].address}:{signoz.self.host_configs['8080'].port}"
settings = get_oidc_settings(client_id)
response = requests.put(
signoz.self.host_configs["8080"].get(f"/api/v1/domains/{domain['id']}"),
json={
"config": {
"ssoEnabled": True,
"ssoType": "oidc",
"oidcConfig": {
"clientId": settings["client_id"],
"clientSecret": settings["client_secret"],
"issuer": f"{idp.container.container_configs['6060'].get(urlparse(settings['issuer']).path)}",
"issuerAlias": settings["issuer"],
"getUserInfo": True,
"claimMapping": {
"email": "email",
"name": "name",
"groups": "groups",
"role": "signoz_role",
},
},
"roleMapping": {
"defaultRole": "VIEWER",
"groupMappings": {
"signoz-admins": "ADMIN",
"signoz-editors": "EDITOR",
},
"useRoleAttribute": True,
},
},
},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
def test_oidc_role_mapping_role_claim_takes_precedence(
signoz: SigNoz,
idp: TestContainerIDP,
driver: webdriver.Chrome,
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
setup_user_profile: Callable[[], None],
) -> None:
"""
Test: useRoleAttribute takes precedence over group mappings.
User is in 'signoz-editors' group but has role claim 'ADMIN'.
Expected: User gets ADMIN (from role claim).
"""
setup_user_profile()
email = "role-claim-precedence@oidc.integration.test"
create_user_idp_with_role(email, "password123", True, "ADMIN", ["signoz-editors"])
perform_oidc_login(
signoz, idp, driver, get_session_context, idp_login, email, "password123"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "ADMIN"
def test_oidc_role_mapping_invalid_role_claim_fallback(
signoz: SigNoz,
idp: TestContainerIDP,
driver: webdriver.Chrome,
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
setup_user_profile: Callable[[], None],
) -> None:
"""
Test: Invalid role claim falls back to group mappings.
User has invalid role 'SUPERADMIN' and is in 'signoz-editors'.
Expected: User gets EDITOR (from group mapping).
"""
setup_user_profile()
email = "invalid-role-user@oidc.integration.test"
create_user_idp_with_role(
email, "password123", True, "SUPERADMIN", ["signoz-editors"]
)
perform_oidc_login(
signoz, idp, driver, get_session_context, idp_login, email, "password123"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "EDITOR"
def test_oidc_role_mapping_case_insensitive(
signoz: SigNoz,
idp: TestContainerIDP,
driver: webdriver.Chrome,
create_user_idp_with_role: Callable[[str, str, bool, str, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
setup_user_profile: Callable[[], None],
) -> None:
"""
Test: Role claim matching is case-insensitive.
User has role 'editor' (lowercase).
Expected: User gets EDITOR role.
"""
setup_user_profile()
email = "lowercase-role-user@oidc.integration.test"
create_user_idp_with_role(email, "password123", True, "editor", [])
perform_oidc_login(
signoz, idp, driver, get_session_context, idp_login, email, "password123"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "EDITOR"
def test_oidc_name_mapping(
signoz: SigNoz,
idp: TestContainerIDP,
driver: webdriver.Chrome,
create_user_idp: Callable[[str, str, bool, str, str], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], dict],
) -> None:
"""Test that user's display name is mapped from IDP name claim."""
email = "named-user@oidc.integration.test"
# Create user with explicit first/last name
create_user_idp(email, "password123", True, first_name="John", last_name="Doe")
perform_oidc_login(
signoz, idp, driver, get_session_context, idp_login, email, "password123"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
users = response.json()["data"]
found_user = next((u for u in users if u["email"] == email), None)
assert found_user is not None
# Keycloak concatenates firstName + lastName into "name" claim
assert found_user["displayName"] == "John Doe"
assert found_user["role"] == "VIEWER" # Default role
def test_oidc_empty_name_uses_fallback(
signoz: SigNoz,
idp: TestContainerIDP,
driver: webdriver.Chrome,
create_user_idp: Callable[[str, str, bool, str, str], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], dict],
) -> None:
"""Test that user without name in IDP still gets created (may have empty displayName)."""
email = "no-name@oidc.integration.test"
# Create user without first/last name
create_user_idp(email, "password123", True)
perform_oidc_login(
signoz, idp, driver, get_session_context, idp_login, email, "password123"
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
users = response.json()["data"]
found_user = next((u for u in users if u["email"] == email), None)
# User should still be created even with empty name
assert found_user is not None
assert found_user["role"] == "VIEWER"
# Note: displayName may be empty - this is a known limitation

Some files were not shown because too many files have changed in this diff Show More