Compare commits

..

17 Commits

Author SHA1 Message Date
Naman Verma
4bd5f804ec fix: open api test fix 2026-02-20 13:35:39 +05:30
Naman Verma
a349b6935d Merge branch 'main' into nv/6703 2026-02-20 13:33:04 +05:30
Naman Verma
5780fac789 Merge branch 'main' into nv/6703 2026-02-20 12:48:21 +05:30
Naman Verma
3450a98fdf fix: open api spec generation 2026-02-20 12:47:11 +05:30
Naman Verma
4fa5c42a36 Merge branch 'main' into nv/6703 2026-02-20 11:26:51 +05:30
Naman Verma
3555a73ac0 fix: typo fix in test comment 2026-02-20 11:25:03 +05:30
Naman Verma
ed2d8d06cb test: integration tests for count based aggregation 2026-02-20 11:23:52 +05:30
Naman Verma
203318a206 fix: fix query for greater than comparison 2026-02-20 11:22:28 +05:30
Naman Verma
67fbf5a548 chore: use comparison param directly in metric aggregation 2026-02-20 08:44:26 +05:30
Naman Verma
195dd36078 Merge branch 'main' into nv/6703 2026-02-20 08:18:55 +05:30
Naman Verma
2c12f0bd03 Merge branch 'main' into nv/6703 2026-02-19 18:26:38 +05:30
Naman Verma
fcbb1843d9 chore: check for le only based on metric type 2026-02-19 16:03:46 +05:30
Naman Verma
00dff5c930 chore: rename limit to threshold in ComparisonSpaceAggregationParam 2026-02-19 16:02:11 +05:30
Naman Verma
c7e5258329 Merge branch 'main' into nv/6703 2026-02-19 15:40:24 +05:30
Naman Verma
84a7be22f7 chore: generate new openapi spec 2026-02-19 11:24:00 +05:30
Naman Verma
c5923f942f chore: fix typo in struct name 2026-02-19 11:23:38 +05:30
Naman Verma
f91eee7e50 feat: add support for count based aggregation in histogram metrics 2026-02-19 08:45:44 +05:30
53 changed files with 2741 additions and 649 deletions

View File

@@ -749,6 +749,14 @@ components:
- temporality
- isMonotonic
type: object
MetrictypesComparisonSpaceAggregationParam:
properties:
operator:
type: string
threshold:
format: double
type: number
type: object
MetrictypesSpaceAggregation:
enum:
- sum
@@ -761,6 +769,7 @@ components:
- p90
- p95
- p99
- histogram_count
type: string
MetrictypesTemporality:
enum:
@@ -1045,6 +1054,8 @@ components:
type: object
Querybuildertypesv5MetricAggregation:
properties:
comparisonSpaceAggregationParam:
$ref: '#/components/schemas/MetrictypesComparisonSpaceAggregationParam'
metricName:
type: string
reduceTo:

View File

@@ -2,9 +2,11 @@ package api
import (
"net/http"
"net/http/httputil"
"time"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/global"
@@ -29,6 +31,7 @@ type APIHandlerOptions struct {
IntegrationsController *integrations.Controller
CloudIntegrationsController *cloudintegrations.Controller
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
Gateway *httputil.ReverseProxy
GatewayUrl string
// Querier Influx Interval
FluxInterval time.Duration
@@ -74,6 +77,10 @@ func (ah *APIHandler) UM() *usage.Manager {
return ah.opts.UsageManager
}
func (ah *APIHandler) Gateway() *httputil.ReverseProxy {
return ah.opts.Gateway
}
// RegisterRoutes registers routes for this handler on the given router
func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
// note: add ee override methods first
@@ -96,6 +103,9 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
// v4
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
// Gateway
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.EditAccess(ah.ServeGatewayHTTP))
ah.APIHandler.RegisterRoutes(router, am)
}

View File

@@ -0,0 +1,58 @@
package api
import (
"net/http"
"strings"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
validPath := false
for _, allowedPrefix := range gateway.AllowedPrefix {
if strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+allowedPrefix) {
validPath = true
break
}
}
if !validPath {
rw.WriteHeader(http.StatusNotFound)
return
}
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
if err != nil {
render.Error(rw, err)
return
}
//Create headers
var licenseKey string
if license != nil {
licenseKey = license.Key
}
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
req.Header.Set("X-Consumer-Username", "lid:00000000-0000-0000-0000-000000000000")
req.Header.Set("X-Consumer-Groups", "ns:default")
ah.Gateway().ServeHTTP(rw, req)
}

View File

@@ -19,6 +19,7 @@ import (
"github.com/gorilla/handlers"
"github.com/SigNoz/signoz/ee/query-service/app/api"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/rules"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
@@ -71,6 +72,11 @@ type Server struct {
// NewServer creates and initializes Server
func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
gatewayProxy, err := gateway.NewProxy(config.Gateway.URL.String(), gateway.RoutePrefix)
if err != nil {
return nil, err
}
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
Provider: "memory",
Memory: cache.Memory{
@@ -164,6 +170,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
CloudIntegrationsController: cloudIntegrationsController,
LogsParsingPipelineController: logParsingPipelineController,
FluxInterval: config.Querier.FluxInterval,
Gateway: gatewayProxy,
GatewayUrl: config.Gateway.URL.String(),
GlobalConfig: config.Global,
}

View File

@@ -0,0 +1,9 @@
package gateway
import (
"net/http/httputil"
)
func NewNoopProxy() (*httputil.ReverseProxy, error) {
return &httputil.ReverseProxy{}, nil
}

View File

@@ -0,0 +1,66 @@
package gateway
import (
"net/http"
"net/http/httputil"
"net/url"
"path"
"strings"
)
var (
RoutePrefix string = "/api/gateway"
AllowedPrefix []string = []string{"/v1/workspaces/me", "/v2/profiles/me", "/v2/deployments/me"}
)
type proxy struct {
url *url.URL
stripPath string
}
func NewProxy(u string, stripPath string) (*httputil.ReverseProxy, error) {
url, err := url.Parse(u)
if err != nil {
return nil, err
}
proxy := &proxy{url: url, stripPath: stripPath}
return &httputil.ReverseProxy{
Rewrite: proxy.rewrite,
ModifyResponse: proxy.modifyResponse,
ErrorHandler: proxy.errorHandler,
}, nil
}
func (p *proxy) rewrite(pr *httputil.ProxyRequest) {
pr.SetURL(p.url)
pr.SetXForwarded()
pr.Out.URL.Path = cleanPath(strings.ReplaceAll(pr.Out.URL.Path, p.stripPath, ""))
}
func (p *proxy) modifyResponse(res *http.Response) error {
return nil
}
func (p *proxy) errorHandler(rw http.ResponseWriter, req *http.Request, err error) {
rw.WriteHeader(http.StatusBadGateway)
}
func cleanPath(p string) string {
if p == "" {
return "/"
}
if p[0] != '/' {
p = "/" + p
}
np := path.Clean(p)
if p[len(p)-1] == '/' && np != "/" {
if len(p) == len(np)+1 && strings.HasPrefix(p, np) {
np = p
} else {
np += "/"
}
}
return np
}

View File

@@ -0,0 +1,61 @@
package gateway
import (
"context"
"net/http"
"net/http/httputil"
"net/url"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestProxyRewrite(t *testing.T) {
testCases := []struct {
name string
url *url.URL
stripPath string
in *url.URL
expected *url.URL
}{
{
name: "SamePathAdded",
url: &url.URL{Scheme: "http", Host: "backend", Path: "/path1"},
stripPath: "/strip",
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/path1/path1"},
},
{
name: "NoStripPathInput",
url: &url.URL{Scheme: "http", Host: "backend"},
stripPath: "",
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
},
{
name: "NoStripPathPresentInReq",
url: &url.URL{Scheme: "http", Host: "backend"},
stripPath: "/not-found",
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
},
}
for _, tc := range testCases {
proxy, err := NewProxy(tc.url.String(), tc.stripPath)
require.NoError(t, err)
inReq, err := http.NewRequest(http.MethodGet, tc.in.String(), nil)
require.NoError(t, err)
proxyReq := &httputil.ProxyRequest{
In: inReq,
Out: inReq.Clone(context.Background()),
}
proxy.Rewrite(proxyReq)
assert.Equal(t, tc.expected.Host, proxyReq.Out.URL.Host)
assert.Equal(t, tc.expected.Scheme, proxyReq.Out.URL.Scheme)
assert.Equal(t, tc.expected.Path, proxyReq.Out.URL.Path)
assert.Equal(t, tc.expected.Query(), proxyReq.Out.URL.Query())
}
}

View File

@@ -0,0 +1,29 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
CreateIngestionKeyProps,
IngestionKeyProps,
} from 'types/api/ingestionKeys/types';
const createIngestionKey = async (
props: CreateIngestionKeyProps,
): Promise<SuccessResponse<IngestionKeyProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.post('/workspaces/me/keys', {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default createIngestionKey;

View File

@@ -0,0 +1,26 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
const deleteIngestionKey = async (
id: string,
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.delete(
`/workspaces/me/keys/${id}`,
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default deleteIngestionKey;

View File

@@ -0,0 +1,21 @@
import { GatewayApiV1Instance } from 'api';
import { AxiosResponse } from 'axios';
import {
AllIngestionKeyProps,
GetIngestionKeyProps,
} from 'types/api/ingestionKeys/types';
export const getAllIngestionKeys = (
props: GetIngestionKeyProps,
): Promise<AxiosResponse<AllIngestionKeyProps>> => {
// eslint-disable-next-line @typescript-eslint/naming-convention
const { search, per_page, page } = props;
const BASE_URL = '/workspaces/me/keys';
const URL_QUERY_PARAMS =
search && search.length > 0
? `/search?name=${search}&page=1&per_page=100`
: `?page=${page}&per_page=${per_page}`;
return GatewayApiV1Instance.get(`${BASE_URL}${URL_QUERY_PARAMS}`);
};

View File

@@ -0,0 +1,65 @@
/* eslint-disable @typescript-eslint/no-throw-literal */
import { GatewayApiV1Instance } from 'api';
import axios from 'axios';
import {
AddLimitProps,
LimitSuccessProps,
} from 'types/api/ingestionKeys/limits/types';
interface SuccessResponse<T> {
statusCode: number;
error: null;
message: string;
payload: T;
}
interface ErrorResponse {
statusCode: number;
error: string;
message: string;
payload: null;
}
const createLimitForIngestionKey = async (
props: AddLimitProps,
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.post(
`/workspaces/me/keys/${props.keyID}/limits`,
{
...props,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
if (axios.isAxiosError(error)) {
// Axios error
const errResponse: ErrorResponse = {
statusCode: error.response?.status || 500,
error: error.response?.data?.error,
message: error.response?.data?.status || 'An error occurred',
payload: null,
};
throw errResponse;
} else {
// Non-Axios error
const errResponse: ErrorResponse = {
statusCode: 500,
error: 'Unknown error',
message: 'An unknown error occurred',
payload: null,
};
throw errResponse;
}
}
};
export default createLimitForIngestionKey;

View File

@@ -0,0 +1,26 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
const deleteLimitsForIngestionKey = async (
id: string,
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.delete(
`/workspaces/me/limits/${id}`,
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default deleteLimitsForIngestionKey;

View File

@@ -0,0 +1,65 @@
/* eslint-disable @typescript-eslint/no-throw-literal */
import { GatewayApiV1Instance } from 'api';
import axios from 'axios';
import {
LimitSuccessProps,
UpdateLimitProps,
} from 'types/api/ingestionKeys/limits/types';
interface SuccessResponse<T> {
statusCode: number;
error: null;
message: string;
payload: T;
}
interface ErrorResponse {
statusCode: number;
error: string;
message: string;
payload: null;
}
const updateLimitForIngestionKey = async (
props: UpdateLimitProps,
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.patch(
`/workspaces/me/limits/${props.limitID}`,
{
config: props.config,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
if (axios.isAxiosError(error)) {
// Axios error
const errResponse: ErrorResponse = {
statusCode: error.response?.status || 500,
error: error.response?.data?.error,
message: error.response?.data?.status || 'An error occurred',
payload: null,
};
throw errResponse;
} else {
// Non-Axios error
const errResponse: ErrorResponse = {
statusCode: 500,
error: 'Unknown error',
message: 'An unknown error occurred',
payload: null,
};
throw errResponse;
}
}
};
export default updateLimitForIngestionKey;

View File

@@ -0,0 +1,32 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
IngestionKeysPayloadProps,
UpdateIngestionKeyProps,
} from 'types/api/ingestionKeys/types';
const updateIngestionKey = async (
props: UpdateIngestionKeyProps,
): Promise<SuccessResponse<IngestionKeysPayloadProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.patch(
`/workspaces/me/keys/${props.id}`,
{
...props.data,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default updateIngestionKey;

View File

@@ -4,6 +4,8 @@ export const apiV2 = '/api/v2/';
export const apiV3 = '/api/v3/';
export const apiV4 = '/api/v4/';
export const apiV5 = '/api/v5/';
export const gatewayApiV1 = '/api/gateway/v1/';
export const gatewayApiV2 = '/api/gateway/v2/';
export const apiAlertManager = '/api/alertmanager/';
export default apiV1;

View File

@@ -927,6 +927,18 @@ export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
unit: string;
}
export interface MetrictypesComparisonSpaceAggregationParamDTO {
/**
* @type string
*/
operator?: string;
/**
* @type number
* @format double
*/
threshold?: number;
}
export enum MetrictypesSpaceAggregationDTO {
sum = 'sum',
avg = 'avg',
@@ -938,6 +950,7 @@ export enum MetrictypesSpaceAggregationDTO {
p90 = 'p90',
p95 = 'p95',
p99 = 'p99',
histogram_count = 'histogram_count',
}
export enum MetrictypesTemporalityDTO {
delta = 'delta',
@@ -1288,6 +1301,7 @@ export interface Querybuildertypesv5LogAggregationDTO {
}
export interface Querybuildertypesv5MetricAggregationDTO {
comparisonSpaceAggregationParam?: MetrictypesComparisonSpaceAggregationParamDTO;
/**
* @type string
*/

View File

@@ -15,7 +15,15 @@ import { Events } from 'constants/events';
import { LOCALSTORAGE } from 'constants/localStorage';
import { eventEmitter } from 'utils/getEventEmitter';
import apiV1, { apiAlertManager, apiV2, apiV3, apiV4, apiV5 } from './apiV1';
import apiV1, {
apiAlertManager,
apiV2,
apiV3,
apiV4,
apiV5,
gatewayApiV1,
gatewayApiV2,
} from './apiV1';
import { Logout } from './utils';
const RESPONSE_TIMEOUT_THRESHOLD = 5000; // 5 seconds
@@ -203,6 +211,24 @@ LogEventAxiosInstance.interceptors.response.use(
LogEventAxiosInstance.interceptors.request.use(interceptorsRequestResponse);
//
// gateway Api V1
export const GatewayApiV1Instance = axios.create({
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV1}`,
});
GatewayApiV1Instance.interceptors.response.use(
interceptorsResponse,
interceptorRejected,
);
GatewayApiV1Instance.interceptors.request.use(interceptorsRequestResponse);
//
// gateway Api V2
export const GatewayApiV2Instance = axios.create({
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV2}`,
});
// generated API Instance
export const GeneratedAPIInstance = axios.create({
baseURL: ENVIRONMENT.baseURL,
@@ -214,6 +240,14 @@ GeneratedAPIInstance.interceptors.response.use(
interceptorRejected,
);
GatewayApiV2Instance.interceptors.response.use(
interceptorsResponse,
interceptorRejected,
);
GatewayApiV2Instance.interceptors.request.use(interceptorsRequestResponse);
//
AxiosAlertManagerInstance.interceptors.response.use(
interceptorsResponse,
interceptorRejected,

View File

@@ -202,7 +202,7 @@ function AllEndPoints({
const onRowClick = useCallback(
(props: any): void => {
setSelectedEndPointName(props[SPAN_ATTRIBUTES.HTTP_URL] as string);
setSelectedEndPointName(props[SPAN_ATTRIBUTES.URL_PATH] as string);
setSelectedView(VIEWS.ENDPOINT_STATS);
const initialItems = [
...(filters?.items || []),
@@ -213,7 +213,7 @@ function AllEndPoints({
op: 'AND',
});
setParams({
selectedEndPointName: props[SPAN_ATTRIBUTES.HTTP_URL] as string,
selectedEndPointName: props[SPAN_ATTRIBUTES.URL_PATH] as string,
selectedView: VIEWS.ENDPOINT_STATS,
endPointDetailsLocalFilters: {
items: initialItems,

View File

@@ -33,7 +33,7 @@ import { SPAN_ATTRIBUTES } from './constants';
const httpUrlKey = {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.HTTP_URL,
key: SPAN_ATTRIBUTES.URL_PATH,
type: 'tag',
};
@@ -93,7 +93,7 @@ function EndPointDetails({
return currentFilters; // No change needed, prevents loop
}
// Rebuild filters: Keep non-http_url filters and add/update http_url filter based on prop
// Rebuild filters: Keep non-http.url filters and add/update http.url filter based on prop
const otherFilters = currentFilters?.items?.filter(
(item) => item.key?.key !== httpUrlKey.key,
);
@@ -125,7 +125,7 @@ function EndPointDetails({
(newFilters: IBuilderQuery['filters']): void => {
// 1. Update local filters state immediately
setFilters(newFilters);
// Filter out http_url filter before saving to params
// Filter out http.url filter before saving to params
const filteredNewFilters = {
op: 'AND',
items:
@@ -299,6 +299,7 @@ function EndPointDetails({
endPointStatusCodeLatencyBarChartsDataQuery
}
domainName={domainName}
endPointName={endPointName}
filters={filters}
timeRange={timeRange}
onDragSelect={onDragSelect}

View File

@@ -56,15 +56,15 @@ function TopErrors({
{
items: endPointName
? [
// Remove any existing http_url filters from initialFilters to avoid duplicates
// Remove any existing http.url filters from initialFilters to avoid duplicates
...(initialFilters?.items?.filter(
(item) => item.key?.key !== SPAN_ATTRIBUTES.HTTP_URL,
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
) || []),
{
id: '92b8a1c1',
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.HTTP_URL,
key: SPAN_ATTRIBUTES.URL_PATH,
type: 'tag',
},
op: '=',

View File

@@ -9,7 +9,6 @@ import { ENTITY_VERSION_V5 } from 'constants/app';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { SPAN_ATTRIBUTES } from '../constants';
import DomainMetrics from './DomainMetrics';
// Mock the API call
@@ -127,9 +126,11 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
'count()',
);
// Verify exact domain filter expression structure
expect(queryA.filter.expression).toContain("http_host = '0.0.0.0'");
expect(queryA.filter.expression).toContain(
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
expect(queryA.filter.expression).toContain(
'url.full EXISTS OR http.url EXISTS',
);
// Verify Query B - p99 latency
@@ -141,13 +142,17 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
'p99(duration_nano)',
);
// Verify exact domain filter expression structure
expect(queryB.filter.expression).toContain("http_host = '0.0.0.0'");
expect(queryB.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
// Verify Query C - error count (disabled)
const queryC = queryData.find((q: any) => q.queryName === 'C');
expect(queryC).toBeDefined();
expect(queryC.disabled).toBe(true);
expect(queryC.filter.expression).toContain("http_host = '0.0.0.0'");
expect(queryC.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
expect(queryC.aggregations?.[0]).toBeDefined();
expect((queryC.aggregations?.[0] as TraceAggregation)?.expression).toBe(
'count()',
@@ -164,7 +169,9 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
'max(timestamp)',
);
// Verify exact domain filter expression structure
expect(queryD.filter.expression).toContain("http_host = '0.0.0.0'");
expect(queryD.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
// Verify Formula F1 - error rate calculation
const formulas = payload.query.builder.queryFormulas;

View File

@@ -153,7 +153,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
// Verify exact domain filter expression structure
if (queryA.filter) {
expect(queryA.filter.expression).toContain(
`http_host = 'api.example.com'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
);
expect(queryA.filter.expression).toContain("kind_string = 'Client'");
}
@@ -171,7 +171,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
// Verify exact domain filter expression structure
if (queryB.filter) {
expect(queryB.filter.expression).toContain(
`http_host = 'api.example.com'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
);
expect(queryB.filter.expression).toContain("kind_string = 'Client'");
}
@@ -185,7 +185,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
expect(queryC.aggregateOperator).toBe('count');
if (queryC.filter) {
expect(queryC.filter.expression).toContain(
`http_host = 'api.example.com'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
);
expect(queryC.filter.expression).toContain("kind_string = 'Client'");
expect(queryC.filter.expression).toContain('has_error = true');
@@ -204,7 +204,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
// Verify exact domain filter expression structure
if (queryD.filter) {
expect(queryD.filter.expression).toContain(
`http_host = 'api.example.com'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
);
expect(queryD.filter.expression).toContain("kind_string = 'Client'");
}
@@ -221,7 +221,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
}
if (queryE.filter) {
expect(queryE.filter.expression).toContain(
`http_host = 'api.example.com'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
);
expect(queryE.filter.expression).toContain("kind_string = 'Client'");
}
@@ -291,7 +291,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
expect(query.filter.expression).toContain('staging');
// Also verify domain filter is still present
expect(query.filter.expression).toContain(
"http_host = 'api.internal.com'",
"(net.peer.name = 'api.internal.com' OR server.address = 'api.internal.com')",
);
// Verify client kind filter is present
expect(query.filter.expression).toContain("kind_string = 'Client'");

View File

@@ -34,6 +34,7 @@ function StatusCodeBarCharts({
endPointStatusCodeBarChartsDataQuery,
endPointStatusCodeLatencyBarChartsDataQuery,
domainName,
endPointName,
filters,
timeRange,
onDragSelect,
@@ -47,6 +48,7 @@ function StatusCodeBarCharts({
unknown
>;
domainName: string;
endPointName: string;
filters: IBuilderQuery['filters'];
timeRange: {
startTime: number;
@@ -142,11 +144,11 @@ function StatusCodeBarCharts({
const widget = useMemo<Widgets>(
() =>
getStatusCodeBarChartWidgetData(domainName, {
getStatusCodeBarChartWidgetData(domainName, endPointName, {
items: [...(filters?.items || [])],
op: filters?.op || 'AND',
}),
[domainName, filters],
[domainName, endPointName, filters],
);
const graphClickHandler = useCallback(
@@ -164,7 +166,6 @@ function StatusCodeBarCharts({
xValue,
TWO_AND_HALF_MINUTES_IN_MILLISECONDS,
);
handleGraphClick({
xValue,
yValue,

View File

@@ -12,7 +12,7 @@ export const VIEW_TYPES = {
// Span attribute keys - these are the source of truth for all attribute keys
export const SPAN_ATTRIBUTES = {
HTTP_URL: 'http_url',
URL_PATH: 'http.url',
RESPONSE_STATUS_CODE: 'response_status_code',
SERVER_NAME: 'http_host',
SERVER_PORT: 'net.peer.port',

View File

@@ -280,7 +280,7 @@ describe('API Monitoring Utils', () => {
const endpointFilter = result?.items?.find(
(item) =>
item.key &&
item.key.key === SPAN_ATTRIBUTES.HTTP_URL &&
item.key.key === SPAN_ATTRIBUTES.URL_PATH &&
item.value === endPointName,
);
expect(endpointFilter).toBeDefined();
@@ -344,12 +344,13 @@ describe('API Monitoring Utils', () => {
describe('getFormattedEndPointDropDownData', () => {
it('should format endpoint dropdown data correctly', () => {
// Arrange
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
const mockData = [
{
data: {
// eslint-disable-next-line sonarjs/no-duplicate-string
[URL_PATH_KEY]: '/api/users',
'url.full': 'http://example.com/api/users',
A: 150, // count or other metric
},
},
@@ -357,6 +358,7 @@ describe('API Monitoring Utils', () => {
data: {
// eslint-disable-next-line sonarjs/no-duplicate-string
[URL_PATH_KEY]: '/api/orders',
'url.full': 'http://example.com/api/orders',
A: 75,
},
},
@@ -404,7 +406,7 @@ describe('API Monitoring Utils', () => {
it('should handle items without URL path', () => {
// Arrange
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
type MockDataType = {
data: {
[key: string]: string | number;
@@ -710,11 +712,13 @@ describe('API Monitoring Utils', () => {
it('should generate widget configuration for status code bar chart', () => {
// Arrange
const domainName = 'test-domain';
const endPointName = '/api/test';
const filters = { items: [], op: 'AND' };
// Act
const result = getStatusCodeBarChartWidgetData(
domainName,
endPointName,
filters as IBuilderQuery['filters'],
);
@@ -737,11 +741,21 @@ describe('API Monitoring Utils', () => {
if (domainFilter) {
expect(domainFilter.value).toBe(domainName);
}
// Should have endpoint filter if provided
const endpointFilter = queryData.filters?.items?.find(
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.URL_PATH,
);
expect(endpointFilter).toBeDefined();
if (endpointFilter) {
expect(endpointFilter.value).toBe(endPointName);
}
});
it('should include custom filters in the widget configuration', () => {
// Arrange
const domainName = 'test-domain';
const endPointName = '/api/test';
const customFilter = {
id: 'custom-filter',
key: {
@@ -757,6 +771,7 @@ describe('API Monitoring Utils', () => {
// Act
const result = getStatusCodeBarChartWidgetData(
domainName,
endPointName,
filters as IBuilderQuery['filters'],
);

View File

@@ -25,7 +25,7 @@ jest.mock('container/GridCardLayout/GridCard', () => ({
type="button"
data-testid="row-click-button"
onClick={(): void =>
customOnRowClick({ [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test' })
customOnRowClick({ [SPAN_ATTRIBUTES.URL_PATH]: '/api/test' })
}
>
Click Row

View File

@@ -6,10 +6,10 @@
* These tests validate the migration from V4 to V5 format for getAllEndpointsWidgetData:
* - Filter format change: filters.items[] → filter.expression
* - Aggregation format: aggregateAttribute → aggregations[] array
* - Domain filter: http_host = '${domainName}'
* - Domain filter: (net.peer.name OR server.address)
* - Kind filter: kind_string = 'Client'
* - Four queries: A (count), B (p99 latency), C (max timestamp), D (error count - disabled)
* - GroupBy: http_url with type 'attribute'
* - GroupBy: Both http.url AND url.full with type 'attribute'
*/
import { getAllEndpointsWidgetData } from 'container/ApiMonitoring/utils';
import {
@@ -18,8 +18,6 @@ import {
} from 'types/api/queryBuilder/queryAutocompleteResponse';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
describe('AllEndpointsWidget - V5 Migration Validation', () => {
const mockDomainName = 'api.example.com';
const emptyFilters: IBuilderQuery['filters'] = {
@@ -94,28 +92,28 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
const baseExpression = `http_host = '${mockDomainName}' AND kind_string = 'Client'`;
const baseExpression = `(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') AND kind_string = 'Client'`;
// Queries A, B, C have identical base filter
expect(queryA.filter?.expression).toBe(
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
);
expect(queryB.filter?.expression).toBe(
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
);
expect(queryC.filter?.expression).toBe(
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
);
// Query D has additional has_error filter
expect(queryD.filter?.expression).toBe(
`${baseExpression} AND has_error = true AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${baseExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
);
});
});
describe('2. GroupBy Structure', () => {
it(`default groupBy includes ${SPAN_ATTRIBUTES.HTTP_URL} with type attribute`, () => {
it('default groupBy includes both http.url and url.full with type attribute', () => {
const widget = getAllEndpointsWidgetData(
emptyGroupBy,
mockDomainName,
@@ -126,13 +124,23 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
// All queries should have the same default groupBy
queryData.forEach((query) => {
expect(query.groupBy).toHaveLength(1);
expect(query.groupBy).toHaveLength(2);
// http.url
expect(query.groupBy).toContainEqual({
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: SPAN_ATTRIBUTES.HTTP_URL,
key: 'http.url',
type: 'attribute',
});
// url.full
expect(query.groupBy).toContainEqual({
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: 'url.full',
type: 'attribute',
});
});
@@ -162,18 +170,19 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
// All queries should have defaults + custom groupBy
queryData.forEach((query) => {
expect(query.groupBy).toHaveLength(3); // 1 default + 2 custom
expect(query.groupBy).toHaveLength(4); // 2 defaults + 2 custom
// First two should be defaults (http_url)
expect(query.groupBy[0].key).toBe(SPAN_ATTRIBUTES.HTTP_URL);
// First two should be defaults (http.url, url.full)
expect(query.groupBy[0].key).toBe('http.url');
expect(query.groupBy[1].key).toBe('url.full');
// Last two should be custom (matching subset of properties)
expect(query.groupBy[1]).toMatchObject({
expect(query.groupBy[2]).toMatchObject({
dataType: DataTypes.String,
key: 'service.name',
type: 'resource',
});
expect(query.groupBy[2]).toMatchObject({
expect(query.groupBy[3]).toMatchObject({
dataType: DataTypes.String,
key: 'deployment.environment',
type: 'resource',

View File

@@ -258,7 +258,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
value: '/api/test',
}),
]),
@@ -278,7 +278,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
value: '/api/test',
}),
]),
@@ -360,7 +360,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
value: '/api/test',
}),
]),
@@ -373,7 +373,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
value: '/api/test',
}),
]),

View File

@@ -191,7 +191,7 @@ describe('EndPointsDropDown Component', () => {
it('formats data using the utility function', () => {
const mockRows = [
{ data: { [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test', A: 10 } },
{ data: { [SPAN_ATTRIBUTES.URL_PATH]: '/api/test', A: 10 } },
];
const dataProps = {

View File

@@ -6,18 +6,15 @@
* These tests validate the migration from V4 to V5 format for the third payload
* in getEndPointDetailsQueryPayload (endpoint dropdown data):
* - Filter format change: filters.items[] → filter.expression
* - Domain handling: http_host = '${domainName}'
* - Domain handling: (net.peer.name OR server.address)
* - Kind filter: kind_string = 'Client'
* - Existence check: http_url EXISTS
* - Existence check: (http.url EXISTS OR url.full EXISTS)
* - Aggregation: count() expression
* - GroupBy: http_url with type 'attribute'
* - GroupBy: Both http.url AND url.full with type 'attribute'
*/
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
describe('EndpointDropdown - V5 Migration Validation', () => {
const mockDomainName = 'api.example.com';
const mockStartTime = 1000;
@@ -46,9 +43,9 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters');
// Base filter 1: Domain http_host = '${domainName}'
// Base filter 1: Domain (net.peer.name OR server.address)
expect(queryA.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Base filter 2: Kind
@@ -56,7 +53,7 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
// Base filter 3: Existence check
expect(queryA.filter?.expression).toContain(
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
'(http.url EXISTS OR url.full EXISTS)',
);
// V5 Aggregation format: aggregations array (not aggregateAttribute)
@@ -67,11 +64,16 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
});
expect(queryA).not.toHaveProperty('aggregateAttribute');
// GroupBy: http_url
expect(queryA.groupBy).toHaveLength(1);
// GroupBy: Both http.url and url.full
expect(queryA.groupBy).toHaveLength(2);
expect(queryA.groupBy).toContainEqual({
key: SPAN_ATTRIBUTES.HTTP_URL,
dataType: DataTypes.String,
key: 'http.url',
dataType: 'string',
type: 'attribute',
});
expect(queryA.groupBy).toContainEqual({
key: 'url.full',
dataType: 'string',
type: 'attribute',
});
});
@@ -118,7 +120,53 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
// Exact filter expression with custom filters merged
expect(expression).toBe(
`${SPAN_ATTRIBUTES.SERVER_NAME} = 'api.example.com' AND kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS service.name = 'user-service' AND deployment.environment = 'production'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND deployment.environment = 'production'",
);
});
});
describe('3. HTTP URL Filter Special Handling', () => {
it('converts http.url filter to (http.url OR url.full) expression', () => {
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
items: [
{
id: 'http-url-filter',
key: {
key: 'http.url',
dataType: 'string' as any,
type: 'tag',
},
op: '=',
value: '/api/users',
},
{
id: 'service-filter',
key: {
key: 'service.name',
dataType: 'string' as any,
type: 'resource',
},
op: '=',
value: 'user-service',
},
],
op: 'AND',
};
const payload = getEndPointDetailsQueryPayload(
mockDomainName,
mockStartTime,
mockEndTime,
filtersWithHttpUrl,
);
const dropdownQuery = payload[2];
const expression =
dropdownQuery.query.builder.queryData[0].filter?.expression;
// CRITICAL: Exact filter expression with http.url converted to OR logic
expect(expression).toBe(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND (http.url = '/api/users' OR url.full = '/api/users')",
);
});
});

View File

@@ -33,7 +33,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
expect(queryData).not.toHaveProperty('filters.items');
});
it('uses new domain filter format: (http_host)', () => {
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
const widget = getRateOverTimeWidgetData(
mockDomainName,
mockEndpointName,
@@ -44,7 +44,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify EXACT new filter format with OR operator
expect(queryData?.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Endpoint name is used in legend, not filter
@@ -90,7 +90,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify domain filter is present
expect(queryData?.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Verify custom filters are merged into the expression
@@ -120,7 +120,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
expect(queryData).not.toHaveProperty('filters.items');
});
it('uses new domain filter format: (http_host)', () => {
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
const widget = getLatencyOverTimeWidgetData(
mockDomainName,
mockEndpointName,
@@ -132,7 +132,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify EXACT new filter format with OR operator
expect(queryData.filter).toBeDefined();
expect(queryData?.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Endpoint name is used in legend, not filter
@@ -166,7 +166,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify domain filter is present
expect(queryData?.filter?.expression).toContain(
`http_host = '${mockDomainName}' service.name = 'user-service'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') service.name = 'user-service'`,
);
});
});

View File

@@ -142,6 +142,7 @@ describe('StatusCodeBarCharts', () => {
endTime: 1609545600000,
};
const mockDomainName = 'test-domain';
const mockEndPointName = '/api/test';
const onDragSelectMock = jest.fn();
const refetchFn = jest.fn();
@@ -231,6 +232,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -266,6 +268,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -308,6 +311,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -352,6 +356,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -399,6 +404,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -413,6 +419,7 @@ describe('StatusCodeBarCharts', () => {
// but we've confirmed the function is mocked and ready to be tested
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
mockDomainName,
mockEndPointName,
expect.objectContaining({
items: [],
op: 'AND',
@@ -460,6 +467,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockCustomFilters as IBuilderQuery['filters']}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -469,6 +477,7 @@ describe('StatusCodeBarCharts', () => {
// Assert widget creation was called with the correct parameters
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
mockDomainName,
mockEndPointName,
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({ id: 'custom-filter' }),

View File

@@ -10,7 +10,7 @@
*
* V5 Changes:
* - Filter format change: filters.items[] → filter.expression
* - Domain filter: (http_host)
* - Domain filter: (net.peer.name OR server.address)
* - Kind filter: kind_string = 'Client'
* - stepInterval: 60 → null
* - Grouped by response_status_code
@@ -47,9 +47,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters.items');
// Base filter 1: Domain (http_host)
// Base filter 1: Domain (net.peer.name OR server.address)
expect(queryA.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Base filter 2: Kind
@@ -96,9 +96,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters.items');
// Base filter 1: Domain (http_host)
// Base filter 1: Domain (net.peer.name OR server.address)
expect(queryA.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Base filter 2: Kind
@@ -177,7 +177,7 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(callsExpression).toBe(latencyExpression);
// Verify base filters
expect(callsExpression).toContain('http_host');
expect(callsExpression).toContain('net.peer.name');
expect(callsExpression).toContain("kind_string = 'Client'");
// Verify custom filters are merged
@@ -187,4 +187,51 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(callsExpression).toContain('production');
});
});
describe('4. HTTP URL Filter Handling', () => {
it('converts http.url filter to (http.url OR url.full) expression in both charts', () => {
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
items: [
{
id: 'http-url-filter',
key: {
key: 'http.url',
dataType: 'string' as any,
type: 'tag',
},
op: '=',
value: '/api/metrics',
},
],
op: 'AND',
};
const payload = getEndPointDetailsQueryPayload(
mockDomainName,
mockStartTime,
mockEndTime,
filtersWithHttpUrl,
);
const callsChartQuery = payload[4];
const latencyChartQuery = payload[5];
const callsExpression =
callsChartQuery.query.builder.queryData[0].filter?.expression;
const latencyExpression =
latencyChartQuery.query.builder.queryData[0].filter?.expression;
// CRITICAL: http.url converted to OR logic
expect(callsExpression).toContain(
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
);
expect(latencyExpression).toContain(
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
);
// Base filters still present
expect(callsExpression).toContain('net.peer.name');
expect(callsExpression).toContain("kind_string = 'Client'");
});
});
});

View File

@@ -6,8 +6,8 @@
* These tests validate the migration from V4 to V5 format for the second payload
* in getEndPointDetailsQueryPayload (status code table data):
* - Filter format change: filters.items[] → filter.expression
* - URL handling: Special logic for http_url
* - Domain filter: http_host = '${domainName}'
* - URL handling: Special logic for (http.url OR url.full)
* - Domain filter: (net.peer.name OR server.address)
* - Kind filter: kind_string = 'Client'
* - Kind filter: response_status_code EXISTS
* - Three queries: A (count), B (p99 latency), C (rate)
@@ -45,9 +45,9 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters.items');
// Base filter 1: Domain (http_host)
// Base filter 1: Domain (net.peer.name OR server.address)
expect(queryA.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Base filter 2: Kind
@@ -149,7 +149,7 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
statusCodeQuery.query.builder.queryData[0].filter?.expression;
// Base filters present
expect(expression).toContain('http_host');
expect(expression).toContain('net.peer.name');
expect(expression).toContain("kind_string = 'Client'");
expect(expression).toContain('response_status_code EXISTS');
@@ -165,4 +165,62 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
expect(queries[1].filter?.expression).toBe(queries[2].filter?.expression);
});
});
describe('4. HTTP URL Filter Handling', () => {
it('converts http.url filter to (http.url OR url.full) expression', () => {
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
items: [
{
id: 'http-url-filter',
key: {
key: 'http.url',
dataType: 'string' as any,
type: 'tag',
},
op: '=',
value: '/api/users',
},
{
id: 'service-filter',
key: {
key: 'service.name',
dataType: 'string' as any,
type: 'resource',
},
op: '=',
value: 'user-service',
},
],
op: 'AND',
};
const payload = getEndPointDetailsQueryPayload(
mockDomainName,
mockStartTime,
mockEndTime,
filtersWithHttpUrl,
);
const statusCodeQuery = payload[1];
const expression =
statusCodeQuery.query.builder.queryData[0].filter?.expression;
// CRITICAL: http.url converted to OR logic
expect(expression).toContain(
"(http.url = '/api/users' OR url.full = '/api/users')",
);
// Other filters still present
expect(expression).toContain('service.name');
expect(expression).toContain('user-service');
// Base filters present
expect(expression).toContain('net.peer.name');
expect(expression).toContain("kind_string = 'Client'");
expect(expression).toContain('response_status_code EXISTS');
// All ANDed together (at least 2 ANDs: domain+kind, custom filter, url condition)
expect(expression?.match(/AND/g)?.length).toBeGreaterThanOrEqual(2);
});
});
});

View File

@@ -84,7 +84,7 @@ describe('TopErrors', () => {
{
columns: [
{
name: SPAN_ATTRIBUTES.HTTP_URL,
name: 'http.url',
fieldDataType: 'string',
fieldContext: 'attribute',
},
@@ -124,7 +124,7 @@ describe('TopErrors', () => {
table: {
rows: [
{
http_url: '/api/test',
'http.url': '/api/test',
A: 100,
},
],
@@ -206,7 +206,7 @@ describe('TopErrors', () => {
expect(navigateMock).toHaveBeenCalledWith({
filters: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
key: expect.objectContaining({ key: 'http.url' }),
op: '=',
value: '/api/test',
}),
@@ -335,7 +335,7 @@ describe('TopErrors', () => {
// Verify all required filters are present
expect(filterExpression).toContain(
`kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS AND ${SPAN_ATTRIBUTES.SERVER_NAME} = 'test-domain' AND has_error = true`,
`kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) AND (net.peer.name = 'test-domain' OR server.address = 'test-domain') AND has_error = true`,
);
});
});

View File

@@ -15,6 +15,7 @@ import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsAppli
import { convertNanoToMilliseconds } from 'container/MetricsExplorer/Summary/utils';
import dayjs from 'dayjs';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { cloneDeep } from 'lodash-es';
import { ArrowUpDown, ChevronDown, ChevronRight, Info } from 'lucide-react';
import { getWidgetQuery } from 'pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil';
@@ -56,12 +57,12 @@ export const getDisplayValue = (value: unknown): string =>
isEmptyFilterValue(value) ? '-' : String(value);
export const getDomainNameFilterExpression = (domainName: string): string =>
`http_host = '${domainName}'`;
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`;
export const clientKindExpression = `kind_string = 'Client'`;
/**
* Converts filters to expression
* Converts filters to expression, handling http.url specially by creating (http.url OR url.full) condition
* @param filters Filters to convert
* @param baseExpression Base expression to combine with filters
* @returns Filter expression string
@@ -74,6 +75,34 @@ export const convertFiltersWithUrlHandling = (
return baseExpression;
}
// Check if filters contain http.url (SPAN_ATTRIBUTES.URL_PATH)
const httpUrlFilter = filters.items?.find(
(item) => item.key?.key === SPAN_ATTRIBUTES.URL_PATH,
);
// If http.url filter exists, create modified filters with (http.url OR url.full)
if (httpUrlFilter && httpUrlFilter.value) {
// Remove ALL http.url filters from items (guards against duplicates)
const otherFilters = filters.items?.filter(
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
);
// Convert to expression first with other filters
const {
filter: intermediateFilter,
} = convertFiltersToExpressionWithExistingQuery(
{ ...filters, items: otherFilters || [] },
baseExpression,
);
// Add the OR condition for http.url and url.full
const urlValue = httpUrlFilter.value;
const urlCondition = `(http.url = '${urlValue}' OR url.full = '${urlValue}')`;
return intermediateFilter.expression.trim()
? `${intermediateFilter.expression} AND ${urlCondition}`
: urlCondition;
}
const { filter } = convertFiltersToExpressionWithExistingQuery(
filters,
baseExpression,
@@ -342,7 +371,7 @@ export const formatDataForTable = (
});
};
const urlExpression = `${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`;
const urlExpression = `(url.full EXISTS OR http.url EXISTS)`;
export const getDomainMetricsQueryPayload = (
domainName: string,
@@ -559,7 +588,14 @@ const defaultGroupBy = [
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: SPAN_ATTRIBUTES.HTTP_URL,
key: SPAN_ATTRIBUTES.URL_PATH,
type: 'attribute',
},
{
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: 'url.full',
type: 'attribute',
},
// {
@@ -831,8 +867,8 @@ function buildFilterExpression(
): string {
const baseFilterParts = [
`kind_string = 'Client'`,
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${SPAN_ATTRIBUTES.SERVER_NAME} = '${domainName}'`,
`(http.url EXISTS OR url.full EXISTS)`,
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
`has_error = true`,
];
if (showStatusCodeErrors) {
@@ -874,7 +910,12 @@ export const getTopErrorsQueryPayload = (
filter: { expression: filterExpression },
groupBy: [
{
name: SPAN_ATTRIBUTES.HTTP_URL,
name: 'http.url',
fieldDataType: 'string',
fieldContext: 'attribute',
},
{
name: 'url.full',
fieldDataType: 'string',
fieldContext: 'attribute',
},
@@ -1093,11 +1134,11 @@ export const formatEndPointsDataForTable = (
if (!isGroupedByAttribute) {
formattedData = data?.map((endpoint) => {
const { port } = extractPortAndEndpoint(
(endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '',
(endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '',
);
return {
key: v4(),
endpointName: (endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '-',
endpointName: (endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '-',
port,
callCount:
endpoint.data.A === 'n/a' || endpoint.data.A === undefined
@@ -1221,7 +1262,9 @@ export const formatTopErrorsDataForTable = (
return {
key: v4(),
endpointName: getDisplayValue(rowObj[SPAN_ATTRIBUTES.HTTP_URL]),
endpointName: getDisplayValue(
rowObj[SPAN_ATTRIBUTES.URL_PATH] || rowObj['url.full'],
),
statusCode: getDisplayValue(rowObj[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]),
statusMessage: getDisplayValue(rowObj.status_message),
count: getDisplayValue(rowObj.__result_0),
@@ -1238,10 +1281,10 @@ export const getTopErrorsCoRelationQueryFilters = (
{
id: 'ea16470b',
key: {
key: SPAN_ATTRIBUTES.HTTP_URL,
key: 'http.url',
dataType: DataTypes.String,
type: 'tag',
id: `${SPAN_ATTRIBUTES.HTTP_URL}--string--tag--false`,
id: 'http.url--string--tag--false',
},
op: '=',
value: endPointName,
@@ -1738,7 +1781,7 @@ export const getEndPointDetailsQueryPayload = (
filters || { items: [], op: 'AND' },
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
),
},
expression: 'A',
@@ -1750,7 +1793,12 @@ export const getEndPointDetailsQueryPayload = (
orderBy: [],
groupBy: [
{
key: SPAN_ATTRIBUTES.HTTP_URL,
key: SPAN_ATTRIBUTES.URL_PATH,
dataType: DataTypes.String,
type: 'attribute',
},
{
key: 'url.full',
dataType: DataTypes.String,
type: 'attribute',
},
@@ -2177,7 +2225,7 @@ export const getEndPointZeroStateQueryPayload = (
orderBy: [],
groupBy: [
{
key: SPAN_ATTRIBUTES.HTTP_URL,
key: SPAN_ATTRIBUTES.URL_PATH,
dataType: DataTypes.String,
type: 'tag',
},
@@ -2371,7 +2419,8 @@ export const statusCodeWidgetInfo = [
interface EndPointDropDownResponseRow {
data: {
[SPAN_ATTRIBUTES.HTTP_URL]: string;
[SPAN_ATTRIBUTES.URL_PATH]: string;
'url.full': string;
A: number;
};
}
@@ -2390,8 +2439,8 @@ export const getFormattedEndPointDropDownData = (
}
return data.map((row) => ({
key: v4(),
label: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
value: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
label: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
value: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
}));
};
@@ -2720,6 +2769,7 @@ export const groupStatusCodes = (
export const getStatusCodeBarChartWidgetData = (
domainName: string,
endPointName: string,
filters: IBuilderQuery['filters'],
): Widgets => ({
query: {
@@ -2748,6 +2798,20 @@ export const getStatusCodeBarChartWidgetData = (
op: '=',
value: domainName,
},
...(endPointName
? [
{
id: '8b1be6f0',
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.URL_PATH,
type: 'tag',
},
op: '=',
value: endPointName,
},
]
: []),
...(filters?.items || []),
],
op: 'AND',
@@ -2869,7 +2933,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND http_url EXISTS`,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
),
},
functions: [],
@@ -2901,7 +2965,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND http_url EXISTS`,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
),
},
functions: [],
@@ -2933,7 +2997,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND http_url EXISTS`,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
),
},
functions: [],
@@ -2965,7 +3029,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND has_error = true AND http_url EXISTS`,
)} AND ${clientKindExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
),
},
functions: [],
@@ -2996,12 +3060,24 @@ export const getAllEndpointsWidgetData = (
);
widget.renderColumnCell = {
[SPAN_ATTRIBUTES.HTTP_URL]: (url: string | number): ReactNode => {
if (isEmptyFilterValue(url) || !url || url === 'n/a') {
[SPAN_ATTRIBUTES.URL_PATH]: (
url: string | number,
record?: RowData,
): ReactNode => {
// First try to use the url from the column value
let urlValue = url;
// If url is empty/null and we have the record, fallback to url.full
if (isEmptyFilterValue(url) && record) {
const { 'url.full': urlFull } = record;
urlValue = urlFull;
}
if (!urlValue || urlValue === 'n/a') {
return <span>-</span>;
}
const { endpoint } = extractPortAndEndpoint(String(url));
const { endpoint } = extractPortAndEndpoint(String(urlValue));
return <span>{getDisplayValue(endpoint)}</span>;
},
A: (numOfCalls: any): ReactNode => (
@@ -3056,8 +3132,8 @@ export const getAllEndpointsWidgetData = (
};
widget.customColTitles = {
[SPAN_ATTRIBUTES.HTTP_URL]: 'Endpoint',
[SPAN_ATTRIBUTES.SERVER_PORT]: 'Port',
[SPAN_ATTRIBUTES.URL_PATH]: 'Endpoint',
'net.peer.port': 'Port',
};
widget.title = (
@@ -3082,10 +3158,12 @@ export const getAllEndpointsWidgetData = (
</div>
);
widget.hiddenColumns = ['url.full'];
return widget;
};
const keysToRemove = [SPAN_ATTRIBUTES.HTTP_URL, 'A', 'B', 'C', 'F1'];
const keysToRemove = ['http.url', 'url.full', 'A', 'B', 'C', 'F1'];
export const getGroupByFiltersFromGroupByValues = (
rowData: any,
@@ -3143,7 +3221,7 @@ export const getRateOverTimeWidgetData = (
filter: {
expression: convertFiltersWithUrlHandling(
filters || { items: [], op: 'AND' },
`http_host = '${domainName}'`,
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
),
},
functions: [],
@@ -3194,7 +3272,7 @@ export const getLatencyOverTimeWidgetData = (
filter: {
expression: convertFiltersWithUrlHandling(
filters || { items: [], op: 'AND' },
`http_host = '${domainName}'`,
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
),
},
functions: [],

View File

@@ -88,10 +88,7 @@ function InviteTeamMembers({
setTeamMembersToInvite((prev) => (prev || []).filter((m) => m.id !== id));
};
const isMemberTouched = (member: TeamMember): boolean =>
member.email.trim() !== '' ||
Boolean(member.role && member.role.trim() !== '');
// Validation function to check all users
const validateAllUsers = (): boolean => {
let isValid = true;
let hasEmailErrors = false;
@@ -99,9 +96,7 @@ function InviteTeamMembers({
const updatedEmailValidity: Record<string, boolean> = {};
const touchedMembers = teamMembersToInvite?.filter(isMemberTouched) ?? [];
touchedMembers?.forEach((member) => {
teamMembersToInvite?.forEach((member) => {
const emailValid = /^[^\s@]+@[^\s@]+\.[^\s@]+$/.test(member.email);
const roleValid = Boolean(member.role && member.role.trim() !== '');
@@ -155,12 +150,12 @@ function InviteTeamMembers({
const handleNext = (): void => {
if (validateAllUsers()) {
setTeamMembers(teamMembersToInvite?.filter(isMemberTouched) ?? []);
setTeamMembers(teamMembersToInvite || []);
setHasInvalidEmails(false);
setHasInvalidRoles(false);
setInviteError(null);
sendInvites({
invites: teamMembersToInvite?.filter(isMemberTouched) ?? [],
invites: teamMembersToInvite || [],
});
}
};
@@ -235,12 +230,12 @@ function InviteTeamMembers({
const getValidationErrorMessage = (): string => {
if (hasInvalidEmails && hasInvalidRoles) {
return 'Please enter valid emails and select roles for team members';
return 'Please enter valid emails and select roles for all team members';
}
if (hasInvalidEmails) {
return 'Please enter valid emails for team members';
return 'Please enter valid emails for all team members';
}
return 'Please select roles for team members';
return 'Please select roles for all team members';
};
const handleDoLater = (): void => {

View File

@@ -1,485 +0,0 @@
import { rest, server } from 'mocks-server/server';
import {
fireEvent,
render,
screen,
userEvent,
waitFor,
} from 'tests/test-utils';
import InviteTeamMembers from '../InviteTeamMembers';
jest.mock('api/common/logEvent', () => ({
__esModule: true,
default: jest.fn(),
}));
const mockNotificationSuccess = jest.fn() as jest.MockedFunction<
(args: { message: string }) => void
>;
const mockNotificationError = jest.fn() as jest.MockedFunction<
(args: { message: string }) => void
>;
jest.mock('hooks/useNotifications', () => ({
useNotifications: (): any => ({
notifications: {
success: mockNotificationSuccess,
error: mockNotificationError,
},
}),
}));
const INVITE_USERS_ENDPOINT = '*/api/v1/invite/bulk';
interface TeamMember {
email: string;
role: string;
name: string;
frontendBaseUrl: string;
id: string;
}
interface InviteRequestBody {
invites: { email: string; role: string }[];
}
interface RenderProps {
isLoading?: boolean;
teamMembers?: TeamMember[] | null;
}
const mockOnNext = jest.fn() as jest.MockedFunction<() => void>;
const mockSetTeamMembers = jest.fn() as jest.MockedFunction<
(members: TeamMember[]) => void
>;
function renderComponent({
isLoading = false,
teamMembers = null,
}: RenderProps = {}): ReturnType<typeof render> {
return render(
<InviteTeamMembers
isLoading={isLoading}
teamMembers={teamMembers}
setTeamMembers={mockSetTeamMembers}
onNext={mockOnNext}
/>,
);
}
async function selectRole(
user: ReturnType<typeof userEvent.setup>,
selectIndex: number,
optionLabel: string,
): Promise<void> {
const placeholders = screen.getAllByText(/select roles/i);
await user.click(placeholders[selectIndex]);
const optionContent = await screen.findByText(optionLabel);
fireEvent.click(optionContent);
}
describe('InviteTeamMembers', () => {
beforeEach(() => {
jest.clearAllMocks();
server.use(
rest.post(INVITE_USERS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json({ status: 'success' })),
),
);
});
afterEach(() => {
jest.useRealTimers();
server.resetHandlers();
});
describe('Initial rendering', () => {
it('renders the page header, column labels, default rows, and action buttons', () => {
renderComponent();
expect(
screen.getByRole('heading', { name: /invite your team/i }),
).toBeInTheDocument();
expect(
screen.getByText(/signoz is a lot more useful with collaborators/i),
).toBeInTheDocument();
expect(
screen.getAllByPlaceholderText(/e\.g\. john@signoz\.io/i),
).toHaveLength(3);
expect(screen.getByText('Email address')).toBeInTheDocument();
expect(screen.getByText('Roles')).toBeInTheDocument();
expect(
screen.getByRole('button', { name: /complete/i }),
).toBeInTheDocument();
expect(
screen.getByRole('button', { name: /i'll do this later/i }),
).toBeInTheDocument();
});
it('disables both action buttons while isLoading is true', () => {
renderComponent({ isLoading: true });
expect(screen.getByRole('button', { name: /complete/i })).toBeDisabled();
expect(
screen.getByRole('button', { name: /i'll do this later/i }),
).toBeDisabled();
});
});
describe('Row management', () => {
it('adds a new empty row when "Add another" is clicked', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
renderComponent();
expect(
screen.getAllByPlaceholderText(/e\.g\. john@signoz\.io/i),
).toHaveLength(3);
await user.click(screen.getByRole('button', { name: /add another/i }));
expect(
screen.getAllByPlaceholderText(/e\.g\. john@signoz\.io/i),
).toHaveLength(4);
});
it('removes the correct row when its trash icon is clicked', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
renderComponent();
const emailInputs = screen.getAllByPlaceholderText(
/e\.g\. john@signoz\.io/i,
);
await user.type(emailInputs[0], 'first@example.com');
await screen.findByDisplayValue('first@example.com');
await user.click(
screen.getAllByRole('button', { name: /remove team member/i })[0],
);
await waitFor(() => {
expect(
screen.queryByDisplayValue('first@example.com'),
).not.toBeInTheDocument();
expect(
screen.getAllByPlaceholderText(/e\.g\. john@signoz\.io/i),
).toHaveLength(2);
});
});
it('hides remove buttons when only one row remains', async () => {
renderComponent();
const user = userEvent.setup({ pointerEventsCheck: 0 });
let removeButtons = screen.getAllByRole('button', {
name: /remove team member/i,
});
while (removeButtons.length > 0) {
await user.click(removeButtons[0]);
removeButtons = screen.queryAllByRole('button', {
name: /remove team member/i,
});
}
expect(
screen.queryByRole('button', { name: /remove team member/i }),
).not.toBeInTheDocument();
});
});
describe('Inline email validation', () => {
it('shows an inline error after typing an invalid email and clears it when a valid email is entered', async () => {
jest.useFakeTimers();
const user = userEvent.setup({
advanceTimers: (ms) => jest.advanceTimersByTime(ms),
});
renderComponent();
const [firstInput] = screen.getAllByPlaceholderText(
/e\.g\. john@signoz\.io/i,
);
await user.type(firstInput, 'not-an-email');
jest.advanceTimersByTime(600);
await waitFor(() => {
expect(screen.getByText(/invalid email address/i)).toBeInTheDocument();
});
await user.clear(firstInput);
await user.type(firstInput, 'good@example.com');
jest.advanceTimersByTime(600);
await waitFor(() => {
expect(
screen.queryByText(/invalid email address/i),
).not.toBeInTheDocument();
});
});
it('does not show an inline error when the field is cleared back to empty', async () => {
jest.useFakeTimers();
const user = userEvent.setup({
advanceTimers: (ms) => jest.advanceTimersByTime(ms),
});
renderComponent();
const [firstInput] = screen.getAllByPlaceholderText(
/e\.g\. john@signoz\.io/i,
);
await user.type(firstInput, 'a');
await user.clear(firstInput);
jest.advanceTimersByTime(600);
await waitFor(() => {
expect(
screen.queryByText(/invalid email address/i),
).not.toBeInTheDocument();
});
});
});
describe('Validation callout on Complete', () => {
it('shows the correct callout message for each combination of email/role validity', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
renderComponent();
const removeButtons = screen.getAllByRole('button', {
name: /remove team member/i,
});
await user.click(removeButtons[0]);
await user.click(
screen.getAllByRole('button', { name: /remove team member/i })[0],
);
const [firstInput] = screen.getAllByPlaceholderText(
/e\.g\. john@signoz\.io/i,
);
await user.type(firstInput, 'bad-email');
await user.click(screen.getByRole('button', { name: /complete/i }));
await waitFor(() => {
expect(
screen.getByText(
/please enter valid emails and select roles for team members/i,
),
).toBeInTheDocument();
expect(
screen.queryByText(/please enter valid emails for team members/i),
).not.toBeInTheDocument();
expect(
screen.queryByText(/please select roles for team members/i),
).not.toBeInTheDocument();
});
await selectRole(user, 0, 'Viewer');
await user.click(screen.getByRole('button', { name: /complete/i }));
await waitFor(() => {
expect(
screen.getByText(/please enter valid emails for team members/i),
).toBeInTheDocument();
expect(
screen.queryByText(/please select roles for team members/i),
).not.toBeInTheDocument();
expect(
screen.queryByText(/please enter valid emails and select roles/i),
).not.toBeInTheDocument();
});
await user.clear(firstInput);
await user.type(firstInput, 'valid@example.com');
await user.click(screen.getByRole('button', { name: /add another/i }));
const allInputs = screen.getAllByPlaceholderText(/e\.g\. john@signoz\.io/i);
await user.type(allInputs[1], 'norole@example.com');
await user.click(screen.getByRole('button', { name: /complete/i }));
await waitFor(() => {
expect(
screen.getByText(/please select roles for team members/i),
).toBeInTheDocument();
expect(
screen.queryByText(/please enter valid emails for team members/i),
).not.toBeInTheDocument();
expect(
screen.queryByText(/please enter valid emails and select roles/i),
).not.toBeInTheDocument();
});
});
it('treats whitespace as untouched, clears the callout on fix-and-resubmit, and clears role error on role select', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
renderComponent();
const removeButtons = screen.getAllByRole('button', {
name: /remove team member/i,
});
await user.click(removeButtons[0]);
await user.click(
screen.getAllByRole('button', { name: /remove team member/i })[0],
);
const [firstInput] = screen.getAllByPlaceholderText(
/e\.g\. john@signoz\.io/i,
);
await user.type(firstInput, ' ');
await user.click(screen.getByRole('button', { name: /complete/i }));
await waitFor(() => {
expect(
screen.queryByText(/please enter valid emails/i),
).not.toBeInTheDocument();
expect(screen.queryByText(/please select roles/i)).not.toBeInTheDocument();
});
await user.clear(firstInput);
await user.type(firstInput, 'bad-email');
await user.click(screen.getByRole('button', { name: /complete/i }));
await waitFor(() => {
expect(
screen.getByText(
/please enter valid emails and select roles for team members/i,
),
).toBeInTheDocument();
expect(
screen.queryByText(/please enter valid emails for team members/i),
).not.toBeInTheDocument();
expect(
screen.queryByText(/please select roles for team members/i),
).not.toBeInTheDocument();
});
await user.clear(firstInput);
await user.type(firstInput, 'good@example.com');
await selectRole(user, 0, 'Admin');
await user.click(screen.getByRole('button', { name: /complete/i }));
await waitFor(() => {
expect(
screen.queryByText(/please enter valid emails and select roles/i),
).not.toBeInTheDocument();
expect(
screen.queryByText(/please enter valid emails for team members/i),
).not.toBeInTheDocument();
expect(
screen.queryByText(/please select roles for team members/i),
).not.toBeInTheDocument();
});
await waitFor(() => expect(mockOnNext).toHaveBeenCalledTimes(1), {
timeout: 1200,
});
});
it('does not show a validation callout when all rows are untouched (empty)', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
renderComponent();
await user.click(screen.getByRole('button', { name: /complete/i }));
await waitFor(() => {
expect(
screen.queryByText(/please enter valid emails/i),
).not.toBeInTheDocument();
expect(screen.queryByText(/please select roles/i)).not.toBeInTheDocument();
});
await waitFor(
() => {
expect(mockOnNext).toHaveBeenCalled();
},
{ timeout: 1200 },
);
});
});
describe('API integration', () => {
it('only sends touched (non-empty) rows — empty rows are excluded from the invite payload', async () => {
let capturedBody: InviteRequestBody | null = null;
server.use(
rest.post(INVITE_USERS_ENDPOINT, async (req, res, ctx) => {
capturedBody = await req.json<InviteRequestBody>();
return res(ctx.status(200), ctx.json({ status: 'success' }));
}),
);
const user = userEvent.setup({ pointerEventsCheck: 0 });
renderComponent();
const [firstInput] = screen.getAllByPlaceholderText(
/e\.g\. john@signoz\.io/i,
);
await user.type(firstInput, 'only@example.com');
await selectRole(user, 0, 'Admin');
await user.click(screen.getByRole('button', { name: /complete/i }));
await waitFor(() => {
expect(capturedBody).not.toBeNull();
expect(capturedBody?.invites).toHaveLength(1);
expect(capturedBody?.invites[0]).toMatchObject({
email: 'only@example.com',
role: 'ADMIN',
});
});
await waitFor(() => expect(mockOnNext).toHaveBeenCalled(), {
timeout: 1200,
});
});
it('calls the invite API, shows a success notification, and calls onNext after the 1 s delay', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
renderComponent();
const [firstInput] = screen.getAllByPlaceholderText(
/e\.g\. john@signoz\.io/i,
);
await user.type(firstInput, 'alice@example.com');
await selectRole(user, 0, 'Admin');
await user.click(screen.getByRole('button', { name: /complete/i }));
await waitFor(() => {
expect(mockNotificationSuccess).toHaveBeenCalledWith(
expect.objectContaining({ message: 'Invites sent successfully!' }),
);
});
await waitFor(
() => {
expect(mockOnNext).toHaveBeenCalledTimes(1);
},
{ timeout: 1200 },
);
});
it('renders an API error container when the invite request fails', async () => {
server.use(
rest.post(INVITE_USERS_ENDPOINT, (_, res, ctx) =>
res(
ctx.status(500),
ctx.json({
errors: [{ code: 'INTERNAL_ERROR', msg: 'Something went wrong' }],
}),
),
),
);
const user = userEvent.setup({ pointerEventsCheck: 0 });
renderComponent();
const [firstInput] = screen.getAllByPlaceholderText(
/e\.g\. john@signoz\.io/i,
);
await user.type(firstInput, 'fail@example.com');
await selectRole(user, 0, 'Viewer');
await user.click(screen.getByRole('button', { name: /complete/i }));
await waitFor(() => {
expect(document.querySelector('.auth-error-container')).toBeInTheDocument();
});
await user.type(firstInput, 'x');
await waitFor(() => {
expect(
document.querySelector('.auth-error-container'),
).not.toBeInTheDocument();
});
});
});
});

View File

@@ -3,7 +3,6 @@ import { MemoryRouter, Route } from 'react-router-dom';
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import ROUTES from 'constants/routes';
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
import { AppProvider } from 'providers/App/App';
import MockQueryClientProvider from 'providers/test/MockQueryClientProvider';
import { Span } from 'types/api/trace/getTraceV2';
@@ -109,7 +108,7 @@ const createMockSpan = (): Span => ({
statusMessage: '',
tagMap: {
'http.method': 'GET',
[SPAN_ATTRIBUTES.HTTP_URL]: '/api/users?page=1',
'http.url': '/api/users?page=1',
'http.status_code': '200',
'service.name': 'frontend-service',
'span.kind': 'server',

View File

@@ -5,7 +5,6 @@ import getSpanPercentiles from 'api/trace/getSpanPercentiles';
import getUserPreference from 'api/v1/user/preferences/name/get';
import { QueryParams } from 'constants/query';
import ROUTES from 'constants/routes';
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { server } from 'mocks-server/server';
import { QueryBuilderContext } from 'providers/QueryBuilder';
@@ -879,9 +878,7 @@ describe('SpanDetailsDrawer', () => {
// Verify only matching attributes are shown (use getAllByText for all since they appear in multiple places)
expect(screen.getAllByText('http.method').length).toBeGreaterThan(0);
expect(screen.getAllByText(SPAN_ATTRIBUTES.HTTP_URL).length).toBeGreaterThan(
0,
);
expect(screen.getAllByText('http.url').length).toBeGreaterThan(0);
expect(screen.getAllByText('http.status_code').length).toBeGreaterThan(0);
});
@@ -1129,7 +1126,7 @@ describe('SpanDetailsDrawer - Search Visibility User Flows', () => {
// User sees all attributes initially
expect(screen.getByText('http.method')).toBeInTheDocument();
expect(screen.getByText(SPAN_ATTRIBUTES.HTTP_URL)).toBeInTheDocument();
expect(screen.getByText('http.url')).toBeInTheDocument();
expect(screen.getByText('http.status_code')).toBeInTheDocument();
// User types "method" in search
@@ -1139,7 +1136,7 @@ describe('SpanDetailsDrawer - Search Visibility User Flows', () => {
// User sees only matching attributes
await waitFor(() => {
expect(screen.getByText('http.method')).toBeInTheDocument();
expect(screen.queryByText(SPAN_ATTRIBUTES.HTTP_URL)).not.toBeInTheDocument();
expect(screen.queryByText('http.url')).not.toBeInTheDocument();
expect(screen.queryByText('http.status_code')).not.toBeInTheDocument();
});
});

View File

@@ -1,4 +1,3 @@
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
import { ILog } from 'types/api/logs/log';
import { Span } from 'types/api/trace/getTraceV2';
@@ -23,7 +22,7 @@ export const mockSpan: Span = {
event: [],
tagMap: {
'http.method': 'GET',
[SPAN_ATTRIBUTES.HTTP_URL]: '/api/test',
'http.url': '/api/test',
'http.status_code': '200',
},
hasError: false,

View File

@@ -0,0 +1,15 @@
import { useQuery, UseQueryResult } from 'react-query';
import { getAllIngestionKeys } from 'api/IngestionKeys/getAllIngestionKeys';
import { AxiosError, AxiosResponse } from 'axios';
import {
AllIngestionKeyProps,
GetIngestionKeyProps,
} from 'types/api/ingestionKeys/types';
export const useGetAllIngestionsKeys = (
props: GetIngestionKeyProps,
): UseQueryResult<AxiosResponse<AllIngestionKeyProps>, AxiosError> =>
useQuery<AxiosResponse<AllIngestionKeyProps>, AxiosError>({
queryKey: [`IngestionKeys-${props.page}-${props.search}`],
queryFn: () => getAllIngestionKeys(props),
});

View File

@@ -1,5 +1,3 @@
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
/* eslint-disable sonarjs/no-duplicate-string */
export const traceDetailResponse = [
{
@@ -37,7 +35,7 @@ export const traceDetailResponse = [
'component',
'host.name',
'http.method',
SPAN_ATTRIBUTES.HTTP_URL,
'http.url',
'ip',
'http.status_code',
'opencensus.exporterversion',
@@ -86,7 +84,7 @@ export const traceDetailResponse = [
'signoz.collector.id',
'component',
'http.method',
SPAN_ATTRIBUTES.HTTP_URL,
'http.url',
'ip',
],
[
@@ -743,7 +741,7 @@ export const traceDetailResponse = [
'component',
'http.method',
'http.status_code',
SPAN_ATTRIBUTES.HTTP_URL,
'http.url',
'net/http.reused',
'net/http.was_idle',
'service.name',
@@ -835,7 +833,7 @@ export const traceDetailResponse = [
'opencensus.exporterversion',
'signoz.collector.id',
'host.name',
SPAN_ATTRIBUTES.HTTP_URL,
'http.url',
'net/http.reused',
'net/http.was_idle',
],
@@ -918,7 +916,7 @@ export const traceDetailResponse = [
'net/http.was_idle',
'component',
'host.name',
SPAN_ATTRIBUTES.HTTP_URL,
'http.url',
'ip',
'service.name',
'signoz.collector.id',

View File

@@ -2,7 +2,6 @@
import { Dispatch, SetStateAction, useEffect, useState } from 'react';
import { getAttributesValues } from 'api/queryBuilder/getAttributesValues';
import { DATA_TYPE_VS_ATTRIBUTE_VALUES_KEY } from 'constants/queryBuilder';
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
import {
BaseAutocompleteData,
DataTypes,
@@ -32,7 +31,7 @@ export const AllTraceFilterKeyValue: Record<string, string> = {
httpRoute: 'HTTP Route',
'http.route': 'HTTP Route',
httpUrl: 'HTTP URL',
[SPAN_ATTRIBUTES.HTTP_URL]: 'HTTP URL',
'http.url': 'HTTP URL',
traceID: 'Trace ID',
trace_id: 'Trace ID',
} as const;

View File

@@ -80,11 +80,16 @@ func (q *builderQuery[T]) Fingerprint() string {
case qbtypes.LogAggregation:
aggParts = append(aggParts, a.Expression)
case qbtypes.MetricAggregation:
aggParts = append(aggParts, fmt.Sprintf("%s:%s:%s:%s",
var spaceAggParamStr string
if a.ComparisonSpaceAggregationParam != nil {
spaceAggParamStr = a.ComparisonSpaceAggregationParam.StringValue()
}
aggParts = append(aggParts, fmt.Sprintf("%s:%s:%s:%s:%s",
a.MetricName,
a.Temporality.StringValue(),
a.TimeAggregation.StringValue(),
a.SpaceAggregation.StringValue(),
spaceAggParamStr,
))
}
}

View File

@@ -1913,7 +1913,7 @@ func (r *ClickHouseReader) GetCustomRetentionTTL(ctx context.Context, orgID stri
// No V2 configuration found, return defaults
response.DefaultTTLDays = 15
response.TTLConditions = []model.CustomRetentionRule{}
response.Status = constants.StatusSuccess
response.Status = constants.StatusFailed
response.ColdStorageTTLDays = -1
return response, nil
}

View File

@@ -123,7 +123,8 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
origTimeAgg := query.Aggregations[0].TimeAggregation
origGroupBy := slices.Clone(query.GroupBy)
if query.Aggregations[0].SpaceAggregation.IsPercentile() &&
if (query.Aggregations[0].SpaceAggregation.IsPercentile() ||
query.Aggregations[0].SpaceAggregation == metrictypes.SpaceAggregationHistogramCount) &&
query.Aggregations[0].Type != metrictypes.ExpHistogramType {
// add le in the group by if doesn't exist
leExists := false
@@ -154,7 +155,11 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
}
// make the time aggregation rate and space aggregation sum
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationRate
if query.Aggregations[0].SpaceAggregation.IsPercentile() {
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationRate
} else {
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationIncrease
}
query.Aggregations[0].SpaceAggregation = metrictypes.SpaceAggregationSum
}
@@ -524,7 +529,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
return "", nil, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`]",
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`, `histogram_count`]",
)
}
sb := sqlbuilder.NewSelectBuilder()
@@ -577,6 +582,29 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
sb.From("__spatial_aggregation_cte")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
sb.GroupBy("ts")
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
}
} else if query.Aggregations[0].SpaceAggregation == metrictypes.SpaceAggregationHistogramCount {
sb.Select("ts")
for _, g := range query.GroupBy {
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
aggQuery, err := AggregationQueryForHistogramCount(query.Aggregations[0].ComparisonSpaceAggregationParam)
if err != nil {
return nil, err
}
sb.SelectMore(aggQuery)
sb.From("__spatial_aggregation_cte")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
sb.GroupBy("ts")
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)

View File

@@ -1,6 +1,7 @@
package telemetrymetrics
import (
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/errors"
@@ -308,3 +309,20 @@ func AggregationColumnForSamplesTable(
}
return aggregationColumn, nil
}
func AggregationQueryForHistogramCount(param *metrictypes.ComparisonSpaceAggregationParam) (string, error) {
if param == nil {
return "", errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "no aggregation param provided for histogram count")
}
histogramCountThreshold := param.Threshold
switch param.Operater {
case "<=":
return fmt.Sprintf("argMaxIf(value, toFloat64(le), toFloat64(le) <= %f) + (argMinIf(value, toFloat64(le), toFloat64(le) > %f) - argMaxIf(value, toFloat64(le), toFloat64(le) <= %f)) * (%f - maxIf(toFloat64(le), toFloat64(le) <= %f)) / (minIf(toFloat64(le), toFloat64(le) > %f) - maxIf(toFloat64(le), toFloat64(le) <= %f)) AS value", histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold), nil
case ">":
return fmt.Sprintf("argMax(value, toFloat64(le)) - (argMaxIf(value, toFloat64(le), toFloat64(le) <= %f) + (argMinIf(value, toFloat64(le), toFloat64(le) > %f) - argMaxIf(value, toFloat64(le), toFloat64(le) <= %f)) * (%f - maxIf(toFloat64(le), toFloat64(le) <= %f)) / (minIf(toFloat64(le), toFloat64(le) > %f) - maxIf(toFloat64(le), toFloat64(le) <= %f))) AS value", histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold), nil
default:
return "", errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid space aggregation operator, should be one of the following: [`<=`, `>`]")
}
}

View File

@@ -2,6 +2,7 @@ package metrictypes
import (
"database/sql/driver"
"fmt"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
@@ -189,17 +190,18 @@ type SpaceAggregation struct {
}
var (
SpaceAggregationUnspecified = SpaceAggregation{valuer.NewString("")}
SpaceAggregationSum = SpaceAggregation{valuer.NewString("sum")}
SpaceAggregationAvg = SpaceAggregation{valuer.NewString("avg")}
SpaceAggregationMin = SpaceAggregation{valuer.NewString("min")}
SpaceAggregationMax = SpaceAggregation{valuer.NewString("max")}
SpaceAggregationCount = SpaceAggregation{valuer.NewString("count")}
SpaceAggregationPercentile50 = SpaceAggregation{valuer.NewString("p50")}
SpaceAggregationPercentile75 = SpaceAggregation{valuer.NewString("p75")}
SpaceAggregationPercentile90 = SpaceAggregation{valuer.NewString("p90")}
SpaceAggregationPercentile95 = SpaceAggregation{valuer.NewString("p95")}
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
SpaceAggregationUnspecified = SpaceAggregation{valuer.NewString("")}
SpaceAggregationSum = SpaceAggregation{valuer.NewString("sum")}
SpaceAggregationAvg = SpaceAggregation{valuer.NewString("avg")}
SpaceAggregationMin = SpaceAggregation{valuer.NewString("min")}
SpaceAggregationMax = SpaceAggregation{valuer.NewString("max")}
SpaceAggregationCount = SpaceAggregation{valuer.NewString("count")}
SpaceAggregationPercentile50 = SpaceAggregation{valuer.NewString("p50")}
SpaceAggregationPercentile75 = SpaceAggregation{valuer.NewString("p75")}
SpaceAggregationPercentile90 = SpaceAggregation{valuer.NewString("p90")}
SpaceAggregationPercentile95 = SpaceAggregation{valuer.NewString("p95")}
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
SpaceAggregationHistogramCount = SpaceAggregation{valuer.NewString("histogram_count")}
)
func (SpaceAggregation) Enum() []any {
@@ -214,6 +216,7 @@ func (SpaceAggregation) Enum() []any {
SpaceAggregationPercentile90,
SpaceAggregationPercentile95,
SpaceAggregationPercentile99,
SpaceAggregationHistogramCount,
}
}
@@ -256,3 +259,12 @@ type MetricTableHints struct {
type MetricValueFilter struct {
Value float64
}
type ComparisonSpaceAggregationParam struct {
Operater string `json:"operator"`
Threshold float64 `json:"threshold"`
}
func (param ComparisonSpaceAggregationParam) StringValue() string {
return fmt.Sprintf("{\"operator\": \"%s\", \"limit\": \"%f\"}", param.Operater, param.Threshold)
}

View File

@@ -446,6 +446,8 @@ type MetricAggregation struct {
TimeAggregation metrictypes.TimeAggregation `json:"timeAggregation"`
// space aggregation to apply to the query
SpaceAggregation metrictypes.SpaceAggregation `json:"spaceAggregation"`
// param for space aggregation if needed
ComparisonSpaceAggregationParam *metrictypes.ComparisonSpaceAggregationParam `json:"comparisonSpaceAggregationParam"`
// table hints to use for the query
TableHints *metrictypes.MetricTableHints `json:"-"`
// value filter to apply to the query

View File

@@ -52,6 +52,7 @@ def build_builder_query(
time_aggregation: str,
space_aggregation: str,
*,
comparisonSpaceAggregationParam: Optional[Dict] = None,
temporality: Optional[str] = None,
step_interval: int = DEFAULT_STEP_INTERVAL,
group_by: Optional[List[str]] = None,
@@ -74,7 +75,8 @@ def build_builder_query(
}
if temporality:
spec["aggregations"][0]["temporality"] = temporality
if comparisonSpaceAggregationParam:
spec["aggregations"][0]["comparisonSpaceAggregationParam"] = comparisonSpaceAggregationParam
if group_by:
spec["groupBy"] = [
{

View File

@@ -0,0 +1,259 @@
"""
Look at the multi_temporality_counters_1h.jsonl file for the relevant data
"""
import random
from datetime import datetime, timedelta, timezone
from http import HTTPStatus
from typing import Callable, List
import pytest
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.metrics import Metrics
from fixtures.querier import (
build_builder_query,
get_all_series,
get_series_values,
make_query_request,
)
from fixtures.utils import get_testdata_file_path
FILE = get_testdata_file_path("histogram_data_1h.jsonl")
@pytest.mark.parametrize(
"threshold, operator, first_value, last_value",
[
(1000, "<=", 11, 69),
(100, "<=", 1.1, 6.9),
(7500, "<=", 16.75, 74.75),
(8000, "<=", 17, 75),
(80000, "<=", 17, 75), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
(1000, ">", 7, 7),
(100, ">", 16.9, 69.1),
(7500, ">", 1.25, 1.25),
(8000, ">", 1, 1),
(80000, ">", 1, 1), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
],
)
def test_histogram_count_for_one_endpoint(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_metrics: Callable[[List[Metrics]], None],
threshold: float,
operator: str,
first_value: float,
last_value: float,
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
metric_name = f"test_bucket"
metrics = Metrics.load_from_file(
FILE,
base_time=now - timedelta(minutes=60),
metric_name_override=metric_name,
)
insert_metrics(metrics)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
query = build_builder_query(
"A",
metric_name,
"rate",
"histogram_count",
comparisonSpaceAggregationParam={
"threshold": threshold,
"operator": operator
},
filter_expression='endpoint = "/health"',
)
response = make_query_request(signoz, token, start_ms, end_ms, [query])
assert response.status_code == HTTPStatus.OK
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) == 59
assert result_values[0]["value"] == first_value
assert result_values[-1]["value"] == last_value
@pytest.mark.parametrize(
"threshold, operator, first_value, last_value",
[
(1000, "<=", 22, 138),
(100, "<=", 2.2, 13.8),
(7500, "<=", 33.5, 149.5),
(8000, "<=", 34, 150),
(80000, "<=", 34, 150), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
(1000, ">", 14, 14),
(100, ">", 33.8, 138.2),
(7500, ">", 2.5, 2.5),
(8000, ">", 2, 2),
(80000, ">", 2, 2), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
],
)
def test_histogram_count_for_one_service(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_metrics: Callable[[List[Metrics]], None],
threshold: float,
operator: str,
first_value: float,
last_value: float,
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
metric_name = f"test_bucket"
metrics = Metrics.load_from_file(
FILE,
base_time=now - timedelta(minutes=60),
metric_name_override=metric_name,
)
insert_metrics(metrics)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
query = build_builder_query(
"A",
metric_name,
"rate",
"histogram_count",
comparisonSpaceAggregationParam={
"threshold": threshold,
"operator": operator
},
filter_expression='service = "api"',
)
response = make_query_request(signoz, token, start_ms, end_ms, [query])
assert response.status_code == HTTPStatus.OK
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) == 59
assert result_values[0]["value"] == first_value
assert result_values[-1]["value"] == last_value
@pytest.mark.parametrize(
"threshold, operator, first_value, last_value",
[
(1000, "<=", 11, 69),
(100, "<=", 1.1, 6.9),
(7500, "<=", 16.75, 74.75),
(8000, "<=", 17, 75),
(80000, "<=", 17, 75), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
(1000, ">", 7, 7),
(100, ">", 16.9, 69.1),
(7500, ">", 1.25, 1.25),
(8000, ">", 1, 1),
(80000, ">", 1, 1), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
],
)
def test_histogram_count_for_delta_service(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_metrics: Callable[[List[Metrics]], None],
threshold: float,
operator: str,
first_value: float,
last_value: float,
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
metric_name = f"test_bucket"
metrics = Metrics.load_from_file(
FILE,
base_time=now - timedelta(minutes=60),
metric_name_override=metric_name,
)
insert_metrics(metrics)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
query = build_builder_query(
"A",
metric_name,
"rate",
"histogram_count",
comparisonSpaceAggregationParam={
"threshold": threshold,
"operator": operator
},
filter_expression='service = "web"',
)
response = make_query_request(signoz, token, start_ms, end_ms, [query])
assert response.status_code == HTTPStatus.OK
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) == 60 ## in delta, the value at 10:01 will also be reported
assert result_values[1]["value"] == first_value ## to keep parallel to the cumulative test cases, first_value refers to the value at 10:02
assert result_values[-1]["value"] == last_value
@pytest.mark.parametrize(
"threshold, operator, first_value, last_value",
[
(1000, "<=", 33, 207),
(100, "<=", 3.3, 20.7),
(7500, "<=", 50.25, 224.25),
(8000, "<=", 51, 225),
(80000, "<=", 51, 225),
(1000, ">", 21, 21),
(100, ">", 50.7, 207.3),
(7500, ">", 3.75, 3.75),
(8000, ">", 3, 3),
(80000, ">", 3, 3),
],
)
def test_histogram_count_for_all_services(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_metrics: Callable[[List[Metrics]], None],
threshold: float,
operator: str,
first_value: float,
last_value: float,
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
metric_name = f"test_bucket"
metrics = Metrics.load_from_file(
FILE,
base_time=now - timedelta(minutes=60),
metric_name_override=metric_name,
)
insert_metrics(metrics)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
query = build_builder_query(
"A",
metric_name,
"rate",
"histogram_count",
comparisonSpaceAggregationParam={
"threshold": threshold,
"operator": operator
},
## no services filter, this tests for multitemporality handling as well
)
response = make_query_request(signoz, token, start_ms, end_ms, [query])
assert response.status_code == HTTPStatus.OK
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) == 60
assert result_values[1]["value"] == first_value ## to keep parallel to the cumulative test cases, first_value refers to the value at 10:02
assert result_values[-1]["value"] == last_value

File diff suppressed because it is too large Load Diff