Compare commits

..

4 Commits

Author SHA1 Message Date
Abhi kumar
41f5f6e421 Merge branch 'main' into fix/chart-manager 2026-02-20 19:10:54 +05:30
Abhi Kumar
da2e050a23 fix: formatting chart manager aggergation values with the yaxis unit 2026-02-20 19:06:47 +05:30
Pandey
5a69f16410 refactor: remove legacy /api/gateway reverse proxy (#10374)
## Summary

- Remove the legacy `/api/gateway` reverse proxy from `ee/query-service/integrations/gateway/` — it has been superseded by the new provider-pattern-based `pkg/gateway/` package (serving
  `/api/v2/gateway/ingestion_keys`)
- Delete dead frontend code: manual IngestionKeys API clients and hooks that targeted the old gateway routes
- Clean up `GatewayApiV1`/`GatewayApiV2` axios instances and route constants from the frontend API layer

## What's retained

- `--gateway-url` flag and `GatewayUrl` field in `APIHandlerOptions` (still used by `cloudIntegrations.go`)
- `pkg/gateway/` package (the new gateway provider)
- `frontend/src/api/generated/services/gateway/` (generated client for the new endpoints)
2026-02-20 16:12:44 +05:30
Yunus M
07afef5c5e fix: use derived values for url and host attributes (#10218)
* fix: use derived values for url and host attributes

* fix: update test cases to use derived attributes

* fix: remove unnecessary handling of http.url and update the test cases

* fix: update SpanDetailsDrawer.test.tsx to use http_url

* fix: repace http_url with constant

* fix: remove redudant url handling in column

* fix: remove endpoint in StatusCodeBarCharts as its fetched from filters

* fix: test title to use correct attribute
2026-02-20 14:20:34 +05:30
61 changed files with 953 additions and 2779 deletions

View File

@@ -749,14 +749,6 @@ components:
- temporality
- isMonotonic
type: object
MetrictypesComparisonSpaceAggregationParam:
properties:
operator:
type: string
threshold:
format: double
type: number
type: object
MetrictypesSpaceAggregation:
enum:
- sum
@@ -769,7 +761,6 @@ components:
- p90
- p95
- p99
- histogram_count
type: string
MetrictypesTemporality:
enum:
@@ -1054,8 +1045,6 @@ components:
type: object
Querybuildertypesv5MetricAggregation:
properties:
comparisonSpaceAggregationParam:
$ref: '#/components/schemas/MetrictypesComparisonSpaceAggregationParam'
metricName:
type: string
reduceTo:

View File

@@ -2,11 +2,9 @@ package api
import (
"net/http"
"net/http/httputil"
"time"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/global"
@@ -31,7 +29,6 @@ type APIHandlerOptions struct {
IntegrationsController *integrations.Controller
CloudIntegrationsController *cloudintegrations.Controller
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
Gateway *httputil.ReverseProxy
GatewayUrl string
// Querier Influx Interval
FluxInterval time.Duration
@@ -77,10 +74,6 @@ func (ah *APIHandler) UM() *usage.Manager {
return ah.opts.UsageManager
}
func (ah *APIHandler) Gateway() *httputil.ReverseProxy {
return ah.opts.Gateway
}
// RegisterRoutes registers routes for this handler on the given router
func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
// note: add ee override methods first
@@ -103,9 +96,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
// v4
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
// Gateway
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.EditAccess(ah.ServeGatewayHTTP))
ah.APIHandler.RegisterRoutes(router, am)
}

View File

@@ -1,58 +0,0 @@
package api
import (
"net/http"
"strings"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
validPath := false
for _, allowedPrefix := range gateway.AllowedPrefix {
if strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+allowedPrefix) {
validPath = true
break
}
}
if !validPath {
rw.WriteHeader(http.StatusNotFound)
return
}
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
if err != nil {
render.Error(rw, err)
return
}
//Create headers
var licenseKey string
if license != nil {
licenseKey = license.Key
}
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
req.Header.Set("X-Consumer-Username", "lid:00000000-0000-0000-0000-000000000000")
req.Header.Set("X-Consumer-Groups", "ns:default")
ah.Gateway().ServeHTTP(rw, req)
}

View File

@@ -19,7 +19,6 @@ import (
"github.com/gorilla/handlers"
"github.com/SigNoz/signoz/ee/query-service/app/api"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/rules"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
@@ -72,11 +71,6 @@ type Server struct {
// NewServer creates and initializes Server
func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
gatewayProxy, err := gateway.NewProxy(config.Gateway.URL.String(), gateway.RoutePrefix)
if err != nil {
return nil, err
}
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
Provider: "memory",
Memory: cache.Memory{
@@ -170,7 +164,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
CloudIntegrationsController: cloudIntegrationsController,
LogsParsingPipelineController: logParsingPipelineController,
FluxInterval: config.Querier.FluxInterval,
Gateway: gatewayProxy,
GatewayUrl: config.Gateway.URL.String(),
GlobalConfig: config.Global,
}

View File

@@ -1,9 +0,0 @@
package gateway
import (
"net/http/httputil"
)
func NewNoopProxy() (*httputil.ReverseProxy, error) {
return &httputil.ReverseProxy{}, nil
}

View File

@@ -1,66 +0,0 @@
package gateway
import (
"net/http"
"net/http/httputil"
"net/url"
"path"
"strings"
)
var (
RoutePrefix string = "/api/gateway"
AllowedPrefix []string = []string{"/v1/workspaces/me", "/v2/profiles/me", "/v2/deployments/me"}
)
type proxy struct {
url *url.URL
stripPath string
}
func NewProxy(u string, stripPath string) (*httputil.ReverseProxy, error) {
url, err := url.Parse(u)
if err != nil {
return nil, err
}
proxy := &proxy{url: url, stripPath: stripPath}
return &httputil.ReverseProxy{
Rewrite: proxy.rewrite,
ModifyResponse: proxy.modifyResponse,
ErrorHandler: proxy.errorHandler,
}, nil
}
func (p *proxy) rewrite(pr *httputil.ProxyRequest) {
pr.SetURL(p.url)
pr.SetXForwarded()
pr.Out.URL.Path = cleanPath(strings.ReplaceAll(pr.Out.URL.Path, p.stripPath, ""))
}
func (p *proxy) modifyResponse(res *http.Response) error {
return nil
}
func (p *proxy) errorHandler(rw http.ResponseWriter, req *http.Request, err error) {
rw.WriteHeader(http.StatusBadGateway)
}
func cleanPath(p string) string {
if p == "" {
return "/"
}
if p[0] != '/' {
p = "/" + p
}
np := path.Clean(p)
if p[len(p)-1] == '/' && np != "/" {
if len(p) == len(np)+1 && strings.HasPrefix(p, np) {
np = p
} else {
np += "/"
}
}
return np
}

View File

@@ -1,61 +0,0 @@
package gateway
import (
"context"
"net/http"
"net/http/httputil"
"net/url"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestProxyRewrite(t *testing.T) {
testCases := []struct {
name string
url *url.URL
stripPath string
in *url.URL
expected *url.URL
}{
{
name: "SamePathAdded",
url: &url.URL{Scheme: "http", Host: "backend", Path: "/path1"},
stripPath: "/strip",
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/path1/path1"},
},
{
name: "NoStripPathInput",
url: &url.URL{Scheme: "http", Host: "backend"},
stripPath: "",
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
},
{
name: "NoStripPathPresentInReq",
url: &url.URL{Scheme: "http", Host: "backend"},
stripPath: "/not-found",
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
},
}
for _, tc := range testCases {
proxy, err := NewProxy(tc.url.String(), tc.stripPath)
require.NoError(t, err)
inReq, err := http.NewRequest(http.MethodGet, tc.in.String(), nil)
require.NoError(t, err)
proxyReq := &httputil.ProxyRequest{
In: inReq,
Out: inReq.Clone(context.Background()),
}
proxy.Rewrite(proxyReq)
assert.Equal(t, tc.expected.Host, proxyReq.Out.URL.Host)
assert.Equal(t, tc.expected.Scheme, proxyReq.Out.URL.Scheme)
assert.Equal(t, tc.expected.Path, proxyReq.Out.URL.Path)
assert.Equal(t, tc.expected.Query(), proxyReq.Out.URL.Query())
}
}

View File

@@ -1,29 +0,0 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
CreateIngestionKeyProps,
IngestionKeyProps,
} from 'types/api/ingestionKeys/types';
const createIngestionKey = async (
props: CreateIngestionKeyProps,
): Promise<SuccessResponse<IngestionKeyProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.post('/workspaces/me/keys', {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default createIngestionKey;

View File

@@ -1,26 +0,0 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
const deleteIngestionKey = async (
id: string,
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.delete(
`/workspaces/me/keys/${id}`,
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default deleteIngestionKey;

View File

@@ -1,21 +0,0 @@
import { GatewayApiV1Instance } from 'api';
import { AxiosResponse } from 'axios';
import {
AllIngestionKeyProps,
GetIngestionKeyProps,
} from 'types/api/ingestionKeys/types';
export const getAllIngestionKeys = (
props: GetIngestionKeyProps,
): Promise<AxiosResponse<AllIngestionKeyProps>> => {
// eslint-disable-next-line @typescript-eslint/naming-convention
const { search, per_page, page } = props;
const BASE_URL = '/workspaces/me/keys';
const URL_QUERY_PARAMS =
search && search.length > 0
? `/search?name=${search}&page=1&per_page=100`
: `?page=${page}&per_page=${per_page}`;
return GatewayApiV1Instance.get(`${BASE_URL}${URL_QUERY_PARAMS}`);
};

View File

@@ -1,65 +0,0 @@
/* eslint-disable @typescript-eslint/no-throw-literal */
import { GatewayApiV1Instance } from 'api';
import axios from 'axios';
import {
AddLimitProps,
LimitSuccessProps,
} from 'types/api/ingestionKeys/limits/types';
interface SuccessResponse<T> {
statusCode: number;
error: null;
message: string;
payload: T;
}
interface ErrorResponse {
statusCode: number;
error: string;
message: string;
payload: null;
}
const createLimitForIngestionKey = async (
props: AddLimitProps,
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.post(
`/workspaces/me/keys/${props.keyID}/limits`,
{
...props,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
if (axios.isAxiosError(error)) {
// Axios error
const errResponse: ErrorResponse = {
statusCode: error.response?.status || 500,
error: error.response?.data?.error,
message: error.response?.data?.status || 'An error occurred',
payload: null,
};
throw errResponse;
} else {
// Non-Axios error
const errResponse: ErrorResponse = {
statusCode: 500,
error: 'Unknown error',
message: 'An unknown error occurred',
payload: null,
};
throw errResponse;
}
}
};
export default createLimitForIngestionKey;

View File

@@ -1,26 +0,0 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
const deleteLimitsForIngestionKey = async (
id: string,
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.delete(
`/workspaces/me/limits/${id}`,
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default deleteLimitsForIngestionKey;

View File

@@ -1,65 +0,0 @@
/* eslint-disable @typescript-eslint/no-throw-literal */
import { GatewayApiV1Instance } from 'api';
import axios from 'axios';
import {
LimitSuccessProps,
UpdateLimitProps,
} from 'types/api/ingestionKeys/limits/types';
interface SuccessResponse<T> {
statusCode: number;
error: null;
message: string;
payload: T;
}
interface ErrorResponse {
statusCode: number;
error: string;
message: string;
payload: null;
}
const updateLimitForIngestionKey = async (
props: UpdateLimitProps,
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.patch(
`/workspaces/me/limits/${props.limitID}`,
{
config: props.config,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
if (axios.isAxiosError(error)) {
// Axios error
const errResponse: ErrorResponse = {
statusCode: error.response?.status || 500,
error: error.response?.data?.error,
message: error.response?.data?.status || 'An error occurred',
payload: null,
};
throw errResponse;
} else {
// Non-Axios error
const errResponse: ErrorResponse = {
statusCode: 500,
error: 'Unknown error',
message: 'An unknown error occurred',
payload: null,
};
throw errResponse;
}
}
};
export default updateLimitForIngestionKey;

View File

@@ -1,32 +0,0 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
IngestionKeysPayloadProps,
UpdateIngestionKeyProps,
} from 'types/api/ingestionKeys/types';
const updateIngestionKey = async (
props: UpdateIngestionKeyProps,
): Promise<SuccessResponse<IngestionKeysPayloadProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.patch(
`/workspaces/me/keys/${props.id}`,
{
...props.data,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default updateIngestionKey;

View File

@@ -4,8 +4,6 @@ export const apiV2 = '/api/v2/';
export const apiV3 = '/api/v3/';
export const apiV4 = '/api/v4/';
export const apiV5 = '/api/v5/';
export const gatewayApiV1 = '/api/gateway/v1/';
export const gatewayApiV2 = '/api/gateway/v2/';
export const apiAlertManager = '/api/alertmanager/';
export default apiV1;

View File

@@ -927,18 +927,6 @@ export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
unit: string;
}
export interface MetrictypesComparisonSpaceAggregationParamDTO {
/**
* @type string
*/
operator?: string;
/**
* @type number
* @format double
*/
threshold?: number;
}
export enum MetrictypesSpaceAggregationDTO {
sum = 'sum',
avg = 'avg',
@@ -950,7 +938,6 @@ export enum MetrictypesSpaceAggregationDTO {
p90 = 'p90',
p95 = 'p95',
p99 = 'p99',
histogram_count = 'histogram_count',
}
export enum MetrictypesTemporalityDTO {
delta = 'delta',
@@ -1301,7 +1288,6 @@ export interface Querybuildertypesv5LogAggregationDTO {
}
export interface Querybuildertypesv5MetricAggregationDTO {
comparisonSpaceAggregationParam?: MetrictypesComparisonSpaceAggregationParamDTO;
/**
* @type string
*/

View File

@@ -15,15 +15,7 @@ import { Events } from 'constants/events';
import { LOCALSTORAGE } from 'constants/localStorage';
import { eventEmitter } from 'utils/getEventEmitter';
import apiV1, {
apiAlertManager,
apiV2,
apiV3,
apiV4,
apiV5,
gatewayApiV1,
gatewayApiV2,
} from './apiV1';
import apiV1, { apiAlertManager, apiV2, apiV3, apiV4, apiV5 } from './apiV1';
import { Logout } from './utils';
const RESPONSE_TIMEOUT_THRESHOLD = 5000; // 5 seconds
@@ -211,24 +203,6 @@ LogEventAxiosInstance.interceptors.response.use(
LogEventAxiosInstance.interceptors.request.use(interceptorsRequestResponse);
//
// gateway Api V1
export const GatewayApiV1Instance = axios.create({
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV1}`,
});
GatewayApiV1Instance.interceptors.response.use(
interceptorsResponse,
interceptorRejected,
);
GatewayApiV1Instance.interceptors.request.use(interceptorsRequestResponse);
//
// gateway Api V2
export const GatewayApiV2Instance = axios.create({
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV2}`,
});
// generated API Instance
export const GeneratedAPIInstance = axios.create({
baseURL: ENVIRONMENT.baseURL,
@@ -240,14 +214,6 @@ GeneratedAPIInstance.interceptors.response.use(
interceptorRejected,
);
GatewayApiV2Instance.interceptors.response.use(
interceptorsResponse,
interceptorRejected,
);
GatewayApiV2Instance.interceptors.request.use(interceptorsRequestResponse);
//
AxiosAlertManagerInstance.interceptors.response.use(
interceptorsResponse,
interceptorRejected,

View File

@@ -202,7 +202,7 @@ function AllEndPoints({
const onRowClick = useCallback(
(props: any): void => {
setSelectedEndPointName(props[SPAN_ATTRIBUTES.URL_PATH] as string);
setSelectedEndPointName(props[SPAN_ATTRIBUTES.HTTP_URL] as string);
setSelectedView(VIEWS.ENDPOINT_STATS);
const initialItems = [
...(filters?.items || []),
@@ -213,7 +213,7 @@ function AllEndPoints({
op: 'AND',
});
setParams({
selectedEndPointName: props[SPAN_ATTRIBUTES.URL_PATH] as string,
selectedEndPointName: props[SPAN_ATTRIBUTES.HTTP_URL] as string,
selectedView: VIEWS.ENDPOINT_STATS,
endPointDetailsLocalFilters: {
items: initialItems,

View File

@@ -33,7 +33,7 @@ import { SPAN_ATTRIBUTES } from './constants';
const httpUrlKey = {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.URL_PATH,
key: SPAN_ATTRIBUTES.HTTP_URL,
type: 'tag',
};
@@ -93,7 +93,7 @@ function EndPointDetails({
return currentFilters; // No change needed, prevents loop
}
// Rebuild filters: Keep non-http.url filters and add/update http.url filter based on prop
// Rebuild filters: Keep non-http_url filters and add/update http_url filter based on prop
const otherFilters = currentFilters?.items?.filter(
(item) => item.key?.key !== httpUrlKey.key,
);
@@ -125,7 +125,7 @@ function EndPointDetails({
(newFilters: IBuilderQuery['filters']): void => {
// 1. Update local filters state immediately
setFilters(newFilters);
// Filter out http.url filter before saving to params
// Filter out http_url filter before saving to params
const filteredNewFilters = {
op: 'AND',
items:
@@ -299,7 +299,6 @@ function EndPointDetails({
endPointStatusCodeLatencyBarChartsDataQuery
}
domainName={domainName}
endPointName={endPointName}
filters={filters}
timeRange={timeRange}
onDragSelect={onDragSelect}

View File

@@ -56,15 +56,15 @@ function TopErrors({
{
items: endPointName
? [
// Remove any existing http.url filters from initialFilters to avoid duplicates
// Remove any existing http_url filters from initialFilters to avoid duplicates
...(initialFilters?.items?.filter(
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
(item) => item.key?.key !== SPAN_ATTRIBUTES.HTTP_URL,
) || []),
{
id: '92b8a1c1',
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.URL_PATH,
key: SPAN_ATTRIBUTES.HTTP_URL,
type: 'tag',
},
op: '=',

View File

@@ -9,6 +9,7 @@ import { ENTITY_VERSION_V5 } from 'constants/app';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { SPAN_ATTRIBUTES } from '../constants';
import DomainMetrics from './DomainMetrics';
// Mock the API call
@@ -126,11 +127,9 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
'count()',
);
// Verify exact domain filter expression structure
expect(queryA.filter.expression).toContain("http_host = '0.0.0.0'");
expect(queryA.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
expect(queryA.filter.expression).toContain(
'url.full EXISTS OR http.url EXISTS',
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
);
// Verify Query B - p99 latency
@@ -142,17 +141,13 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
'p99(duration_nano)',
);
// Verify exact domain filter expression structure
expect(queryB.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
expect(queryB.filter.expression).toContain("http_host = '0.0.0.0'");
// Verify Query C - error count (disabled)
const queryC = queryData.find((q: any) => q.queryName === 'C');
expect(queryC).toBeDefined();
expect(queryC.disabled).toBe(true);
expect(queryC.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
expect(queryC.filter.expression).toContain("http_host = '0.0.0.0'");
expect(queryC.aggregations?.[0]).toBeDefined();
expect((queryC.aggregations?.[0] as TraceAggregation)?.expression).toBe(
'count()',
@@ -169,9 +164,7 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
'max(timestamp)',
);
// Verify exact domain filter expression structure
expect(queryD.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
expect(queryD.filter.expression).toContain("http_host = '0.0.0.0'");
// Verify Formula F1 - error rate calculation
const formulas = payload.query.builder.queryFormulas;

View File

@@ -153,7 +153,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
// Verify exact domain filter expression structure
if (queryA.filter) {
expect(queryA.filter.expression).toContain(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
`http_host = 'api.example.com'`,
);
expect(queryA.filter.expression).toContain("kind_string = 'Client'");
}
@@ -171,7 +171,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
// Verify exact domain filter expression structure
if (queryB.filter) {
expect(queryB.filter.expression).toContain(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
`http_host = 'api.example.com'`,
);
expect(queryB.filter.expression).toContain("kind_string = 'Client'");
}
@@ -185,7 +185,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
expect(queryC.aggregateOperator).toBe('count');
if (queryC.filter) {
expect(queryC.filter.expression).toContain(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
`http_host = 'api.example.com'`,
);
expect(queryC.filter.expression).toContain("kind_string = 'Client'");
expect(queryC.filter.expression).toContain('has_error = true');
@@ -204,7 +204,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
// Verify exact domain filter expression structure
if (queryD.filter) {
expect(queryD.filter.expression).toContain(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
`http_host = 'api.example.com'`,
);
expect(queryD.filter.expression).toContain("kind_string = 'Client'");
}
@@ -221,7 +221,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
}
if (queryE.filter) {
expect(queryE.filter.expression).toContain(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
`http_host = 'api.example.com'`,
);
expect(queryE.filter.expression).toContain("kind_string = 'Client'");
}
@@ -291,7 +291,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
expect(query.filter.expression).toContain('staging');
// Also verify domain filter is still present
expect(query.filter.expression).toContain(
"(net.peer.name = 'api.internal.com' OR server.address = 'api.internal.com')",
"http_host = 'api.internal.com'",
);
// Verify client kind filter is present
expect(query.filter.expression).toContain("kind_string = 'Client'");

View File

@@ -34,7 +34,6 @@ function StatusCodeBarCharts({
endPointStatusCodeBarChartsDataQuery,
endPointStatusCodeLatencyBarChartsDataQuery,
domainName,
endPointName,
filters,
timeRange,
onDragSelect,
@@ -48,7 +47,6 @@ function StatusCodeBarCharts({
unknown
>;
domainName: string;
endPointName: string;
filters: IBuilderQuery['filters'];
timeRange: {
startTime: number;
@@ -144,11 +142,11 @@ function StatusCodeBarCharts({
const widget = useMemo<Widgets>(
() =>
getStatusCodeBarChartWidgetData(domainName, endPointName, {
getStatusCodeBarChartWidgetData(domainName, {
items: [...(filters?.items || [])],
op: filters?.op || 'AND',
}),
[domainName, endPointName, filters],
[domainName, filters],
);
const graphClickHandler = useCallback(
@@ -166,6 +164,7 @@ function StatusCodeBarCharts({
xValue,
TWO_AND_HALF_MINUTES_IN_MILLISECONDS,
);
handleGraphClick({
xValue,
yValue,

View File

@@ -12,7 +12,7 @@ export const VIEW_TYPES = {
// Span attribute keys - these are the source of truth for all attribute keys
export const SPAN_ATTRIBUTES = {
URL_PATH: 'http.url',
HTTP_URL: 'http_url',
RESPONSE_STATUS_CODE: 'response_status_code',
SERVER_NAME: 'http_host',
SERVER_PORT: 'net.peer.port',

View File

@@ -280,7 +280,7 @@ describe('API Monitoring Utils', () => {
const endpointFilter = result?.items?.find(
(item) =>
item.key &&
item.key.key === SPAN_ATTRIBUTES.URL_PATH &&
item.key.key === SPAN_ATTRIBUTES.HTTP_URL &&
item.value === endPointName,
);
expect(endpointFilter).toBeDefined();
@@ -344,13 +344,12 @@ describe('API Monitoring Utils', () => {
describe('getFormattedEndPointDropDownData', () => {
it('should format endpoint dropdown data correctly', () => {
// Arrange
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
const mockData = [
{
data: {
// eslint-disable-next-line sonarjs/no-duplicate-string
[URL_PATH_KEY]: '/api/users',
'url.full': 'http://example.com/api/users',
A: 150, // count or other metric
},
},
@@ -358,7 +357,6 @@ describe('API Monitoring Utils', () => {
data: {
// eslint-disable-next-line sonarjs/no-duplicate-string
[URL_PATH_KEY]: '/api/orders',
'url.full': 'http://example.com/api/orders',
A: 75,
},
},
@@ -406,7 +404,7 @@ describe('API Monitoring Utils', () => {
it('should handle items without URL path', () => {
// Arrange
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
type MockDataType = {
data: {
[key: string]: string | number;
@@ -712,13 +710,11 @@ describe('API Monitoring Utils', () => {
it('should generate widget configuration for status code bar chart', () => {
// Arrange
const domainName = 'test-domain';
const endPointName = '/api/test';
const filters = { items: [], op: 'AND' };
// Act
const result = getStatusCodeBarChartWidgetData(
domainName,
endPointName,
filters as IBuilderQuery['filters'],
);
@@ -741,21 +737,11 @@ describe('API Monitoring Utils', () => {
if (domainFilter) {
expect(domainFilter.value).toBe(domainName);
}
// Should have endpoint filter if provided
const endpointFilter = queryData.filters?.items?.find(
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.URL_PATH,
);
expect(endpointFilter).toBeDefined();
if (endpointFilter) {
expect(endpointFilter.value).toBe(endPointName);
}
});
it('should include custom filters in the widget configuration', () => {
// Arrange
const domainName = 'test-domain';
const endPointName = '/api/test';
const customFilter = {
id: 'custom-filter',
key: {
@@ -771,7 +757,6 @@ describe('API Monitoring Utils', () => {
// Act
const result = getStatusCodeBarChartWidgetData(
domainName,
endPointName,
filters as IBuilderQuery['filters'],
);

View File

@@ -25,7 +25,7 @@ jest.mock('container/GridCardLayout/GridCard', () => ({
type="button"
data-testid="row-click-button"
onClick={(): void =>
customOnRowClick({ [SPAN_ATTRIBUTES.URL_PATH]: '/api/test' })
customOnRowClick({ [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test' })
}
>
Click Row

View File

@@ -6,10 +6,10 @@
* These tests validate the migration from V4 to V5 format for getAllEndpointsWidgetData:
* - Filter format change: filters.items[] → filter.expression
* - Aggregation format: aggregateAttribute → aggregations[] array
* - Domain filter: (net.peer.name OR server.address)
* - Domain filter: http_host = '${domainName}'
* - Kind filter: kind_string = 'Client'
* - Four queries: A (count), B (p99 latency), C (max timestamp), D (error count - disabled)
* - GroupBy: Both http.url AND url.full with type 'attribute'
* - GroupBy: http_url with type 'attribute'
*/
import { getAllEndpointsWidgetData } from 'container/ApiMonitoring/utils';
import {
@@ -18,6 +18,8 @@ import {
} from 'types/api/queryBuilder/queryAutocompleteResponse';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
describe('AllEndpointsWidget - V5 Migration Validation', () => {
const mockDomainName = 'api.example.com';
const emptyFilters: IBuilderQuery['filters'] = {
@@ -92,28 +94,28 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
const baseExpression = `(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') AND kind_string = 'Client'`;
const baseExpression = `http_host = '${mockDomainName}' AND kind_string = 'Client'`;
// Queries A, B, C have identical base filter
expect(queryA.filter?.expression).toBe(
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
);
expect(queryB.filter?.expression).toBe(
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
);
expect(queryC.filter?.expression).toBe(
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
);
// Query D has additional has_error filter
expect(queryD.filter?.expression).toBe(
`${baseExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
`${baseExpression} AND has_error = true AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
);
});
});
describe('2. GroupBy Structure', () => {
it('default groupBy includes both http.url and url.full with type attribute', () => {
it(`default groupBy includes ${SPAN_ATTRIBUTES.HTTP_URL} with type attribute`, () => {
const widget = getAllEndpointsWidgetData(
emptyGroupBy,
mockDomainName,
@@ -124,23 +126,13 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
// All queries should have the same default groupBy
queryData.forEach((query) => {
expect(query.groupBy).toHaveLength(2);
expect(query.groupBy).toHaveLength(1);
// http.url
expect(query.groupBy).toContainEqual({
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: 'http.url',
type: 'attribute',
});
// url.full
expect(query.groupBy).toContainEqual({
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: 'url.full',
key: SPAN_ATTRIBUTES.HTTP_URL,
type: 'attribute',
});
});
@@ -170,19 +162,18 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
// All queries should have defaults + custom groupBy
queryData.forEach((query) => {
expect(query.groupBy).toHaveLength(4); // 2 defaults + 2 custom
expect(query.groupBy).toHaveLength(3); // 1 default + 2 custom
// First two should be defaults (http.url, url.full)
expect(query.groupBy[0].key).toBe('http.url');
expect(query.groupBy[1].key).toBe('url.full');
// First two should be defaults (http_url)
expect(query.groupBy[0].key).toBe(SPAN_ATTRIBUTES.HTTP_URL);
// Last two should be custom (matching subset of properties)
expect(query.groupBy[2]).toMatchObject({
expect(query.groupBy[1]).toMatchObject({
dataType: DataTypes.String,
key: 'service.name',
type: 'resource',
});
expect(query.groupBy[3]).toMatchObject({
expect(query.groupBy[2]).toMatchObject({
dataType: DataTypes.String,
key: 'deployment.environment',
type: 'resource',

View File

@@ -258,7 +258,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
value: '/api/test',
}),
]),
@@ -278,7 +278,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
value: '/api/test',
}),
]),
@@ -360,7 +360,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
value: '/api/test',
}),
]),
@@ -373,7 +373,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
value: '/api/test',
}),
]),

View File

@@ -191,7 +191,7 @@ describe('EndPointsDropDown Component', () => {
it('formats data using the utility function', () => {
const mockRows = [
{ data: { [SPAN_ATTRIBUTES.URL_PATH]: '/api/test', A: 10 } },
{ data: { [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test', A: 10 } },
];
const dataProps = {

View File

@@ -6,15 +6,18 @@
* These tests validate the migration from V4 to V5 format for the third payload
* in getEndPointDetailsQueryPayload (endpoint dropdown data):
* - Filter format change: filters.items[] → filter.expression
* - Domain handling: (net.peer.name OR server.address)
* - Domain handling: http_host = '${domainName}'
* - Kind filter: kind_string = 'Client'
* - Existence check: (http.url EXISTS OR url.full EXISTS)
* - Existence check: http_url EXISTS
* - Aggregation: count() expression
* - GroupBy: Both http.url AND url.full with type 'attribute'
* - GroupBy: http_url with type 'attribute'
*/
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
describe('EndpointDropdown - V5 Migration Validation', () => {
const mockDomainName = 'api.example.com';
const mockStartTime = 1000;
@@ -43,9 +46,9 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters');
// Base filter 1: Domain (net.peer.name OR server.address)
// Base filter 1: Domain http_host = '${domainName}'
expect(queryA.filter?.expression).toContain(
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
`http_host = '${mockDomainName}'`,
);
// Base filter 2: Kind
@@ -53,7 +56,7 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
// Base filter 3: Existence check
expect(queryA.filter?.expression).toContain(
'(http.url EXISTS OR url.full EXISTS)',
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
);
// V5 Aggregation format: aggregations array (not aggregateAttribute)
@@ -64,16 +67,11 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
});
expect(queryA).not.toHaveProperty('aggregateAttribute');
// GroupBy: Both http.url and url.full
expect(queryA.groupBy).toHaveLength(2);
// GroupBy: http_url
expect(queryA.groupBy).toHaveLength(1);
expect(queryA.groupBy).toContainEqual({
key: 'http.url',
dataType: 'string',
type: 'attribute',
});
expect(queryA.groupBy).toContainEqual({
key: 'url.full',
dataType: 'string',
key: SPAN_ATTRIBUTES.HTTP_URL,
dataType: DataTypes.String,
type: 'attribute',
});
});
@@ -120,53 +118,7 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
// Exact filter expression with custom filters merged
expect(expression).toBe(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND deployment.environment = 'production'",
);
});
});
describe('3. HTTP URL Filter Special Handling', () => {
it('converts http.url filter to (http.url OR url.full) expression', () => {
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
items: [
{
id: 'http-url-filter',
key: {
key: 'http.url',
dataType: 'string' as any,
type: 'tag',
},
op: '=',
value: '/api/users',
},
{
id: 'service-filter',
key: {
key: 'service.name',
dataType: 'string' as any,
type: 'resource',
},
op: '=',
value: 'user-service',
},
],
op: 'AND',
};
const payload = getEndPointDetailsQueryPayload(
mockDomainName,
mockStartTime,
mockEndTime,
filtersWithHttpUrl,
);
const dropdownQuery = payload[2];
const expression =
dropdownQuery.query.builder.queryData[0].filter?.expression;
// CRITICAL: Exact filter expression with http.url converted to OR logic
expect(expression).toBe(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND (http.url = '/api/users' OR url.full = '/api/users')",
`${SPAN_ATTRIBUTES.SERVER_NAME} = 'api.example.com' AND kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS service.name = 'user-service' AND deployment.environment = 'production'`,
);
});
});

View File

@@ -33,7 +33,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
expect(queryData).not.toHaveProperty('filters.items');
});
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
it('uses new domain filter format: (http_host)', () => {
const widget = getRateOverTimeWidgetData(
mockDomainName,
mockEndpointName,
@@ -44,7 +44,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify EXACT new filter format with OR operator
expect(queryData?.filter?.expression).toContain(
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
`http_host = '${mockDomainName}'`,
);
// Endpoint name is used in legend, not filter
@@ -90,7 +90,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify domain filter is present
expect(queryData?.filter?.expression).toContain(
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
`http_host = '${mockDomainName}'`,
);
// Verify custom filters are merged into the expression
@@ -120,7 +120,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
expect(queryData).not.toHaveProperty('filters.items');
});
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
it('uses new domain filter format: (http_host)', () => {
const widget = getLatencyOverTimeWidgetData(
mockDomainName,
mockEndpointName,
@@ -132,7 +132,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify EXACT new filter format with OR operator
expect(queryData.filter).toBeDefined();
expect(queryData?.filter?.expression).toContain(
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
`http_host = '${mockDomainName}'`,
);
// Endpoint name is used in legend, not filter
@@ -166,7 +166,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify domain filter is present
expect(queryData?.filter?.expression).toContain(
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') service.name = 'user-service'`,
`http_host = '${mockDomainName}' service.name = 'user-service'`,
);
});
});

View File

@@ -142,7 +142,6 @@ describe('StatusCodeBarCharts', () => {
endTime: 1609545600000,
};
const mockDomainName = 'test-domain';
const mockEndPointName = '/api/test';
const onDragSelectMock = jest.fn();
const refetchFn = jest.fn();
@@ -232,7 +231,6 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -268,7 +266,6 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -311,7 +308,6 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -356,7 +352,6 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -404,7 +399,6 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -419,7 +413,6 @@ describe('StatusCodeBarCharts', () => {
// but we've confirmed the function is mocked and ready to be tested
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
mockDomainName,
mockEndPointName,
expect.objectContaining({
items: [],
op: 'AND',
@@ -467,7 +460,6 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockCustomFilters as IBuilderQuery['filters']}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -477,7 +469,6 @@ describe('StatusCodeBarCharts', () => {
// Assert widget creation was called with the correct parameters
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
mockDomainName,
mockEndPointName,
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({ id: 'custom-filter' }),

View File

@@ -10,7 +10,7 @@
*
* V5 Changes:
* - Filter format change: filters.items[] → filter.expression
* - Domain filter: (net.peer.name OR server.address)
* - Domain filter: (http_host)
* - Kind filter: kind_string = 'Client'
* - stepInterval: 60 → null
* - Grouped by response_status_code
@@ -47,9 +47,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters.items');
// Base filter 1: Domain (net.peer.name OR server.address)
// Base filter 1: Domain (http_host)
expect(queryA.filter?.expression).toContain(
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
`http_host = '${mockDomainName}'`,
);
// Base filter 2: Kind
@@ -96,9 +96,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters.items');
// Base filter 1: Domain (net.peer.name OR server.address)
// Base filter 1: Domain (http_host)
expect(queryA.filter?.expression).toContain(
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
`http_host = '${mockDomainName}'`,
);
// Base filter 2: Kind
@@ -177,7 +177,7 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(callsExpression).toBe(latencyExpression);
// Verify base filters
expect(callsExpression).toContain('net.peer.name');
expect(callsExpression).toContain('http_host');
expect(callsExpression).toContain("kind_string = 'Client'");
// Verify custom filters are merged
@@ -187,51 +187,4 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(callsExpression).toContain('production');
});
});
describe('4. HTTP URL Filter Handling', () => {
it('converts http.url filter to (http.url OR url.full) expression in both charts', () => {
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
items: [
{
id: 'http-url-filter',
key: {
key: 'http.url',
dataType: 'string' as any,
type: 'tag',
},
op: '=',
value: '/api/metrics',
},
],
op: 'AND',
};
const payload = getEndPointDetailsQueryPayload(
mockDomainName,
mockStartTime,
mockEndTime,
filtersWithHttpUrl,
);
const callsChartQuery = payload[4];
const latencyChartQuery = payload[5];
const callsExpression =
callsChartQuery.query.builder.queryData[0].filter?.expression;
const latencyExpression =
latencyChartQuery.query.builder.queryData[0].filter?.expression;
// CRITICAL: http.url converted to OR logic
expect(callsExpression).toContain(
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
);
expect(latencyExpression).toContain(
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
);
// Base filters still present
expect(callsExpression).toContain('net.peer.name');
expect(callsExpression).toContain("kind_string = 'Client'");
});
});
});

View File

@@ -6,8 +6,8 @@
* These tests validate the migration from V4 to V5 format for the second payload
* in getEndPointDetailsQueryPayload (status code table data):
* - Filter format change: filters.items[] → filter.expression
* - URL handling: Special logic for (http.url OR url.full)
* - Domain filter: (net.peer.name OR server.address)
* - URL handling: Special logic for http_url
* - Domain filter: http_host = '${domainName}'
* - Kind filter: kind_string = 'Client'
* - Kind filter: response_status_code EXISTS
* - Three queries: A (count), B (p99 latency), C (rate)
@@ -45,9 +45,9 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters.items');
// Base filter 1: Domain (net.peer.name OR server.address)
// Base filter 1: Domain (http_host)
expect(queryA.filter?.expression).toContain(
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
`http_host = '${mockDomainName}'`,
);
// Base filter 2: Kind
@@ -149,7 +149,7 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
statusCodeQuery.query.builder.queryData[0].filter?.expression;
// Base filters present
expect(expression).toContain('net.peer.name');
expect(expression).toContain('http_host');
expect(expression).toContain("kind_string = 'Client'");
expect(expression).toContain('response_status_code EXISTS');
@@ -165,62 +165,4 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
expect(queries[1].filter?.expression).toBe(queries[2].filter?.expression);
});
});
describe('4. HTTP URL Filter Handling', () => {
it('converts http.url filter to (http.url OR url.full) expression', () => {
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
items: [
{
id: 'http-url-filter',
key: {
key: 'http.url',
dataType: 'string' as any,
type: 'tag',
},
op: '=',
value: '/api/users',
},
{
id: 'service-filter',
key: {
key: 'service.name',
dataType: 'string' as any,
type: 'resource',
},
op: '=',
value: 'user-service',
},
],
op: 'AND',
};
const payload = getEndPointDetailsQueryPayload(
mockDomainName,
mockStartTime,
mockEndTime,
filtersWithHttpUrl,
);
const statusCodeQuery = payload[1];
const expression =
statusCodeQuery.query.builder.queryData[0].filter?.expression;
// CRITICAL: http.url converted to OR logic
expect(expression).toContain(
"(http.url = '/api/users' OR url.full = '/api/users')",
);
// Other filters still present
expect(expression).toContain('service.name');
expect(expression).toContain('user-service');
// Base filters present
expect(expression).toContain('net.peer.name');
expect(expression).toContain("kind_string = 'Client'");
expect(expression).toContain('response_status_code EXISTS');
// All ANDed together (at least 2 ANDs: domain+kind, custom filter, url condition)
expect(expression?.match(/AND/g)?.length).toBeGreaterThanOrEqual(2);
});
});
});

View File

@@ -84,7 +84,7 @@ describe('TopErrors', () => {
{
columns: [
{
name: 'http.url',
name: SPAN_ATTRIBUTES.HTTP_URL,
fieldDataType: 'string',
fieldContext: 'attribute',
},
@@ -124,7 +124,7 @@ describe('TopErrors', () => {
table: {
rows: [
{
'http.url': '/api/test',
http_url: '/api/test',
A: 100,
},
],
@@ -206,7 +206,7 @@ describe('TopErrors', () => {
expect(navigateMock).toHaveBeenCalledWith({
filters: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: 'http.url' }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
op: '=',
value: '/api/test',
}),
@@ -335,7 +335,7 @@ describe('TopErrors', () => {
// Verify all required filters are present
expect(filterExpression).toContain(
`kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) AND (net.peer.name = 'test-domain' OR server.address = 'test-domain') AND has_error = true`,
`kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS AND ${SPAN_ATTRIBUTES.SERVER_NAME} = 'test-domain' AND has_error = true`,
);
});
});

View File

@@ -15,7 +15,6 @@ import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsAppli
import { convertNanoToMilliseconds } from 'container/MetricsExplorer/Summary/utils';
import dayjs from 'dayjs';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { cloneDeep } from 'lodash-es';
import { ArrowUpDown, ChevronDown, ChevronRight, Info } from 'lucide-react';
import { getWidgetQuery } from 'pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil';
@@ -57,12 +56,12 @@ export const getDisplayValue = (value: unknown): string =>
isEmptyFilterValue(value) ? '-' : String(value);
export const getDomainNameFilterExpression = (domainName: string): string =>
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`;
`http_host = '${domainName}'`;
export const clientKindExpression = `kind_string = 'Client'`;
/**
* Converts filters to expression, handling http.url specially by creating (http.url OR url.full) condition
* Converts filters to expression
* @param filters Filters to convert
* @param baseExpression Base expression to combine with filters
* @returns Filter expression string
@@ -75,34 +74,6 @@ export const convertFiltersWithUrlHandling = (
return baseExpression;
}
// Check if filters contain http.url (SPAN_ATTRIBUTES.URL_PATH)
const httpUrlFilter = filters.items?.find(
(item) => item.key?.key === SPAN_ATTRIBUTES.URL_PATH,
);
// If http.url filter exists, create modified filters with (http.url OR url.full)
if (httpUrlFilter && httpUrlFilter.value) {
// Remove ALL http.url filters from items (guards against duplicates)
const otherFilters = filters.items?.filter(
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
);
// Convert to expression first with other filters
const {
filter: intermediateFilter,
} = convertFiltersToExpressionWithExistingQuery(
{ ...filters, items: otherFilters || [] },
baseExpression,
);
// Add the OR condition for http.url and url.full
const urlValue = httpUrlFilter.value;
const urlCondition = `(http.url = '${urlValue}' OR url.full = '${urlValue}')`;
return intermediateFilter.expression.trim()
? `${intermediateFilter.expression} AND ${urlCondition}`
: urlCondition;
}
const { filter } = convertFiltersToExpressionWithExistingQuery(
filters,
baseExpression,
@@ -371,7 +342,7 @@ export const formatDataForTable = (
});
};
const urlExpression = `(url.full EXISTS OR http.url EXISTS)`;
const urlExpression = `${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`;
export const getDomainMetricsQueryPayload = (
domainName: string,
@@ -588,14 +559,7 @@ const defaultGroupBy = [
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: SPAN_ATTRIBUTES.URL_PATH,
type: 'attribute',
},
{
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: 'url.full',
key: SPAN_ATTRIBUTES.HTTP_URL,
type: 'attribute',
},
// {
@@ -867,8 +831,8 @@ function buildFilterExpression(
): string {
const baseFilterParts = [
`kind_string = 'Client'`,
`(http.url EXISTS OR url.full EXISTS)`,
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${SPAN_ATTRIBUTES.SERVER_NAME} = '${domainName}'`,
`has_error = true`,
];
if (showStatusCodeErrors) {
@@ -910,12 +874,7 @@ export const getTopErrorsQueryPayload = (
filter: { expression: filterExpression },
groupBy: [
{
name: 'http.url',
fieldDataType: 'string',
fieldContext: 'attribute',
},
{
name: 'url.full',
name: SPAN_ATTRIBUTES.HTTP_URL,
fieldDataType: 'string',
fieldContext: 'attribute',
},
@@ -1134,11 +1093,11 @@ export const formatEndPointsDataForTable = (
if (!isGroupedByAttribute) {
formattedData = data?.map((endpoint) => {
const { port } = extractPortAndEndpoint(
(endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '',
(endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '',
);
return {
key: v4(),
endpointName: (endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '-',
endpointName: (endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '-',
port,
callCount:
endpoint.data.A === 'n/a' || endpoint.data.A === undefined
@@ -1262,9 +1221,7 @@ export const formatTopErrorsDataForTable = (
return {
key: v4(),
endpointName: getDisplayValue(
rowObj[SPAN_ATTRIBUTES.URL_PATH] || rowObj['url.full'],
),
endpointName: getDisplayValue(rowObj[SPAN_ATTRIBUTES.HTTP_URL]),
statusCode: getDisplayValue(rowObj[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]),
statusMessage: getDisplayValue(rowObj.status_message),
count: getDisplayValue(rowObj.__result_0),
@@ -1281,10 +1238,10 @@ export const getTopErrorsCoRelationQueryFilters = (
{
id: 'ea16470b',
key: {
key: 'http.url',
key: SPAN_ATTRIBUTES.HTTP_URL,
dataType: DataTypes.String,
type: 'tag',
id: 'http.url--string--tag--false',
id: `${SPAN_ATTRIBUTES.HTTP_URL}--string--tag--false`,
},
op: '=',
value: endPointName,
@@ -1781,7 +1738,7 @@ export const getEndPointDetailsQueryPayload = (
filters || { items: [], op: 'AND' },
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
)} AND ${clientKindExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
),
},
expression: 'A',
@@ -1793,12 +1750,7 @@ export const getEndPointDetailsQueryPayload = (
orderBy: [],
groupBy: [
{
key: SPAN_ATTRIBUTES.URL_PATH,
dataType: DataTypes.String,
type: 'attribute',
},
{
key: 'url.full',
key: SPAN_ATTRIBUTES.HTTP_URL,
dataType: DataTypes.String,
type: 'attribute',
},
@@ -2225,7 +2177,7 @@ export const getEndPointZeroStateQueryPayload = (
orderBy: [],
groupBy: [
{
key: SPAN_ATTRIBUTES.URL_PATH,
key: SPAN_ATTRIBUTES.HTTP_URL,
dataType: DataTypes.String,
type: 'tag',
},
@@ -2419,8 +2371,7 @@ export const statusCodeWidgetInfo = [
interface EndPointDropDownResponseRow {
data: {
[SPAN_ATTRIBUTES.URL_PATH]: string;
'url.full': string;
[SPAN_ATTRIBUTES.HTTP_URL]: string;
A: number;
};
}
@@ -2439,8 +2390,8 @@ export const getFormattedEndPointDropDownData = (
}
return data.map((row) => ({
key: v4(),
label: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
value: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
label: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
value: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
}));
};
@@ -2769,7 +2720,6 @@ export const groupStatusCodes = (
export const getStatusCodeBarChartWidgetData = (
domainName: string,
endPointName: string,
filters: IBuilderQuery['filters'],
): Widgets => ({
query: {
@@ -2798,20 +2748,6 @@ export const getStatusCodeBarChartWidgetData = (
op: '=',
value: domainName,
},
...(endPointName
? [
{
id: '8b1be6f0',
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.URL_PATH,
type: 'tag',
},
op: '=',
value: endPointName,
},
]
: []),
...(filters?.items || []),
],
op: 'AND',
@@ -2933,7 +2869,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
)} AND ${clientKindExpression} AND http_url EXISTS`,
),
},
functions: [],
@@ -2965,7 +2901,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
)} AND ${clientKindExpression} AND http_url EXISTS`,
),
},
functions: [],
@@ -2997,7 +2933,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
)} AND ${clientKindExpression} AND http_url EXISTS`,
),
},
functions: [],
@@ -3029,7 +2965,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
)} AND ${clientKindExpression} AND has_error = true AND http_url EXISTS`,
),
},
functions: [],
@@ -3060,24 +2996,12 @@ export const getAllEndpointsWidgetData = (
);
widget.renderColumnCell = {
[SPAN_ATTRIBUTES.URL_PATH]: (
url: string | number,
record?: RowData,
): ReactNode => {
// First try to use the url from the column value
let urlValue = url;
// If url is empty/null and we have the record, fallback to url.full
if (isEmptyFilterValue(url) && record) {
const { 'url.full': urlFull } = record;
urlValue = urlFull;
}
if (!urlValue || urlValue === 'n/a') {
[SPAN_ATTRIBUTES.HTTP_URL]: (url: string | number): ReactNode => {
if (isEmptyFilterValue(url) || !url || url === 'n/a') {
return <span>-</span>;
}
const { endpoint } = extractPortAndEndpoint(String(urlValue));
const { endpoint } = extractPortAndEndpoint(String(url));
return <span>{getDisplayValue(endpoint)}</span>;
},
A: (numOfCalls: any): ReactNode => (
@@ -3132,8 +3056,8 @@ export const getAllEndpointsWidgetData = (
};
widget.customColTitles = {
[SPAN_ATTRIBUTES.URL_PATH]: 'Endpoint',
'net.peer.port': 'Port',
[SPAN_ATTRIBUTES.HTTP_URL]: 'Endpoint',
[SPAN_ATTRIBUTES.SERVER_PORT]: 'Port',
};
widget.title = (
@@ -3158,12 +3082,10 @@ export const getAllEndpointsWidgetData = (
</div>
);
widget.hiddenColumns = ['url.full'];
return widget;
};
const keysToRemove = ['http.url', 'url.full', 'A', 'B', 'C', 'F1'];
const keysToRemove = [SPAN_ATTRIBUTES.HTTP_URL, 'A', 'B', 'C', 'F1'];
export const getGroupByFiltersFromGroupByValues = (
rowData: any,
@@ -3221,7 +3143,7 @@ export const getRateOverTimeWidgetData = (
filter: {
expression: convertFiltersWithUrlHandling(
filters || { items: [], op: 'AND' },
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
`http_host = '${domainName}'`,
),
},
functions: [],
@@ -3272,7 +3194,7 @@ export const getLatencyOverTimeWidgetData = (
filter: {
expression: convertFiltersWithUrlHandling(
filters || { items: [], op: 'AND' },
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
`http_host = '${domainName}'`,
),
},
functions: [],

View File

@@ -1,3 +1,22 @@
.chart-manager-series-label {
width: 100%;
min-width: 0;
max-width: 100%;
cursor: pointer;
border: none;
background-color: transparent;
color: inherit;
text-align: left;
padding: 0;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
&:disabled {
cursor: not-allowed;
}
}
.chart-manager-container {
width: 100%;
max-height: calc(40% - 40px);

View File

@@ -1,24 +1,28 @@
import { useCallback, useEffect, useMemo, useState } from 'react';
import { Button, Input } from 'antd';
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
import { ResizeTable } from 'components/ResizeTable';
import { getGraphManagerTableColumns } from 'container/GridCardLayout/GridCard/FullView/TableRender/GraphManagerColumns';
import { ExtendedChartDataset } from 'container/GridCardLayout/GridCard/FullView/types';
import { getDefaultTableDataSet } from 'container/GridCardLayout/GridCard/FullView/utils';
import { useNotifications } from 'hooks/useNotifications';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import { usePlotContext } from 'lib/uPlotV2/context/PlotContext';
import useLegendsSync from 'lib/uPlotV2/hooks/useLegendsSync';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { getChartManagerColumns } from './columns';
import { ExtendedChartDataset, getDefaultTableDataSet } from './utils';
import './ChartManager.styles.scss';
interface ChartManagerProps {
config: UPlotConfigBuilder;
alignedData: uPlot.AlignedData;
yAxisUnit?: string;
decimalPrecision?: PrecisionOption;
onCancel?: () => void;
}
const X_AXIS_INDEX = 0;
/**
* ChartManager provides a tabular view to manage the visibility of
* individual series on a uPlot chart.
@@ -28,16 +32,12 @@ interface ChartManagerProps {
* - filter series by label
* - toggle individual series on/off
* - persist the visibility configuration to local storage.
*
* @param config - `UPlotConfigBuilder` instance used to derive chart options.
* @param alignedData - uPlot aligned data used to build the initial table dataset.
* @param yAxisUnit - Optional unit label for Y-axis values shown in the table.
* @param onCancel - Optional callback invoked when the user cancels the dialog.
*/
export default function ChartManager({
config,
alignedData,
yAxisUnit,
decimalPrecision = PrecisionOptionsEnum.TWO,
onCancel,
}: ChartManagerProps): JSX.Element {
const { notifications } = useNotifications();
@@ -53,8 +53,13 @@ export default function ChartManager({
const { isDashboardLocked } = useDashboard();
const [tableDataSet, setTableDataSet] = useState<ExtendedChartDataset[]>(() =>
getDefaultTableDataSet(config.getConfig() as uPlot.Options, alignedData),
getDefaultTableDataSet(
config.getConfig() as uPlot.Options,
alignedData,
decimalPrecision,
),
);
const [filterValue, setFilterValue] = useState('');
const graphVisibilityState = useMemo(
() =>
@@ -67,46 +72,62 @@ export default function ChartManager({
useEffect(() => {
setTableDataSet(
getDefaultTableDataSet(config.getConfig() as uPlot.Options, alignedData),
);
}, [alignedData, config]);
const filterHandler = useCallback(
(event: React.ChangeEvent<HTMLInputElement>): void => {
const value = event.target.value.toString().toLowerCase();
const updatedDataSet = tableDataSet.map((item) => {
if (item.label?.toLocaleLowerCase().includes(value)) {
return { ...item, show: true };
}
return { ...item, show: false };
});
setTableDataSet(updatedDataSet);
},
[tableDataSet],
);
const dataSource = useMemo(
() =>
tableDataSet.filter(
(item, index) => index !== 0 && item.show, // skipping the first item as it is the x-axis
getDefaultTableDataSet(
config.getConfig() as uPlot.Options,
alignedData,
decimalPrecision,
),
[tableDataSet],
);
setFilterValue('');
}, [alignedData, config, decimalPrecision]);
const handleFilterChange = useCallback(
(e: React.ChangeEvent<HTMLInputElement>): void => {
setFilterValue(e.target.value.toLowerCase());
},
[],
);
const handleToggleSeriesOnOff = useCallback(
(index: number): void => {
onToggleSeriesOnOff(index);
},
[onToggleSeriesOnOff],
);
const dataSource = useMemo(() => {
const filter = filterValue.trim();
return tableDataSet.filter((item, index) => {
if (index === X_AXIS_INDEX) {
return false;
}
if (!filter) {
return true;
}
return item.label?.toLowerCase().includes(filter) ?? false;
});
}, [tableDataSet, filterValue]);
const columns = useMemo(
() =>
getGraphManagerTableColumns({
getChartManagerColumns({
tableDataSet,
checkBoxOnChangeHandler: (_e, index) => {
onToggleSeriesOnOff(index);
},
graphVisibilityState,
labelClickedHandler: onToggleSeriesVisibility,
onToggleSeriesOnOff: handleToggleSeriesOnOff,
onToggleSeriesVisibility,
yAxisUnit,
isGraphDisabled: isDashboardLocked,
decimalPrecision,
}),
// eslint-disable-next-line react-hooks/exhaustive-deps
[tableDataSet, graphVisibilityState, yAxisUnit, isDashboardLocked],
[
tableDataSet,
graphVisibilityState,
handleToggleSeriesOnOff,
onToggleSeriesVisibility,
yAxisUnit,
isDashboardLocked,
decimalPrecision,
],
);
const handleSave = useCallback((): void => {
@@ -114,20 +135,23 @@ export default function ChartManager({
notifications.success({
message: 'The updated graphs & legends are saved',
});
if (onCancel) {
onCancel();
}
onCancel?.();
}, [syncSeriesVisibilityToLocalStorage, notifications, onCancel]);
return (
<div className="chart-manager-container">
<div className="chart-manager-header">
<Input onChange={filterHandler} placeholder="Filter Series" />
<Input
placeholder="Filter Series"
value={filterValue}
onChange={handleFilterChange}
data-testid="filter-input"
/>
<div className="chart-manager-actions-container">
<Button type="default" onClick={onCancel}>
<Button type="default" onClick={onCancel} data-testid="cancel-button">
Cancel
</Button>
<Button type="primary" onClick={handleSave}>
<Button type="primary" onClick={handleSave} data-testid="save-button">
Save
</Button>
</div>
@@ -136,10 +160,10 @@ export default function ChartManager({
<ResizeTable
columns={columns}
dataSource={dataSource}
virtual
rowKey="index"
scroll={{ y: 200 }}
pagination={false}
virtual
/>
</div>
</div>

View File

@@ -0,0 +1,31 @@
import { Tooltip } from 'antd';
import './ChartManager.styles.scss';
interface SeriesLabelProps {
label: string;
labelIndex: number;
onClick: (idx: number) => void;
disabled?: boolean;
}
export function SeriesLabel({
label,
labelIndex,
onClick,
disabled,
}: SeriesLabelProps): JSX.Element {
return (
<Tooltip placement="topLeft" title={label}>
<button
className="chart-manager-series-label"
disabled={disabled}
type="button"
data-testid={`series-label-button-${labelIndex}`}
onClick={(): void => onClick(labelIndex)}
>
{label}
</button>
</Tooltip>
);
}

View File

@@ -0,0 +1,172 @@
import userEvent from '@testing-library/user-event';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import { render, screen } from 'tests/test-utils';
import ChartManager from '../ChartManager';
const mockSyncSeriesVisibilityToLocalStorage = jest.fn();
const mockNotificationsSuccess = jest.fn();
jest.mock('lib/uPlotV2/context/PlotContext', () => ({
usePlotContext: (): {
onToggleSeriesOnOff: jest.Mock;
onToggleSeriesVisibility: jest.Mock;
syncSeriesVisibilityToLocalStorage: jest.Mock;
} => ({
onToggleSeriesOnOff: jest.fn(),
onToggleSeriesVisibility: jest.fn(),
syncSeriesVisibilityToLocalStorage: mockSyncSeriesVisibilityToLocalStorage,
}),
}));
jest.mock('lib/uPlotV2/hooks/useLegendsSync', () => ({
__esModule: true,
default: (): {
legendItemsMap: { [key: number]: { show: boolean; label: string } };
} => ({
legendItemsMap: {
0: { show: true, label: 'Time' },
1: { show: true, label: 'Series 1' },
2: { show: true, label: 'Series 2' },
},
}),
}));
jest.mock('providers/Dashboard/Dashboard', () => ({
useDashboard: (): { isDashboardLocked: boolean } => ({
isDashboardLocked: false,
}),
}));
jest.mock('hooks/useNotifications', () => ({
useNotifications: (): { notifications: { success: jest.Mock } } => ({
notifications: {
success: mockNotificationsSuccess,
},
}),
}));
jest.mock('components/ResizeTable', () => {
const MockTable = ({
dataSource,
columns,
}: {
dataSource: { index: number; label?: string }[];
columns: { key: string; title: string }[];
}): JSX.Element => (
<div data-testid="resize-table">
{columns.map((col) => (
<span key={col.key}>{col.title}</span>
))}
{dataSource.map((row) => (
<div key={row.index} data-testid={`row-${row.index}`}>
{row.label}
</div>
))}
</div>
);
return { ResizeTable: MockTable };
});
const createMockConfig = (): { getConfig: () => uPlot.Options } => ({
getConfig: (): uPlot.Options => ({
width: 100,
height: 100,
series: [
{ label: 'Time', value: 'time' },
{ label: 'Series 1', scale: 'y' },
{ label: 'Series 2', scale: 'y' },
],
}),
});
const createAlignedData = (): uPlot.AlignedData => [
[1000, 2000, 3000],
[10, 20, 30],
[1, 2, 3],
];
describe('ChartManager', () => {
const mockOnCancel = jest.fn();
beforeEach(() => {
jest.clearAllMocks();
});
it('renders filter input and action buttons', () => {
render(
<ChartManager
config={createMockConfig() as any}
alignedData={createAlignedData()}
onCancel={mockOnCancel}
/>,
);
expect(screen.getByPlaceholderText('Filter Series')).toBeInTheDocument();
expect(screen.getByRole('button', { name: /cancel/i })).toBeInTheDocument();
expect(screen.getByRole('button', { name: /save/i })).toBeInTheDocument();
});
it('renders ResizeTable with data', () => {
render(
<ChartManager
config={createMockConfig() as UPlotConfigBuilder}
alignedData={createAlignedData()}
/>,
);
expect(screen.getByTestId('resize-table')).toBeInTheDocument();
});
it('calls onCancel when Cancel button is clicked', async () => {
render(
<ChartManager
config={createMockConfig() as UPlotConfigBuilder}
alignedData={createAlignedData()}
onCancel={mockOnCancel}
/>,
);
await userEvent.click(screen.getByTestId('cancel-button'));
expect(mockOnCancel).toHaveBeenCalledTimes(1);
});
it('filters table data when typing in filter input', async () => {
render(
<ChartManager
config={createMockConfig() as UPlotConfigBuilder}
alignedData={createAlignedData()}
/>,
);
// Before filter: both Series 1 and Series 2 rows are visible
expect(screen.getByTestId('row-1')).toBeInTheDocument();
expect(screen.getByTestId('row-2')).toBeInTheDocument();
const filterInput = screen.getByTestId('filter-input');
await userEvent.type(filterInput, 'Series 1');
// After filter: only Series 1 row is visible, Series 2 row is filtered out
expect(screen.getByTestId('row-1')).toBeInTheDocument();
expect(screen.queryByTestId('row-2')).not.toBeInTheDocument();
});
it('calls syncSeriesVisibilityToLocalStorage, notifications.success, and onCancel when Save is clicked', async () => {
render(
<ChartManager
config={createMockConfig() as UPlotConfigBuilder}
alignedData={createAlignedData()}
onCancel={mockOnCancel}
/>,
);
await userEvent.click(screen.getByTestId('save-button'));
expect(mockSyncSeriesVisibilityToLocalStorage).toHaveBeenCalledTimes(1);
expect(mockNotificationsSuccess).toHaveBeenCalledWith({
message: 'The updated graphs & legends are saved',
});
expect(mockOnCancel).toHaveBeenCalledTimes(1);
});
});

View File

@@ -0,0 +1,39 @@
import userEvent from '@testing-library/user-event';
import { render, screen } from 'tests/test-utils';
import { SeriesLabel } from '../SeriesLabel';
describe('SeriesLabel', () => {
it('renders the label text', () => {
render(
<SeriesLabel label="Test Series Label" labelIndex={1} onClick={jest.fn()} />,
);
expect(screen.getByTestId('series-label-button-1')).toHaveTextContent(
'Test Series Label',
);
});
it('calls onClick with labelIndex when clicked', async () => {
const onClick = jest.fn();
render(<SeriesLabel label="Series A" labelIndex={2} onClick={onClick} />);
await userEvent.click(screen.getByTestId('series-label-button-2'));
expect(onClick).toHaveBeenCalledWith(2);
expect(onClick).toHaveBeenCalledTimes(1);
});
it('renders disabled button when disabled prop is true', () => {
render(
<SeriesLabel label="Disabled" labelIndex={0} onClick={jest.fn()} disabled />,
);
const button = screen.getByTestId('series-label-button-0');
expect(button).toBeDisabled();
});
it('has chart-manager-series-label class', () => {
render(<SeriesLabel label="Label" labelIndex={0} onClick={jest.fn()} />);
const button = screen.getByTestId('series-label-button-0');
expect(button).toHaveClass('chart-manager-series-label');
});
});

View File

@@ -0,0 +1,167 @@
import { render } from '@testing-library/react';
import { Y_AXIS_UNIT_NAMES } from 'components/YAxisUnitSelector/constants';
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
import { getChartManagerColumns } from '../columns';
import { ExtendedChartDataset } from '../utils';
const createMockDataset = (
index: number,
overrides: Partial<ExtendedChartDataset> = {},
): ExtendedChartDataset =>
({
index,
label: `Series ${index}`,
show: true,
sum: 100,
avg: 50,
min: 10,
max: 90,
stroke: '#ff0000',
...overrides,
} as ExtendedChartDataset);
describe('getChartManagerColumns', () => {
const tableDataSet: ExtendedChartDataset[] = [
createMockDataset(0, { label: 'Time' }),
createMockDataset(1),
createMockDataset(2),
];
const graphVisibilityState = [true, true, false];
const onToggleSeriesOnOff = jest.fn();
const onToggleSeriesVisibility = jest.fn();
beforeEach(() => {
jest.clearAllMocks();
});
it('returns columns with expected structure', () => {
const columns = getChartManagerColumns({
tableDataSet,
graphVisibilityState,
onToggleSeriesOnOff,
onToggleSeriesVisibility,
});
expect(columns).toHaveLength(6);
expect(columns[0].key).toBe('index');
expect(columns[1].key).toBe('label');
expect(columns[2].key).toBe('avg');
expect(columns[3].key).toBe('sum');
expect(columns[4].key).toBe('max');
expect(columns[5].key).toBe('min');
});
it('includes Label column with title', () => {
const columns = getChartManagerColumns({
tableDataSet,
graphVisibilityState,
onToggleSeriesOnOff,
onToggleSeriesVisibility,
});
const labelCol = columns.find((c) => c.key === 'label');
expect(labelCol?.title).toBe('Label');
});
it('formats column titles with yAxisUnit', () => {
const columns = getChartManagerColumns({
tableDataSet,
graphVisibilityState,
onToggleSeriesOnOff,
onToggleSeriesVisibility,
yAxisUnit: 'ms',
});
const avgCol = columns.find((c) => c.key === 'avg');
expect(avgCol?.title).toBe(
`Avg (in ${Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MILLISECONDS]})`,
);
});
it('numeric column render returns formatted string with yAxisUnit', () => {
const columns = getChartManagerColumns({
tableDataSet,
graphVisibilityState,
onToggleSeriesOnOff,
onToggleSeriesVisibility,
yAxisUnit: 'ms',
});
const avgCol = columns.find((c) => c.key === 'avg');
const renderFn = avgCol?.render as
| ((val: number, record: ExtendedChartDataset, index: number) => string)
| undefined;
expect(renderFn).toBeDefined();
const output = renderFn?.(123.45, tableDataSet[1], 1);
expect(output).toBe('123.45 ms');
});
it('numeric column render formats zero when value is undefined', () => {
const columns = getChartManagerColumns({
tableDataSet,
graphVisibilityState,
onToggleSeriesOnOff,
onToggleSeriesVisibility,
yAxisUnit: 'none',
});
const sumCol = columns.find((c) => c.key === 'sum');
const renderFn = sumCol?.render as
| ((
val: number | undefined,
record: ExtendedChartDataset,
index: number,
) => string)
| undefined;
const output = renderFn?.(undefined, tableDataSet[1], 1);
expect(output).toBe('0');
});
it('label column render displays label text and is clickable', () => {
const columns = getChartManagerColumns({
tableDataSet,
graphVisibilityState,
onToggleSeriesOnOff,
onToggleSeriesVisibility,
});
const labelCol = columns.find((c) => c.key === 'label');
const renderFn = labelCol?.render as
| ((
label: string,
record: ExtendedChartDataset,
index: number,
) => JSX.Element)
| undefined;
expect(renderFn).toBeDefined();
const renderResult = renderFn!('Series 1', tableDataSet[1], 1);
const { getByRole } = render(renderResult);
expect(getByRole('button', { name: 'Series 1' })).toBeInTheDocument();
});
it('index column render renders checkbox with correct checked state', () => {
const columns = getChartManagerColumns({
tableDataSet,
graphVisibilityState,
onToggleSeriesOnOff,
onToggleSeriesVisibility,
});
const indexCol = columns.find((c) => c.key === 'index');
const renderFn = indexCol?.render as
| ((
_val: unknown,
record: ExtendedChartDataset,
index: number,
) => JSX.Element)
| undefined;
expect(renderFn).toBeDefined();
const { container } = render(renderFn!(null, tableDataSet[1], 1));
const checkbox = container.querySelector('input[type="checkbox"]');
expect(checkbox).toBeInTheDocument();
expect(checkbox).toBeChecked(); // graphVisibilityState[1] is true
});
});

View File

@@ -0,0 +1,113 @@
import { PrecisionOptionsEnum } from 'components/Graph/types';
import {
formatTableValueWithUnit,
getDefaultTableDataSet,
getTableColumnTitle,
} from '../utils';
describe('ChartManager utils', () => {
describe('getDefaultTableDataSet', () => {
const createOptions = (seriesCount: number): uPlot.Options => ({
series: Array.from({ length: seriesCount }, (_, i) =>
i === 0
? { label: 'Time', value: 'time' }
: { label: `Series ${i}`, scale: 'y' },
),
width: 100,
height: 100,
});
it('returns one row per series with computed stats', () => {
const options = createOptions(3);
const data: uPlot.AlignedData = [
[1000, 2000, 3000],
[10, 20, 30],
[1, 2, 3],
];
const result = getDefaultTableDataSet(options, data);
expect(result).toHaveLength(3);
expect(result[0]).toMatchObject({
index: 0,
label: 'Time',
show: true,
});
expect(result[1]).toMatchObject({
index: 1,
label: 'Series 1',
show: true,
sum: 60,
avg: 20,
max: 30,
min: 10,
});
expect(result[2]).toMatchObject({
index: 2,
label: 'Series 2',
show: true,
sum: 6,
avg: 2,
max: 3,
min: 1,
});
});
it('handles empty data arrays', () => {
const options = createOptions(2);
const data: uPlot.AlignedData = [[], []];
const result = getDefaultTableDataSet(options, data);
expect(result[0]).toMatchObject({
sum: 0,
avg: 0,
max: 0,
min: 0,
});
});
it('respects decimalPrecision parameter', () => {
const options = createOptions(2);
const data: uPlot.AlignedData = [[1000], [123.454]];
const resultTwo = getDefaultTableDataSet(
options,
data,
PrecisionOptionsEnum.TWO,
);
expect(resultTwo[1].avg).toBe(123.45);
const resultZero = getDefaultTableDataSet(
options,
data,
PrecisionOptionsEnum.ZERO,
);
expect(resultZero[1].avg).toBe(123);
});
});
describe('formatTableValueWithUnit', () => {
it('formats value with unit', () => {
const result = formatTableValueWithUnit(1234.56, 'ms');
expect(result).toBe('1.23 s');
});
it('falls back to none format when yAxisUnit is undefined', () => {
const result = formatTableValueWithUnit(123.45);
expect(result).toBe('123.45');
});
});
describe('getTableColumnTitle', () => {
it('returns title only when yAxisUnit is undefined', () => {
expect(getTableColumnTitle('Avg')).toBe('Avg');
});
it('returns title with unit when yAxisUnit is provided', () => {
const result = getTableColumnTitle('Avg', 'ms');
expect(result).toBe('Avg (in Milliseconds (ms))');
});
});
});

View File

@@ -0,0 +1,94 @@
import { ColumnType } from 'antd/es/table';
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
import CustomCheckBox from 'container/GridCardLayout/GridCard/FullView/TableRender/CustomCheckBox';
import { SeriesLabel } from './SeriesLabel';
import {
ExtendedChartDataset,
formatTableValueWithUnit,
getTableColumnTitle,
} from './utils';
export interface GetChartManagerColumnsParams {
tableDataSet: ExtendedChartDataset[];
graphVisibilityState: boolean[];
onToggleSeriesOnOff: (index: number) => void;
onToggleSeriesVisibility: (index: number) => void;
yAxisUnit?: string;
decimalPrecision?: PrecisionOption;
isGraphDisabled?: boolean;
}
export function getChartManagerColumns({
tableDataSet,
graphVisibilityState,
onToggleSeriesOnOff,
onToggleSeriesVisibility,
yAxisUnit,
decimalPrecision = PrecisionOptionsEnum.TWO,
isGraphDisabled,
}: GetChartManagerColumnsParams): ColumnType<ExtendedChartDataset>[] {
return [
{
title: '',
width: 50,
dataIndex: 'index',
key: 'index',
render: (_: unknown, record: ExtendedChartDataset): JSX.Element => (
<CustomCheckBox
data={tableDataSet}
graphVisibilityState={graphVisibilityState}
index={record.index}
disabled={isGraphDisabled}
checkBoxOnChangeHandler={(_e, idx): void => onToggleSeriesOnOff(idx)}
/>
),
},
{
title: 'Label',
width: 300,
dataIndex: 'label',
key: 'label',
render: (label: string, record: ExtendedChartDataset): JSX.Element => (
<SeriesLabel
label={label ?? ''}
labelIndex={record.index}
disabled={isGraphDisabled}
onClick={onToggleSeriesVisibility}
/>
),
},
{
title: getTableColumnTitle('Avg', yAxisUnit),
width: 90,
dataIndex: 'avg',
key: 'avg',
render: (val: number | undefined): string =>
formatTableValueWithUnit(val ?? 0, yAxisUnit, decimalPrecision),
},
{
title: getTableColumnTitle('Sum', yAxisUnit),
width: 90,
dataIndex: 'sum',
key: 'sum',
render: (val: number | undefined): string =>
formatTableValueWithUnit(val ?? 0, yAxisUnit, decimalPrecision),
},
{
title: getTableColumnTitle('Max', yAxisUnit),
width: 90,
dataIndex: 'max',
key: 'max',
render: (val: number | undefined): string =>
formatTableValueWithUnit(val ?? 0, yAxisUnit, decimalPrecision),
},
{
title: getTableColumnTitle('Min', yAxisUnit),
width: 90,
dataIndex: 'min',
key: 'min',
render: (val: number | undefined): string =>
formatTableValueWithUnit(val ?? 0, yAxisUnit, decimalPrecision),
},
];
}

View File

@@ -0,0 +1,93 @@
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
import { Y_AXIS_UNIT_NAMES } from 'components/YAxisUnitSelector/constants';
import uPlot from 'uplot';
/** Extended series with computed stats for table display */
export type ExtendedChartDataset = uPlot.Series & {
show: boolean;
sum: number;
avg: number;
min: number;
max: number;
index: number;
};
function roundToDecimalPrecision(
value: number,
decimalPrecision: PrecisionOption = PrecisionOptionsEnum.TWO,
): number {
if (
typeof value !== 'number' ||
Number.isNaN(value) ||
value === Infinity ||
value === -Infinity
) {
return 0;
}
if (decimalPrecision === PrecisionOptionsEnum.FULL) {
return value;
}
// regex to match the decimal precision for the given decimal precision
const regex = new RegExp(`^-?\\d*\\.?0*\\d{0,${decimalPrecision}}`);
const matched = value ? value.toFixed(decimalPrecision).match(regex) : null;
return matched ? parseFloat(matched[0]) : 0;
}
/** Build table dataset from uPlot options and aligned data */
export function getDefaultTableDataSet(
options: uPlot.Options,
data: uPlot.AlignedData,
decimalPrecision: PrecisionOption = PrecisionOptionsEnum.TWO,
): ExtendedChartDataset[] {
return options.series.map(
(series: uPlot.Series, index: number): ExtendedChartDataset => {
const arr = (data[index] as number[]) ?? [];
const sum = arr.reduce((a, b) => a + b, 0) || 0;
const count = arr.length || 1;
const hasValues = arr.length > 0;
return {
...series,
index,
show: true,
sum: roundToDecimalPrecision(sum, decimalPrecision),
avg: roundToDecimalPrecision(sum / count, decimalPrecision),
max: hasValues
? roundToDecimalPrecision(Math.max(...arr), decimalPrecision)
: 0,
min: hasValues
? roundToDecimalPrecision(Math.min(...arr), decimalPrecision)
: 0,
};
},
);
}
/** Format numeric value for table display using yAxisUnit */
export function formatTableValueWithUnit(
value: number,
yAxisUnit?: string,
decimalPrecision: PrecisionOption = PrecisionOptionsEnum.TWO,
): string {
return `${getYAxisFormattedValue(
String(value),
yAxisUnit ?? 'none',
decimalPrecision,
)}`;
}
/** Format column header with optional unit */
export function getTableColumnTitle(title: string, yAxisUnit?: string): string {
if (!yAxisUnit) {
return title;
}
const universalName =
Y_AXIS_UNIT_NAMES[yAxisUnit as keyof typeof Y_AXIS_UNIT_NAMES];
if (!universalName) {
return `${title} (in ${yAxisUnit})`;
}
return `${title} (in ${universalName})`;
}

View File

@@ -96,6 +96,7 @@ function BarPanel(props: PanelWrapperProps): JSX.Element {
config={config}
alignedData={chartData}
yAxisUnit={widget.yAxisUnit}
decimalPrecision={widget.decimalPrecision}
onCancel={onToggleModelHandler}
/>
);
@@ -105,6 +106,7 @@ function BarPanel(props: PanelWrapperProps): JSX.Element {
chartData,
widget.yAxisUnit,
onToggleModelHandler,
widget.decimalPrecision,
]);
const onPlotDestroy = useCallback(() => {

View File

@@ -95,6 +95,7 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
config={config}
alignedData={chartData}
yAxisUnit={widget.yAxisUnit}
decimalPrecision={widget.decimalPrecision}
onCancel={onToggleModelHandler}
/>
);
@@ -104,6 +105,7 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
chartData,
widget.yAxisUnit,
onToggleModelHandler,
widget.decimalPrecision,
]);
return (

View File

@@ -3,6 +3,7 @@ import { MemoryRouter, Route } from 'react-router-dom';
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import ROUTES from 'constants/routes';
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
import { AppProvider } from 'providers/App/App';
import MockQueryClientProvider from 'providers/test/MockQueryClientProvider';
import { Span } from 'types/api/trace/getTraceV2';
@@ -108,7 +109,7 @@ const createMockSpan = (): Span => ({
statusMessage: '',
tagMap: {
'http.method': 'GET',
'http.url': '/api/users?page=1',
[SPAN_ATTRIBUTES.HTTP_URL]: '/api/users?page=1',
'http.status_code': '200',
'service.name': 'frontend-service',
'span.kind': 'server',

View File

@@ -5,6 +5,7 @@ import getSpanPercentiles from 'api/trace/getSpanPercentiles';
import getUserPreference from 'api/v1/user/preferences/name/get';
import { QueryParams } from 'constants/query';
import ROUTES from 'constants/routes';
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { server } from 'mocks-server/server';
import { QueryBuilderContext } from 'providers/QueryBuilder';
@@ -878,7 +879,9 @@ describe('SpanDetailsDrawer', () => {
// Verify only matching attributes are shown (use getAllByText for all since they appear in multiple places)
expect(screen.getAllByText('http.method').length).toBeGreaterThan(0);
expect(screen.getAllByText('http.url').length).toBeGreaterThan(0);
expect(screen.getAllByText(SPAN_ATTRIBUTES.HTTP_URL).length).toBeGreaterThan(
0,
);
expect(screen.getAllByText('http.status_code').length).toBeGreaterThan(0);
});
@@ -1126,7 +1129,7 @@ describe('SpanDetailsDrawer - Search Visibility User Flows', () => {
// User sees all attributes initially
expect(screen.getByText('http.method')).toBeInTheDocument();
expect(screen.getByText('http.url')).toBeInTheDocument();
expect(screen.getByText(SPAN_ATTRIBUTES.HTTP_URL)).toBeInTheDocument();
expect(screen.getByText('http.status_code')).toBeInTheDocument();
// User types "method" in search
@@ -1136,7 +1139,7 @@ describe('SpanDetailsDrawer - Search Visibility User Flows', () => {
// User sees only matching attributes
await waitFor(() => {
expect(screen.getByText('http.method')).toBeInTheDocument();
expect(screen.queryByText('http.url')).not.toBeInTheDocument();
expect(screen.queryByText(SPAN_ATTRIBUTES.HTTP_URL)).not.toBeInTheDocument();
expect(screen.queryByText('http.status_code')).not.toBeInTheDocument();
});
});

View File

@@ -1,3 +1,4 @@
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
import { ILog } from 'types/api/logs/log';
import { Span } from 'types/api/trace/getTraceV2';
@@ -22,7 +23,7 @@ export const mockSpan: Span = {
event: [],
tagMap: {
'http.method': 'GET',
'http.url': '/api/test',
[SPAN_ATTRIBUTES.HTTP_URL]: '/api/test',
'http.status_code': '200',
},
hasError: false,

View File

@@ -1,15 +0,0 @@
import { useQuery, UseQueryResult } from 'react-query';
import { getAllIngestionKeys } from 'api/IngestionKeys/getAllIngestionKeys';
import { AxiosError, AxiosResponse } from 'axios';
import {
AllIngestionKeyProps,
GetIngestionKeyProps,
} from 'types/api/ingestionKeys/types';
export const useGetAllIngestionsKeys = (
props: GetIngestionKeyProps,
): UseQueryResult<AxiosResponse<AllIngestionKeyProps>, AxiosError> =>
useQuery<AxiosResponse<AllIngestionKeyProps>, AxiosError>({
queryKey: [`IngestionKeys-${props.page}-${props.search}`],
queryFn: () => getAllIngestionKeys(props),
});

View File

@@ -1,3 +1,5 @@
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
/* eslint-disable sonarjs/no-duplicate-string */
export const traceDetailResponse = [
{
@@ -35,7 +37,7 @@ export const traceDetailResponse = [
'component',
'host.name',
'http.method',
'http.url',
SPAN_ATTRIBUTES.HTTP_URL,
'ip',
'http.status_code',
'opencensus.exporterversion',
@@ -84,7 +86,7 @@ export const traceDetailResponse = [
'signoz.collector.id',
'component',
'http.method',
'http.url',
SPAN_ATTRIBUTES.HTTP_URL,
'ip',
],
[
@@ -741,7 +743,7 @@ export const traceDetailResponse = [
'component',
'http.method',
'http.status_code',
'http.url',
SPAN_ATTRIBUTES.HTTP_URL,
'net/http.reused',
'net/http.was_idle',
'service.name',
@@ -833,7 +835,7 @@ export const traceDetailResponse = [
'opencensus.exporterversion',
'signoz.collector.id',
'host.name',
'http.url',
SPAN_ATTRIBUTES.HTTP_URL,
'net/http.reused',
'net/http.was_idle',
],
@@ -916,7 +918,7 @@ export const traceDetailResponse = [
'net/http.was_idle',
'component',
'host.name',
'http.url',
SPAN_ATTRIBUTES.HTTP_URL,
'ip',
'service.name',
'signoz.collector.id',

View File

@@ -2,6 +2,7 @@
import { Dispatch, SetStateAction, useEffect, useState } from 'react';
import { getAttributesValues } from 'api/queryBuilder/getAttributesValues';
import { DATA_TYPE_VS_ATTRIBUTE_VALUES_KEY } from 'constants/queryBuilder';
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
import {
BaseAutocompleteData,
DataTypes,
@@ -31,7 +32,7 @@ export const AllTraceFilterKeyValue: Record<string, string> = {
httpRoute: 'HTTP Route',
'http.route': 'HTTP Route',
httpUrl: 'HTTP URL',
'http.url': 'HTTP URL',
[SPAN_ATTRIBUTES.HTTP_URL]: 'HTTP URL',
traceID: 'Trace ID',
trace_id: 'Trace ID',
} as const;

View File

@@ -80,16 +80,11 @@ func (q *builderQuery[T]) Fingerprint() string {
case qbtypes.LogAggregation:
aggParts = append(aggParts, a.Expression)
case qbtypes.MetricAggregation:
var spaceAggParamStr string
if a.ComparisonSpaceAggregationParam != nil {
spaceAggParamStr = a.ComparisonSpaceAggregationParam.StringValue()
}
aggParts = append(aggParts, fmt.Sprintf("%s:%s:%s:%s:%s",
aggParts = append(aggParts, fmt.Sprintf("%s:%s:%s:%s",
a.MetricName,
a.Temporality.StringValue(),
a.TimeAggregation.StringValue(),
a.SpaceAggregation.StringValue(),
spaceAggParamStr,
))
}
}

View File

@@ -123,8 +123,7 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
origTimeAgg := query.Aggregations[0].TimeAggregation
origGroupBy := slices.Clone(query.GroupBy)
if (query.Aggregations[0].SpaceAggregation.IsPercentile() ||
query.Aggregations[0].SpaceAggregation == metrictypes.SpaceAggregationHistogramCount) &&
if query.Aggregations[0].SpaceAggregation.IsPercentile() &&
query.Aggregations[0].Type != metrictypes.ExpHistogramType {
// add le in the group by if doesn't exist
leExists := false
@@ -155,11 +154,7 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
}
// make the time aggregation rate and space aggregation sum
if query.Aggregations[0].SpaceAggregation.IsPercentile() {
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationRate
} else {
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationIncrease
}
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationRate
query.Aggregations[0].SpaceAggregation = metrictypes.SpaceAggregationSum
}
@@ -529,7 +524,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
return "", nil, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`, `histogram_count`]",
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`]",
)
}
sb := sqlbuilder.NewSelectBuilder()
@@ -582,29 +577,6 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
sb.From("__spatial_aggregation_cte")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
sb.GroupBy("ts")
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
}
} else if query.Aggregations[0].SpaceAggregation == metrictypes.SpaceAggregationHistogramCount {
sb.Select("ts")
for _, g := range query.GroupBy {
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
aggQuery, err := AggregationQueryForHistogramCount(query.Aggregations[0].ComparisonSpaceAggregationParam)
if err != nil {
return nil, err
}
sb.SelectMore(aggQuery)
sb.From("__spatial_aggregation_cte")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
sb.GroupBy("ts")
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)

View File

@@ -1,7 +1,6 @@
package telemetrymetrics
import (
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/errors"
@@ -309,20 +308,3 @@ func AggregationColumnForSamplesTable(
}
return aggregationColumn, nil
}
func AggregationQueryForHistogramCount(param *metrictypes.ComparisonSpaceAggregationParam) (string, error) {
if param == nil {
return "", errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "no aggregation param provided for histogram count")
}
histogramCountThreshold := param.Threshold
switch param.Operater {
case "<=":
return fmt.Sprintf("argMaxIf(value, toFloat64(le), toFloat64(le) <= %f) + (argMinIf(value, toFloat64(le), toFloat64(le) > %f) - argMaxIf(value, toFloat64(le), toFloat64(le) <= %f)) * (%f - maxIf(toFloat64(le), toFloat64(le) <= %f)) / (minIf(toFloat64(le), toFloat64(le) > %f) - maxIf(toFloat64(le), toFloat64(le) <= %f)) AS value", histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold), nil
case ">":
return fmt.Sprintf("argMax(value, toFloat64(le)) - (argMaxIf(value, toFloat64(le), toFloat64(le) <= %f) + (argMinIf(value, toFloat64(le), toFloat64(le) > %f) - argMaxIf(value, toFloat64(le), toFloat64(le) <= %f)) * (%f - maxIf(toFloat64(le), toFloat64(le) <= %f)) / (minIf(toFloat64(le), toFloat64(le) > %f) - maxIf(toFloat64(le), toFloat64(le) <= %f))) AS value", histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold), nil
default:
return "", errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid space aggregation operator, should be one of the following: [`<=`, `>`]")
}
}

View File

@@ -2,7 +2,6 @@ package metrictypes
import (
"database/sql/driver"
"fmt"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
@@ -190,18 +189,17 @@ type SpaceAggregation struct {
}
var (
SpaceAggregationUnspecified = SpaceAggregation{valuer.NewString("")}
SpaceAggregationSum = SpaceAggregation{valuer.NewString("sum")}
SpaceAggregationAvg = SpaceAggregation{valuer.NewString("avg")}
SpaceAggregationMin = SpaceAggregation{valuer.NewString("min")}
SpaceAggregationMax = SpaceAggregation{valuer.NewString("max")}
SpaceAggregationCount = SpaceAggregation{valuer.NewString("count")}
SpaceAggregationPercentile50 = SpaceAggregation{valuer.NewString("p50")}
SpaceAggregationPercentile75 = SpaceAggregation{valuer.NewString("p75")}
SpaceAggregationPercentile90 = SpaceAggregation{valuer.NewString("p90")}
SpaceAggregationPercentile95 = SpaceAggregation{valuer.NewString("p95")}
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
SpaceAggregationHistogramCount = SpaceAggregation{valuer.NewString("histogram_count")}
SpaceAggregationUnspecified = SpaceAggregation{valuer.NewString("")}
SpaceAggregationSum = SpaceAggregation{valuer.NewString("sum")}
SpaceAggregationAvg = SpaceAggregation{valuer.NewString("avg")}
SpaceAggregationMin = SpaceAggregation{valuer.NewString("min")}
SpaceAggregationMax = SpaceAggregation{valuer.NewString("max")}
SpaceAggregationCount = SpaceAggregation{valuer.NewString("count")}
SpaceAggregationPercentile50 = SpaceAggregation{valuer.NewString("p50")}
SpaceAggregationPercentile75 = SpaceAggregation{valuer.NewString("p75")}
SpaceAggregationPercentile90 = SpaceAggregation{valuer.NewString("p90")}
SpaceAggregationPercentile95 = SpaceAggregation{valuer.NewString("p95")}
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
)
func (SpaceAggregation) Enum() []any {
@@ -216,7 +214,6 @@ func (SpaceAggregation) Enum() []any {
SpaceAggregationPercentile90,
SpaceAggregationPercentile95,
SpaceAggregationPercentile99,
SpaceAggregationHistogramCount,
}
}
@@ -259,12 +256,3 @@ type MetricTableHints struct {
type MetricValueFilter struct {
Value float64
}
type ComparisonSpaceAggregationParam struct {
Operater string `json:"operator"`
Threshold float64 `json:"threshold"`
}
func (param ComparisonSpaceAggregationParam) StringValue() string {
return fmt.Sprintf("{\"operator\": \"%s\", \"limit\": \"%f\"}", param.Operater, param.Threshold)
}

View File

@@ -446,8 +446,6 @@ type MetricAggregation struct {
TimeAggregation metrictypes.TimeAggregation `json:"timeAggregation"`
// space aggregation to apply to the query
SpaceAggregation metrictypes.SpaceAggregation `json:"spaceAggregation"`
// param for space aggregation if needed
ComparisonSpaceAggregationParam *metrictypes.ComparisonSpaceAggregationParam `json:"comparisonSpaceAggregationParam"`
// table hints to use for the query
TableHints *metrictypes.MetricTableHints `json:"-"`
// value filter to apply to the query

View File

@@ -52,7 +52,6 @@ def build_builder_query(
time_aggregation: str,
space_aggregation: str,
*,
comparisonSpaceAggregationParam: Optional[Dict] = None,
temporality: Optional[str] = None,
step_interval: int = DEFAULT_STEP_INTERVAL,
group_by: Optional[List[str]] = None,
@@ -75,8 +74,7 @@ def build_builder_query(
}
if temporality:
spec["aggregations"][0]["temporality"] = temporality
if comparisonSpaceAggregationParam:
spec["aggregations"][0]["comparisonSpaceAggregationParam"] = comparisonSpaceAggregationParam
if group_by:
spec["groupBy"] = [
{

View File

@@ -1,259 +0,0 @@
"""
Look at the multi_temporality_counters_1h.jsonl file for the relevant data
"""
import random
from datetime import datetime, timedelta, timezone
from http import HTTPStatus
from typing import Callable, List
import pytest
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.metrics import Metrics
from fixtures.querier import (
build_builder_query,
get_all_series,
get_series_values,
make_query_request,
)
from fixtures.utils import get_testdata_file_path
FILE = get_testdata_file_path("histogram_data_1h.jsonl")
@pytest.mark.parametrize(
"threshold, operator, first_value, last_value",
[
(1000, "<=", 11, 69),
(100, "<=", 1.1, 6.9),
(7500, "<=", 16.75, 74.75),
(8000, "<=", 17, 75),
(80000, "<=", 17, 75), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
(1000, ">", 7, 7),
(100, ">", 16.9, 69.1),
(7500, ">", 1.25, 1.25),
(8000, ">", 1, 1),
(80000, ">", 1, 1), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
],
)
def test_histogram_count_for_one_endpoint(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_metrics: Callable[[List[Metrics]], None],
threshold: float,
operator: str,
first_value: float,
last_value: float,
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
metric_name = f"test_bucket"
metrics = Metrics.load_from_file(
FILE,
base_time=now - timedelta(minutes=60),
metric_name_override=metric_name,
)
insert_metrics(metrics)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
query = build_builder_query(
"A",
metric_name,
"rate",
"histogram_count",
comparisonSpaceAggregationParam={
"threshold": threshold,
"operator": operator
},
filter_expression='endpoint = "/health"',
)
response = make_query_request(signoz, token, start_ms, end_ms, [query])
assert response.status_code == HTTPStatus.OK
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) == 59
assert result_values[0]["value"] == first_value
assert result_values[-1]["value"] == last_value
@pytest.mark.parametrize(
"threshold, operator, first_value, last_value",
[
(1000, "<=", 22, 138),
(100, "<=", 2.2, 13.8),
(7500, "<=", 33.5, 149.5),
(8000, "<=", 34, 150),
(80000, "<=", 34, 150), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
(1000, ">", 14, 14),
(100, ">", 33.8, 138.2),
(7500, ">", 2.5, 2.5),
(8000, ">", 2, 2),
(80000, ">", 2, 2), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
],
)
def test_histogram_count_for_one_service(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_metrics: Callable[[List[Metrics]], None],
threshold: float,
operator: str,
first_value: float,
last_value: float,
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
metric_name = f"test_bucket"
metrics = Metrics.load_from_file(
FILE,
base_time=now - timedelta(minutes=60),
metric_name_override=metric_name,
)
insert_metrics(metrics)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
query = build_builder_query(
"A",
metric_name,
"rate",
"histogram_count",
comparisonSpaceAggregationParam={
"threshold": threshold,
"operator": operator
},
filter_expression='service = "api"',
)
response = make_query_request(signoz, token, start_ms, end_ms, [query])
assert response.status_code == HTTPStatus.OK
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) == 59
assert result_values[0]["value"] == first_value
assert result_values[-1]["value"] == last_value
@pytest.mark.parametrize(
"threshold, operator, first_value, last_value",
[
(1000, "<=", 11, 69),
(100, "<=", 1.1, 6.9),
(7500, "<=", 16.75, 74.75),
(8000, "<=", 17, 75),
(80000, "<=", 17, 75), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
(1000, ">", 7, 7),
(100, ">", 16.9, 69.1),
(7500, ">", 1.25, 1.25),
(8000, ">", 1, 1),
(80000, ">", 1, 1), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
],
)
def test_histogram_count_for_delta_service(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_metrics: Callable[[List[Metrics]], None],
threshold: float,
operator: str,
first_value: float,
last_value: float,
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
metric_name = f"test_bucket"
metrics = Metrics.load_from_file(
FILE,
base_time=now - timedelta(minutes=60),
metric_name_override=metric_name,
)
insert_metrics(metrics)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
query = build_builder_query(
"A",
metric_name,
"rate",
"histogram_count",
comparisonSpaceAggregationParam={
"threshold": threshold,
"operator": operator
},
filter_expression='service = "web"',
)
response = make_query_request(signoz, token, start_ms, end_ms, [query])
assert response.status_code == HTTPStatus.OK
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) == 60 ## in delta, the value at 10:01 will also be reported
assert result_values[1]["value"] == first_value ## to keep parallel to the cumulative test cases, first_value refers to the value at 10:02
assert result_values[-1]["value"] == last_value
@pytest.mark.parametrize(
"threshold, operator, first_value, last_value",
[
(1000, "<=", 33, 207),
(100, "<=", 3.3, 20.7),
(7500, "<=", 50.25, 224.25),
(8000, "<=", 51, 225),
(80000, "<=", 51, 225),
(1000, ">", 21, 21),
(100, ">", 50.7, 207.3),
(7500, ">", 3.75, 3.75),
(8000, ">", 3, 3),
(80000, ">", 3, 3),
],
)
def test_histogram_count_for_all_services(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_metrics: Callable[[List[Metrics]], None],
threshold: float,
operator: str,
first_value: float,
last_value: float,
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
metric_name = f"test_bucket"
metrics = Metrics.load_from_file(
FILE,
base_time=now - timedelta(minutes=60),
metric_name_override=metric_name,
)
insert_metrics(metrics)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
query = build_builder_query(
"A",
metric_name,
"rate",
"histogram_count",
comparisonSpaceAggregationParam={
"threshold": threshold,
"operator": operator
},
## no services filter, this tests for multitemporality handling as well
)
response = make_query_request(signoz, token, start_ms, end_ms, [query])
assert response.status_code == HTTPStatus.OK
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) == 60
assert result_values[1]["value"] == first_value ## to keep parallel to the cumulative test cases, first_value refers to the value at 10:02
assert result_values[-1]["value"] == last_value

File diff suppressed because it is too large Load Diff