Compare commits

..

6 Commits

Author SHA1 Message Date
amlannandy
6f0f5b51a9 chore: additional performance fix 2026-02-18 14:54:49 +07:00
Amlan Kumar Nandy
037b92feb3 Merge branch 'main' into SIG-3497 2026-02-18 12:35:11 +07:00
amlannandy
95e023dafd chore: fix CI 2026-02-18 12:24:27 +07:00
amlannandy
d20c866d2c chore: additional fixes 2026-02-18 12:09:18 +07:00
amlannandy
06f55e6eda chore: search bar replacement 2026-02-18 10:58:27 +07:00
amlannandy
f592bc084d chore: metrics explorer summary page api migration 2026-02-11 20:55:03 +07:00
104 changed files with 3530 additions and 7920 deletions

7
.gitignore vendored
View File

@@ -1,11 +1,8 @@
node_modules
# editor
.vscode
!.vscode/settings.json
.zed
.idea
deploy/docker/environment_tiny/common_test
frontend/node_modules
@@ -34,6 +31,8 @@ frontend/yarn-debug.log*
frontend/yarn-error.log*
frontend/src/constants/env.ts
.idea
**/build
**/storage
**/locust-scripts/__pycache__/
@@ -230,3 +229,5 @@ cython_debug/
pyrightconfig.json
# cursor files
frontend/.cursor/

View File

@@ -176,7 +176,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.112.0
image: signoz/signoz:v0.111.0
command:
- --config=/root/config/prometheus.yml
ports:

View File

@@ -117,7 +117,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.112.0
image: signoz/signoz:v0.111.0
command:
- --config=/root/config/prometheus.yml
ports:

View File

@@ -179,7 +179,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.112.0}
image: signoz/signoz:${VERSION:-v0.111.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml

View File

@@ -111,7 +111,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.112.0}
image: signoz/signoz:${VERSION:-v0.111.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml

File diff suppressed because it is too large Load Diff

View File

@@ -10,7 +10,6 @@ import (
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
@@ -107,10 +106,7 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
// v5
router.Handle("/api/v5/query_range", handler.New(
am.ViewAccess(ah.queryRangeV5),
querierAPI.QueryRangeV5OpenAPIDef,
)).Methods(http.MethodPost)
router.HandleFunc("/api/v5/query_range", am.ViewAccess(ah.queryRangeV5)).Methods(http.MethodPost)
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)

View File

@@ -1,74 +0,0 @@
---
description: Core testing conventions - imports, rendering, MSW, interactions, queries
globs: **/*.test.{ts,tsx}
alwaysApply: false
---
# Testing Conventions
## Imports
Always import from the test harness, never directly from `@testing-library/react`:
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { server, rest } from 'mocks-server/server';
```
## Router
Use the built-in router in `render`:
```ts
render(<Page />, undefined, { initialRoute: '/traces-explorer' });
```
Only mock `useLocation` / `useParams` if the test depends on their values.
## MSW
Global MSW server runs automatically. Override per-test:
```ts
server.use(
rest.get('*/api/v1/foo', (_req, res, ctx) =>
res(ctx.status(200), ctx.json({ ok: true })))
);
```
Keep large response fixtures in `mocks-server/__mockdata_`.
## Interactions
- Prefer `userEvent` for real user interactions (click, type, select, tab).
- Use `fireEvent` only for low-level events not covered by `userEvent` (e.g., scroll, resize). Wrap in `act(...)` if needed.
- Always `await` interactions:
```ts
const user = userEvent.setup({ pointerEventsCheck: 0 });
await user.click(screen.getByRole('button', { name: /save/i }));
```
## Timers
No global fake timers. Per-test only, for debounce/throttle:
```ts
jest.useFakeTimers();
const user = userEvent.setup({ advanceTimers: (ms) => jest.advanceTimersByTime(ms) });
await user.type(screen.getByRole('textbox'), 'query');
jest.advanceTimersByTime(400);
jest.useRealTimers();
```
## Queries
Prefer accessible queries: `getByRole` > `findByRole` > `getByLabelText` > visible text > `data-testid` (last resort).
## Anti-patterns
- Never import from `@testing-library/react` directly
- Never use global fake timers
- Never wrap `render` in `act(...)`
- Never mock infra dependencies locally (router, react-query)
- Limit to 3-5 focused tests per file

View File

@@ -1,54 +0,0 @@
---
description: When to use global vs local mocks in tests
globs: **/*.test.{ts,tsx}
alwaysApply: false
---
# Mock Strategy
## Use Global Mocks For
High-frequency dependencies (20+ test files):
- Core infrastructure: react-router-dom, react-query, antd
- Browser APIs: ResizeObserver, matchMedia, localStorage
- Utility libraries: date-fns, lodash
Available global mock files (from jest.config.ts):
- `uplot` -> `__mocks__/uplotMock.ts`
## Use Local Mocks For
- Business logic dependencies (API endpoints, custom hooks, domain components)
- Test-specific behavior (different data per test, error scenarios, loading states)
## Decision Tree
```
Used in 20+ test files?
YES -> Global mock
NO -> Business logic or test-specific?
YES -> Local mock
NO -> Consider global if usage grows
```
## Correct Usage
```ts
// Global mocks are already available - just import
import { useLocation } from 'react-router-dom';
// Local mocks for business logic
jest.mock('../api/tracesService', () => ({
getTraces: jest.fn(() => mockTracesData),
}));
```
## Anti-patterns
```ts
// Never re-mock globally mocked dependencies locally
jest.mock('react-router-dom', () => ({ ... }));
// Never put test-specific data in global mocks
jest.mock('../api/tracesService', () => ({ getTraces: jest.fn(() => specificTestData) }));
```

View File

@@ -1,54 +0,0 @@
---
description: TypeScript type safety requirements for Jest tests
globs: **/*.test.{ts,tsx}
alwaysApply: false
---
# TypeScript Type Safety in Tests
All Jest tests must be fully type-safe. Never use `any`.
## Mock Function Typing
```ts
// Use jest.mocked for module mocks
import useFoo from 'hooks/useFoo';
jest.mock('hooks/useFoo');
const mockUseFoo = jest.mocked(useFoo);
// Use jest.MockedFunction for standalone mocks
const mockFetch = jest.fn() as jest.MockedFunction<(id: number) => Promise<User>>;
```
## Mock Data
Define interfaces for all mock data:
```ts
const mockUser: User = { id: 1, name: 'John', email: 'john@example.com' };
const mockProps: ComponentProps = {
title: 'Test',
data: [mockUser],
onSelect: jest.fn() as jest.MockedFunction<(user: User) => void>,
};
```
## Hook Mocking Pattern
```ts
import useFoo from 'hooks/useFoo';
jest.mock('hooks/useFoo');
const mockUseFoo = jest.mocked(useFoo);
mockUseFoo.mockReturnValue(/* minimal shape */);
```
Prefer helpers (`rqSuccess`, `rqLoading`, `rqError`) for React Query results.
## Checklist
- All mock functions use `jest.MockedFunction<T>` or `jest.mocked()`
- All mock data has proper interfaces
- No `any` types in test files
- Component props are typed
- API response types are defined

484
frontend/.cursorrules Normal file
View File

@@ -0,0 +1,484 @@
# Persona
You are an expert developer with deep knowledge of Jest, React Testing Library, MSW, and TypeScript, tasked with creating unit tests for this repository.
# Auto-detect TypeScript Usage
Check for TypeScript in the project through tsconfig.json or package.json dependencies.
Adjust syntax based on this detection.
# TypeScript Type Safety for Jest Tests
**CRITICAL**: All Jest tests MUST be fully type-safe with proper TypeScript types.
**Type Safety Requirements:**
- Use proper TypeScript interfaces for all mock data
- Type all Jest mock functions with `jest.MockedFunction<T>`
- Use generic types for React components and hooks
- Define proper return types for mock functions
- Use `as const` for literal types when needed
- Avoid `any` type use proper typing instead
# Unit Testing Focus
Focus on critical functionality (business logic, utility functions, component behavior)
Mock dependencies (API calls, external modules) before imports
Test multiple data scenarios (valid inputs, invalid inputs, edge cases)
Write maintainable tests with descriptive names grouped in describe blocks
# Global vs Local Mocks
**Use Global Mocks for:**
- High-frequency dependencies (20+ test files)
- Core infrastructure (react-router-dom, react-query, antd)
- Standard implementations across the app
- Browser APIs (ResizeObserver, matchMedia, localStorage)
- Utility libraries (date-fns, lodash)
**Use Local Mocks for:**
- Business logic dependencies (5-15 test files)
- Test-specific behavior (different data per test)
- API endpoints with specific responses
- Domain-specific components
- Error scenarios and edge cases
**Global Mock Files Available (from jest.config.ts):**
- `uplot` → `__mocks__/uplotMock.ts`
# Repo-specific Testing Conventions
## Imports
Always import from our harness:
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
```
For API mocks:
```ts
import { server, rest } from 'mocks-server/server';
```
Do not import directly from `@testing-library/react`.
## Router
Use the router built into render:
```ts
render(<Page />, undefined, { initialRoute: '/traces-explorer' });
```
Only mock `useLocation` / `useParams` if the test depends on them.
## Hook Mocks
Pattern:
```ts
import useFoo from 'hooks/useFoo';
jest.mock('hooks/useFoo');
const mockUseFoo = jest.mocked(useFoo);
mockUseFoo.mockReturnValue(/* minimal shape */ as any);
```
Prefer helpers (`rqSuccess`, `rqLoading`, `rqError`) for React Query results.
## MSW
Global MSW server runs automatically.
Override per-test:
```ts
server.use(
rest.get('*/api/v1/foo', (_req, res, ctx) => res(ctx.status(200), ctx.json({ ok: true })))
);
```
Keep large responses in `mocks-server/__mockdata_`.
## Interactions
- Prefer `userEvent` for real user interactions (click, type, select, tab).
- Use `fireEvent` only for low-level/programmatic events not covered by `userEvent` (e.g., scroll, resize, setting `element.scrollTop` for virtualization). Wrap in `act(...)` if needed.
- Always await interactions:
```ts
const user = userEvent.setup({ pointerEventsCheck: 0 });
await user.click(screen.getByRole('button', { name: /save/i }));
```
```ts
// Example: virtualized list scroll (no userEvent helper)
const scroller = container.querySelector('[data-test-id="virtuoso-scroller"]') as HTMLElement;
scroller.scrollTop = targetScrollTop;
act(() => { fireEvent.scroll(scroller); });
```
## Timers
❌ No global fake timers.
✅ Per-test only, for debounce/throttle:
```ts
jest.useFakeTimers();
const user = userEvent.setup({ advanceTimers: (ms) => jest.advanceTimersByTime(ms) });
await user.type(screen.getByRole('textbox'), 'query');
jest.advanceTimersByTime(400);
jest.useRealTimers();
```
## Queries
Prefer accessible queries (`getByRole`, `findByRole`, `getByLabelText`).
Fallback: visible text.
Last resort: `data-testid`.
# Example Test (using only configured global mocks)
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { server, rest } from 'mocks-server/server';
import MyComponent from '../MyComponent';
describe('MyComponent', () => {
it('renders and interacts', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(
rest.get('*/api/v1/example', (_req, res, ctx) => res(ctx.status(200), ctx.json({ value: 42 })))
);
render(<MyComponent />, undefined, { initialRoute: '/foo' });
expect(await screen.findByText(/value: 42/i)).toBeInTheDocument();
await user.click(screen.getByRole('button', { name: /refresh/i }));
await waitFor(() => expect(screen.getByText(/loading/i)).toBeInTheDocument());
});
});
```
# Anti-patterns
❌ Importing RTL directly
❌ Using global fake timers
❌ Wrapping render in `act(...)`
❌ Mocking infra dependencies locally (router, react-query)
✅ Use our harness (`tests/test-utils`)
✅ Use MSW for API overrides
✅ Use userEvent + await
✅ Pin time only in tests that assert relative dates
# Best Practices
- **Critical Functionality**: Prioritize testing business logic and utilities
- **Dependency Mocking**: Global mocks for infra, local mocks for business logic
- **Data Scenarios**: Always test valid, invalid, and edge cases
- **Descriptive Names**: Make test intent clear
- **Organization**: Group related tests in describe
- **Consistency**: Match repo conventions
- **Edge Cases**: Test null, undefined, unexpected values
- **Limit Scope**: 35 focused tests per file
- **Use Helpers**: `rqSuccess`, `makeUser`, etc.
- **No Any**: Enforce type safety
# Example Test
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { server, rest } from 'mocks-server/server';
import MyComponent from '../MyComponent';
describe('MyComponent', () => {
it('renders and interacts', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(
rest.get('*/api/v1/example', (_req, res, ctx) => res(ctx.status(200), ctx.json({ value: 42 })))
);
render(<MyComponent />, undefined, { initialRoute: '/foo' });
expect(await screen.findByText(/value: 42/i)).toBeInTheDocument();
await user.click(screen.getByRole('button', { name: /refresh/i }));
await waitFor(() => expect(screen.getByText(/loading/i)).toBeInTheDocument());
});
});
```
# Anti-patterns
❌ Importing RTL directly
❌ Using global fake timers
❌ Wrapping render in `act(...)`
❌ Mocking infra dependencies locally (router, react-query)
✅ Use our harness (`tests/test-utils`)
✅ Use MSW for API overrides
✅ Use userEvent + await
✅ Pin time only in tests that assert relative dates
# TypeScript Type Safety Examples
## Proper Mock Typing
```ts
// ✅ GOOD - Properly typed mocks
interface User {
id: number;
name: string;
email: string;
}
interface ApiResponse<T> {
data: T;
status: number;
message: string;
}
// Type the mock functions
const mockFetchUser = jest.fn() as jest.MockedFunction<(id: number) => Promise<ApiResponse<User>>>;
const mockUpdateUser = jest.fn() as jest.MockedFunction<(user: User) => Promise<ApiResponse<User>>>;
// Mock implementation with proper typing
mockFetchUser.mockResolvedValue({
data: { id: 1, name: 'John Doe', email: 'john@example.com' },
status: 200,
message: 'Success'
});
// ❌ BAD - Using any type
const mockFetchUser = jest.fn() as any; // Don't do this
```
## React Component Testing with Types
```ts
// ✅ GOOD - Properly typed component testing
interface ComponentProps {
title: string;
data: User[];
onUserSelect: (user: User) => void;
isLoading?: boolean;
}
const TestComponent: React.FC<ComponentProps> = ({ title, data, onUserSelect, isLoading = false }) => {
// Component implementation
};
describe('TestComponent', () => {
it('should render with proper props', () => {
// Arrange - Type the props properly
const mockProps: ComponentProps = {
title: 'Test Title',
data: [{ id: 1, name: 'John', email: 'john@example.com' }],
onUserSelect: jest.fn() as jest.MockedFunction<(user: User) => void>,
isLoading: false
};
// Act
render(<TestComponent {...mockProps} />);
// Assert
expect(screen.getByText('Test Title')).toBeInTheDocument();
});
});
```
## Hook Testing with Types
```ts
// ✅ GOOD - Properly typed hook testing
interface UseUserDataReturn {
user: User | null;
loading: boolean;
error: string | null;
refetch: () => void;
}
const useUserData = (id: number): UseUserDataReturn => {
// Hook implementation
};
describe('useUserData', () => {
it('should return user data with proper typing', () => {
// Arrange
const mockUser: User = { id: 1, name: 'John', email: 'john@example.com' };
mockFetchUser.mockResolvedValue({
data: mockUser,
status: 200,
message: 'Success'
});
// Act
const { result } = renderHook(() => useUserData(1));
// Assert
expect(result.current.user).toEqual(mockUser);
expect(result.current.loading).toBe(false);
expect(result.current.error).toBeNull();
});
});
```
## Global Mock Type Safety
```ts
// ✅ GOOD - Type-safe global mocks
// In __mocks__/routerMock.ts
export const mockUseLocation = (overrides: Partial<Location> = {}): Location => ({
pathname: '/traces',
search: '',
hash: '',
state: null,
key: 'test-key',
...overrides,
});
// In test files
const location = useLocation(); // Properly typed from global mock
expect(location.pathname).toBe('/traces');
```
# TypeScript Configuration for Jest
## Required Jest Configuration
```json
// jest.config.ts
{
"preset": "ts-jest/presets/js-with-ts-esm",
"globals": {
"ts-jest": {
"useESM": true,
"isolatedModules": true,
"tsconfig": "<rootDir>/tsconfig.jest.json"
}
},
"extensionsToTreatAsEsm": [".ts", ".tsx"],
"moduleFileExtensions": ["ts", "tsx", "js", "json"]
}
```
## TypeScript Jest Configuration
```json
// tsconfig.jest.json
{
"extends": "./tsconfig.json",
"compilerOptions": {
"types": ["jest", "@testing-library/jest-dom"],
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"moduleResolution": "node"
},
"include": [
"src/**/*",
"**/*.test.ts",
"**/*.test.tsx",
"__mocks__/**/*"
]
}
```
## Common Type Safety Patterns
### Mock Function Typing
```ts
// ✅ GOOD - Proper mock function typing
const mockApiCall = jest.fn() as jest.MockedFunction<typeof apiCall>;
const mockEventHandler = jest.fn() as jest.MockedFunction<(event: Event) => void>;
// ❌ BAD - Using any
const mockApiCall = jest.fn() as any;
```
### Generic Mock Typing
```ts
// ✅ GOOD - Generic mock typing
interface MockApiResponse<T> {
data: T;
status: number;
}
const mockFetchData = jest.fn() as jest.MockedFunction<
<T>(endpoint: string) => Promise<MockApiResponse<T>>
>;
// Usage
mockFetchData<User>('/users').mockResolvedValue({
data: { id: 1, name: 'John' },
status: 200
});
```
### React Testing Library with Types
```ts
// ✅ GOOD - Typed testing utilities
import { render, screen, RenderResult } from '@testing-library/react';
import { ComponentProps } from 'react';
type TestComponentProps = ComponentProps<typeof TestComponent>;
const renderTestComponent = (props: Partial<TestComponentProps> = {}): RenderResult => {
const defaultProps: TestComponentProps = {
title: 'Test',
data: [],
onSelect: jest.fn(),
...props
};
return render(<TestComponent {...defaultProps} />);
};
```
### Error Handling with Types
```ts
// ✅ GOOD - Typed error handling
interface ApiError {
message: string;
code: number;
details?: Record<string, unknown>;
}
const mockApiError: ApiError = {
message: 'API Error',
code: 500,
details: { endpoint: '/users' }
};
mockFetchUser.mockRejectedValue(new Error(JSON.stringify(mockApiError)));
```
## Type Safety Checklist
- [ ] All mock functions use `jest.MockedFunction<T>`
- [ ] All mock data has proper interfaces
- [ ] No `any` types in test files
- [ ] Generic types are used where appropriate
- [ ] Error types are properly defined
- [ ] Component props are typed
- [ ] Hook return types are defined
- [ ] API response types are defined
- [ ] Global mocks are type-safe
- [ ] Test utilities are properly typed
# Mock Decision Tree
```
Is it used in 20+ test files?
├─ YES → Use Global Mock
│ ├─ react-router-dom
│ ├─ react-query
│ ├─ antd components
│ └─ browser APIs
└─ NO → Is it business logic?
├─ YES → Use Local Mock
│ ├─ API endpoints
│ ├─ Custom hooks
│ └─ Domain components
└─ NO → Is it test-specific?
├─ YES → Use Local Mock
│ ├─ Error scenarios
│ ├─ Loading states
│ └─ Specific data
└─ NO → Consider Global Mock
└─ If it becomes frequently used
```
# Common Anti-Patterns to Avoid
❌ **Don't mock global dependencies locally:**
```js
// BAD - This is already globally mocked
jest.mock('react-router-dom', () => ({ ... }));
```
❌ **Don't create global mocks for test-specific data:**
```js
// BAD - This should be local
jest.mock('../api/tracesService', () => ({
getTraces: jest.fn(() => specificTestData)
}));
```
✅ **Do use global mocks for infrastructure:**
```js
// GOOD - Use global mock
import { useLocation } from 'react-router-dom';
```
✅ **Do create local mocks for business logic:**
```js
// GOOD - Local mock for specific test needs
jest.mock('../api/tracesService', () => ({
getTraces: jest.fn(() => mockTracesData)
}));
```

View File

@@ -20,20 +20,17 @@ import { useMutation, useQuery } from 'react-query';
import { GeneratedAPIInstance } from '../../../index';
import type {
GetMetricAlerts200,
GetMetricAlertsPathParameters,
GetMetricAlertsParams,
GetMetricAttributes200,
GetMetricAttributesParams,
GetMetricAttributesPathParameters,
GetMetricDashboards200,
GetMetricDashboardsPathParameters,
GetMetricDashboardsParams,
GetMetricHighlights200,
GetMetricHighlightsPathParameters,
GetMetricHighlightsParams,
GetMetricMetadata200,
GetMetricMetadataPathParameters,
GetMetricMetadataParams,
GetMetricsStats200,
GetMetricsTreemap200,
ListMetrics200,
ListMetricsParams,
MetricsexplorertypesMetricAttributesRequestDTO,
MetricsexplorertypesStatsRequestDTO,
MetricsexplorertypesTreemapRequestDTO,
MetricsexplorertypesUpdateMetricMetadataRequestDTO,
@@ -46,128 +43,30 @@ type AwaitedInput<T> = PromiseLike<T> | T;
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
/**
* This endpoint returns a list of distinct metric names within the specified time range
* @summary List metric names
* This endpoint returns associated alerts for a specified metric
* @summary Get metric alerts
*/
export const listMetrics = (
params?: ListMetricsParams,
export const getMetricAlerts = (
params: GetMetricAlertsParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<ListMetrics200>({
url: `/api/v2/metrics`,
return GeneratedAPIInstance<GetMetricAlerts200>({
url: `/api/v2/metric/alerts`,
method: 'GET',
params,
signal,
});
};
export const getListMetricsQueryKey = (params?: ListMetricsParams) => {
return ['listMetrics', ...(params ? [params] : [])] as const;
};
export const getListMetricsQueryOptions = <
TData = Awaited<ReturnType<typeof listMetrics>>,
TError = RenderErrorResponseDTO
>(
params?: ListMetricsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof listMetrics>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getListMetricsQueryKey(params);
const queryFn: QueryFunction<Awaited<ReturnType<typeof listMetrics>>> = ({
signal,
}) => listMetrics(params, signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof listMetrics>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type ListMetricsQueryResult = NonNullable<
Awaited<ReturnType<typeof listMetrics>>
>;
export type ListMetricsQueryError = RenderErrorResponseDTO;
/**
* @summary List metric names
*/
export function useListMetrics<
TData = Awaited<ReturnType<typeof listMetrics>>,
TError = RenderErrorResponseDTO
>(
params?: ListMetricsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof listMetrics>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getListMetricsQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary List metric names
*/
export const invalidateListMetrics = async (
queryClient: QueryClient,
params?: ListMetricsParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getListMetricsQueryKey(params) },
options,
);
return queryClient;
};
/**
* This endpoint returns associated alerts for a specified metric
* @summary Get metric alerts
*/
export const getMetricAlerts = (
{ metricName }: GetMetricAlertsPathParameters,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricAlerts200>({
url: `/api/v2/metrics/${metricName}/alerts`,
method: 'GET',
signal,
});
};
export const getGetMetricAlertsQueryKey = ({
metricName,
}: GetMetricAlertsPathParameters) => {
return ['getMetricAlerts'] as const;
export const getGetMetricAlertsQueryKey = (params?: GetMetricAlertsParams) => {
return ['getMetricAlerts', ...(params ? [params] : [])] as const;
};
export const getGetMetricAlertsQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricAlerts>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricAlertsPathParameters,
params: GetMetricAlertsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricAlerts>>,
@@ -178,19 +77,13 @@ export const getGetMetricAlertsQueryOptions = <
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetMetricAlertsQueryKey({ metricName });
const queryKey = queryOptions?.queryKey ?? getGetMetricAlertsQueryKey(params);
const queryFn: QueryFunction<Awaited<ReturnType<typeof getMetricAlerts>>> = ({
signal,
}) => getMetricAlerts({ metricName }, signal);
}) => getMetricAlerts(params, signal);
return {
queryKey,
queryFn,
enabled: !!metricName,
...queryOptions,
} as UseQueryOptions<
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getMetricAlerts>>,
TError,
TData
@@ -210,7 +103,7 @@ export function useGetMetricAlerts<
TData = Awaited<ReturnType<typeof getMetricAlerts>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricAlertsPathParameters,
params: GetMetricAlertsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricAlerts>>,
@@ -219,7 +112,7 @@ export function useGetMetricAlerts<
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricAlertsQueryOptions({ metricName }, options);
const queryOptions = getGetMetricAlertsQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
@@ -235,126 +128,11 @@ export function useGetMetricAlerts<
*/
export const invalidateGetMetricAlerts = async (
queryClient: QueryClient,
{ metricName }: GetMetricAlertsPathParameters,
params: GetMetricAlertsParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricAlertsQueryKey({ metricName }) },
options,
);
return queryClient;
};
/**
* This endpoint returns attribute keys and their unique values for a specified metric
* @summary Get metric attributes
*/
export const getMetricAttributes = (
{ metricName }: GetMetricAttributesPathParameters,
params?: GetMetricAttributesParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricAttributes200>({
url: `/api/v2/metrics/${metricName}/attributes`,
method: 'GET',
params,
signal,
});
};
export const getGetMetricAttributesQueryKey = (
{ metricName }: GetMetricAttributesPathParameters,
params?: GetMetricAttributesParams,
) => {
return ['getMetricAttributes', ...(params ? [params] : [])] as const;
};
export const getGetMetricAttributesQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricAttributes>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricAttributesPathParameters,
params?: GetMetricAttributesParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ??
getGetMetricAttributesQueryKey({ metricName }, params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMetricAttributes>>
> = ({ signal }) => getMetricAttributes({ metricName }, params, signal);
return {
queryKey,
queryFn,
enabled: !!metricName,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetMetricAttributesQueryResult = NonNullable<
Awaited<ReturnType<typeof getMetricAttributes>>
>;
export type GetMetricAttributesQueryError = RenderErrorResponseDTO;
/**
* @summary Get metric attributes
*/
export function useGetMetricAttributes<
TData = Awaited<ReturnType<typeof getMetricAttributes>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricAttributesPathParameters,
params?: GetMetricAttributesParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricAttributesQueryOptions(
{ metricName },
params,
options,
);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get metric attributes
*/
export const invalidateGetMetricAttributes = async (
queryClient: QueryClient,
{ metricName }: GetMetricAttributesPathParameters,
params?: GetMetricAttributesParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricAttributesQueryKey({ metricName }, params) },
{ queryKey: getGetMetricAlertsQueryKey(params) },
options,
);
@@ -366,27 +144,28 @@ export const invalidateGetMetricAttributes = async (
* @summary Get metric dashboards
*/
export const getMetricDashboards = (
{ metricName }: GetMetricDashboardsPathParameters,
params: GetMetricDashboardsParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricDashboards200>({
url: `/api/v2/metrics/${metricName}/dashboards`,
url: `/api/v2/metric/dashboards`,
method: 'GET',
params,
signal,
});
};
export const getGetMetricDashboardsQueryKey = ({
metricName,
}: GetMetricDashboardsPathParameters) => {
return ['getMetricDashboards'] as const;
export const getGetMetricDashboardsQueryKey = (
params?: GetMetricDashboardsParams,
) => {
return ['getMetricDashboards', ...(params ? [params] : [])] as const;
};
export const getGetMetricDashboardsQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricDashboards>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricDashboardsPathParameters,
params: GetMetricDashboardsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricDashboards>>,
@@ -398,18 +177,13 @@ export const getGetMetricDashboardsQueryOptions = <
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetMetricDashboardsQueryKey({ metricName });
queryOptions?.queryKey ?? getGetMetricDashboardsQueryKey(params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMetricDashboards>>
> = ({ signal }) => getMetricDashboards({ metricName }, signal);
> = ({ signal }) => getMetricDashboards(params, signal);
return {
queryKey,
queryFn,
enabled: !!metricName,
...queryOptions,
} as UseQueryOptions<
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getMetricDashboards>>,
TError,
TData
@@ -429,7 +203,7 @@ export function useGetMetricDashboards<
TData = Awaited<ReturnType<typeof getMetricDashboards>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricDashboardsPathParameters,
params: GetMetricDashboardsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricDashboards>>,
@@ -438,10 +212,7 @@ export function useGetMetricDashboards<
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricDashboardsQueryOptions(
{ metricName },
options,
);
const queryOptions = getGetMetricDashboardsQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
@@ -457,11 +228,11 @@ export function useGetMetricDashboards<
*/
export const invalidateGetMetricDashboards = async (
queryClient: QueryClient,
{ metricName }: GetMetricDashboardsPathParameters,
params: GetMetricDashboardsParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricDashboardsQueryKey({ metricName }) },
{ queryKey: getGetMetricDashboardsQueryKey(params) },
options,
);
@@ -473,27 +244,28 @@ export const invalidateGetMetricDashboards = async (
* @summary Get metric highlights
*/
export const getMetricHighlights = (
{ metricName }: GetMetricHighlightsPathParameters,
params: GetMetricHighlightsParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricHighlights200>({
url: `/api/v2/metrics/${metricName}/highlights`,
url: `/api/v2/metric/highlights`,
method: 'GET',
params,
signal,
});
};
export const getGetMetricHighlightsQueryKey = ({
metricName,
}: GetMetricHighlightsPathParameters) => {
return ['getMetricHighlights'] as const;
export const getGetMetricHighlightsQueryKey = (
params?: GetMetricHighlightsParams,
) => {
return ['getMetricHighlights', ...(params ? [params] : [])] as const;
};
export const getGetMetricHighlightsQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricHighlights>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricHighlightsPathParameters,
params: GetMetricHighlightsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricHighlights>>,
@@ -505,18 +277,13 @@ export const getGetMetricHighlightsQueryOptions = <
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetMetricHighlightsQueryKey({ metricName });
queryOptions?.queryKey ?? getGetMetricHighlightsQueryKey(params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMetricHighlights>>
> = ({ signal }) => getMetricHighlights({ metricName }, signal);
> = ({ signal }) => getMetricHighlights(params, signal);
return {
queryKey,
queryFn,
enabled: !!metricName,
...queryOptions,
} as UseQueryOptions<
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getMetricHighlights>>,
TError,
TData
@@ -536,7 +303,7 @@ export function useGetMetricHighlights<
TData = Awaited<ReturnType<typeof getMetricHighlights>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricHighlightsPathParameters,
params: GetMetricHighlightsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricHighlights>>,
@@ -545,10 +312,7 @@ export function useGetMetricHighlights<
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricHighlightsQueryOptions(
{ metricName },
options,
);
const queryOptions = getGetMetricHighlightsQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
@@ -564,115 +328,11 @@ export function useGetMetricHighlights<
*/
export const invalidateGetMetricHighlights = async (
queryClient: QueryClient,
{ metricName }: GetMetricHighlightsPathParameters,
params: GetMetricHighlightsParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricHighlightsQueryKey({ metricName }) },
options,
);
return queryClient;
};
/**
* This endpoint returns metadata information like metric description, unit, type, temporality, monotonicity for a specified metric
* @summary Get metric metadata
*/
export const getMetricMetadata = (
{ metricName }: GetMetricMetadataPathParameters,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricMetadata200>({
url: `/api/v2/metrics/${metricName}/metadata`,
method: 'GET',
signal,
});
};
export const getGetMetricMetadataQueryKey = ({
metricName,
}: GetMetricMetadataPathParameters) => {
return ['getMetricMetadata'] as const;
};
export const getGetMetricMetadataQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricMetadataPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetMetricMetadataQueryKey({ metricName });
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMetricMetadata>>
> = ({ signal }) => getMetricMetadata({ metricName }, signal);
return {
queryKey,
queryFn,
enabled: !!metricName,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetMetricMetadataQueryResult = NonNullable<
Awaited<ReturnType<typeof getMetricMetadata>>
>;
export type GetMetricMetadataQueryError = RenderErrorResponseDTO;
/**
* @summary Get metric metadata
*/
export function useGetMetricMetadata<
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricMetadataPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricMetadataQueryOptions({ metricName }, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get metric metadata
*/
export const invalidateGetMetricMetadata = async (
queryClient: QueryClient,
{ metricName }: GetMetricMetadataPathParameters,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricMetadataQueryKey({ metricName }) },
{ queryKey: getGetMetricHighlightsQueryKey(params) },
options,
);
@@ -778,6 +438,189 @@ export const useUpdateMetricMetadata = <
return useMutation(mutationOptions);
};
/**
* This endpoint returns attribute keys and their unique values for a specified metric
* @summary Get metric attributes
*/
export const getMetricAttributes = (
metricsexplorertypesMetricAttributesRequestDTO: MetricsexplorertypesMetricAttributesRequestDTO,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricAttributes200>({
url: `/api/v2/metrics/attributes`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: metricsexplorertypesMetricAttributesRequestDTO,
signal,
});
};
export const getGetMetricAttributesMutationOptions = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
{ data: MetricsexplorertypesMetricAttributesRequestDTO },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
{ data: MetricsexplorertypesMetricAttributesRequestDTO },
TContext
> => {
const mutationKey = ['getMetricAttributes'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof getMetricAttributes>>,
{ data: MetricsexplorertypesMetricAttributesRequestDTO }
> = (props) => {
const { data } = props ?? {};
return getMetricAttributes(data);
};
return { mutationFn, ...mutationOptions };
};
export type GetMetricAttributesMutationResult = NonNullable<
Awaited<ReturnType<typeof getMetricAttributes>>
>;
export type GetMetricAttributesMutationBody = MetricsexplorertypesMetricAttributesRequestDTO;
export type GetMetricAttributesMutationError = RenderErrorResponseDTO;
/**
* @summary Get metric attributes
*/
export const useGetMetricAttributes = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
{ data: MetricsexplorertypesMetricAttributesRequestDTO },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
{ data: MetricsexplorertypesMetricAttributesRequestDTO },
TContext
> => {
const mutationOptions = getGetMetricAttributesMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* This endpoint returns metadata information like metric description, unit, type, temporality, monotonicity for a specified metric
* @summary Get metric metadata
*/
export const getMetricMetadata = (
params: GetMetricMetadataParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricMetadata200>({
url: `/api/v2/metrics/metadata`,
method: 'GET',
params,
signal,
});
};
export const getGetMetricMetadataQueryKey = (
params?: GetMetricMetadataParams,
) => {
return ['getMetricMetadata', ...(params ? [params] : [])] as const;
};
export const getGetMetricMetadataQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
TError = RenderErrorResponseDTO
>(
params: GetMetricMetadataParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetMetricMetadataQueryKey(params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMetricMetadata>>
> = ({ signal }) => getMetricMetadata(params, signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetMetricMetadataQueryResult = NonNullable<
Awaited<ReturnType<typeof getMetricMetadata>>
>;
export type GetMetricMetadataQueryError = RenderErrorResponseDTO;
/**
* @summary Get metric metadata
*/
export function useGetMetricMetadata<
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
TError = RenderErrorResponseDTO
>(
params: GetMetricMetadataParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricMetadataQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get metric metadata
*/
export const invalidateGetMetricMetadata = async (
queryClient: QueryClient,
params: GetMetricMetadataParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricMetadataQueryKey(params) },
options,
);
return queryClient;
};
/**
* This endpoint provides list of metrics with their number of samples and timeseries for the given time range
* @summary Get metrics statistics

View File

@@ -1,107 +0,0 @@
/**
* ! Do not edit manually
* * The file has been auto-generated using Orval for SigNoz
* * regenerate with 'yarn generate:api'
* SigNoz
*/
import type {
MutationFunction,
UseMutationOptions,
UseMutationResult,
} from 'react-query';
import { useMutation } from 'react-query';
import { GeneratedAPIInstance } from '../../../index';
import type {
Querybuildertypesv5QueryRangeRequestDTO,
QueryRangeV5200,
RenderErrorResponseDTO,
} from '../sigNoz.schemas';
type AwaitedInput<T> = PromiseLike<T> | T;
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
/**
* Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.
* @summary Query range
*/
export const queryRangeV5 = (
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<QueryRangeV5200>({
url: `/api/v5/query_range`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: querybuildertypesv5QueryRangeRequestDTO,
signal,
});
};
export const getQueryRangeV5MutationOptions = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof queryRangeV5>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof queryRangeV5>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
> => {
const mutationKey = ['queryRangeV5'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof queryRangeV5>>,
{ data: Querybuildertypesv5QueryRangeRequestDTO }
> = (props) => {
const { data } = props ?? {};
return queryRangeV5(data);
};
return { mutationFn, ...mutationOptions };
};
export type QueryRangeV5MutationResult = NonNullable<
Awaited<ReturnType<typeof queryRangeV5>>
>;
export type QueryRangeV5MutationBody = Querybuildertypesv5QueryRangeRequestDTO;
export type QueryRangeV5MutationError = RenderErrorResponseDTO;
/**
* @summary Query range
*/
export const useQueryRangeV5 = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof queryRangeV5>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof queryRangeV5>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
> => {
const mutationOptions = getQueryRangeV5MutationOptions(options);
return useMutation(mutationOptions);
};

File diff suppressed because it is too large Load Diff

View File

@@ -11,10 +11,13 @@ export const getMetricMetadata = async (
): Promise<SuccessResponseV2<MetricMetadataResponse> | ErrorResponseV2> => {
try {
const encodedMetricName = encodeURIComponent(metricName);
const response = await axios.get(`/metrics/${encodedMetricName}/metadata`, {
signal,
headers,
});
const response = await axios.get(
`/metrics/metadata?metricName=${encodedMetricName}`,
{
signal,
headers,
},
);
return {
httpStatusCode: response.status,

View File

@@ -1,9 +1,8 @@
/* eslint-disable no-nested-ternary */
import { useCallback, useEffect, useMemo, useRef } from 'react';
import { useCallback, useEffect, useMemo } from 'react';
import { useQuery } from 'react-query';
import { Virtuoso, VirtuosoHandle } from 'react-virtuoso';
import { Virtuoso } from 'react-virtuoso';
import { Card } from 'antd';
import LogDetail from 'components/LogDetail';
import RawLogView from 'components/Logs/RawLogView';
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
@@ -11,8 +10,6 @@ import LogsError from 'container/LogsError/LogsError';
import { LogsLoading } from 'container/LogsLoading/LogsLoading';
import { FontSize } from 'container/OptionsMenu/types';
import { useHandleLogsPagination } from 'hooks/infraMonitoring/useHandleLogsPagination';
import useLogDetailHandlers from 'hooks/logs/useLogDetailHandlers';
import useScrollToLog from 'hooks/logs/useScrollToLog';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { ILog } from 'types/api/logs/log';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
@@ -31,15 +28,6 @@ interface Props {
}
function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
const virtuosoRef = useRef<VirtuosoHandle>(null);
const {
activeLog,
onAddToQuery,
selectedTab,
handleSetActiveLog,
handleCloseLogDetail,
} = useLogDetailHandlers();
const basePayload = getHostLogsQueryPayload(
timeRange.startTime,
timeRange.endTime,
@@ -84,40 +72,29 @@ function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
setIsPaginating(false);
}, [data, setIsPaginating]);
const handleScrollToLog = useScrollToLog({
logs,
virtuosoRef,
});
const getItemContent = useCallback(
(_: number, logToRender: ILog): JSX.Element => {
return (
<div key={logToRender.id}>
<RawLogView
isTextOverflowEllipsisDisabled
data={logToRender}
linesPerRow={5}
fontSize={FontSize.MEDIUM}
selectedFields={[
{
dataType: 'string',
type: '',
name: 'body',
},
{
dataType: 'string',
type: '',
name: 'timestamp',
},
]}
onSetActiveLog={handleSetActiveLog}
onClearActiveLog={handleCloseLogDetail}
isActiveLog={activeLog?.id === logToRender.id}
/>
</div>
);
},
[activeLog, handleSetActiveLog, handleCloseLogDetail],
(_: number, logToRender: ILog): JSX.Element => (
<RawLogView
isTextOverflowEllipsisDisabled
key={logToRender.id}
data={logToRender}
linesPerRow={5}
fontSize={FontSize.MEDIUM}
selectedFields={[
{
dataType: 'string',
type: '',
name: 'body',
},
{
dataType: 'string',
type: '',
name: 'timestamp',
},
]}
/>
),
[],
);
const renderFooter = useCallback(
@@ -141,7 +118,6 @@ function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
<Virtuoso
className="host-metrics-logs-virtuoso"
key="host-metrics-logs-virtuoso"
ref={virtuosoRef}
data={logs}
endReached={loadMoreLogs}
totalCount={logs.length}
@@ -163,24 +139,7 @@ function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
{!isLoading && !isError && logs.length === 0 && <NoLogsContainer />}
{isError && !isLoading && <LogsError />}
{!isLoading && !isError && logs.length > 0 && (
<div
className="host-metrics-logs-list-container"
data-log-detail-ignore="true"
>
{renderContent}
</div>
)}
{selectedTab && activeLog && (
<LogDetail
log={activeLog}
onClose={handleCloseLogDetail}
logs={logs}
onNavigateLog={handleSetActiveLog}
selectedTab={selectedTab}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
onScrollToLog={handleScrollToLog}
/>
<div className="host-metrics-logs-list-container">{renderContent}</div>
)}
</div>
);

View File

@@ -13,9 +13,6 @@ export type LogDetailProps = {
handleChangeSelectedView?: ChangeViewFunctionType;
isListViewPanel?: boolean;
listViewPanelSelectedFields?: IField[] | null;
logs?: ILog[];
onNavigateLog?: (log: ILog) => void;
onScrollToLog?: (logId: string) => void;
} & Pick<AddToQueryHOCProps, 'onAddToQuery'> &
Partial<Pick<ActionItemProps, 'onClickActionItem'>> &
Pick<DrawerProps, 'onClose'>;

View File

@@ -15,8 +15,6 @@
}
.log-detail-drawer__title-right {
display: flex;
align-items: center;
.ant-btn {
display: flex;
align-items: center;
@@ -68,10 +66,6 @@
margin-bottom: 16px;
}
.log-detail-drawer__content {
height: 100%;
}
.log-detail-drawer__log {
width: 100%;
display: flex;
@@ -189,115 +183,9 @@
.ant-drawer-close {
padding: 0px;
}
.log-detail-drawer__footer-hint {
position: absolute;
bottom: 0;
left: 0;
right: 0;
padding: 8px 16px;
text-align: left;
color: var(--text-vanilla-200);
background: var(--bg-ink-400);
z-index: 10;
.log-detail-drawer__footer-hint-content {
display: flex;
align-items: center;
gap: 4px;
}
.log-detail-drawer__footer-hint-icon {
display: inline;
vertical-align: middle;
color: var(--text-vanilla-200);
}
.log-detail-drawer__footer-hint-text {
font-size: 13px;
margin: 0;
}
}
.log-arrows {
display: flex;
box-shadow: 0 1px 4px 0 rgba(0, 0, 0, 0.1);
border-radius: 6px;
padding: 2px 6px;
align-items: center;
margin-left: 8px;
}
.log-arrow-btn {
padding: 0;
min-width: 28px;
height: 28px;
border-radius: 4px;
background: var(--bg-ink-400);
color: var(--text-vanilla-400);
border: 1px solid var(--bg-ink-300);
box-shadow: 0 1px 2px 0 rgba(0, 0, 0, 0.08);
display: flex;
align-items: center;
justify-content: center;
transition: background-color 0.2s ease-in-out;
}
.log-arrow-btn-up,
.log-arrow-btn-down {
background: var(--bg-ink-400);
}
.log-arrow-btn:active,
.log-arrow-btn:focus {
background: var(--bg-ink-300);
color: var(--text-vanilla-100);
}
.log-arrow-btn[disabled] {
opacity: 0.5;
cursor: not-allowed;
background: var(--bg-ink-500);
color: var(--text-vanilla-200);
.log-arrow-btn:hover:not([disabled]) {
background: var(--bg-ink-300);
color: var(--text-vanilla-100);
}
}
}
.lightMode {
.log-arrows {
background: var(--bg-vanilla-100);
box-shadow: 0 1px 4px 0 rgba(0, 0, 0, 0.04);
}
.log-arrow-btn {
background: var(--bg-vanilla-100);
color: var(--text-ink-400);
border: 1px solid var(--bg-vanilla-300);
box-shadow: 0 1px 2px 0 rgba(0, 0, 0, 0.04);
}
.log-arrow-btn-up,
.log-arrow-btn-down {
background: var(--bg-vanilla-100);
}
.log-arrow-btn:active,
.log-arrow-btn:focus {
background: var(--bg-vanilla-200);
color: var(--text-ink-500);
}
.log-arrow-btn:hover:not([disabled]) {
background: var(--bg-vanilla-200);
color: var(--text-ink-500);
}
.log-arrow-btn[disabled] {
background: var(--bg-vanilla-100);
color: var(--text-ink-200);
}
.ant-drawer-header {
border-bottom: 1px solid var(--bg-vanilla-400);
background: var(--bg-vanilla-100);
@@ -364,33 +252,4 @@
color: var(--text-ink-300);
}
}
.log-detail-drawer__footer-hint {
position: absolute;
bottom: 0;
left: 0;
right: 0;
padding: 8px 16px;
text-align: left;
color: var(--text-vanilla-700);
background: var(--bg-vanilla-100);
z-index: 10;
.log-detail-drawer__footer-hint-content {
display: flex;
align-items: center;
gap: 4px;
}
.log-detail-drawer__footer-hint-icon {
display: inline;
vertical-align: middle;
color: var(--text-vanilla-700);
}
.log-detail-drawer__footer-hint-text {
font-size: 13px;
margin: 0;
}
}
}

View File

@@ -1,5 +1,5 @@
/* eslint-disable sonarjs/cognitive-complexity */
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useCallback, useMemo, useState } from 'react';
import { useSelector } from 'react-redux';
import { useCopyToClipboard, useLocation } from 'react-use';
import { Color, Spacing } from '@signozhq/design-tokens';
@@ -32,12 +32,8 @@ import { useSafeNavigate } from 'hooks/useSafeNavigate';
import createQueryParams from 'lib/createQueryParams';
import { cloneDeep } from 'lodash-es';
import {
ArrowDown,
ArrowUp,
BarChart2,
Braces,
ChevronDown,
ChevronUp,
Compass,
Copy,
Filter,
@@ -64,9 +60,6 @@ function LogDetailInner({
isListViewPanel = false,
listViewPanelSelectedFields,
handleChangeSelectedView,
logs,
onNavigateLog,
onScrollToLog,
}: LogDetailInnerProps): JSX.Element {
const initialContextQuery = useInitialQuery(log);
const [contextQuery, setContextQuery] = useState<Query | undefined>(
@@ -81,78 +74,6 @@ function LogDetailInner({
const [isEdit, setIsEdit] = useState<boolean>(false);
const { stagedQuery, updateAllQueriesOperators } = useQueryBuilder();
// Handle clicks outside to close drawer, except on explicitly ignored regions
useEffect(() => {
const handleClickOutside = (e: MouseEvent): void => {
const target = e.target as HTMLElement;
// Don't close if clicking on explicitly ignored regions
if (target.closest('[data-log-detail-ignore="true"]')) {
return;
}
// Close the drawer for any other outside click
onClose?.(e as any);
};
document.addEventListener('mousedown', handleClickOutside);
return (): void => {
document.removeEventListener('mousedown', handleClickOutside);
};
}, [onClose]);
// Keyboard navigation - handle up/down arrow keys
// Only listen when in OVERVIEW tab
useEffect(() => {
if (
!logs ||
!onNavigateLog ||
logs.length === 0 ||
selectedView !== VIEW_TYPES.OVERVIEW
) {
return;
}
const handleKeyDown = (e: KeyboardEvent): void => {
const currentIndex = logs.findIndex((l) => l.id === log.id);
if (currentIndex === -1) {
return;
}
if (e.key === 'ArrowUp') {
e.preventDefault();
e.stopPropagation();
// Navigate to previous log
if (currentIndex > 0) {
const prevLog = logs[currentIndex - 1];
onNavigateLog(prevLog);
// Trigger scroll to the log element
if (onScrollToLog) {
onScrollToLog(prevLog.id);
}
}
} else if (e.key === 'ArrowDown') {
e.preventDefault();
e.stopPropagation();
// Navigate to next log
if (currentIndex < logs.length - 1) {
const nextLog = logs[currentIndex + 1];
onNavigateLog(nextLog);
// Trigger scroll to the log element
if (onScrollToLog) {
onScrollToLog(nextLog.id);
}
}
}
};
document.addEventListener('keydown', handleKeyDown);
return (): void => {
document.removeEventListener('keydown', handleKeyDown);
};
}, [log.id, logs, onNavigateLog, onScrollToLog, selectedView]);
const listQuery = useMemo(() => {
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) {
return null;
@@ -306,87 +227,32 @@ function LogDetailInner({
);
const logType = log?.attributes_string?.log_level || LogType.INFO;
const currentLogIndex = logs ? logs.findIndex((l) => l.id === log.id) : -1;
const isPrevDisabled =
!logs || !onNavigateLog || logs.length === 0 || currentLogIndex <= 0;
const isNextDisabled =
!logs ||
!onNavigateLog ||
logs.length === 0 ||
currentLogIndex === logs.length - 1;
type HandleNavigateLogParams = {
direction: 'next' | 'previous';
};
const handleNavigateLog = ({ direction }: HandleNavigateLogParams): void => {
if (!logs || !onNavigateLog || currentLogIndex === -1) {
return;
}
if (direction === 'previous' && !isPrevDisabled) {
const prevLog = logs[currentLogIndex - 1];
onNavigateLog(prevLog);
onScrollToLog?.(prevLog.id);
} else if (direction === 'next' && !isNextDisabled) {
const nextLog = logs[currentLogIndex + 1];
onNavigateLog(nextLog);
onScrollToLog?.(nextLog.id);
}
};
return (
<Drawer
width="60%"
mask={false}
maskClosable={false}
maskStyle={{ background: 'none' }}
title={
<div className="log-detail-drawer__title" data-log-detail-ignore="true">
<div className="log-detail-drawer__title">
<div className="log-detail-drawer__title-left">
<Divider type="vertical" className={cx('log-type-indicator', LogType)} />
<Typography.Text className="title">Log details</Typography.Text>
</div>
<div className="log-detail-drawer__title-right">
<div className="log-arrows">
<Tooltip
title={isPrevDisabled ? '' : 'Move to previous log'}
placement="top"
mouseLeaveDelay={0}
{showOpenInExplorerBtn && (
<div className="log-detail-drawer__title-right">
<Button
className="open-in-explorer-btn"
icon={<Compass size={16} />}
onClick={handleOpenInExplorer}
>
<Button
icon={<ChevronUp size={14} />}
className="log-arrow-btn log-arrow-btn-up"
disabled={isPrevDisabled}
onClick={(): void => handleNavigateLog({ direction: 'previous' })}
/>
</Tooltip>
<Tooltip
title={isNextDisabled ? '' : 'Move to next log'}
placement="top"
mouseLeaveDelay={0}
>
<Button
icon={<ChevronDown size={14} />}
className="log-arrow-btn log-arrow-btn-down"
disabled={isNextDisabled}
onClick={(): void => handleNavigateLog({ direction: 'next' })}
/>
</Tooltip>
Open in Explorer
</Button>
</div>
{showOpenInExplorerBtn && (
<div>
<Button
className="open-in-explorer-btn"
icon={<Compass size={16} />}
onClick={handleOpenInExplorer}
>
Open in Explorer
</Button>
</div>
)}
</div>
)}
</div>
}
placement="right"
// closable
onClose={drawerCloseHandler}
open={log !== null}
style={{
@@ -397,164 +263,138 @@ function LogDetailInner({
destroyOnClose
closeIcon={<X size={16} style={{ marginTop: Spacing.MARGIN_1 }} />}
>
<div className="log-detail-drawer__content" data-log-detail-ignore="true">
<div className="log-detail-drawer__log">
<Divider type="vertical" className={cx('log-type-indicator', logType)} />
<Tooltip title={removeEscapeCharacters(log?.body)} placement="left">
<div className="log-body" dangerouslySetInnerHTML={htmlBody} />
</Tooltip>
<div className="log-detail-drawer__log">
<Divider type="vertical" className={cx('log-type-indicator', logType)} />
<Tooltip title={removeEscapeCharacters(log?.body)} placement="left">
<div className="log-body" dangerouslySetInnerHTML={htmlBody} />
</Tooltip>
<div className="log-overflow-shadow">&nbsp;</div>
</div>
<div className="log-overflow-shadow">&nbsp;</div>
</div>
<div className="tabs-and-search">
<Radio.Group
className="views-tabs"
onChange={handleModeChange}
value={selectedView}
<div className="tabs-and-search">
<Radio.Group
className="views-tabs"
onChange={handleModeChange}
value={selectedView}
>
<Radio.Button
className={
// eslint-disable-next-line sonarjs/no-duplicate-string
selectedView === VIEW_TYPES.OVERVIEW ? 'selected_view tab' : 'tab'
}
value={VIEW_TYPES.OVERVIEW}
>
<Radio.Button
className={
// eslint-disable-next-line sonarjs/no-duplicate-string
selectedView === VIEW_TYPES.OVERVIEW ? 'selected_view tab' : 'tab'
}
value={VIEW_TYPES.OVERVIEW}
>
<div className="view-title">
<Table size={14} />
Overview
</div>
</Radio.Button>
<Radio.Button
className={
selectedView === VIEW_TYPES.JSON ? 'selected_view tab' : 'tab'
}
value={VIEW_TYPES.JSON}
>
<div className="view-title">
<Braces size={14} />
JSON
</div>
</Radio.Button>
<Radio.Button
className={
selectedView === VIEW_TYPES.CONTEXT ? 'selected_view tab' : 'tab'
}
value={VIEW_TYPES.CONTEXT}
>
<div className="view-title">
<TextSelect size={14} />
Context
</div>
</Radio.Button>
<Radio.Button
className={
selectedView === VIEW_TYPES.INFRAMETRICS ? 'selected_view tab' : 'tab'
}
value={VIEW_TYPES.INFRAMETRICS}
>
<div className="view-title">
<BarChart2 size={14} />
Metrics
</div>
</Radio.Button>
</Radio.Group>
<div className="log-detail-drawer__actions">
{selectedView === VIEW_TYPES.CONTEXT && (
<Tooltip
title="Show Filters"
placement="topLeft"
aria-label="Show Filters"
>
<Button
className="action-btn"
icon={<Filter size={16} />}
onClick={handleFilterVisible}
/>
</Tooltip>
)}
<div className="view-title">
<Table size={14} />
Overview
</div>
</Radio.Button>
<Radio.Button
className={selectedView === VIEW_TYPES.JSON ? 'selected_view tab' : 'tab'}
value={VIEW_TYPES.JSON}
>
<div className="view-title">
<Braces size={14} />
JSON
</div>
</Radio.Button>
<Radio.Button
className={
selectedView === VIEW_TYPES.CONTEXT ? 'selected_view tab' : 'tab'
}
value={VIEW_TYPES.CONTEXT}
>
<div className="view-title">
<TextSelect size={14} />
Context
</div>
</Radio.Button>
<Radio.Button
className={
selectedView === VIEW_TYPES.INFRAMETRICS ? 'selected_view tab' : 'tab'
}
value={VIEW_TYPES.INFRAMETRICS}
>
<div className="view-title">
<BarChart2 size={14} />
Metrics
</div>
</Radio.Button>
</Radio.Group>
<div className="log-detail-drawer__actions">
{selectedView === VIEW_TYPES.CONTEXT && (
<Tooltip
title={selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'}
title="Show Filters"
placement="topLeft"
aria-label={
selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'
}
aria-label="Show Filters"
>
<Button
className="action-btn"
icon={<Copy size={16} />}
onClick={selectedView === VIEW_TYPES.JSON ? handleJSONCopy : onLogCopy}
icon={<Filter size={16} />}
onClick={handleFilterVisible}
/>
</Tooltip>
</div>
</div>
{isFilterVisible && contextQuery?.builder.queryData[0] && (
<div className="log-detail-drawer-query-container">
<QuerySearch
onChange={(value): void => handleQueryExpressionChange(value, 0)}
dataSource={DataSource.LOGS}
queryData={contextQuery?.builder.queryData[0]}
onRun={handleRunQuery}
)}
<Tooltip
title={selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'}
placement="topLeft"
aria-label={
selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'
}
>
<Button
className="action-btn"
icon={<Copy size={16} />}
onClick={selectedView === VIEW_TYPES.JSON ? handleJSONCopy : onLogCopy}
/>
</div>
)}
{selectedView === VIEW_TYPES.OVERVIEW && (
<Overview
logData={log}
onAddToQuery={onAddToQuery}
onClickActionItem={onClickActionItem}
isListViewPanel={isListViewPanel}
selectedOptions={options}
listViewPanelSelectedFields={listViewPanelSelectedFields}
handleChangeSelectedView={handleChangeSelectedView}
/>
)}
{selectedView === VIEW_TYPES.JSON && <JSONView logData={log} />}
{selectedView === VIEW_TYPES.CONTEXT && (
<ContextView
log={log}
filters={filters}
contextQuery={contextQuery}
isEdit={isEdit}
/>
)}
{selectedView === VIEW_TYPES.INFRAMETRICS && (
<InfraMetrics
clusterName={log.resources_string?.[RESOURCE_KEYS.CLUSTER_NAME] || ''}
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
timestamp={log.timestamp.toString()}
dataSource={DataSource.LOGS}
/>
)}
{selectedView === VIEW_TYPES.OVERVIEW && (
<div className="log-detail-drawer__footer-hint">
<div className="log-detail-drawer__footer-hint-content">
<Typography.Text
type="secondary"
className="log-detail-drawer__footer-hint-text"
>
Use
</Typography.Text>
<ArrowUp size={14} className="log-detail-drawer__footer-hint-icon" />
<span>/</span>
<ArrowDown size={14} className="log-detail-drawer__footer-hint-icon" />
<Typography.Text
type="secondary"
className="log-detail-drawer__footer-hint-text"
>
to view previous/next log
</Typography.Text>
</div>
</div>
)}
</Tooltip>
</div>
</div>
{isFilterVisible && contextQuery?.builder.queryData[0] && (
<div className="log-detail-drawer-query-container">
<QuerySearch
onChange={(value): void => handleQueryExpressionChange(value, 0)}
dataSource={DataSource.LOGS}
queryData={contextQuery?.builder.queryData[0]}
onRun={handleRunQuery}
/>
</div>
)}
{selectedView === VIEW_TYPES.OVERVIEW && (
<Overview
logData={log}
onAddToQuery={onAddToQuery}
onClickActionItem={onClickActionItem}
isListViewPanel={isListViewPanel}
selectedOptions={options}
listViewPanelSelectedFields={listViewPanelSelectedFields}
handleChangeSelectedView={handleChangeSelectedView}
/>
)}
{selectedView === VIEW_TYPES.JSON && <JSONView logData={log} />}
{selectedView === VIEW_TYPES.CONTEXT && (
<ContextView
log={log}
filters={filters}
contextQuery={contextQuery}
isEdit={isEdit}
/>
)}
{selectedView === VIEW_TYPES.INFRAMETRICS && (
<InfraMetrics
clusterName={log.resources_string?.[RESOURCE_KEYS.CLUSTER_NAME] || ''}
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
timestamp={log.timestamp.toString()}
dataSource={DataSource.LOGS}
/>
)}
</Drawer>
);
}

View File

@@ -2,11 +2,13 @@ import { memo, useCallback, useMemo } from 'react';
import { blue } from '@ant-design/colors';
import { Typography } from 'antd';
import cx from 'classnames';
import LogDetail from 'components/LogDetail';
import { VIEW_TYPES } from 'components/LogDetail/constants';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import { ChangeViewFunctionType } from 'container/ExplorerOptions/types';
import { getSanitizedLogBody } from 'container/LogDetailedView/utils';
import { FontSize } from 'container/OptionsMenu/types';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
import { useIsDarkMode } from 'hooks/useDarkMode';
// utils
@@ -102,17 +104,12 @@ function LogSelectedField({
type ListLogViewProps = {
logData: ILog;
selectedFields: IField[];
onSetActiveLog: (
log: ILog,
selectedTab?: typeof VIEW_TYPES[keyof typeof VIEW_TYPES],
) => void;
onSetActiveLog: (log: ILog) => void;
onAddToQuery: AddToQueryHOCProps['onAddToQuery'];
activeLog?: ILog | null;
linesPerRow: number;
fontSize: FontSize;
handleChangeSelectedView?: ChangeViewFunctionType;
isActiveLog?: boolean;
onClearActiveLog?: () => void;
};
function ListLogView({
@@ -123,8 +120,7 @@ function ListLogView({
activeLog,
linesPerRow,
fontSize,
isActiveLog,
onClearActiveLog,
handleChangeSelectedView,
}: ListLogViewProps): JSX.Element {
const flattenLogData = useMemo(() => FlatLogData(logData), [logData]);
@@ -133,24 +129,35 @@ function ListLogView({
);
const isReadOnlyLog = !isLogsExplorerPage;
const {
activeLog: activeContextLog,
onAddToQuery: handleAddToQuery,
onSetActiveLog: handleSetActiveContextLog,
onClearActiveLog: handleClearActiveContextLog,
} = useActiveLog();
const isDarkMode = useIsDarkMode();
const handleDetailedView = useCallback(() => {
if (isActiveLog) {
onClearActiveLog?.();
return;
}
const handlerClearActiveContextLog = useCallback(
(event: React.MouseEvent | React.KeyboardEvent) => {
event.preventDefault();
event.stopPropagation();
handleClearActiveContextLog();
},
[handleClearActiveContextLog],
);
const handleDetailedView = useCallback(() => {
onSetActiveLog(logData);
}, [logData, onSetActiveLog, isActiveLog, onClearActiveLog]);
}, [logData, onSetActiveLog]);
const handleShowContext = useCallback(
(event: React.MouseEvent) => {
event.preventDefault();
event.stopPropagation();
onSetActiveLog(logData, VIEW_TYPES.CONTEXT);
handleSetActiveContextLog(logData);
},
[logData, onSetActiveLog],
[logData, handleSetActiveContextLog],
);
const updatedSelecedFields = useMemo(
@@ -179,7 +186,11 @@ function ListLogView({
return (
<>
<Container
$isActiveLog={isHighlighted || activeLog?.id === logData.id}
$isActiveLog={
isHighlighted ||
activeLog?.id === logData.id ||
activeContextLog?.id === logData.id
}
$isDarkMode={isDarkMode}
$logType={logType}
onClick={handleDetailedView}
@@ -240,6 +251,15 @@ function ListLogView({
/>
)}
</Container>
{activeContextLog && (
<LogDetail
log={activeContextLog}
onAddToQuery={handleAddToQuery}
selectedTab={VIEW_TYPES.CONTEXT}
onClose={handlerClearActiveContextLog}
handleChangeSelectedView={handleChangeSelectedView}
/>
)}
</>
);
}

View File

@@ -1,15 +1,19 @@
import {
KeyboardEvent,
memo,
MouseEvent,
MouseEventHandler,
useCallback,
useMemo,
useState,
} from 'react';
import { Color } from '@signozhq/design-tokens';
import { Tooltip } from 'antd';
import { VIEW_TYPES } from 'components/LogDetail/constants';
import { DrawerProps, Tooltip } from 'antd';
import LogDetail from 'components/LogDetail';
import { VIEW_TYPES, VIEWS } from 'components/LogDetail/constants';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import { getSanitizedLogBody } from 'container/LogDetailedView/utils';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
// hooks
import { useIsDarkMode } from 'hooks/useDarkMode';
@@ -35,8 +39,7 @@ function RawLogView({
selectedFields = [],
fontSize,
onLogClick,
onSetActiveLog,
onClearActiveLog,
handleChangeSelectedView,
}: RawLogViewProps): JSX.Element {
const {
isHighlighted: isUrlHighlighted,
@@ -45,6 +48,15 @@ function RawLogView({
} = useCopyLogLink(data.id);
const flattenLogData = useMemo(() => FlatLogData(data), [data]);
const {
activeLog,
onSetActiveLog,
onClearActiveLog,
onAddToQuery,
} = useActiveLog();
const [selectedTab, setSelectedTab] = useState<VIEWS | undefined>();
const isDarkMode = useIsDarkMode();
const isReadOnlyLog = !isLogsExplorerPage || isReadOnly;
@@ -122,24 +134,34 @@ function RawLogView({
// Use custom click handler if provided, otherwise use default behavior
if (onLogClick) {
onLogClick(data, event);
return;
} else {
onSetActiveLog(data);
setSelectedTab(VIEW_TYPES.OVERVIEW);
}
if (isActiveLog) {
onClearActiveLog?.();
return;
}
onSetActiveLog?.(data);
},
[isReadOnly, onLogClick, isActiveLog, onSetActiveLog, data, onClearActiveLog],
[isReadOnly, data, onSetActiveLog, onLogClick],
);
const handleCloseLogDetail: DrawerProps['onClose'] = useCallback(
(
event: MouseEvent<Element, globalThis.MouseEvent> | KeyboardEvent<Element>,
) => {
event.preventDefault();
event.stopPropagation();
onClearActiveLog();
setSelectedTab(undefined);
},
[onClearActiveLog],
);
const handleShowContext: MouseEventHandler<HTMLElement> = useCallback(
(event) => {
event.preventDefault();
event.stopPropagation();
onSetActiveLog?.(data, VIEW_TYPES.CONTEXT);
// handleSetActiveContextLog(data);
setSelectedTab(VIEW_TYPES.CONTEXT);
onSetActiveLog(data);
},
[data, onSetActiveLog],
);
@@ -159,7 +181,7 @@ function RawLogView({
$isDarkMode={isDarkMode}
$isReadOnly={isReadOnly}
$isHightlightedLog={isUrlHighlighted}
$isActiveLog={isActiveLog}
$isActiveLog={activeLog?.id === data.id || isActiveLog}
$isCustomHighlighted={isHighlighted}
$logType={logType}
fontSize={fontSize}
@@ -196,6 +218,17 @@ function RawLogView({
onLogCopy={onLogCopy}
/>
)}
{selectedTab && (
<LogDetail
selectedTab={selectedTab}
log={activeLog}
onClose={handleCloseLogDetail}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
handleChangeSelectedView={handleChangeSelectedView}
/>
)}
</RawLogViewContainer>
);
}

View File

@@ -45,6 +45,9 @@ export const RawLogViewContainer = styled(Row)<{
: `margin: 2px 0;`}
}
${({ $isActiveLog, $logType }): string =>
getActiveLogBackground($isActiveLog, true, $logType)}
${({ $isReadOnly, $isActiveLog, $isDarkMode, $logType }): string =>
$isActiveLog
? getActiveLogBackground($isActiveLog, $isDarkMode, $logType)

View File

@@ -1,5 +1,4 @@
import { MouseEvent } from 'react';
import { VIEW_TYPES } from 'components/LogDetail/constants';
import { ChangeViewFunctionType } from 'container/ExplorerOptions/types';
import { FontSize } from 'container/OptionsMenu/types';
import { IField } from 'types/api/logs/fields';
@@ -17,11 +16,6 @@ export interface RawLogViewProps {
selectedFields?: IField[];
onLogClick?: (log: ILog, event: MouseEvent) => void;
handleChangeSelectedView?: ChangeViewFunctionType;
onSetActiveLog?: (
log: ILog,
selectedTab?: typeof VIEW_TYPES[keyof typeof VIEW_TYPES],
) => void;
onClearActiveLog?: () => void;
}
export interface RawLogContentProps {

View File

@@ -86,6 +86,7 @@ interface QuerySearchProps {
signalSource?: string;
hardcodedAttributeKeys?: QueryKeyDataSuggestionsProps[];
onRun?: (query: string) => void;
showFilterSuggestionsWithoutMetric?: boolean;
}
function QuerySearch({
@@ -96,6 +97,7 @@ function QuerySearch({
onRun,
signalSource,
hardcodedAttributeKeys,
showFilterSuggestionsWithoutMetric,
}: QuerySearchProps): JSX.Element {
const isDarkMode = useIsDarkMode();
const [valueSuggestions, setValueSuggestions] = useState<any[]>([]);
@@ -252,7 +254,8 @@ function QuerySearch({
async (searchText?: string): Promise<void> => {
if (
dataSource === DataSource.METRICS &&
!queryData.aggregateAttribute?.key
!queryData.aggregateAttribute?.key &&
!showFilterSuggestionsWithoutMetric
) {
setKeySuggestions([]);
return;
@@ -301,6 +304,7 @@ function QuerySearch({
queryData.aggregateAttribute?.key,
signalSource,
hardcodedAttributeKeys,
showFilterSuggestionsWithoutMetric,
],
);
@@ -1562,6 +1566,7 @@ QuerySearch.defaultProps = {
hardcodedAttributeKeys: undefined,
placeholder:
"Enter your filter query (e.g., http.status_code >= 500 AND service.name = 'frontend')",
showFilterSuggestionsWithoutMetric: false,
};
export default QuerySearch;

View File

@@ -14,6 +14,6 @@ export const VIEW_TYPES = {
export const SPAN_ATTRIBUTES = {
URL_PATH: 'http.url',
RESPONSE_STATUS_CODE: 'response_status_code',
SERVER_NAME: 'http_host',
SERVER_NAME: 'net.peer.name',
SERVER_PORT: 'net.peer.port',
} as const;

View File

@@ -4,7 +4,6 @@ import { rest, server } from 'mocks-server/server';
import { fireEvent, render, screen, waitFor, within } from 'tests/test-utils';
import { DataSource } from 'types/common/queryBuilder';
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
import TopErrors from '../Explorer/Domains/DomainDetails/TopErrors';
import { getTopErrorsQueryPayload } from '../utils';
@@ -216,7 +215,7 @@ describe('TopErrors', () => {
value: 'true',
}),
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.SERVER_NAME }),
key: expect.objectContaining({ key: 'net.peer.name' }),
op: '=',
value: 'test-domain',
}),

View File

@@ -638,7 +638,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: '',
type: 'tag',
},
op: '=',
value: domainName,
@@ -685,7 +685,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: '',
type: 'tag',
},
op: '=',
value: domainName,
@@ -733,7 +733,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: '',
type: 'tag',
},
op: '=',
value: domainName,
@@ -780,7 +780,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: '',
type: 'tag',
},
op: '=',
value: domainName,
@@ -1302,7 +1302,7 @@ export const getTopErrorsCoRelationQueryFilters = (
{
id: 'e8a043b7',
key: {
key: SPAN_ATTRIBUTES.SERVER_NAME,
key: 'net.peer.name',
dataType: DataTypes.String,
type: '',
},
@@ -2198,7 +2198,7 @@ export const getEndPointZeroStateQueryPayload = (
key: {
key: SPAN_ATTRIBUTES.SERVER_NAME,
dataType: DataTypes.String,
type: '',
type: 'tag',
},
op: '=',
value: domainName,
@@ -2793,7 +2793,7 @@ export const getStatusCodeBarChartWidgetData = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: '',
type: 'tag',
},
op: '=',
value: domainName,

View File

@@ -50,10 +50,6 @@
}
.variable-select {
.ant-select-selector {
overflow-y: hidden !important;
}
.ant-select-item {
display: flex;
align-items: center;

View File

@@ -1,4 +1,3 @@
import { ExecStats } from 'api/v5/v5';
import { Timezone } from 'components/CustomTimePicker/timezoneUtils';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { getInitialStackedBands } from 'container/DashboardContainer/visualization/charts/utils/stackSeriesUtils';
@@ -55,13 +54,6 @@ export function prepareBarPanelConfig({
minTimeScale?: number;
maxTimeScale?: number;
}): UPlotConfigBuilder {
const stepIntervals: ExecStats['stepIntervals'] = get(
apiResponse,
'data.newResult.meta.stepIntervals',
{},
);
const minStepInterval = Math.min(...Object.values(stepIntervals));
const builder = buildBaseConfig({
widget,
isDarkMode,
@@ -73,7 +65,12 @@ export function prepareBarPanelConfig({
panelType: PANEL_TYPES.BAR,
minTimeScale,
maxTimeScale,
stepInterval: minStepInterval,
});
builder.setCursor({
focus: {
prox: 1e3,
},
});
if (widget.stackedBarChart) {
@@ -81,6 +78,12 @@ export function prepareBarPanelConfig({
builder.setBands(getInitialStackedBands(seriesCount));
}
const stepIntervals: Record<string, number> = get(
apiResponse,
'data.newResult.meta.stepIntervals',
{},
);
const seriesList: QueryData[] = apiResponse?.data?.result || [];
seriesList.forEach((series) => {
const baseLabelName = getLabelName(

View File

@@ -1,325 +0,0 @@
import { Widgets } from 'types/api/dashboard/getAll';
import {
MetricRangePayloadProps,
MetricRangePayloadV3,
} from 'types/api/metrics/getQueryRange';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { PanelMode } from '../../types';
import { prepareChartData, prepareUPlotConfig } from '../utils';
jest.mock(
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
() => ({
getStoredSeriesVisibility: jest.fn(),
}),
);
jest.mock('lib/uPlotLib/plugins/onClickPlugin', () => ({
__esModule: true,
default: jest.fn().mockReturnValue({ name: 'onClickPlugin' }),
}));
jest.mock('lib/dashboard/getQueryResults', () => ({
getLegend: jest.fn(
(_queryData: unknown, _query: unknown, labelName: string) =>
`legend-${labelName}`,
),
}));
jest.mock('lib/getLabelName', () => ({
__esModule: true,
default: jest.fn(
(_metric: unknown, _queryName: string, _legend: string) => 'baseLabel',
),
}));
const getLegendMock = jest.requireMock('lib/dashboard/getQueryResults')
.getLegend as jest.Mock;
const getLabelNameMock = jest.requireMock('lib/getLabelName')
.default as jest.Mock;
const createApiResponse = (
result: MetricRangePayloadProps['data']['result'] = [],
): MetricRangePayloadProps => ({
data: {
result,
resultType: 'matrix',
newResult: (null as unknown) as MetricRangePayloadV3,
},
});
const createWidget = (overrides: Partial<Widgets> = {}): Widgets =>
({
id: 'widget-1',
yAxisUnit: 'ms',
isLogScale: false,
thresholds: [],
customLegendColors: {},
...overrides,
} as Widgets);
const defaultTimezone = {
name: 'UTC',
value: 'UTC',
offset: 'UTC',
searchIndex: 'UTC',
};
describe('TimeSeriesPanel utils', () => {
beforeEach(() => {
jest.clearAllMocks();
getLabelNameMock.mockReturnValue('baseLabel');
getLegendMock.mockImplementation(
(_queryData: unknown, _query: unknown, labelName: string) =>
`legend-${labelName}`,
);
});
describe('prepareChartData', () => {
it('returns aligned data with timestamps and empty series when result is empty', () => {
const apiResponse = createApiResponse([]);
const data = prepareChartData(apiResponse);
expect(data).toHaveLength(1);
expect(data[0]).toEqual([]);
});
it('returns timestamps and one series of y values for single series', () => {
const apiResponse = createApiResponse([
{
metric: {},
queryName: 'Q',
legend: 'Series A',
values: [
[1000, '10'],
[2000, '20'],
],
} as MetricRangePayloadProps['data']['result'][0],
]);
const data = prepareChartData(apiResponse);
expect(data).toHaveLength(2);
expect(data[0]).toEqual([1000, 2000]);
expect(data[1]).toEqual([10, 20]);
});
it('merges timestamps and fills missing values with null for multiple series', () => {
const apiResponse = createApiResponse([
{
metric: {},
queryName: 'Q1',
values: [
[1000, '1'],
[3000, '3'],
],
} as MetricRangePayloadProps['data']['result'][0],
{
metric: {},
queryName: 'Q2',
values: [
[1000, '10'],
[2000, '20'],
],
} as MetricRangePayloadProps['data']['result'][0],
]);
const data = prepareChartData(apiResponse);
expect(data[0]).toEqual([1000, 2000, 3000]);
// First series: 1, null, 3
expect(data[1]).toEqual([1, null, 3]);
// Second series: 10, 20, null
expect(data[2]).toEqual([10, 20, null]);
});
});
describe('prepareUPlotConfig', () => {
const baseParams = {
widget: createWidget(),
isDarkMode: true,
currentQuery: {} as Query,
onClick: jest.fn(),
onDragSelect: jest.fn(),
apiResponse: createApiResponse(),
timezone: defaultTimezone,
panelMode: PanelMode.DASHBOARD_VIEW,
};
it('adds no series when apiResponse has empty result', () => {
const builder = prepareUPlotConfig(baseParams);
const config = builder.getConfig();
// Base series (timestamp) only
expect(config.series).toHaveLength(1);
});
it('adds one series per result item with label from getLabelName when no currentQuery', () => {
getLegendMock.mockReset();
const apiResponse = createApiResponse([
{
metric: { __name__: 'cpu' },
queryName: 'Q1',
legend: 'CPU',
values: [
[1000, '1'],
[2000, '2'],
],
} as MetricRangePayloadProps['data']['result'][0],
]);
const builder = prepareUPlotConfig({
...baseParams,
apiResponse,
currentQuery: (null as unknown) as Query,
});
expect(getLabelNameMock).toHaveBeenCalled();
expect(getLegendMock).not.toHaveBeenCalled();
const config = builder.getConfig();
expect(config.series).toHaveLength(2);
expect(config.series?.[1]).toMatchObject({
label: 'baseLabel',
scale: 'y',
});
});
it('uses getLegend for label when currentQuery is provided', () => {
const apiResponse = createApiResponse([
{
metric: {},
queryName: 'Q1',
legend: 'L1',
values: [[1000, '1']],
} as MetricRangePayloadProps['data']['result'][0],
]);
prepareUPlotConfig({
...baseParams,
apiResponse,
currentQuery: {} as Query,
});
expect(getLegendMock).toHaveBeenCalledWith(
{
legend: 'L1',
metric: {},
queryName: 'Q1',
values: [[1000, '1']],
},
{},
'baseLabel',
);
const config = prepareUPlotConfig({
...baseParams,
apiResponse,
currentQuery: {} as Query,
}).getConfig();
expect(config.series?.[1]).toMatchObject({
label: 'legend-baseLabel',
});
});
it('uses DrawStyle.Line and VisibilityMode.Never when series has multiple valid points', () => {
const apiResponse = createApiResponse([
{
metric: {},
queryName: 'Q',
values: [
[1000, '1'],
[2000, '2'],
],
} as MetricRangePayloadProps['data']['result'][0],
]);
const builder = prepareUPlotConfig({ ...baseParams, apiResponse });
const config = builder.getConfig();
const series = config.series?.[1];
expect(config.series).toHaveLength(2);
// Line style and points never for multi-point series (checked via builder API)
const legendItems = builder.getLegendItems();
expect(Object.keys(legendItems)).toHaveLength(1);
// multi-point series → points hidden
expect(series).toBeDefined();
expect(series!.points?.show).toBe(false);
});
it('uses DrawStyle.Points and shows points when series has only one valid point', () => {
const apiResponse = createApiResponse([
{
metric: {},
queryName: 'Q',
values: [
[1000, '1'],
[2000, 'NaN'],
[3000, 'invalid'],
],
} as MetricRangePayloadProps['data']['result'][0],
]);
const builder = prepareUPlotConfig({ ...baseParams, apiResponse });
const config = builder.getConfig();
expect(config.series).toHaveLength(2);
const seriesConfig = config.series?.[1];
expect(seriesConfig).toBeDefined();
// Single valid point -> Points draw style (asserted via series config)
expect(seriesConfig).toMatchObject({
scale: 'y',
spanGaps: true,
});
// single-point series → points shown
expect(seriesConfig).toBeDefined();
expect(seriesConfig!.points?.show).toBe(true);
});
it('uses widget customLegendColors to set series stroke color', () => {
const widget = createWidget({
customLegendColors: { 'legend-baseLabel': '#ff0000' },
});
const apiResponse = createApiResponse([
{
metric: {},
queryName: 'Q',
values: [[1000, '1']],
} as MetricRangePayloadProps['data']['result'][0],
]);
const builder = prepareUPlotConfig({
...baseParams,
widget,
apiResponse,
});
const config = builder.getConfig();
const seriesConfig = config.series?.[1];
expect(seriesConfig).toBeDefined();
expect(seriesConfig!.stroke).toBe('#ff0000');
});
it('adds multiple series when result has multiple items', () => {
const apiResponse = createApiResponse([
{
metric: {},
queryName: 'Q1',
values: [[1000, '1']],
} as MetricRangePayloadProps['data']['result'][0],
{
metric: {},
queryName: 'Q2',
values: [[1000, '2']],
} as MetricRangePayloadProps['data']['result'][0],
]);
const builder = prepareUPlotConfig({ ...baseParams, apiResponse });
const config = builder.getConfig();
expect(config.series).toHaveLength(3);
});
});
});

View File

@@ -1,4 +1,3 @@
import { ExecStats } from 'api/v5/v5';
import { Timezone } from 'components/CustomTimePicker/timezoneUtils';
import { PANEL_TYPES } from 'constants/queryBuilder';
import {
@@ -15,12 +14,9 @@ import {
VisibilityMode,
} from 'lib/uPlotV2/config/types';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import { isInvalidPlotValue } from 'lib/uPlotV2/utils/dataUtils';
import get from 'lodash-es/get';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { QueryData } from 'types/api/widgets/getQuery';
import { PanelMode } from '../types';
import { buildBaseConfig } from '../utils/baseConfigBuilder';
@@ -35,22 +31,6 @@ export const prepareChartData = (
return [timestampArr, ...yAxisValuesArr];
};
function hasSingleVisiblePointForSeries(series: QueryData): boolean {
const rawValues = series.values ?? [];
let validPointCount = 0;
for (const [, rawValue] of rawValues) {
if (!isInvalidPlotValue(rawValue)) {
validPointCount += 1;
if (validPointCount > 1) {
return false;
}
}
}
return true;
}
export const prepareUPlotConfig = ({
widget,
isDarkMode,
@@ -74,13 +54,6 @@ export const prepareUPlotConfig = ({
minTimeScale?: number;
maxTimeScale?: number;
}): UPlotConfigBuilder => {
const stepIntervals: ExecStats['stepIntervals'] = get(
apiResponse,
'data.newResult.meta.stepIntervals',
{},
);
const minStepInterval = Math.min(...Object.values(stepIntervals));
const builder = buildBaseConfig({
widget,
isDarkMode,
@@ -92,11 +65,9 @@ export const prepareUPlotConfig = ({
panelType: PANEL_TYPES.TIME_SERIES,
minTimeScale,
maxTimeScale,
stepInterval: minStepInterval,
});
apiResponse.data?.result?.forEach((series) => {
const hasSingleValidPoint = hasSingleVisiblePointForSeries(series);
const baseLabelName = getLabelName(
series.metric,
series.queryName || '', // query
@@ -109,15 +80,13 @@ export const prepareUPlotConfig = ({
builder.addSeries({
scaleKey: 'y',
drawStyle: hasSingleValidPoint ? DrawStyle.Points : DrawStyle.Line,
drawStyle: DrawStyle.Line,
label: label,
colorMapping: widget.customLegendColors ?? {},
spanGaps: true,
lineStyle: LineStyle.Solid,
lineInterpolation: LineInterpolation.Spline,
showPoints: hasSingleValidPoint
? VisibilityMode.Always
: VisibilityMode.Never,
showPoints: VisibilityMode.Never,
pointSize: 5,
isDarkMode,
panelType: PANEL_TYPES.TIME_SERIES,

View File

@@ -1,233 +0,0 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { STEP_INTERVAL_MULTIPLIER } from 'lib/uPlotV2/constants';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import uPlot from 'uplot';
import { PanelMode } from '../../types';
import { buildBaseConfig } from '../baseConfigBuilder';
jest.mock(
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
() => ({
getStoredSeriesVisibility: jest.fn(),
}),
);
jest.mock('lib/uPlotV2/utils', () => ({
calculateWidthBasedOnStepInterval: jest.fn(),
}));
const calculateWidthBasedOnStepIntervalMock = jest.requireMock(
'lib/uPlotV2/utils',
).calculateWidthBasedOnStepInterval as jest.Mock;
jest.mock('lib/uPlotLib/plugins/onClickPlugin', () => ({
__esModule: true,
default: jest.fn().mockReturnValue({ name: 'onClickPlugin' }),
}));
const createWidget = (overrides: Partial<Widgets> = {}): Widgets =>
({
id: 'widget-1',
yAxisUnit: 'ms',
isLogScale: false,
softMin: undefined,
softMax: undefined,
thresholds: [],
...overrides,
} as Widgets);
const createApiResponse = (
overrides: Partial<MetricRangePayloadProps> = {},
): MetricRangePayloadProps =>
({
data: { result: [], resultType: 'matrix', newResult: null },
...overrides,
} as MetricRangePayloadProps);
const baseProps = {
widget: createWidget(),
apiResponse: createApiResponse(),
isDarkMode: true,
panelMode: PanelMode.DASHBOARD_VIEW,
panelType: PANEL_TYPES.TIME_SERIES,
};
describe('buildBaseConfig', () => {
it('returns a UPlotConfigBuilder instance', () => {
const builder = buildBaseConfig(baseProps);
expect(builder).toBeDefined();
expect(typeof builder.getConfig).toBe('function');
expect(typeof builder.getLegendItems).toBe('function');
});
it('configures builder with widgetId and DASHBOARD_VIEW preferences', () => {
const builder = buildBaseConfig({
...baseProps,
panelMode: PanelMode.DASHBOARD_VIEW,
widget: createWidget({ id: 'my-widget' }),
});
expect(builder.getWidgetId()).toBe('my-widget');
expect(builder.getShouldSaveSelectionPreference()).toBe(true);
});
it('configures builder with IN_MEMORY selection when panelMode is DASHBOARD_EDIT', () => {
const builder = buildBaseConfig({
...baseProps,
panelMode: PanelMode.DASHBOARD_EDIT,
});
expect(builder.getShouldSaveSelectionPreference()).toBe(false);
const config = builder.getConfig();
expect(config.series).toBeDefined();
});
it('passes stepInterval to builder and cursor prox uses width * multiplier', () => {
const stepInterval = 60;
const mockWidth = 100;
calculateWidthBasedOnStepIntervalMock.mockReturnValue(mockWidth);
const builder = buildBaseConfig({
...baseProps,
stepInterval,
});
const config = builder.getConfig();
const prox = config.cursor?.hover?.prox;
expect(typeof prox).toBe('function');
const uPlotInstance = {} as uPlot;
const proxResult = (prox as (u: uPlot) => number)(uPlotInstance);
expect(calculateWidthBasedOnStepIntervalMock).toHaveBeenCalledWith({
uPlotInstance,
stepInterval,
});
expect(proxResult).toBe(mockWidth * STEP_INTERVAL_MULTIPLIER);
});
it('adds x scale with time config and min/max when provided', () => {
const builder = buildBaseConfig({
...baseProps,
minTimeScale: 1000,
maxTimeScale: 2000,
});
const config = builder.getConfig();
expect(config.scales?.x).toBeDefined();
expect(config.scales?.x?.time).toBe(true);
const range = config.scales?.x?.range;
expect(Array.isArray(range)).toBe(true);
expect((range as [number, number])[0]).toBe(1000);
});
it('configures log scale on y axis when widget.isLogScale is true', () => {
const builder = buildBaseConfig({
...baseProps,
widget: createWidget({ isLogScale: true }),
});
const config = builder.getConfig();
expect(config.scales?.y).toBeDefined();
expect(config.scales?.y?.log).toBe(10);
});
it('adds onClick plugin when onClick is a function', () => {
const onClickPlugin = jest.requireMock('lib/uPlotLib/plugins/onClickPlugin')
.default;
const onClick = jest.fn();
buildBaseConfig({
...baseProps,
onClick,
apiResponse: createApiResponse(),
});
expect(onClickPlugin).toHaveBeenCalledWith({
onClick,
apiResponse: expect.any(Object),
});
});
it('does not add onClick plugin when onClick is not a function', () => {
const onClickPlugin = jest.requireMock('lib/uPlotLib/plugins/onClickPlugin')
.default;
const builder = buildBaseConfig({
...baseProps,
});
const config = builder.getConfig();
const plugins = config.plugins ?? [];
expect(
plugins.some((p) => (p as { name?: string }).name === 'onClickPlugin'),
).toBe(false);
expect(onClickPlugin).not.toHaveBeenCalled();
});
it('adds thresholds from widget', () => {
const builder = buildBaseConfig({
...baseProps,
widget: createWidget({
thresholds: [
{
thresholdValue: 80,
thresholdColor: '#ff0000',
thresholdUnit: 'ms',
thresholdLabel: 'High',
},
] as Widgets['thresholds'],
}),
});
const config = builder.getConfig();
const drawHooks = config.hooks?.draw ?? [];
expect(drawHooks.length).toBeGreaterThan(0);
});
it('adds x and y axes with correct scaleKeys and panelType', () => {
const builder = buildBaseConfig(baseProps);
const config = builder.getConfig();
expect(config.axes).toHaveLength(2);
expect(config.axes?.[0].scale).toBe('x');
expect(config.axes?.[1].scale).toBe('y');
});
it('sets tzDate when timezone is provided', () => {
const builder = buildBaseConfig({
...baseProps,
timezone: {
name: 'America/New_York',
value: 'America/New_York',
offset: 'UTC-5',
searchIndex: 'America/New_York',
},
});
const config = builder.getConfig();
expect(config.tzDate).toBeDefined();
expect(typeof config.tzDate).toBe('function');
});
it('leaves tzDate undefined when timezone is not provided', () => {
const builder = buildBaseConfig(baseProps);
const config = builder.getConfig();
expect(config.tzDate).toBeUndefined();
});
it('register setSelect hook when onDragSelect is provided', () => {
const onDragSelect = jest.fn();
const builder = buildBaseConfig({
...baseProps,
onDragSelect,
});
const config = builder.getConfig();
expect(config.hooks?.setSelect).toBeDefined();
});
});

View File

@@ -26,7 +26,6 @@ export interface BaseConfigBuilderProps {
panelType: PANEL_TYPES;
minTimeScale?: number;
maxTimeScale?: number;
stepInterval?: number;
}
export function buildBaseConfig({
@@ -40,7 +39,6 @@ export function buildBaseConfig({
panelType,
minTimeScale,
maxTimeScale,
stepInterval,
}: BaseConfigBuilderProps): UPlotConfigBuilder {
const tzDate = timezone
? (timestamp: number): Date =>
@@ -58,7 +56,6 @@ export function buildBaseConfig({
].includes(panelMode)
? SelectionPreferencesSource.LOCAL_STORAGE
: SelectionPreferencesSource.IN_MEMORY,
stepInterval,
});
const thresholdOptions: ThresholdsDrawHookOptions = {

View File

@@ -1,9 +1,8 @@
/* eslint-disable no-nested-ternary */
import { useCallback, useEffect, useMemo, useRef } from 'react';
import { useCallback, useEffect, useMemo } from 'react';
import { useQuery } from 'react-query';
import { Virtuoso, VirtuosoHandle } from 'react-virtuoso';
import { Virtuoso } from 'react-virtuoso';
import { Card } from 'antd';
import LogDetail from 'components/LogDetail';
import RawLogView from 'components/Logs/RawLogView';
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
@@ -12,8 +11,6 @@ import LogsError from 'container/LogsError/LogsError';
import { LogsLoading } from 'container/LogsLoading/LogsLoading';
import { FontSize } from 'container/OptionsMenu/types';
import { useHandleLogsPagination } from 'hooks/infraMonitoring/useHandleLogsPagination';
import useLogDetailHandlers from 'hooks/logs/useLogDetailHandlers';
import useScrollToLog from 'hooks/logs/useScrollToLog';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { ILog } from 'types/api/logs/log';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
@@ -43,15 +40,6 @@ function EntityLogs({
category,
queryKeyFilters,
}: Props): JSX.Element {
const virtuosoRef = useRef<VirtuosoHandle>(null);
const {
activeLog,
onAddToQuery,
selectedTab,
handleSetActiveLog,
handleCloseLogDetail,
} = useLogDetailHandlers();
const basePayload = getEntityEventsOrLogsQueryPayload(
timeRange.startTime,
timeRange.endTime,
@@ -74,40 +62,29 @@ function EntityLogs({
basePayload,
});
const handleScrollToLog = useScrollToLog({
logs,
virtuosoRef,
});
const getItemContent = useCallback(
(_: number, logToRender: ILog): JSX.Element => {
return (
<div key={logToRender.id}>
<RawLogView
isTextOverflowEllipsisDisabled
data={logToRender}
linesPerRow={5}
fontSize={FontSize.MEDIUM}
selectedFields={[
{
dataType: 'string',
type: '',
name: 'body',
},
{
dataType: 'string',
type: '',
name: 'timestamp',
},
]}
onSetActiveLog={handleSetActiveLog}
onClearActiveLog={handleCloseLogDetail}
isActiveLog={activeLog?.id === logToRender.id}
/>
</div>
);
},
[activeLog, handleSetActiveLog, handleCloseLogDetail],
(_: number, logToRender: ILog): JSX.Element => (
<RawLogView
isTextOverflowEllipsisDisabled
key={logToRender.id}
data={logToRender}
linesPerRow={5}
fontSize={FontSize.MEDIUM}
selectedFields={[
{
dataType: 'string',
type: '',
name: 'body',
},
{
dataType: 'string',
type: '',
name: 'timestamp',
},
]}
/>
),
[],
);
const { data, isLoading, isFetching, isError } = useQuery({
@@ -154,7 +131,6 @@ function EntityLogs({
<Virtuoso
className="entity-logs-virtuoso"
key="entity-logs-virtuoso"
ref={virtuosoRef}
data={logs}
endReached={loadMoreLogs}
totalCount={logs.length}
@@ -178,21 +154,7 @@ function EntityLogs({
)}
{isError && !isLoading && <LogsError />}
{!isLoading && !isError && logs.length > 0 && (
<div className="entity-logs-list-container" data-log-detail-ignore="true">
{renderContent}
</div>
)}
{selectedTab && activeLog && (
<LogDetail
log={activeLog}
onClose={handleCloseLogDetail}
logs={logs}
onNavigateLog={handleSetActiveLog}
selectedTab={selectedTab}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
onScrollToLog={handleScrollToLog}
/>
<div className="entity-logs-list-container">{renderContent}</div>
)}
</div>
);

View File

@@ -2,6 +2,7 @@ import { memo, useCallback, useEffect, useMemo, useRef } from 'react';
import { Virtuoso, VirtuosoHandle } from 'react-virtuoso';
import { Card, Typography } from 'antd';
import LogDetail from 'components/LogDetail';
import { VIEW_TYPES } from 'components/LogDetail/constants';
import ListLogView from 'components/Logs/ListLogView';
import RawLogView from 'components/Logs/RawLogView';
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
@@ -13,9 +14,8 @@ import { InfinityWrapperStyled } from 'container/LogsExplorerList/styles';
import { convertKeysToColumnFields } from 'container/LogsExplorerList/utils';
import { useOptionsMenu } from 'container/OptionsMenu';
import { defaultLogsSelectedColumns } from 'container/OptionsMenu/constants';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
import useLogDetailHandlers from 'hooks/logs/useLogDetailHandlers';
import useScrollToLog from 'hooks/logs/useScrollToLog';
import { useEventSource } from 'providers/EventSource';
// interfaces
import { ILog } from 'types/api/logs/log';
@@ -38,11 +38,10 @@ function LiveLogsList({
const {
activeLog,
onClearActiveLog,
onAddToQuery,
selectedTab,
handleSetActiveLog,
handleCloseLogDetail,
} = useLogDetailHandlers();
onSetActiveLog,
} = useActiveLog();
// get only data from the logs object
const formattedLogs: ILog[] = useMemo(
@@ -66,56 +65,42 @@ function LiveLogsList({
...options.selectColumns,
]);
const handleScrollToLog = useScrollToLog({
logs: formattedLogs,
virtuosoRef: ref,
});
const getItemContent = useCallback(
(_: number, log: ILog): JSX.Element => {
if (options.format === 'raw') {
return (
<div key={log.id}>
<RawLogView
data={log}
isActiveLog={activeLog?.id === log.id}
linesPerRow={options.maxLines}
selectedFields={selectedFields}
fontSize={options.fontSize}
handleChangeSelectedView={handleChangeSelectedView}
onSetActiveLog={handleSetActiveLog}
onClearActiveLog={handleCloseLogDetail}
/>
</div>
<RawLogView
key={log.id}
data={log}
linesPerRow={options.maxLines}
selectedFields={selectedFields}
fontSize={options.fontSize}
handleChangeSelectedView={handleChangeSelectedView}
/>
);
}
return (
<div key={log.id}>
<ListLogView
logData={log}
isActiveLog={activeLog?.id === log.id}
selectedFields={selectedFields}
linesPerRow={options.maxLines}
onAddToQuery={onAddToQuery}
onSetActiveLog={handleSetActiveLog}
onClearActiveLog={handleCloseLogDetail}
fontSize={options.fontSize}
handleChangeSelectedView={handleChangeSelectedView}
/>
</div>
<ListLogView
key={log.id}
logData={log}
selectedFields={selectedFields}
linesPerRow={options.maxLines}
onAddToQuery={onAddToQuery}
onSetActiveLog={onSetActiveLog}
fontSize={options.fontSize}
handleChangeSelectedView={handleChangeSelectedView}
/>
);
},
[
handleChangeSelectedView,
onAddToQuery,
onSetActiveLog,
options.fontSize,
options.format,
options.maxLines,
options.fontSize,
activeLog?.id,
selectedFields,
onAddToQuery,
handleSetActiveLog,
handleCloseLogDetail,
handleChangeSelectedView,
],
);
@@ -171,10 +156,6 @@ function LiveLogsList({
activeLogIndex,
}}
handleChangeSelectedView={handleChangeSelectedView}
logs={formattedLogs}
onSetActiveLog={handleSetActiveLog}
onClearActiveLog={handleCloseLogDetail}
activeLog={activeLog}
/>
) : (
<Card style={{ width: '100%' }} bodyStyle={CARD_BODY_STYLE}>
@@ -192,17 +173,14 @@ function LiveLogsList({
</InfinityWrapperStyled>
)}
{activeLog && selectedTab && (
{activeLog && (
<LogDetail
selectedTab={selectedTab}
selectedTab={VIEW_TYPES.OVERVIEW}
log={activeLog}
onClose={handleCloseLogDetail}
onClose={onClearActiveLog}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
handleChangeSelectedView={handleChangeSelectedView}
logs={formattedLogs}
onNavigateLog={handleSetActiveLog}
onScrollToLog={handleScrollToLog}
/>
)}
</div>

View File

@@ -395,7 +395,7 @@ export default function TableViewActions(
onOpenChange={setIsOpen}
arrow={false}
content={
<div data-log-detail-ignore="true">
<div>
<Button
className="more-filter-actions"
type="text"
@@ -481,7 +481,7 @@ export default function TableViewActions(
onOpenChange={setIsOpen}
arrow={false}
content={
<div data-log-detail-ignore="true">
<div>
<Button
className="more-filter-actions"
type="text"

View File

@@ -7,7 +7,6 @@ import {
useMemo,
} from 'react';
import { ColumnsType } from 'antd/es/table';
import { VIEW_TYPES } from 'components/LogDetail/constants';
import LogLinesActionButtons from 'components/Logs/LogLinesActionButtons/LogLinesActionButtons';
import { ColumnTypeRender } from 'components/Logs/TableView/types';
import { FontSize } from 'container/OptionsMenu/types';
@@ -23,27 +22,22 @@ interface TableRowProps {
tableColumns: ColumnsType<Record<string, unknown>>;
index: number;
log: Record<string, unknown>;
onShowLogDetails?: (
log: ILog,
selectedTab?: typeof VIEW_TYPES[keyof typeof VIEW_TYPES],
) => void;
handleSetActiveContextLog: (log: ILog) => void;
onShowLogDetails: (log: ILog) => void;
logs: ILog[];
hasActions: boolean;
fontSize: FontSize;
isActiveLog?: boolean;
onClearActiveLog?: () => void;
}
export default function TableRow({
tableColumns,
index,
log,
handleSetActiveContextLog,
onShowLogDetails,
logs,
hasActions,
fontSize,
isActiveLog,
onClearActiveLog,
}: TableRowProps): JSX.Element {
const isDarkMode = useIsDarkMode();
@@ -58,31 +52,21 @@ export default function TableRow({
(event) => {
event.preventDefault();
event.stopPropagation();
if (!currentLog) {
if (!handleSetActiveContextLog || !currentLog) {
return;
}
onShowLogDetails?.(currentLog, VIEW_TYPES.CONTEXT);
handleSetActiveContextLog(currentLog);
},
[currentLog, onShowLogDetails],
[currentLog, handleSetActiveContextLog],
);
const handleShowLogDetails = useCallback(() => {
if (!currentLog) {
if (!onShowLogDetails || !currentLog) {
return;
}
// If this log is already active, close the detail drawer
if (isActiveLog && onClearActiveLog) {
onClearActiveLog();
return;
}
// Otherwise, open the detail drawer for this log
if (onShowLogDetails) {
onShowLogDetails(currentLog);
}
}, [currentLog, onShowLogDetails, isActiveLog, onClearActiveLog]);
onShowLogDetails(currentLog);
}, [currentLog, onShowLogDetails]);
const hasSingleColumn =
tableColumns.filter((column) => column.key !== 'state-indicator').length ===

View File

@@ -4,6 +4,7 @@ import {
TableVirtuoso,
TableVirtuosoHandle,
} from 'react-virtuoso';
import LogDetail from 'components/LogDetail';
import { VIEW_TYPES } from 'components/LogDetail/constants';
import { getLogIndicatorType } from 'components/Logs/LogStateIndicator/utils';
import { useTableView } from 'components/Logs/TableView/useTableView';
@@ -57,40 +58,26 @@ const CustomTableRow: TableComponents<ILog>['TableRow'] = ({
const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
function InfinityTableView(
{
isLoading,
tableViewProps,
infitiyTableProps,
onSetActiveLog,
onClearActiveLog,
activeLog,
},
{ isLoading, tableViewProps, infitiyTableProps, handleChangeSelectedView },
ref,
): JSX.Element | null {
const { activeLog: activeContextLog } = useActiveLog();
const onSetActiveLogExpand = useCallback(
(log: ILog) => {
onSetActiveLog?.(log);
},
[onSetActiveLog],
);
const onSetActiveLogContext = useCallback(
(log: ILog) => {
onSetActiveLog?.(log, VIEW_TYPES.CONTEXT);
},
[onSetActiveLog],
);
const onCloseActiveLog = useCallback(() => {
onClearActiveLog?.();
}, [onClearActiveLog]);
const {
activeLog: activeContextLog,
onSetActiveLog: handleSetActiveContextLog,
onClearActiveLog: handleClearActiveContextLog,
onAddToQuery: handleAddToQuery,
} = useActiveLog();
const {
activeLog,
onSetActiveLog,
onClearActiveLog,
onAddToQuery,
} = useActiveLog();
const { dataSource, columns } = useTableView({
...tableViewProps,
onClickExpand: onSetActiveLogExpand,
onOpenLogsContext: onSetActiveLogContext,
onClickExpand: onSetActiveLog,
onOpenLogsContext: handleSetActiveContextLog,
});
const { draggedColumns, onDragColumns } = useDragColumns<
@@ -111,32 +98,27 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
);
const itemContent = useCallback(
(index: number, log: Record<string, unknown>): JSX.Element => {
return (
<div key={log.id as string}>
<TableRow
tableColumns={tableColumns}
index={index}
log={log}
logs={tableViewProps.logs}
hasActions
fontSize={tableViewProps.fontSize}
onShowLogDetails={onSetActiveLog}
isActiveLog={activeLog?.id === log.id}
onClearActiveLog={onCloseActiveLog}
/>
</div>
);
},
(index: number, log: Record<string, unknown>): JSX.Element => (
<TableRow
tableColumns={tableColumns}
index={index}
log={log}
handleSetActiveContextLog={handleSetActiveContextLog}
logs={tableViewProps.logs}
hasActions
fontSize={tableViewProps.fontSize}
onShowLogDetails={onSetActiveLog}
/>
),
[
handleSetActiveContextLog,
tableColumns,
onSetActiveLog,
tableViewProps.logs,
tableViewProps.fontSize,
activeLog?.id,
onCloseActiveLog,
tableViewProps.logs,
onSetActiveLog,
],
);
const tableHeader = useCallback(
() => (
<tr>
@@ -197,6 +179,24 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
? { endReached: infitiyTableProps.onEndReached }
: {})}
/>
{activeContextLog && (
<LogDetail
log={activeContextLog}
onClose={handleClearActiveContextLog}
onAddToQuery={handleAddToQuery}
selectedTab={VIEW_TYPES.CONTEXT}
handleChangeSelectedView={handleChangeSelectedView}
/>
)}
<LogDetail
selectedTab={VIEW_TYPES.OVERVIEW}
log={activeLog}
onClose={onClearActiveLog}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
handleChangeSelectedView={handleChangeSelectedView}
/>
</>
);
},

View File

@@ -1,7 +1,5 @@
import { VIEW_TYPES } from 'components/LogDetail/constants';
import { UseTableViewProps } from 'components/Logs/TableView/types';
import { ChangeViewFunctionType } from 'container/ExplorerOptions/types';
import { ILog } from 'types/api/logs/log';
export type InfinityTableProps = {
isLoading?: boolean;
@@ -10,11 +8,4 @@ export type InfinityTableProps = {
onEndReached: (index: number) => void;
};
handleChangeSelectedView?: ChangeViewFunctionType;
logs?: ILog[];
onSetActiveLog?: (
log: ILog,
selectedTab?: typeof VIEW_TYPES[keyof typeof VIEW_TYPES],
) => void;
onClearActiveLog?: () => void;
activeLog?: ILog | null;
};

View File

@@ -4,6 +4,7 @@ import { Card } from 'antd';
import logEvent from 'api/common/logEvent';
import ErrorInPlace from 'components/ErrorInPlace/ErrorInPlace';
import LogDetail from 'components/LogDetail';
import { VIEW_TYPES } from 'components/LogDetail/constants';
// components
import ListLogView from 'components/Logs/ListLogView';
import RawLogView from 'components/Logs/RawLogView';
@@ -15,9 +16,8 @@ import EmptyLogsSearch from 'container/EmptyLogsSearch/EmptyLogsSearch';
import { LogsLoading } from 'container/LogsLoading/LogsLoading';
import { useOptionsMenu } from 'container/OptionsMenu';
import { FontSize } from 'container/OptionsMenu/types';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
import useLogDetailHandlers from 'hooks/logs/useLogDetailHandlers';
import useScrollToLog from 'hooks/logs/useScrollToLog';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import APIError from 'types/api/error';
// interfaces
@@ -55,11 +55,10 @@ function LogsExplorerList({
const {
activeLog,
onClearActiveLog,
onAddToQuery,
selectedTab,
handleSetActiveLog,
handleCloseLogDetail,
} = useLogDetailHandlers();
onSetActiveLog,
} = useActiveLog();
const { options } = useOptionsMenu({
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
@@ -83,12 +82,6 @@ function LogsExplorerList({
() => convertKeysToColumnFields(options.selectColumns),
[options],
);
const handleScrollToLog = useScrollToLog({
logs,
virtuosoRef: ref,
});
useEffect(() => {
if (!isLoading && !isFetching && !isError && logs.length !== 0) {
logEvent('Logs Explorer: Data present', {
@@ -101,48 +94,40 @@ function LogsExplorerList({
(_: number, log: ILog): JSX.Element => {
if (options.format === 'raw') {
return (
<div key={log.id}>
<RawLogView
data={log}
isActiveLog={activeLog?.id === log.id}
linesPerRow={options.maxLines}
selectedFields={selectedFields}
fontSize={options.fontSize}
handleChangeSelectedView={handleChangeSelectedView}
onSetActiveLog={handleSetActiveLog}
onClearActiveLog={handleCloseLogDetail}
/>
</div>
<RawLogView
key={log.id}
data={log}
linesPerRow={options.maxLines}
selectedFields={selectedFields}
fontSize={options.fontSize}
handleChangeSelectedView={handleChangeSelectedView}
/>
);
}
return (
<div key={log.id}>
<ListLogView
logData={log}
isActiveLog={activeLog?.id === log.id}
selectedFields={selectedFields}
onAddToQuery={onAddToQuery}
onSetActiveLog={handleSetActiveLog}
activeLog={activeLog}
fontSize={options.fontSize}
linesPerRow={options.maxLines}
handleChangeSelectedView={handleChangeSelectedView}
onClearActiveLog={handleCloseLogDetail}
/>
</div>
<ListLogView
key={log.id}
logData={log}
selectedFields={selectedFields}
onAddToQuery={onAddToQuery}
onSetActiveLog={onSetActiveLog}
activeLog={activeLog}
fontSize={options.fontSize}
linesPerRow={options.maxLines}
handleChangeSelectedView={handleChangeSelectedView}
/>
);
},
[
options.format,
options.fontSize,
options.maxLines,
activeLog,
selectedFields,
onAddToQuery,
handleSetActiveLog,
handleChangeSelectedView,
handleCloseLogDetail,
onAddToQuery,
onSetActiveLog,
options.fontSize,
options.format,
options.maxLines,
selectedFields,
],
);
@@ -168,10 +153,6 @@ function LogsExplorerList({
}}
infitiyTableProps={{ onEndReached }}
handleChangeSelectedView={handleChangeSelectedView}
logs={logs}
onSetActiveLog={handleSetActiveLog}
onClearActiveLog={handleCloseLogDetail}
activeLog={activeLog}
/>
);
}
@@ -218,9 +199,6 @@ function LogsExplorerList({
getItemContent,
selectedFields,
handleChangeSelectedView,
handleSetActiveLog,
handleCloseLogDetail,
activeLog,
]);
const isTraceToLogsNavigation = useMemo(() => {
@@ -300,19 +278,14 @@ function LogsExplorerList({
{renderContent}
</InfinityWrapperStyled>
{selectedTab && activeLog && (
<LogDetail
selectedTab={selectedTab}
log={activeLog}
onClose={handleCloseLogDetail}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
handleChangeSelectedView={handleChangeSelectedView}
logs={logs}
onNavigateLog={handleSetActiveLog}
onScrollToLog={handleScrollToLog}
/>
)}
<LogDetail
selectedTab={VIEW_TYPES.OVERVIEW}
log={activeLog}
onClose={onClearActiveLog}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
handleChangeSelectedView={handleChangeSelectedView}
/>
</>
)}
</div>

View File

@@ -466,10 +466,7 @@ function LogsExplorerViewsContainer({
</div>
)}
<div
className="logs-explorer-views-type-content"
data-log-detail-ignore="true"
>
<div className="logs-explorer-views-type-content">
{showLiveLogs && (
<LiveLogs handleChangeSelectedView={handleChangeSelectedView} />
)}

View File

@@ -8,6 +8,7 @@ import {
} from 'react';
import { UseQueryResult } from 'react-query';
import LogDetail from 'components/LogDetail';
import { VIEW_TYPES } from 'components/LogDetail/constants';
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
import { ResizeTable } from 'components/ResizeTable';
import { SOMETHING_WENT_WRONG } from 'constants/api';
@@ -15,7 +16,7 @@ import { PANEL_TYPES } from 'constants/queryBuilder';
import Controls from 'container/Controls';
import { PER_PAGE_OPTIONS } from 'container/TracesExplorer/ListView/configs';
import { tableStyles } from 'container/TracesExplorer/ListView/styles';
import useLogDetailHandlers from 'hooks/logs/useLogDetailHandlers';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import { useLogsData } from 'hooks/useLogsData';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { FlatLogData } from 'lib/logs/flatLogData';
@@ -82,24 +83,24 @@ function LogsPanelComponent({
() => logs.map((log) => FlatLogData(log) as RowData),
[logs],
);
const {
activeLog,
onSetActiveLog,
onClearActiveLog,
onAddToQuery,
selectedTab,
handleSetActiveLog,
handleCloseLogDetail,
} = useLogDetailHandlers();
} = useActiveLog();
const handleRow = useCallback(
(record: RowData): HTMLAttributes<RowData> => ({
onClick: (): void => {
const log = logs.find((item) => item.id === record.id);
if (log) {
handleSetActiveLog(log);
onSetActiveLog(log);
}
},
}),
[handleSetActiveLog, logs],
[logs, onSetActiveLog],
);
const handleRequestData = (newOffset: number): void => {
@@ -131,7 +132,7 @@ function LogsPanelComponent({
return (
<>
<div className="logs-table" data-log-detail-ignore="true">
<div className="logs-table">
<div className="resize-table">
<OverlayScrollbar>
<ResizeTable
@@ -165,19 +166,15 @@ function LogsPanelComponent({
</div>
)}
</div>
{selectedTab && activeLog && (
<LogDetail
selectedTab={selectedTab}
log={activeLog}
onClose={handleCloseLogDetail}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
isListViewPanel
listViewPanelSelectedFields={widget?.selectedLogFields}
logs={logs}
onNavigateLog={handleSetActiveLog}
/>
)}
<LogDetail
selectedTab={VIEW_TYPES.OVERVIEW}
log={activeLog}
onClose={onClearActiveLog}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
isListViewPanel
listViewPanelSelectedFields={widget?.selectedLogFields}
/>
</>
);
}

View File

@@ -1,5 +1,4 @@
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useSearchParams } from 'react-router-dom-v5-compat';
import {
Button,
Empty,
@@ -10,22 +9,24 @@ import {
Popover,
Spin,
} from 'antd';
import { Filter } from 'api/v5/v5';
import {
convertExpressionToFilters,
convertFiltersToExpression,
} from 'components/QueryBuilderV2/utils';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useGetMetricsListFilterValues } from 'hooks/metricsExplorer/useGetMetricsListFilterValues';
import useDebouncedFn from 'hooks/useDebouncedFunction';
import { Search } from 'lucide-react';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
import { SUMMARY_FILTERS_KEY } from './constants';
function MetricNameSearch({
queryFilters,
queryFilterExpression,
onFilterChange,
}: {
queryFilters: TagFilter;
queryFilterExpression: Filter;
onFilterChange: (value: string) => void;
}): JSX.Element {
const [searchParams, setSearchParams] = useSearchParams();
const [isPopoverOpen, setIsPopoverOpen] = useState<boolean>(false);
const [searchString, setSearchString] = useState<string>('');
const [debouncedSearchString, setDebouncedSearchString] = useState<string>('');
@@ -67,9 +68,12 @@ function MetricNameSearch({
const handleSelect = useCallback(
(selectedMetricName: string): void => {
const queryFilters = convertExpressionToFilters(
queryFilterExpression?.expression,
);
const newFilters = {
items: [
...queryFilters.items,
...queryFilters,
{
id: 'metric_name',
op: 'CONTAINS',
@@ -83,13 +87,11 @@ function MetricNameSearch({
],
op: 'and',
};
setSearchParams({
...Object.fromEntries(searchParams.entries()),
[SUMMARY_FILTERS_KEY]: JSON.stringify(newFilters),
});
const newFilterExpression = convertFiltersToExpression(newFilters);
onFilterChange(newFilterExpression.expression);
setIsPopoverOpen(false);
},
[queryFilters.items, setSearchParams, searchParams],
[queryFilterExpression, onFilterChange],
);
const metricNameFilterValues = useMemo(

View File

@@ -1,23 +1,19 @@
import { useCallback, useMemo, useState } from 'react';
import { useSearchParams } from 'react-router-dom-v5-compat';
import { Button, Menu, Popover, Tooltip } from 'antd';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { MetrictypesTypeDTO } from 'api/generated/services/sigNoz.schemas';
import { convertFiltersToExpression } from 'components/QueryBuilderV2/utils';
import { Search } from 'lucide-react';
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
import {
METRIC_TYPE_LABEL_MAP,
METRIC_TYPE_VALUES_MAP,
SUMMARY_FILTERS_KEY,
} from './constants';
import { METRIC_TYPE_LABEL_MAP_V2 } from './constants';
function MetricTypeSearch({
queryFilters,
onFilterChange,
}: {
queryFilters: TagFilter;
onFilterChange: (expression: string) => void;
}): JSX.Element {
const [searchParams, setSearchParams] = useSearchParams();
const [isPopoverOpen, setIsPopoverOpen] = useState<boolean>(false);
const menuItems = useMemo(
@@ -26,9 +22,9 @@ function MetricTypeSearch({
key: 'all',
value: 'All',
},
...Object.keys(METRIC_TYPE_LABEL_MAP).map((key) => ({
key: METRIC_TYPE_VALUES_MAP[key as MetricType],
value: METRIC_TYPE_LABEL_MAP[key as MetricType],
...Object.keys(METRIC_TYPE_LABEL_MAP_V2).map((key) => ({
key: METRIC_TYPE_LABEL_MAP_V2[key as MetrictypesTypeDTO],
value: METRIC_TYPE_LABEL_MAP_V2[key as MetrictypesTypeDTO],
})),
],
[],
@@ -36,16 +32,17 @@ function MetricTypeSearch({
const handleSelect = useCallback(
(selectedMetricType: string): void => {
let newFilters;
if (selectedMetricType !== 'all') {
const newFilters = {
newFilters = {
items: [
...queryFilters.items,
{
id: 'metric_type',
id: 'ttype',
op: '=',
key: {
id: 'metric_type',
key: 'metric_type',
id: 'type',
key: 'type',
type: 'tag',
},
value: selectedMetricType,
@@ -53,23 +50,17 @@ function MetricTypeSearch({
],
op: 'AND',
};
setSearchParams({
...Object.fromEntries(searchParams.entries()),
[SUMMARY_FILTERS_KEY]: JSON.stringify(newFilters),
});
} else {
const newFilters = {
items: queryFilters.items.filter((item) => item.id !== 'metric_type'),
newFilters = {
items: queryFilters.items.filter((item) => item.id !== 'type'),
op: 'AND',
};
setSearchParams({
...Object.fromEntries(searchParams.entries()),
[SUMMARY_FILTERS_KEY]: JSON.stringify(newFilters),
});
}
const newFilterExpression = convertFiltersToExpression(newFilters);
onFilterChange(newFilterExpression.expression);
setIsPopoverOpen(false);
},
[queryFilters.items, setSearchParams, searchParams],
[queryFilters.items, onFilterChange],
);
const menu = (

View File

@@ -1,27 +1,59 @@
import { Tooltip } from 'antd';
import QueryBuilderSearch from 'container/QueryBuilder/filters/QueryBuilderSearch';
import { useEffect, useState } from 'react';
import { Button, Tooltip } from 'antd';
import QuerySearch from 'components/QueryBuilderV2/QueryV2/QuerySearch/QuerySearch';
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
import { HardHat, Info } from 'lucide-react';
import { Info, Play } from 'lucide-react';
import { DataSource } from 'types/common/queryBuilder';
import { MetricsSearchProps } from './types';
function MetricsSearch({ query, onChange }: MetricsSearchProps): JSX.Element {
const [currentExpression, setCurrentExpression] = useState<string>(
query?.filter?.expression || '',
);
useEffect(() => {
const expression = query?.filter?.expression || '';
setCurrentExpression(expression);
}, [query?.filter?.expression]);
const handleOnChange = (expression: string): void => {
setCurrentExpression(expression);
};
const handleStageAndRunQuery = (): void => onChange(currentExpression);
return (
<div className="metrics-search-container">
<div className="qb-search-container">
<div data-testid="qb-search-container" className="qb-search-container">
<Tooltip
title="Use filters to refine metrics based on attributes. Example: service_name=api - Shows all metrics associated with the API service"
placement="right"
>
<Info size={16} />
</Tooltip>
<QueryBuilderSearch
query={query}
onChange={onChange}
suffixIcon={<HardHat size={16} />}
isMetricsExplorer
<QuerySearch
onChange={handleOnChange}
dataSource={DataSource.METRICS}
queryData={{
...query,
filter: {
...query?.filter,
expression: currentExpression,
},
}}
onRun={handleOnChange}
showFilterSuggestionsWithoutMetric
/>
</div>
<Button
type="primary"
onClick={handleStageAndRunQuery}
className="stage-run-query"
icon={<Play size={14} />}
>
Stage & Run Query
</Button>
<div className="metrics-search-options">
<DateTimeSelectionV2
showAutoRefresh={false}

View File

@@ -24,7 +24,8 @@ function MetricsTable({
setOrderBy,
totalCount,
openMetricDetails,
queryFilters,
queryFilterExpression,
onFilterChange,
}: MetricsTableProps): JSX.Element {
const handleTableChange: TableProps<MetricsListItemRowData>['onChange'] = useCallback(
(
@@ -36,13 +37,17 @@ function MetricsTable({
): void => {
if ('field' in sorter && sorter.order) {
setOrderBy({
columnName: sorter.field as string,
order: sorter.order === 'ascend' ? 'asc' : 'desc',
key: {
name: sorter.field as string,
},
direction: sorter.order === 'ascend' ? 'asc' : 'desc',
});
} else {
setOrderBy({
columnName: 'samples',
order: 'desc',
key: {
name: 'samples',
},
direction: 'desc',
});
}
},
@@ -51,19 +56,17 @@ function MetricsTable({
return (
<div className="metrics-table-container">
{!isError && !isLoading && (
<div className="metrics-table-title" data-testid="metrics-table-title">
<Typography.Title level={4} className="metrics-table-title">
List View
</Typography.Title>
<Tooltip
title="The table displays all metrics in the selected time range. Each row represents a unique metric, and its metric name, and metadata like description, type, unit, and samples/timeseries cardinality observed in the selected time range."
placement="right"
>
<Info size={16} />
</Tooltip>
</div>
)}
<div className="metrics-table-title" data-testid="metrics-table-title">
<Typography.Title level={4} className="metrics-table-title">
List View
</Typography.Title>
<Tooltip
title="The table displays all metrics in the selected time range. Each row represents a unique metric, and its metric name, and metadata like description, type, unit, and samples/timeseries cardinality observed in the selected time range."
placement="right"
>
<Info size={16} />
</Tooltip>
</div>
<Table
loading={{
spinning: isLoading,
@@ -75,7 +78,7 @@ function MetricsTable({
),
}}
dataSource={data}
columns={getMetricsTableColumns(queryFilters)}
columns={getMetricsTableColumns(queryFilterExpression, onFilterChange)}
locale={{
emptyText: isLoading ? null : (
<div

View File

@@ -3,6 +3,7 @@ import { useWindowSize } from 'react-use';
import { Group } from '@visx/group';
import { Treemap } from '@visx/hierarchy';
import { Empty, Select, Skeleton, Tooltip, Typography } from 'antd';
import { MetricsexplorertypesTreemapModeDTO } from 'api/generated/services/sigNoz.schemas';
import { stratify, treemapBinary } from 'd3-hierarchy';
import { Info } from 'lucide-react';
@@ -12,7 +13,7 @@ import {
TREEMAP_SQUARE_PADDING,
TREEMAP_VIEW_OPTIONS,
} from './constants';
import { MetricsTreemapProps, TreemapTile, TreemapViewType } from './types';
import { MetricsTreemapProps, TreemapTile } from './types';
import {
getTreemapTileStyle,
getTreemapTileTextStyle,
@@ -40,9 +41,9 @@ function MetricsTreemap({
const treemapData = useMemo(() => {
const extracedTreemapData =
(viewType === TreemapViewType.TIMESERIES
? data?.data?.[TreemapViewType.TIMESERIES]
: data?.data?.[TreemapViewType.SAMPLES]) || [];
(viewType === MetricsexplorertypesTreemapModeDTO.timeseries
? data?.timeseries
: data?.samples) || [];
return transformTreemapData(extracedTreemapData, viewType);
}, [data, viewType]);
@@ -54,62 +55,36 @@ function MetricsTreemap({
const xMax = treemapWidth - TREEMAP_MARGINS.LEFT - TREEMAP_MARGINS.RIGHT;
const yMax = TREEMAP_HEIGHT - TREEMAP_MARGINS.TOP - TREEMAP_MARGINS.BOTTOM;
if (isLoading) {
return (
<div data-testid="metrics-treemap-loading-state">
<Skeleton
style={{ width: treemapWidth, height: TREEMAP_HEIGHT + 55 }}
active
/>
</div>
);
}
if (
!data ||
!data.data ||
(data?.status === 'success' && !data?.data?.[viewType])
) {
return (
<Empty
description="No metrics found"
data-testid="metrics-treemap-empty-state"
style={{ width: treemapWidth, height: TREEMAP_HEIGHT, paddingTop: 30 }}
/>
);
}
if (data?.status === 'error' || isError) {
return (
<Empty
description="Error fetching metrics. If the problem persists, please contact support."
data-testid="metrics-treemap-error-state"
style={{ width: treemapWidth, height: TREEMAP_HEIGHT, paddingTop: 30 }}
/>
);
}
return (
<div
className="metrics-treemap-container"
data-testid="metrics-treemap-container"
>
<div className="metrics-treemap-title">
<div className="metrics-treemap-title-left">
<Typography.Title level={4}>Proportion View</Typography.Title>
<Tooltip
title="The treemap displays the proportion of samples/timeseries in the selected time range. Each tile represents a unique metric, and its size indicates the percentage of samples/timeseries it contributes to the total."
placement="right"
>
<Info size={16} />
</Tooltip>
const treemapContent = useMemo(() => {
if (isLoading) {
return (
<div data-testid="metrics-treemap-loading-state">
<Skeleton style={{ width: treemapWidth, height: TREEMAP_HEIGHT }} active />
</div>
<Select
options={TREEMAP_VIEW_OPTIONS}
value={viewType}
onChange={setHeatmapView}
);
}
if (isError) {
return (
<Empty
description="Error fetching metrics. If the problem persists, please contact support."
data-testid="metrics-treemap-error-state"
style={{ width: treemapWidth, height: TREEMAP_HEIGHT, paddingTop: 30 }}
/>
</div>
);
}
if (!data || !data?.[viewType]?.length) {
return (
<Empty
description="No metrics found"
data-testid="metrics-treemap-empty-state"
style={{ width: treemapWidth, height: TREEMAP_HEIGHT, paddingTop: 30 }}
/>
);
}
return (
<svg
width={treemapWidth}
height={TREEMAP_HEIGHT}
@@ -174,6 +149,42 @@ function MetricsTreemap({
)}
</Treemap>
</svg>
);
}, [
data,
isError,
isLoading,
openMetricDetails,
transformedTreemapData,
treemapWidth,
viewType,
xMax,
yMax,
]);
return (
<div
className="metrics-treemap-container"
data-testid="metrics-treemap-container"
>
<div className="metrics-treemap-title">
<div className="metrics-treemap-title-left">
<Typography.Title level={4}>Proportion View</Typography.Title>
<Tooltip
title="The treemap displays the proportion of samples/timeseries in the selected time range. Each tile represents a unique metric, and its size indicates the percentage of samples/timeseries it contributes to the total."
placement="right"
>
<Info size={16} />
</Tooltip>
</div>
<Select
options={TREEMAP_VIEW_OPTIONS}
value={viewType}
onChange={setHeatmapView}
disabled={isLoading}
/>
</div>
{treemapContent}
</div>
);
}

View File

@@ -38,6 +38,7 @@
.metrics-search-container {
display: flex;
gap: 16px;
align-items: center;
.metrics-search-options {
display: flex;

View File

@@ -4,11 +4,23 @@ import { useSelector } from 'react-redux';
import { useSearchParams } from 'react-router-dom-v5-compat';
import * as Sentry from '@sentry/react';
import logEvent from 'api/common/logEvent';
import { initialQueriesMap } from 'constants/queryBuilder';
import {
useGetMetricsStats,
useGetMetricsTreemap,
} from 'api/generated/services/metrics';
import {
MetricsexplorertypesStatsRequestDTO,
MetricsexplorertypesTreemapModeDTO,
MetricsexplorertypesTreemapRequestDTO,
Querybuildertypesv5OrderByDTO,
} from 'api/generated/services/sigNoz.schemas';
import {
convertExpressionToFilters,
convertFiltersToExpression,
} from 'components/QueryBuilderV2/utils';
import { usePageSize } from 'container/InfraMonitoringK8s/utils';
import NoLogs from 'container/NoLogs/NoLogs';
import { useGetMetricsList } from 'hooks/metricsExplorer/useGetMetricsList';
import { useGetMetricsTreeMap } from 'hooks/metricsExplorer/useGetMetricsTreeMap';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import { AppState } from 'store/reducers';
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
@@ -23,32 +35,38 @@ import {
IS_INSPECT_MODAL_OPEN_KEY,
IS_METRIC_DETAILS_OPEN_KEY,
SELECTED_METRIC_NAME_KEY,
SUMMARY_FILTERS_KEY,
} from './constants';
import MetricsSearch from './MetricsSearch';
import MetricsTable from './MetricsTable';
import MetricsTreemap from './MetricsTreemap';
import { OrderByPayload, TreemapViewType } from './types';
import {
convertNanoToMilliseconds,
formatDataForMetricsTable,
getMetricsListQuery,
} from './utils';
import { convertNanoToMilliseconds, formatDataForMetricsTable } from './utils';
import './Summary.styles.scss';
const DEFAULT_ORDER_BY: OrderByPayload = {
columnName: 'samples',
order: 'desc',
const DEFAULT_ORDER_BY: Querybuildertypesv5OrderByDTO = {
key: {
name: 'samples',
},
direction: 'desc',
};
function Summary(): JSX.Element {
const { pageSize, setPageSize } = usePageSize('metricsExplorer');
const [currentPage, setCurrentPage] = useState(1);
const [orderBy, setOrderBy] = useState<OrderByPayload>(DEFAULT_ORDER_BY);
const [heatmapView, setHeatmapView] = useState<TreemapViewType>(
TreemapViewType.TIMESERIES,
const [orderBy, setOrderBy] = useState<Querybuildertypesv5OrderByDTO>(
DEFAULT_ORDER_BY,
);
const [
heatmapView,
setHeatmapView,
] = useState<MetricsexplorertypesTreemapModeDTO>(
MetricsexplorertypesTreemapModeDTO.timeseries,
);
const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder();
const query = useMemo(() => currentQuery?.builder?.queryData[0], [
currentQuery,
]);
const [searchParams, setSearchParams] = useSearchParams();
const [isMetricDetailsOpen, setIsMetricDetailsOpen] = useState(
@@ -65,17 +83,6 @@ function Summary(): JSX.Element {
(state) => state.globalTime,
);
const queryFilters: TagFilter = useMemo(() => {
const encodedFilters = searchParams.get(SUMMARY_FILTERS_KEY);
if (encodedFilters) {
return JSON.parse(encodedFilters);
}
return {
items: [],
op: 'AND',
};
}, [searchParams]);
useEffect(() => {
logEvent(MetricsExplorerEvents.TabChanged, {
[MetricsExplorerEventKeys.Tab]: 'summary',
@@ -87,105 +94,112 @@ function Summary(): JSX.Element {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
// This is used to avoid the filters from being serialized with the id
const queryFiltersWithoutId = useMemo(() => {
const filtersWithoutId = {
...queryFilters,
items: queryFilters.items.map(({ id: _id, ...rest }) => rest),
};
return JSON.stringify(filtersWithoutId);
}, [queryFilters]);
const queryFilterExpression = useMemo(() => {
const filters = query?.filters || { items: [], op: 'AND' };
return convertFiltersToExpression(filters);
}, [query?.filters]);
const metricsListQuery = useMemo(() => {
const baseQuery = getMetricsListQuery();
const metricsListQuery: MetricsexplorertypesStatsRequestDTO = useMemo(() => {
return {
...baseQuery,
limit: pageSize,
offset: (currentPage - 1) * pageSize,
filters: queryFilters,
start: convertNanoToMilliseconds(minTime),
end: convertNanoToMilliseconds(maxTime),
limit: pageSize,
offset: (currentPage - 1) * pageSize,
orderBy,
filter: {
expression: queryFilterExpression.expression,
},
};
}, [queryFilters, minTime, maxTime, orderBy, pageSize, currentPage]);
}, [
minTime,
maxTime,
orderBy,
pageSize,
currentPage,
queryFilterExpression.expression,
]);
const metricsTreemapQuery = useMemo(
const metricsTreemapQuery: MetricsexplorertypesTreemapRequestDTO = useMemo(
() => ({
limit: 100,
filters: queryFilters,
treemap: heatmapView,
start: convertNanoToMilliseconds(minTime),
end: convertNanoToMilliseconds(maxTime),
mode: heatmapView,
filter: {
expression: queryFilterExpression.expression,
},
}),
[queryFilters, heatmapView, minTime, maxTime],
[heatmapView, minTime, maxTime, queryFilterExpression.expression],
);
const {
data: metricsData,
isLoading: isMetricsLoading,
isFetching: isMetricsFetching,
isError: isMetricsError,
} = useGetMetricsList(metricsListQuery, {
enabled: !!metricsListQuery && !isInspectModalOpen,
queryKey: [
'metricsList',
queryFiltersWithoutId,
orderBy,
pageSize,
currentPage,
minTime,
maxTime,
],
});
mutate: getMetricsStats,
isLoading: isGetMetricsStatsLoading,
isError: isGetMetricsStatsError,
} = useGetMetricsStats();
const isListViewError = useMemo(
() => isMetricsError || !!(metricsData && metricsData.statusCode !== 200),
[isMetricsError, metricsData],
() => isGetMetricsStatsError || metricsData?.status !== 200,
[isGetMetricsStatsError, metricsData],
);
const {
data: treeMapData,
isLoading: isTreeMapLoading,
isFetching: isTreeMapFetching,
isError: isTreeMapError,
} = useGetMetricsTreeMap(metricsTreemapQuery, {
enabled: !!metricsTreemapQuery && !isInspectModalOpen,
queryKey: [
'metricsTreemap',
queryFiltersWithoutId,
heatmapView,
minTime,
maxTime,
],
});
mutate: getMetricsTreemap,
isLoading: isGetMetricsTreemapLoading,
isError: isGetMetricsTreemapError,
} = useGetMetricsTreemap();
useEffect(() => {
getMetricsStats({
data: metricsListQuery,
});
getMetricsTreemap({
data: metricsTreemapQuery,
});
}, [
metricsTreemapQuery,
metricsListQuery,
getMetricsTreemap,
getMetricsStats,
]);
const isProportionViewError = useMemo(
() => isTreeMapError || treeMapData?.statusCode !== 200,
[isTreeMapError, treeMapData],
() => isGetMetricsTreemapError || treeMapData?.status !== 200,
[isGetMetricsTreemapError, treeMapData],
);
const handleFilterChange = useCallback(
(value: TagFilter) => {
setSearchParams({
...Object.fromEntries(searchParams.entries()),
[SUMMARY_FILTERS_KEY]: JSON.stringify(value),
(expression: string) => {
const newFilters: TagFilter = {
items: convertExpressionToFilters(expression),
op: 'AND',
};
redirectWithQueryBuilderData({
...currentQuery,
builder: {
...currentQuery.builder,
queryData: [
{
...currentQuery.builder.queryData[0],
filters: newFilters,
filter: {
expression,
},
},
],
},
});
setCurrentPage(1);
if (value.items.length > 0) {
if (expression) {
logEvent(MetricsExplorerEvents.FilterApplied, {
[MetricsExplorerEventKeys.Tab]: 'summary',
});
}
},
[setSearchParams, searchParams],
);
const searchQuery = useMemo(
() => ({
...initialQueriesMap.metrics.builder.queryData[0],
filters: queryFilters,
}),
[queryFilters],
[currentQuery, redirectWithQueryBuilderData],
);
const onPaginationChange = (page: number, pageSize: number): void => {
@@ -202,7 +216,7 @@ function Summary(): JSX.Element {
};
const formattedMetricsData = useMemo(
() => formatDataForMetricsTable(metricsData?.payload?.data?.metrics || []),
() => formatDataForMetricsTable(metricsData?.data?.data?.metrics || []),
[metricsData],
);
@@ -254,7 +268,9 @@ function Summary(): JSX.Element {
});
};
const handleSetHeatmapView = (view: TreemapViewType): void => {
const handleSetHeatmapView = (
view: MetricsexplorertypesTreemapModeDTO,
): void => {
setHeatmapView(view);
logEvent(MetricsExplorerEvents.TreemapViewChanged, {
[MetricsExplorerEventKeys.Tab]: 'summary',
@@ -262,63 +278,71 @@ function Summary(): JSX.Element {
});
};
const handleSetOrderBy = (orderBy: OrderByPayload): void => {
const handleSetOrderBy = (orderBy: Querybuildertypesv5OrderByDTO): void => {
setOrderBy(orderBy);
logEvent(MetricsExplorerEvents.OrderByApplied, {
[MetricsExplorerEventKeys.Tab]: 'summary',
[MetricsExplorerEventKeys.ColumnName]: orderBy.columnName,
[MetricsExplorerEventKeys.Order]: orderBy.order,
[MetricsExplorerEventKeys.ColumnName]: orderBy.key?.name,
[MetricsExplorerEventKeys.Order]: orderBy.direction,
});
};
const isMetricsListDataEmpty = useMemo(
() =>
formattedMetricsData.length === 0 && !isMetricsLoading && !isMetricsFetching,
[formattedMetricsData, isMetricsLoading, isMetricsFetching],
() => formattedMetricsData.length === 0 && !isGetMetricsStatsLoading,
[formattedMetricsData, isGetMetricsStatsLoading],
);
const isMetricsTreeMapDataEmpty = useMemo(
() =>
!treeMapData?.payload?.data[heatmapView]?.length &&
!isTreeMapLoading &&
!isTreeMapFetching,
!treeMapData?.data?.data?.[heatmapView]?.length &&
!isGetMetricsTreemapLoading,
[treeMapData?.data?.data, heatmapView, isGetMetricsTreemapLoading],
);
const showFullScreenLoading = useMemo(
() =>
(isGetMetricsStatsLoading || isGetMetricsTreemapLoading) &&
formattedMetricsData.length === 0 &&
!treeMapData?.data?.data?.[heatmapView]?.length,
[
treeMapData?.payload?.data,
isGetMetricsStatsLoading,
isGetMetricsTreemapLoading,
formattedMetricsData,
treeMapData,
heatmapView,
isTreeMapLoading,
isTreeMapFetching,
],
);
return (
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<div className="metrics-explorer-summary-tab">
<MetricsSearch query={searchQuery} onChange={handleFilterChange} />
{isMetricsLoading || isTreeMapLoading ? (
<MetricsSearch query={query} onChange={handleFilterChange} />
{showFullScreenLoading ? (
<MetricsLoading />
) : isMetricsListDataEmpty && isMetricsTreeMapDataEmpty ? (
<NoLogs dataSource={DataSource.METRICS} />
) : (
<>
<MetricsTreemap
data={treeMapData?.payload}
isLoading={isTreeMapLoading || isTreeMapFetching}
data={treeMapData?.data?.data}
isLoading={isGetMetricsTreemapLoading}
isError={isProportionViewError}
viewType={heatmapView}
openMetricDetails={openMetricDetails}
setHeatmapView={handleSetHeatmapView}
/>
<MetricsTable
isLoading={isMetricsLoading || isMetricsFetching}
isLoading={isGetMetricsStatsLoading}
isError={isListViewError}
data={formattedMetricsData}
pageSize={pageSize}
currentPage={currentPage}
onPaginationChange={onPaginationChange}
setOrderBy={handleSetOrderBy}
totalCount={metricsData?.payload?.data?.total || 0}
totalCount={metricsData?.data?.data?.total || 0}
openMetricDetails={openMetricDetails}
queryFilters={queryFilters}
queryFilterExpression={queryFilterExpression}
onFilterChange={handleFilterChange}
/>
</>
)}

View File

@@ -1,10 +1,10 @@
import { Provider } from 'react-redux';
import { MemoryRouter } from 'react-router-dom';
import { fireEvent, render, screen } from '@testing-library/react';
import { Filter } from 'api/v5/v5';
import * as useGetMetricsListFilterValues from 'hooks/metricsExplorer/useGetMetricsListFilterValues';
import * as useQueryBuilderOperationsHooks from 'hooks/queryBuilder/useQueryBuilderOperations';
import store from 'store';
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
import MetricsTable from '../MetricsTable';
import { MetricsListItemRowData } from '../types';
@@ -30,9 +30,8 @@ const mockData: MetricsListItemRowData[] = [
},
];
const mockQueryFilters: TagFilter = {
items: [],
op: 'AND',
const mockQueryFilterExpression: Filter = {
expression: '',
};
jest.mock('react-router-dom-v5-compat', () => {
@@ -82,7 +81,8 @@ describe('MetricsTable', () => {
setOrderBy={jest.fn()}
totalCount={2}
openMetricDetails={jest.fn()}
queryFilters={mockQueryFilters}
queryFilterExpression={mockQueryFilterExpression}
onFilterChange={jest.fn()}
/>
</Provider>
</MemoryRouter>,
@@ -106,8 +106,9 @@ describe('MetricsTable', () => {
setOrderBy={jest.fn()}
totalCount={2}
openMetricDetails={jest.fn()}
queryFilters={mockQueryFilters}
queryFilterExpression={mockQueryFilterExpression}
isLoading
onFilterChange={jest.fn()}
/>
</Provider>
</MemoryRouter>,
@@ -130,7 +131,8 @@ describe('MetricsTable', () => {
setOrderBy={jest.fn()}
totalCount={2}
openMetricDetails={jest.fn()}
queryFilters={mockQueryFilters}
queryFilterExpression={mockQueryFilterExpression}
onFilterChange={jest.fn()}
/>
</Provider>
</MemoryRouter>,
@@ -158,7 +160,8 @@ describe('MetricsTable', () => {
setOrderBy={jest.fn()}
totalCount={2}
openMetricDetails={jest.fn()}
queryFilters={mockQueryFilters}
queryFilterExpression={mockQueryFilterExpression}
onFilterChange={jest.fn()}
/>
</Provider>
</MemoryRouter>,
@@ -187,7 +190,8 @@ describe('MetricsTable', () => {
setOrderBy={jest.fn()}
totalCount={2}
openMetricDetails={mockOpenMetricDetails}
queryFilters={mockQueryFilters}
queryFilterExpression={mockQueryFilterExpression}
onFilterChange={jest.fn()}
/>
</Provider>
</MemoryRouter>,
@@ -212,7 +216,8 @@ describe('MetricsTable', () => {
setOrderBy={mockSetOrderBy}
totalCount={2}
openMetricDetails={jest.fn()}
queryFilters={mockQueryFilters}
queryFilterExpression={mockQueryFilterExpression}
onFilterChange={jest.fn()}
/>
</Provider>
</MemoryRouter>,
@@ -222,8 +227,10 @@ describe('MetricsTable', () => {
fireEvent.click(samplesHeader);
expect(mockSetOrderBy).toHaveBeenCalledWith({
columnName: 'samples',
order: 'asc',
key: {
name: 'samples',
},
direction: 'asc',
});
});
});

View File

@@ -1,10 +1,10 @@
import { Provider } from 'react-redux';
import { MemoryRouter } from 'react-router-dom';
import { render, screen } from '@testing-library/react';
import { MetricsexplorertypesTreemapModeDTO } from 'api/generated/services/sigNoz.schemas';
import store from 'store';
import MetricsTreemap from '../MetricsTreemap';
import { TreemapViewType } from '../types';
jest.mock('d3-hierarchy', () => ({
stratify: jest.fn().mockReturnValue({
@@ -27,14 +27,14 @@ jest.mock('react-use', () => ({
const mockData = [
{
metric_name: 'Metric 1',
metricName: 'Metric 1',
percentage: 0.5,
total_value: 15,
totalValue: 15,
},
{
metric_name: 'Metric 2',
metricName: 'Metric 2',
percentage: 0.6,
total_value: 10,
totalValue: 10,
},
];
@@ -47,14 +47,11 @@ describe('MetricsTreemap', () => {
isLoading={false}
isError={false}
data={{
status: 'success',
data: {
timeseries: [mockData[0]],
samples: [mockData[1]],
},
timeseries: [mockData[0]],
samples: [mockData[1]],
}}
openMetricDetails={jest.fn()}
viewType={TreemapViewType.SAMPLES}
viewType={MetricsexplorertypesTreemapModeDTO.samples}
setHeatmapView={jest.fn()}
/>
</Provider>
@@ -72,14 +69,11 @@ describe('MetricsTreemap', () => {
isLoading
isError={false}
data={{
status: 'success',
data: {
timeseries: [mockData[0]],
samples: [mockData[1]],
},
timeseries: [mockData[0]],
samples: [mockData[1]],
}}
openMetricDetails={jest.fn()}
viewType={TreemapViewType.SAMPLES}
viewType={MetricsexplorertypesTreemapModeDTO.samples}
setHeatmapView={jest.fn()}
/>
</Provider>
@@ -99,14 +93,11 @@ describe('MetricsTreemap', () => {
isLoading={false}
isError
data={{
status: 'success',
data: {
timeseries: [mockData[0]],
samples: [mockData[1]],
},
timeseries: [mockData[0]],
samples: [mockData[1]],
}}
openMetricDetails={jest.fn()}
viewType={TreemapViewType.SAMPLES}
viewType={MetricsexplorertypesTreemapModeDTO.samples}
setHeatmapView={jest.fn()}
/>
</Provider>
@@ -130,7 +121,7 @@ describe('MetricsTreemap', () => {
isError={false}
data={null}
openMetricDetails={jest.fn()}
viewType={TreemapViewType.SAMPLES}
viewType={MetricsexplorertypesTreemapModeDTO.samples}
setHeatmapView={jest.fn()}
/>
</Provider>

View File

@@ -1,8 +1,9 @@
import { Color } from '@signozhq/design-tokens';
import { render } from '@testing-library/react';
import { MetrictypesTypeDTO } from 'api/generated/services/sigNoz.schemas';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { Filter } from 'api/v5/v5';
import { getUniversalNameFromMetricUnit } from 'components/YAxisUnitSelector/utils';
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
import { TreemapViewType } from '../types';
import {
@@ -11,52 +12,76 @@ import {
MetricTypeRenderer,
} from '../utils';
describe('metricsTableColumns', () => {
const mockQueryFilters: TagFilter = {
items: [],
op: 'AND',
};
const mockQueryExpression: Filter = {
expression: '',
};
const mockOnChange = jest.fn();
describe('metricsTableColumns', () => {
it('should have correct column definitions', () => {
expect(getMetricsTableColumns(mockQueryFilters)).toHaveLength(6);
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange),
).toHaveLength(6);
// Metric Name column
expect(getMetricsTableColumns(mockQueryFilters)[0].dataIndex).toBe(
'metric_name',
);
expect(getMetricsTableColumns(mockQueryFilters)[0].width).toBe(400);
expect(getMetricsTableColumns(mockQueryFilters)[0].sorter).toBe(false);
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange)[0].dataIndex,
).toBe('metric_name');
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange)[0].width,
).toBe(400);
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange)[0].sorter,
).toBe(false);
// Description column
expect(getMetricsTableColumns(mockQueryFilters)[1].dataIndex).toBe(
'description',
);
expect(getMetricsTableColumns(mockQueryFilters)[1].width).toBe(400);
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange)[1].dataIndex,
).toBe('description');
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange)[1].width,
).toBe(400);
// Type column
expect(getMetricsTableColumns(mockQueryFilters)[2].dataIndex).toBe(
'metric_type',
);
expect(getMetricsTableColumns(mockQueryFilters)[2].width).toBe(150);
expect(getMetricsTableColumns(mockQueryFilters)[2].sorter).toBe(false);
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange)[2].dataIndex,
).toBe('metric_type');
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange)[2].width,
).toBe(150);
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange)[2].sorter,
).toBe(false);
// Unit column
expect(getMetricsTableColumns(mockQueryFilters)[3].dataIndex).toBe('unit');
expect(getMetricsTableColumns(mockQueryFilters)[3].width).toBe(150);
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange)[3].dataIndex,
).toBe('unit');
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange)[3].width,
).toBe(150);
// Samples column
expect(getMetricsTableColumns(mockQueryFilters)[4].dataIndex).toBe(
TreemapViewType.SAMPLES,
);
expect(getMetricsTableColumns(mockQueryFilters)[4].width).toBe(150);
expect(getMetricsTableColumns(mockQueryFilters)[4].sorter).toBe(true);
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange)[4].dataIndex,
).toBe(TreemapViewType.SAMPLES);
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange)[4].width,
).toBe(150);
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange)[4].sorter,
).toBe(true);
// Time Series column
expect(getMetricsTableColumns(mockQueryFilters)[5].dataIndex).toBe(
TreemapViewType.TIMESERIES,
);
expect(getMetricsTableColumns(mockQueryFilters)[5].width).toBe(150);
expect(getMetricsTableColumns(mockQueryFilters)[5].sorter).toBe(true);
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange)[5].dataIndex,
).toBe(TreemapViewType.TIMESERIES);
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange)[5].width,
).toBe(150);
expect(
getMetricsTableColumns(mockQueryExpression, mockOnChange)[5].sorter,
).toBe(true);
});
describe('MetricTypeRenderer', () => {
@@ -111,12 +136,12 @@ describe('formatDataForMetricsTable', () => {
it('should format metrics data correctly', () => {
const mockData = [
{
metric_name: 'test_metric',
metricName: 'test_metric',
description: 'Test description',
type: MetricType.GAUGE,
type: MetrictypesTypeDTO.gauge,
unit: 'bytes',
[TreemapViewType.SAMPLES]: 1000,
[TreemapViewType.TIMESERIES]: 2000,
samples: 1000,
timeseries: 2000,
lastReceived: '2023-01-01T00:00:00Z',
},
];
@@ -163,12 +188,12 @@ describe('formatDataForMetricsTable', () => {
it('should handle empty/null values', () => {
const mockData = [
{
metric_name: '',
metricName: '',
description: '',
type: MetricType.GAUGE,
type: MetrictypesTypeDTO.gauge,
unit: '',
[TreemapViewType.SAMPLES]: 0,
[TreemapViewType.TIMESERIES]: 0,
samples: 0,
timeseries: 0,
lastReceived: '2023-01-01T00:00:00Z',
},
];

View File

@@ -1,15 +1,17 @@
import {
MetricsexplorertypesTreemapModeDTO,
MetrictypesTypeDTO,
} from 'api/generated/services/sigNoz.schemas';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { TreemapViewType } from './types';
export const METRICS_TABLE_PAGE_SIZE = 10;
export const TREEMAP_VIEW_OPTIONS: {
value: TreemapViewType;
value: MetricsexplorertypesTreemapModeDTO;
label: string;
}[] = [
{ value: TreemapViewType.TIMESERIES, label: 'Time Series' },
{ value: TreemapViewType.SAMPLES, label: 'Samples' },
{ value: MetricsexplorertypesTreemapModeDTO.timeseries, label: 'Time Series' },
{ value: MetricsexplorertypesTreemapModeDTO.samples, label: 'Samples' },
];
export const TREEMAP_HEIGHT = 200;
@@ -17,6 +19,7 @@ export const TREEMAP_SQUARE_PADDING = 5;
export const TREEMAP_MARGINS = { TOP: 10, LEFT: 10, RIGHT: 10, BOTTOM: 10 };
// TODO: Remove this once API migration is complete
export const METRIC_TYPE_LABEL_MAP = {
[MetricType.SUM]: 'Sum',
[MetricType.GAUGE]: 'Gauge',
@@ -25,6 +28,14 @@ export const METRIC_TYPE_LABEL_MAP = {
[MetricType.EXPONENTIAL_HISTOGRAM]: 'Exp. Histogram',
};
export const METRIC_TYPE_LABEL_MAP_V2 = {
[MetrictypesTypeDTO.sum]: 'Sum',
[MetrictypesTypeDTO.gauge]: 'Gauge',
[MetrictypesTypeDTO.histogram]: 'Histogram',
[MetrictypesTypeDTO.summary]: 'Summary',
[MetrictypesTypeDTO.exponentialhistogram]: 'Exp. Histogram',
};
export const METRIC_TYPE_VALUES_MAP = {
[MetricType.SUM]: 'Sum',
[MetricType.GAUGE]: 'Gauge',
@@ -36,4 +47,3 @@ export const METRIC_TYPE_VALUES_MAP = {
export const IS_METRIC_DETAILS_OPEN_KEY = 'isMetricDetailsOpen';
export const IS_INSPECT_MODAL_OPEN_KEY = 'isInspectModalOpen';
export const SELECTED_METRIC_NAME_KEY = 'selectedMetricName';
export const SUMMARY_FILTERS_KEY = 'summaryFilters';

View File

@@ -1,9 +1,11 @@
import React from 'react';
import { MetricsTreeMapResponse } from 'api/metricsExplorer/getMetricsTreeMap';
import {
IBuilderQuery,
TagFilter,
} from 'types/api/queryBuilder/queryBuilderData';
MetricsexplorertypesTreemapModeDTO,
MetricsexplorertypesTreemapResponseDTO,
Querybuildertypesv5OrderByDTO,
} from 'api/generated/services/sigNoz.schemas';
import { Filter } from 'api/v5/v5';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
export interface MetricsTableProps {
isLoading: boolean;
@@ -12,24 +14,25 @@ export interface MetricsTableProps {
pageSize: number;
currentPage: number;
onPaginationChange: (page: number, pageSize: number) => void;
setOrderBy: (orderBy: OrderByPayload) => void;
setOrderBy: (orderBy: Querybuildertypesv5OrderByDTO) => void;
totalCount: number;
openMetricDetails: (metricName: string, view: 'list' | 'treemap') => void;
queryFilters: TagFilter;
queryFilterExpression: Filter;
onFilterChange: (expression: string) => void;
}
export interface MetricsSearchProps {
query: IBuilderQuery;
onChange: (value: TagFilter) => void;
onChange: (expression: string) => void;
}
export interface MetricsTreemapProps {
data: MetricsTreeMapResponse | null | undefined;
data: MetricsexplorertypesTreemapResponseDTO | null | undefined;
isLoading: boolean;
isError: boolean;
viewType: TreemapViewType;
viewType: MetricsexplorertypesTreemapModeDTO;
openMetricDetails: (metricName: string, view: 'list' | 'treemap') => void;
setHeatmapView: (value: TreemapViewType) => void;
setHeatmapView: (value: MetricsexplorertypesTreemapModeDTO) => void;
}
export interface OrderByPayload {

View File

@@ -3,14 +3,16 @@ import { Color } from '@signozhq/design-tokens';
import { Tooltip, Typography } from 'antd';
import { ColumnType } from 'antd/es/table';
import {
MetricsListItemData,
MetricsexplorertypesStatDTO,
MetricsexplorertypesTreemapEntryDTO,
MetricsexplorertypesTreemapModeDTO,
MetrictypesTypeDTO,
} from 'api/generated/services/sigNoz.schemas';
import {
MetricsListPayload,
MetricType,
} from 'api/metricsExplorer/getMetricsList';
import {
SamplesData,
TimeseriesData,
} from 'api/metricsExplorer/getMetricsTreeMap';
import { Filter } from 'api/v5/v5';
import { getUniversalNameFromMetricUnit } from 'components/YAxisUnitSelector/utils';
import {
BarChart,
@@ -19,21 +21,23 @@ import {
Diff,
Gauge,
} from 'lucide-react';
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
import { METRIC_TYPE_LABEL_MAP } from './constants';
import { METRIC_TYPE_LABEL_MAP, METRIC_TYPE_LABEL_MAP_V2 } from './constants';
import MetricNameSearch from './MetricNameSearch';
import MetricTypeSearch from './MetricTypeSearch';
import { MetricsListItemRowData, TreemapTile, TreemapViewType } from './types';
import { MetricsListItemRowData, TreemapTile } from './types';
export const getMetricsTableColumns = (
queryFilters: TagFilter,
queryFilterExpression: Filter,
onFilterChange: (expression: string) => void,
): ColumnType<MetricsListItemRowData>[] => [
{
title: (
<div className="metric-name-column-header">
<span className="metric-name-column-header-text">METRIC</span>
<MetricNameSearch queryFilters={queryFilters} />
<MetricNameSearch
queryFilterExpression={queryFilterExpression}
onFilterChange={onFilterChange}
/>
</div>
),
dataIndex: 'metric_name',
@@ -55,7 +59,11 @@ export const getMetricsTableColumns = (
title: (
<div className="metric-type-column-header">
<span className="metric-type-column-header-text">TYPE</span>
<MetricTypeSearch queryFilters={queryFilters} />
{/* TODO: @amlannandy: Re-enable once API supports metric type filtering */}
{/* <MetricTypeSearch
queryFilters={queryFilters}
onFilterChange={onFilterChange}
/> */}
</div>
),
dataIndex: 'metric_type',
@@ -69,13 +77,13 @@ export const getMetricsTableColumns = (
},
{
title: 'SAMPLES',
dataIndex: TreemapViewType.SAMPLES,
dataIndex: MetricsexplorertypesTreemapModeDTO.samples,
width: 150,
sorter: true,
},
{
title: 'TIME SERIES',
dataIndex: TreemapViewType.TIMESERIES,
dataIndex: MetricsexplorertypesTreemapModeDTO.timeseries,
width: 150,
sorter: true,
},
@@ -143,6 +151,60 @@ export function MetricTypeRenderer({
);
}
export function MetricTypeRendererV2({
type,
}: {
type: MetrictypesTypeDTO;
}): JSX.Element {
const [icon, color] = useMemo(() => {
switch (type) {
case MetrictypesTypeDTO.sum:
return [
<Diff key={type} size={12} color={Color.BG_ROBIN_500} />,
Color.BG_ROBIN_500,
];
case MetrictypesTypeDTO.gauge:
return [
<Gauge key={type} size={12} color={Color.BG_SAKURA_500} />,
Color.BG_SAKURA_500,
];
case MetrictypesTypeDTO.histogram:
return [
<BarChart2 key={type} size={12} color={Color.BG_SIENNA_500} />,
Color.BG_SIENNA_500,
];
case MetrictypesTypeDTO.summary:
return [
<BarChartHorizontal key={type} size={12} color={Color.BG_FOREST_500} />,
Color.BG_FOREST_500,
];
case MetrictypesTypeDTO.exponentialhistogram:
return [
<BarChart key={type} size={12} color={Color.BG_AQUA_500} />,
Color.BG_AQUA_500,
];
default:
return [null, ''];
}
}, [type]);
return (
<div
className="metric-type-renderer"
style={{
backgroundColor: `${color}33`,
border: `1px solid ${color}`,
color,
}}
>
{icon}
<Typography.Text style={{ color, fontSize: 12 }}>
{METRIC_TYPE_LABEL_MAP_V2[type]}
</Typography.Text>
</div>
);
}
function ValidateRowValueWrapper({
value,
children,
@@ -182,13 +244,13 @@ export const formatNumberIntoHumanReadableFormat = (
};
export const formatDataForMetricsTable = (
data: MetricsListItemData[],
data: MetricsexplorertypesStatDTO[],
): MetricsListItemRowData[] =>
data.map((metric) => ({
key: metric.metric_name,
key: metric.metricName,
metric_name: (
<ValidateRowValueWrapper value={metric.metric_name}>
<Tooltip title={metric.metric_name}>{metric.metric_name}</Tooltip>
<ValidateRowValueWrapper value={metric.metricName}>
<Tooltip title={metric.metricName}>{metric.metricName}</Tooltip>
</ValidateRowValueWrapper>
),
description: (
@@ -198,39 +260,54 @@ export const formatDataForMetricsTable = (
</Tooltip>
</ValidateRowValueWrapper>
),
metric_type: <MetricTypeRenderer type={metric.type} />,
metric_type: <MetricTypeRendererV2 type={metric.type} />,
unit: (
<ValidateRowValueWrapper value={getUniversalNameFromMetricUnit(metric.unit)}>
{getUniversalNameFromMetricUnit(metric.unit)}
</ValidateRowValueWrapper>
),
[TreemapViewType.SAMPLES]: (
<ValidateRowValueWrapper value={metric[TreemapViewType.SAMPLES]}>
<Tooltip title={metric[TreemapViewType.SAMPLES].toLocaleString()}>
{formatNumberIntoHumanReadableFormat(metric[TreemapViewType.SAMPLES])}
[MetricsexplorertypesTreemapModeDTO.samples]: (
<ValidateRowValueWrapper
value={metric[MetricsexplorertypesTreemapModeDTO.samples]}
>
<Tooltip
title={metric[MetricsexplorertypesTreemapModeDTO.samples].toLocaleString()}
>
{formatNumberIntoHumanReadableFormat(
metric[MetricsexplorertypesTreemapModeDTO.samples],
)}
</Tooltip>
</ValidateRowValueWrapper>
),
[TreemapViewType.TIMESERIES]: (
<ValidateRowValueWrapper value={metric[TreemapViewType.TIMESERIES]}>
<Tooltip title={metric[TreemapViewType.TIMESERIES].toLocaleString()}>
{formatNumberIntoHumanReadableFormat(metric[TreemapViewType.TIMESERIES])}
[MetricsexplorertypesTreemapModeDTO.timeseries]: (
<ValidateRowValueWrapper
value={metric[MetricsexplorertypesTreemapModeDTO.timeseries]}
>
<Tooltip
title={metric[
MetricsexplorertypesTreemapModeDTO.timeseries
].toLocaleString()}
>
{formatNumberIntoHumanReadableFormat(
metric[MetricsexplorertypesTreemapModeDTO.timeseries],
)}
</Tooltip>
</ValidateRowValueWrapper>
),
}));
export const transformTreemapData = (
data: TimeseriesData[] | SamplesData[],
viewType: TreemapViewType,
data: MetricsexplorertypesTreemapEntryDTO[],
viewType: MetricsexplorertypesTreemapModeDTO,
): TreemapTile[] => {
const totalSize = (data as (TimeseriesData | SamplesData)[]).reduce(
(acc: number, item: TimeseriesData | SamplesData) => acc + item.percentage,
const totalSize = data.reduce(
(acc: number, item: MetricsexplorertypesTreemapEntryDTO) =>
acc + item.percentage,
0,
);
const children = data.map((item) => ({
id: item.metric_name,
id: item.metricName,
size: totalSize > 0 ? Number((item.percentage / totalSize).toFixed(2)) : 0,
displayValue: Number(item.percentage).toFixed(2),
parent: viewType,

View File

@@ -1,59 +0,0 @@
import { useCallback, useState } from 'react';
import { VIEW_TYPES } from 'components/LogDetail/constants';
import type { UseActiveLog } from 'hooks/logs/types';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import { ILog } from 'types/api/logs/log';
type SelectedTab = typeof VIEW_TYPES[keyof typeof VIEW_TYPES] | undefined;
type UseLogDetailHandlersParams = {
defaultTab?: SelectedTab;
};
type UseLogDetailHandlersResult = {
activeLog: UseActiveLog['activeLog'];
onAddToQuery: UseActiveLog['onAddToQuery'];
selectedTab: SelectedTab;
handleSetActiveLog: (log: ILog, selectedTab?: SelectedTab) => void;
handleCloseLogDetail: () => void;
};
function useLogDetailHandlers({
defaultTab = VIEW_TYPES.OVERVIEW,
}: UseLogDetailHandlersParams = {}): UseLogDetailHandlersResult {
const {
activeLog,
onSetActiveLog,
onClearActiveLog,
onAddToQuery,
} = useActiveLog();
const [selectedTab, setSelectedTab] = useState<SelectedTab>(defaultTab);
const handleSetActiveLog = useCallback(
(log: ILog, nextTab: SelectedTab = defaultTab): void => {
if (activeLog?.id === log.id) {
onClearActiveLog();
setSelectedTab(undefined);
return;
}
onSetActiveLog(log);
setSelectedTab(nextTab ?? defaultTab);
},
[activeLog?.id, defaultTab, onClearActiveLog, onSetActiveLog],
);
const handleCloseLogDetail = useCallback((): void => {
onClearActiveLog();
setSelectedTab(undefined);
}, [onClearActiveLog]);
return {
activeLog,
onAddToQuery,
selectedTab,
handleSetActiveLog,
handleCloseLogDetail,
};
}
export default useLogDetailHandlers;

View File

@@ -1,28 +0,0 @@
import { useCallback } from 'react';
import type { VirtuosoHandle } from 'react-virtuoso';
type UseScrollToLogParams = {
logs: Array<{ id: string }>;
virtuosoRef: React.RefObject<VirtuosoHandle | null>;
};
function useScrollToLog({
logs,
virtuosoRef,
}: UseScrollToLogParams): (logId: string) => void {
return useCallback(
(logId: string): void => {
const logIndex = logs.findIndex(({ id }) => id === logId);
if (logIndex !== -1 && virtuosoRef.current) {
virtuosoRef.current.scrollToIndex({
index: logIndex,
align: 'center',
behavior: 'smooth',
});
}
},
[logs, virtuosoRef],
);
}
export default useScrollToLog;

View File

@@ -7,16 +7,11 @@ import { merge } from 'lodash-es';
import noop from 'lodash-es/noop';
import uPlot, { Cursor, Hooks, Options } from 'uplot';
import {
DEFAULT_CURSOR_CONFIG,
DEFAULT_HOVER_PROXIMITY_VALUE,
DEFAULT_PLOT_CONFIG,
STEP_INTERVAL_MULTIPLIER,
} from '../constants';
import { calculateWidthBasedOnStepInterval } from '../utils';
import {
ConfigBuilder,
ConfigBuilderProps,
DEFAULT_CURSOR_CONFIG,
DEFAULT_PLOT_CONFIG,
LegendItem,
SelectionPreferencesSource,
} from './types';
@@ -50,8 +45,6 @@ export class UPlotConfigBuilder extends ConfigBuilder<
private axes: Record<string, UPlotAxisBuilder> = {};
private stepInterval: number | undefined;
readonly scales: UPlotScaleBuilder[] = [];
private bands: uPlot.Band[] = [];
@@ -86,7 +79,6 @@ export class UPlotConfigBuilder extends ConfigBuilder<
tzDate,
selectionPreferencesSource,
shouldSaveSelectionPreference,
stepInterval,
} = args ?? {};
if (widgetId) {
this.widgetId = widgetId;
@@ -104,10 +96,6 @@ export class UPlotConfigBuilder extends ConfigBuilder<
this.shouldSaveSelectionPreference = shouldSaveSelectionPreference;
}
if (stepInterval) {
this.stepInterval = stepInterval;
}
this.onDragSelect = noop;
if (onDragSelect) {
this.onDragSelect = onDragSelect;
@@ -384,31 +372,6 @@ export class UPlotConfigBuilder extends ConfigBuilder<
return this.widgetId;
}
/**
* Get cursor configuration
*/
getCursorConfig(): Cursor {
if (this.stepInterval) {
const cursorConfig = {
...DEFAULT_CURSOR_CONFIG,
hover: {
...DEFAULT_CURSOR_CONFIG.hover,
prox: this.stepInterval
? (uPlotInstance: uPlot): number => {
const width = calculateWidthBasedOnStepInterval({
uPlotInstance,
stepInterval: this.stepInterval ?? 0,
});
return width * STEP_INTERVAL_MULTIPLIER;
}
: DEFAULT_HOVER_PROXIMITY_VALUE,
},
};
return merge({}, DEFAULT_CURSOR_CONFIG, cursorConfig, this.cursor);
}
return merge({}, DEFAULT_CURSOR_CONFIG, this.cursor);
}
/**
* Build the final uPlot.Options configuration
*/
@@ -450,7 +413,7 @@ export class UPlotConfigBuilder extends ConfigBuilder<
config.hooks = this.hooks;
config.select = this.select;
config.cursor = this.getCursorConfig();
config.cursor = merge({}, DEFAULT_CURSOR_CONFIG, this.cursor);
config.tzDate = this.tzDate;
config.plugins = this.plugins.length > 0 ? this.plugins : undefined;
config.bands = this.bands.length > 0 ? this.bands : undefined;

View File

@@ -1,7 +1,6 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { themeColors } from 'constants/theme';
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
import { calculateWidthBasedOnStepInterval } from 'lib/uPlotV2/utils';
import uPlot, { Series } from 'uplot';
import {
@@ -292,16 +291,21 @@ function getBarPathBuilder({
idx1: number,
): Series.Paths | null => {
let effectiveBarMaxWidth = barMaxWidth;
const widthBasedOnStepInterval = calculateWidthBasedOnStepInterval({
uPlotInstance: self,
stepInterval,
});
if (widthBasedOnStepInterval > 0) {
effectiveBarMaxWidth = Math.min(
effectiveBarMaxWidth,
widthBasedOnStepInterval,
);
const xScale = self.scales.x as uPlot.Scale | undefined;
if (xScale && typeof xScale.min === 'number') {
const start = xScale.min as number;
const end = start + stepInterval;
const startPx = self.valToPos(start, 'x');
const endPx = self.valToPos(end, 'x');
const intervalPx = Math.abs(endPx - startPx);
if (intervalPx > 0) {
effectiveBarMaxWidth =
typeof barMaxWidth === 'number'
? Math.min(barMaxWidth, intervalPx)
: intervalPx;
}
}
const barsCfgKey = `bars|${barAlignment}|${barWidthFactor}|${effectiveBarMaxWidth}`;

View File

@@ -1,10 +1,6 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import uPlot from 'uplot';
import {
DEFAULT_HOVER_PROXIMITY_VALUE,
STEP_INTERVAL_MULTIPLIER,
} from '../../constants';
import type { SeriesProps } from '../types';
import { DrawStyle, SelectionPreferencesSource } from '../types';
import { UPlotConfigBuilder } from '../UPlotConfigBuilder';
@@ -17,14 +13,6 @@ jest.mock(
}),
);
jest.mock('lib/uPlotV2/utils', () => ({
calculateWidthBasedOnStepInterval: jest.fn(),
}));
const calculateWidthBasedOnStepIntervalMock = jest.requireMock(
'lib/uPlotV2/utils',
).calculateWidthBasedOnStepInterval as jest.Mock;
const getStoredSeriesVisibilityMock = jest.requireMock(
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
) as {
@@ -396,52 +384,6 @@ describe('UPlotConfigBuilder', () => {
expect(config.cursor?.points).toBeDefined();
});
describe('getCursorConfig', () => {
it('returns default cursor merged with custom cursor when no stepInterval', () => {
const builder = new UPlotConfigBuilder();
builder.setCursor({
drag: { setScale: false },
});
const cursorConfig = builder.getCursorConfig();
expect(cursorConfig.drag?.setScale).toBe(false);
expect(cursorConfig.hover?.prox).toBe(DEFAULT_HOVER_PROXIMITY_VALUE);
expect(cursorConfig.points).toBeDefined();
});
it('returns hover prox as DEFAULT_HOVER_PROXIMITY_VALUE when stepInterval is not set', () => {
const builder = new UPlotConfigBuilder();
const cursorConfig = builder.getCursorConfig();
expect(cursorConfig.hover?.prox).toBe(DEFAULT_HOVER_PROXIMITY_VALUE);
});
it('returns hover prox as function when stepInterval is set, computing width * multiplier', () => {
const stepInterval = 60;
const mockWidth = 100;
calculateWidthBasedOnStepIntervalMock.mockReturnValue(mockWidth);
const builder = new UPlotConfigBuilder({ stepInterval });
const cursorConfig = builder.getCursorConfig();
expect(typeof cursorConfig.hover?.prox).toBe('function');
const uPlotInstance = {} as uPlot;
const proxResult = (cursorConfig.hover!.prox as (u: uPlot) => number)(
uPlotInstance,
);
expect(calculateWidthBasedOnStepIntervalMock).toHaveBeenCalledWith({
uPlotInstance,
stepInterval,
});
expect(proxResult).toBe(mockWidth * STEP_INTERVAL_MULTIPLIER);
});
});
it('adds plugins and includes them in config', () => {
const builder = new UPlotConfigBuilder();
const plugin: uPlot.Plugin = {

View File

@@ -1,6 +1,6 @@
import { PrecisionOption } from 'components/Graph/types';
import { PANEL_TYPES } from 'constants/queryBuilder';
import uPlot, { Series } from 'uplot';
import uPlot, { Cursor, Options, Series } from 'uplot';
import { ThresholdsDrawHookOptions } from '../hooks/types';
@@ -39,7 +39,6 @@ export interface ConfigBuilderProps {
tzDate?: uPlot.LocalDateFromUnix;
selectionPreferencesSource?: SelectionPreferencesSource;
shouldSaveSelectionPreference?: boolean;
stepInterval?: number;
}
/**
@@ -187,3 +186,47 @@ export interface LegendItem {
color: uPlot.Series['stroke'];
show: boolean;
}
export const DEFAULT_PLOT_CONFIG: Partial<Options> = {
focus: {
alpha: 0.3,
},
cursor: {
focus: {
prox: 30,
},
},
legend: {
show: false,
},
padding: [16, 16, 8, 8],
series: [],
hooks: {},
};
const POINTS_FILL_COLOR = '#FFFFFF';
export const DEFAULT_CURSOR_CONFIG: Cursor = {
drag: { setScale: true },
points: {
one: true,
size: (u, seriesIdx) => (u.series[seriesIdx]?.points?.size ?? 0) * 3,
width: (_u, _seriesIdx, size) => size / 4,
stroke: (u, seriesIdx): string => {
const points = u.series[seriesIdx]?.points;
const strokeFn =
typeof points?.stroke === 'function' ? points.stroke : undefined;
const strokeValue =
strokeFn !== undefined
? strokeFn(u, seriesIdx)
: typeof points?.stroke === 'string'
? points.stroke
: '';
return `${strokeValue}90`;
},
fill: (): string => POINTS_FILL_COLOR,
},
focus: {
prox: 30,
},
};

View File

@@ -1,48 +0,0 @@
import { Cursor, Options } from 'uplot';
const POINTS_FILL_COLOR = '#FFFFFF';
export const DEFAULT_HOVER_PROXIMITY_VALUE = 30; // only snap if within 30px horizontally
export const DEFAULT_FOCUS_PROXIMITY_VALUE = 1e6;
export const STEP_INTERVAL_MULTIPLIER = 3; // multiply the width computed by STEP_INTERVAL_MULTIPLIER to get the hover prox value
export const DEFAULT_PLOT_CONFIG: Partial<Options> = {
focus: {
alpha: 0.3,
},
legend: {
show: false,
},
padding: [16, 16, 8, 8],
series: [],
hooks: {},
};
export const DEFAULT_CURSOR_CONFIG: Cursor = {
drag: { setScale: true },
points: {
one: true,
size: (u, seriesIdx) => (u.series[seriesIdx]?.points?.size ?? 0) * 3,
width: (_u, _seriesIdx, size) => size / 4,
stroke: (u, seriesIdx): string => {
const points = u.series[seriesIdx]?.points;
const strokeFn =
typeof points?.stroke === 'function' ? points.stroke : undefined;
const strokeValue =
strokeFn !== undefined
? strokeFn(u, seriesIdx)
: typeof points?.stroke === 'string'
? points.stroke
: '';
return `${strokeValue}90`;
},
fill: (): string => POINTS_FILL_COLOR,
},
focus: {
prox: DEFAULT_FOCUS_PROXIMITY_VALUE,
},
hover: {
prox: DEFAULT_HOVER_PROXIMITY_VALUE,
bias: 0,
},
};

View File

@@ -87,7 +87,7 @@ export function shouldShowTooltipForSync(
export function shouldShowTooltipForInteraction(
controller: TooltipControllerState,
): boolean {
return controller.focusedSeriesIndex != null;
return controller.focusedSeriesIndex != null || controller.isAnySeriesActive;
}
export function updateHoverState(

View File

@@ -1,70 +0,0 @@
import uPlot from 'uplot';
import { calculateWidthBasedOnStepInterval } from '../index';
describe('calculateWidthBasedOnStepInterval', () => {
it('returns pixel width between start and start+stepInterval when xScale exists with numeric min', () => {
const valToPos = jest
.fn()
.mockReturnValueOnce(100) // startPx for start
.mockReturnValueOnce(250); // endPx for start + stepInterval
const uPlotInstance = ({
scales: { x: { min: 1000 } },
valToPos,
} as unknown) as uPlot;
const result = calculateWidthBasedOnStepInterval({
uPlotInstance,
stepInterval: 60,
});
expect(valToPos).toHaveBeenCalledWith(1000, 'x');
expect(valToPos).toHaveBeenCalledWith(1060, 'x');
expect(result).toBe(150); // Math.abs(250 - 100)
});
it('returns absolute pixel width when endPx is less than startPx', () => {
const valToPos = jest.fn().mockReturnValueOnce(250).mockReturnValueOnce(100);
const uPlotInstance = ({
scales: { x: { min: 0 } },
valToPos,
} as unknown) as uPlot;
const result = calculateWidthBasedOnStepInterval({
uPlotInstance,
stepInterval: 60,
});
expect(result).toBe(150); // Math.abs(100 - 250)
});
it('returns 0 when xScale is undefined', () => {
const uPlotInstance = ({
scales: { x: undefined },
valToPos: jest.fn(),
} as unknown) as uPlot;
const result = calculateWidthBasedOnStepInterval({
uPlotInstance,
stepInterval: 60,
});
expect(result).toBe(0);
});
it('returns 0 when xScale.min is not a number', () => {
const uPlotInstance = ({
scales: { x: { min: undefined } },
valToPos: jest.fn(),
} as unknown) as uPlot;
const result = calculateWidthBasedOnStepInterval({
uPlotInstance,
stepInterval: 60,
});
expect(result).toBe(0);
});
});

View File

@@ -1,17 +0,0 @@
export function calculateWidthBasedOnStepInterval({
uPlotInstance,
stepInterval,
}: {
uPlotInstance: uPlot;
stepInterval: number;
}): number {
const xScale = uPlotInstance.scales.x;
if (xScale && typeof xScale.min === 'number') {
const start = xScale.min as number;
const end = start + stepInterval;
const startPx = uPlotInstance.valToPos(start, 'x');
const endPx = uPlotInstance.valToPos(end, 'x');
return Math.abs(endPx - startPx);
}
return 0;
}

View File

@@ -567,15 +567,6 @@ body {
border: 1px solid var(--bg-vanilla-300);
}
.ant-tooltip {
--antd-arrow-background-color: var(--bg-vanilla-100);
.ant-tooltip-inner {
background-color: var(--bg-vanilla-100);
color: var(---bg-ink-500);
}
}
.ant-dropdown-menu {
border: 1px solid var(--bg-vanilla-300);
background: var(--bg-vanilla-100);

View File

@@ -9,9 +9,10 @@ export const getDefaultLogBackground = (
if (isReadOnly) {
return '';
}
// TODO handle the light mode here
return `&:hover {
background-color: ${
isDarkMode ? 'rgba(171, 189, 255, 0.04)' : 'rgba(0, 0, 0, 0.04)'
isDarkMode ? 'rgba(171, 189, 255, 0.04)' : 'var(--bg-vanilla-200)'
};
}`;
};
@@ -27,38 +28,22 @@ export const getActiveLogBackground = (
if (isDarkMode) {
switch (logType) {
case LogType.INFO:
return `background-color: ${Color.BG_ROBIN_500}40 !important;`;
return `background-color: ${Color.BG_ROBIN_500}10 !important;`;
case LogType.WARN:
return `background-color: ${Color.BG_AMBER_500}40 !important;`;
return `background-color: ${Color.BG_AMBER_500}10 !important;`;
case LogType.ERROR:
return `background-color: ${Color.BG_CHERRY_500}40 !important;`;
return `background-color: ${Color.BG_CHERRY_500}10 !important;`;
case LogType.TRACE:
return `background-color: ${Color.BG_FOREST_400}40 !important;`;
return `background-color: ${Color.BG_FOREST_400}10 !important;`;
case LogType.DEBUG:
return `background-color: ${Color.BG_AQUA_500}40 !important;`;
return `background-color: ${Color.BG_AQUA_500}10 !important;`;
case LogType.FATAL:
return `background-color: ${Color.BG_SAKURA_500}40 !important;`;
return `background-color: ${Color.BG_SAKURA_500}10 !important;`;
default:
return `background-color: ${Color.BG_ROBIN_500}40 !important;`;
return `background-color: ${Color.BG_SLATE_200} !important;`;
}
}
// Light mode - use lighter background colors
switch (logType) {
case LogType.INFO:
return `background-color: ${Color.BG_ROBIN_100} !important;`;
case LogType.WARN:
return `background-color: ${Color.BG_AMBER_100} !important;`;
case LogType.ERROR:
return `background-color: ${Color.BG_CHERRY_100} !important;`;
case LogType.TRACE:
return `background-color: ${Color.BG_FOREST_200} !important;`;
case LogType.DEBUG:
return `background-color: ${Color.BG_AQUA_100} !important;`;
case LogType.FATAL:
return `background-color: ${Color.BG_SAKURA_100} !important;`;
default:
return `background-color: ${Color.BG_VANILLA_300} !important;`;
}
return `background-color: ${Color.BG_VANILLA_400}!important; color: ${Color.TEXT_SLATE_400} !important;`;
};
export const getHightLightedLogBackground = (

View File

@@ -10,26 +10,6 @@ import (
)
func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
if err := router.Handle("/api/v2/metrics", handler.New(
provider.authZ.ViewAccess(provider.metricsExplorerHandler.ListMetrics),
handler.OpenAPIDef{
ID: "ListMetrics",
Tags: []string{"metrics"},
Summary: "List metric names",
Description: "This endpoint returns a list of distinct metric names within the specified time range",
Request: nil,
RequestQuery: new(metricsexplorertypes.ListMetricsParams),
RequestContentType: "",
Response: new(metricsexplorertypes.ListMetricsResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusInternalServerError},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v2/metrics/stats", handler.New(
provider.authZ.ViewAccess(provider.metricsExplorerHandler.GetStats),
handler.OpenAPIDef{
@@ -68,27 +48,26 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v2/metrics/{metric_name}/attributes", handler.New(
if err := router.Handle("/api/v2/metrics/attributes", handler.New(
provider.authZ.ViewAccess(provider.metricsExplorerHandler.GetMetricAttributes),
handler.OpenAPIDef{
ID: "GetMetricAttributes",
Tags: []string{"metrics"},
Summary: "Get metric attributes",
Description: "This endpoint returns attribute keys and their unique values for a specified metric",
Request: nil,
RequestQuery: new(metricsexplorertypes.MetricAttributesRequest),
RequestContentType: "",
Request: new(metricsexplorertypes.MetricAttributesRequest),
RequestContentType: "application/json",
Response: new(metricsexplorertypes.MetricAttributesResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusInternalServerError},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v2/metrics/{metric_name}/metadata", handler.New(
if err := router.Handle("/api/v2/metrics/metadata", handler.New(
provider.authZ.ViewAccess(provider.metricsExplorerHandler.GetMetricMetadata),
handler.OpenAPIDef{
ID: "GetMetricMetadata",
@@ -96,6 +75,7 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
Summary: "Get metric metadata",
Description: "This endpoint returns metadata information like metric description, unit, type, temporality, monotonicity for a specified metric",
Request: nil,
RequestQuery: new(metricsexplorertypes.MetricNameParams),
RequestContentType: "",
Response: new(metricsexplorertypes.MetricMetadata),
ResponseContentType: "application/json",
@@ -126,7 +106,7 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v2/metrics/{metric_name}/highlights", handler.New(
if err := router.Handle("/api/v2/metric/highlights", handler.New(
provider.authZ.ViewAccess(provider.metricsExplorerHandler.GetMetricHighlights),
handler.OpenAPIDef{
ID: "GetMetricHighlights",
@@ -134,6 +114,7 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
Summary: "Get metric highlights",
Description: "This endpoint returns highlights like number of datapoints, totaltimeseries, active time series, last received time for a specified metric",
Request: nil,
RequestQuery: new(metricsexplorertypes.MetricNameParams),
RequestContentType: "",
Response: new(metricsexplorertypes.MetricHighlightsResponse),
ResponseContentType: "application/json",
@@ -145,7 +126,7 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v2/metrics/{metric_name}/alerts", handler.New(
if err := router.Handle("/api/v2/metric/alerts", handler.New(
provider.authZ.ViewAccess(provider.metricsExplorerHandler.GetMetricAlerts),
handler.OpenAPIDef{
ID: "GetMetricAlerts",
@@ -153,6 +134,7 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
Summary: "Get metric alerts",
Description: "This endpoint returns associated alerts for a specified metric",
Request: nil,
RequestQuery: new(metricsexplorertypes.MetricNameParams),
RequestContentType: "",
Response: new(metricsexplorertypes.MetricAlertsResponse),
ResponseContentType: "application/json",
@@ -164,7 +146,7 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v2/metrics/{metric_name}/dashboards", handler.New(
if err := router.Handle("/api/v2/metric/dashboards", handler.New(
provider.authZ.ViewAccess(provider.metricsExplorerHandler.GetMetricDashboards),
handler.OpenAPIDef{
ID: "GetMetricDashboards",
@@ -172,6 +154,7 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
Summary: "Get metric dashboards",
Description: "This endpoint returns associated dashboards for a specified metric",
Request: nil,
RequestQuery: new(metricsexplorertypes.MetricNameParams),
RequestContentType: "",
Response: new(metricsexplorertypes.MetricDashboardsResponse),
ResponseContentType: "application/json",

View File

@@ -7,7 +7,6 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/swaggest/openapi-go"
"github.com/swaggest/openapi-go/openapi3"
)
type ServeOpenAPIFunc func(openapi.OperationContext)
@@ -60,39 +59,7 @@ func (handler *handler) ServeOpenAPI(opCtx openapi.OperationContext) {
}
// Add request structure
reqOpts := []openapi.ContentOption{openapi.WithContentType(handler.openAPIDef.RequestContentType)}
if len(handler.openAPIDef.RequestExamples) > 0 {
reqOpts = append(reqOpts, openapi.WithCustomize(func(cor openapi.ContentOrReference) {
rbOrRef, ok := cor.(*openapi3.RequestBodyOrRef)
if !ok || rbOrRef.RequestBody == nil {
return
}
ct := handler.openAPIDef.RequestContentType
if ct == "" {
ct = "application/json"
}
mt, exists := rbOrRef.RequestBody.Content[ct]
if !exists {
return
}
if mt.Examples == nil {
mt.Examples = make(map[string]openapi3.ExampleOrRef)
}
for _, ex := range handler.openAPIDef.RequestExamples {
val := ex.Value
oaExample := openapi3.Example{Value: &val}
if ex.Summary != "" {
oaExample.WithSummary(ex.Summary)
}
if ex.Description != "" {
oaExample.WithDescription(ex.Description)
}
mt.Examples[ex.Name] = openapi3.ExampleOrRef{Example: &oaExample}
}
rbOrRef.RequestBody.Content[ct] = mt
}))
}
opCtx.AddReqStructure(handler.openAPIDef.Request, reqOpts...)
opCtx.AddReqStructure(handler.openAPIDef.Request, openapi.WithContentType(handler.openAPIDef.RequestContentType))
// Add request query structure
opCtx.AddReqStructure(handler.openAPIDef.RequestQuery)

View File

@@ -9,14 +9,6 @@ import (
"github.com/swaggest/rest/openapi"
)
// OpenAPIExample is a named example for an OpenAPI operation.
type OpenAPIExample struct {
Name string
Summary string
Description string
Value any
}
// Def is the definition of an OpenAPI operation
type OpenAPIDef struct {
ID string
@@ -26,7 +18,6 @@ type OpenAPIDef struct {
Request any
RequestQuery any
RequestContentType string
RequestExamples []OpenAPIExample
Response any
ResponseContentType string
SuccessStatusCode int

View File

@@ -13,14 +13,6 @@ import (
"github.com/gorilla/mux"
)
func extractMetricName(req *http.Request) (string, error) {
metricName := mux.Vars(req)["metric_name"]
if metricName == "" {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "metric_name is required in URL path")
}
return metricName, nil
}
type handler struct {
module metricsexplorer.Module
}
@@ -32,34 +24,6 @@ func NewHandler(m metricsexplorer.Module) metricsexplorer.Handler {
}
}
func (h *handler) ListMetrics(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
var params metricsexplorertypes.ListMetricsParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
if err := params.Validate(); err != nil {
render.Error(rw, err)
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
out, err := h.module.ListMetrics(req.Context(), orgID, &params)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, out)
}
func (h *handler) GetStats(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
@@ -150,23 +114,28 @@ func (h *handler) GetMetricMetadata(rw http.ResponseWriter, req *http.Request) {
return
}
metricName, err := extractMetricName(req)
if err != nil {
var params metricsexplorertypes.MetricNameParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
if params.MetricName == "" {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName query parameter is required"))
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
metadataMap, err := h.module.GetMetricMetadataMulti(req.Context(), orgID, []string{metricName})
metadataMap, err := h.module.GetMetricMetadataMulti(req.Context(), orgID, []string{params.MetricName})
if err != nil {
render.Error(rw, err)
return
}
metadata, ok := metadataMap[metricName]
metadata, ok := metadataMap[params.MetricName]
if !ok || metadata == nil {
render.Error(rw, errors.NewNotFoundf(errors.CodeNotFound, "metadata not found for metric %q", metricName))
render.Error(rw, errors.NewNotFoundf(errors.CodeNotFound, "metadata not found for metric %q", params.MetricName))
return
}
@@ -180,14 +149,19 @@ func (h *handler) GetMetricAlerts(rw http.ResponseWriter, req *http.Request) {
return
}
metricName, err := extractMetricName(req)
if err != nil {
var params metricsexplorertypes.MetricNameParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
if params.MetricName == "" {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName query parameter is required"))
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
out, err := h.module.GetMetricAlerts(req.Context(), orgID, metricName)
out, err := h.module.GetMetricAlerts(req.Context(), orgID, params.MetricName)
if err != nil {
render.Error(rw, err)
return
@@ -202,14 +176,19 @@ func (h *handler) GetMetricDashboards(rw http.ResponseWriter, req *http.Request)
return
}
metricName, err := extractMetricName(req)
if err != nil {
var params metricsexplorertypes.MetricNameParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
if params.MetricName == "" {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName query parameter is required"))
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
out, err := h.module.GetMetricDashboards(req.Context(), orgID, metricName)
out, err := h.module.GetMetricDashboards(req.Context(), orgID, params.MetricName)
if err != nil {
render.Error(rw, err)
return
@@ -224,14 +203,19 @@ func (h *handler) GetMetricHighlights(rw http.ResponseWriter, req *http.Request)
return
}
metricName, err := extractMetricName(req)
if err != nil {
var params metricsexplorertypes.MetricNameParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
if params.MetricName == "" {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName query parameter is required"))
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
highlights, err := h.module.GetMetricHighlights(req.Context(), orgID, metricName)
highlights, err := h.module.GetMetricHighlights(req.Context(), orgID, params.MetricName)
if err != nil {
render.Error(rw, err)
return
@@ -246,21 +230,8 @@ func (h *handler) GetMetricAttributes(rw http.ResponseWriter, req *http.Request)
return
}
metricName, err := extractMetricName(req)
if err != nil {
render.Error(rw, err)
return
}
var in metricsexplorertypes.MetricAttributesRequest
if err := binding.Query.BindQuery(req.URL.Query(), &in); err != nil {
render.Error(rw, err)
return
}
in.MetricName = metricName
if err := in.Validate(); err != nil {
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
render.Error(rw, err)
return
}

View File

@@ -56,88 +56,6 @@ func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetr
}
}
func (m *module) ListMetrics(ctx context.Context, orgID valuer.UUID, params *metricsexplorertypes.ListMetricsParams) (*metricsexplorertypes.ListMetricsResponse, error) {
if err := params.Validate(); err != nil {
return nil, err
}
sb := sqlbuilder.NewSelectBuilder()
sb.Select("DISTINCT metric_name")
if params.Start != nil && params.End != nil {
start, end, distributedTsTable, _ := telemetrymetrics.WhichTSTableToUse(uint64(*params.Start), uint64(*params.End), nil)
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, distributedTsTable))
sb.Where(sb.Between("unix_milli", start, end))
} else {
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.TimeseriesV41weekTableName))
}
sb.Where(sb.E("__normalized", false))
if params.Search != "" {
searchLower := strings.ToLower(params.Search)
searchLower = strings.ReplaceAll(searchLower, "%", "\\%")
searchLower = strings.ReplaceAll(searchLower, "_", "\\_")
sb.Where(sb.Like("lower(metric_name)", fmt.Sprintf("%%%s%%", searchLower)))
}
sb.OrderBy("metric_name ASC")
sb.Limit(params.Limit)
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
valueCtx := ctxtypes.SetClickhouseMaxThreads(ctx, m.config.TelemetryStore.Threads)
db := m.telemetryStore.ClickhouseDB()
rows, err := db.Query(valueCtx, query, args...)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to list metrics")
}
defer rows.Close()
metricNames := make([]string, 0)
for rows.Next() {
var name string
if err := rows.Scan(&name); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan metric name")
}
metricNames = append(metricNames, name)
}
if err := rows.Err(); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "error iterating metric names")
}
if len(metricNames) == 0 {
return &metricsexplorertypes.ListMetricsResponse{
Metrics: []metricsexplorertypes.ListMetric{},
}, nil
}
metadata, err := m.GetMetricMetadataMulti(ctx, orgID, metricNames)
if err != nil {
return nil, err
}
metrics := make([]metricsexplorertypes.ListMetric, 0, len(metricNames))
for _, name := range metricNames {
metric := metricsexplorertypes.ListMetric{
MetricName: name,
}
if meta, ok := metadata[name]; ok && meta != nil {
metric.Description = meta.Description
metric.MetricType = meta.MetricType
metric.MetricUnit = meta.MetricUnit
metric.Temporality = meta.Temporality
metric.IsMonotonic = meta.IsMonotonic
}
metrics = append(metrics, metric)
}
return &metricsexplorertypes.ListMetricsResponse{
Metrics: metrics,
}, nil
}
func (m *module) GetStats(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.StatsRequest) (*metricsexplorertypes.StatsResponse, error) {
if err := req.Validate(); err != nil {
return nil, err

View File

@@ -10,7 +10,6 @@ import (
// Handler exposes HTTP handlers for the metrics module.
type Handler interface {
ListMetrics(http.ResponseWriter, *http.Request)
GetStats(http.ResponseWriter, *http.Request)
GetTreemap(http.ResponseWriter, *http.Request)
GetMetricMetadata(http.ResponseWriter, *http.Request)
@@ -23,7 +22,6 @@ type Handler interface {
// Module represents the metrics module interface.
type Module interface {
ListMetrics(ctx context.Context, orgID valuer.UUID, params *metricsexplorertypes.ListMetricsParams) (*metricsexplorertypes.ListMetricsResponse, error)
GetStats(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.StatsRequest) (*metricsexplorertypes.StatsResponse, error)
GetTreemap(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.TreemapRequest) (*metricsexplorertypes.TreemapResponse, error)
GetMetricMetadataMulti(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error)

View File

@@ -12,30 +12,60 @@ import (
)
const (
derivedKeyHTTPURL = "http_url" // https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_url
derivedKeyHTTPHost = "http_host"
urlPathKeyLegacy = "http.url"
serverAddressKeyLegacy = "net.peer.name"
urlPathKey = "url.full"
serverAddressKey = "server.address"
)
var defaultStepInterval = 60 * time.Second
var (
groupByKeyHTTPHost = qbtypes.GroupByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: derivedKeyHTTPHost,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextSpan,
Signal: telemetrytypes.SignalTraces,
type SemconvFieldMapping struct {
LegacyField string
CurrentField string
FieldType telemetrytypes.FieldDataType
Context telemetrytypes.FieldContext
}
var dualSemconvGroupByKeys = map[string][]qbtypes.GroupByKey{
"server": {
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: serverAddressKey,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextAttribute,
Signal: telemetrytypes.SignalTraces,
},
},
}
groupByKeyHTTPURL = qbtypes.GroupByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: derivedKeyHTTPURL,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextSpan,
Signal: telemetrytypes.SignalTraces,
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: serverAddressKeyLegacy,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextAttribute,
Signal: telemetrytypes.SignalTraces,
},
},
}
)
},
"url": {
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: urlPathKey,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextAttribute,
Signal: telemetrytypes.SignalTraces,
},
},
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: urlPathKeyLegacy,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextAttribute,
Signal: telemetrytypes.SignalTraces,
},
},
},
}
func FilterIntermediateColumns(result *qbtypes.QueryRangeResponse) *qbtypes.QueryRangeResponse {
if result == nil || result.Data.Results == nil {
@@ -84,6 +114,103 @@ func FilterIntermediateColumns(result *qbtypes.QueryRangeResponse) *qbtypes.Quer
return result
}
func MergeSemconvColumns(result *qbtypes.QueryRangeResponse) *qbtypes.QueryRangeResponse {
if result == nil || result.Data.Results == nil {
return result
}
for _, res := range result.Data.Results {
scalarData, ok := res.(*qbtypes.ScalarData)
if !ok {
continue
}
serverAddressKeyIdx := -1
serverAddressKeyLegacyIdx := -1
for i, col := range scalarData.Columns {
if col.Name == serverAddressKey {
serverAddressKeyIdx = i
} else if col.Name == serverAddressKeyLegacy {
serverAddressKeyLegacyIdx = i
}
}
if serverAddressKeyIdx == -1 || serverAddressKeyLegacyIdx == -1 {
continue
}
var newRows [][]any
for _, row := range scalarData.Data {
if len(row) <= serverAddressKeyIdx || len(row) <= serverAddressKeyLegacyIdx {
continue
}
var serverName any
if isValidValue(row[serverAddressKeyIdx]) {
serverName = row[serverAddressKeyIdx]
} else if isValidValue(row[serverAddressKeyLegacyIdx]) {
serverName = row[serverAddressKeyLegacyIdx]
}
if serverName != nil {
newRow := make([]any, len(row)-1)
newRow[0] = serverName
targetIdx := 1
for i, val := range row {
if i != serverAddressKeyLegacyIdx && i != serverAddressKeyIdx {
if targetIdx < len(newRow) {
newRow[targetIdx] = val
targetIdx++
}
}
}
newRows = append(newRows, newRow)
}
}
newColumns := make([]*qbtypes.ColumnDescriptor, len(scalarData.Columns)-1)
targetIdx := 0
for i, col := range scalarData.Columns {
if i == serverAddressKeyIdx {
newCol := &qbtypes.ColumnDescriptor{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: serverAddressKeyLegacy,
FieldDataType: col.FieldDataType,
FieldContext: col.FieldContext,
Signal: col.Signal,
},
QueryName: col.QueryName,
AggregationIndex: col.AggregationIndex,
Meta: col.Meta,
Type: col.Type,
}
newColumns[targetIdx] = newCol
targetIdx++
} else if i != serverAddressKeyLegacyIdx {
newColumns[targetIdx] = col
targetIdx++
}
}
scalarData.Columns = newColumns
scalarData.Data = newRows
}
return result
}
func isValidValue(val any) bool {
if val == nil {
return false
}
if str, ok := val.(string); ok {
return str != "" && str != "n/a"
}
return true
}
func FilterResponse(results []*qbtypes.QueryRangeResponse) []*qbtypes.QueryRangeResponse {
filteredResults := make([]*qbtypes.QueryRangeResponse, 0, len(results))
@@ -134,7 +261,7 @@ func FilterResponse(results []*qbtypes.QueryRangeResponse) []*qbtypes.QueryRange
func shouldIncludeSeries(series *qbtypes.TimeSeries) bool {
for _, label := range series.Labels {
if label.Key.Name == derivedKeyHTTPHost {
if label.Key.Name == serverAddressKeyLegacy || label.Key.Name == serverAddressKey {
if strVal, ok := label.Value.(string); ok {
if net.ParseIP(strVal) != nil {
return false
@@ -147,10 +274,12 @@ func shouldIncludeSeries(series *qbtypes.TimeSeries) bool {
func shouldIncludeRow(row *qbtypes.RawRow) bool {
if row.Data != nil {
if domainVal, ok := row.Data[derivedKeyHTTPHost]; ok {
if domainStr, ok := domainVal.(string); ok {
if net.ParseIP(domainStr) != nil {
return false
for _, key := range []string{serverAddressKeyLegacy, serverAddressKey} {
if domainVal, ok := row.Data[key]; ok {
if domainStr, ok := domainVal.(string); ok {
if net.ParseIP(domainStr) != nil {
return false
}
}
}
}
@@ -158,8 +287,8 @@ func shouldIncludeRow(row *qbtypes.RawRow) bool {
return true
}
func mergeGroupBy(base qbtypes.GroupByKey, additional []qbtypes.GroupByKey) []qbtypes.GroupByKey {
return append([]qbtypes.GroupByKey{base}, additional...)
func mergeGroupBy(base, additional []qbtypes.GroupByKey) []qbtypes.GroupByKey {
return append(base, additional...)
}
func BuildDomainList(req *thirdpartyapitypes.ThirdPartyApiRequest) (*qbtypes.QueryRangeRequest, error) {
@@ -225,10 +354,10 @@ func buildEndpointsQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Q
Signal: telemetrytypes.SignalTraces,
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
Aggregations: []qbtypes.TraceAggregation{
{Expression: fmt.Sprintf("count_distinct(%s)", derivedKeyHTTPURL)},
{Expression: "count_distinct(http.url)"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
},
}
}
@@ -244,7 +373,7 @@ func buildLastSeenQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Qu
{Expression: "max(timestamp)"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
},
}
}
@@ -260,7 +389,7 @@ func buildRpsQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEn
{Expression: "rate()"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
},
}
}
@@ -278,7 +407,7 @@ func buildErrorQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Query
{Expression: "count()"},
},
Filter: filter,
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
},
}
}
@@ -294,7 +423,7 @@ func buildTotalSpanQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Q
{Expression: "count()"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
},
}
}
@@ -310,7 +439,7 @@ func buildP99Query(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEn
{Expression: "p99(duration_nano)"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
},
}
}
@@ -333,10 +462,10 @@ func buildEndpointsInfoQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtyp
Signal: telemetrytypes.SignalTraces,
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
Aggregations: []qbtypes.TraceAggregation{
{Expression: fmt.Sprintf("rate(%s)", derivedKeyHTTPURL)},
{Expression: "rate(http.url)"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(groupByKeyHTTPURL, req.GroupBy),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["url"], req.GroupBy),
},
}
}
@@ -390,7 +519,8 @@ func buildLastSeenInfoQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtype
}
func buildBaseFilter(additionalFilter *qbtypes.Filter) *qbtypes.Filter {
baseExpression := fmt.Sprintf("%s EXISTS AND kind_string = 'Client'", derivedKeyHTTPURL)
baseExpression := fmt.Sprintf("(%s EXISTS OR %s EXISTS) AND kind_string = 'Client'",
urlPathKeyLegacy, urlPathKey)
if additionalFilter != nil && additionalFilter.Expression != "" {
// even if it contains kind_string we add with an AND so it doesn't matter if the user is overriding it.

View File

@@ -1,9 +1,8 @@
package thirdpartyapi
import (
"testing"
"github.com/SigNoz/signoz/pkg/types/thirdpartyapitypes"
"testing"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
@@ -29,7 +28,7 @@ func TestFilterResponse(t *testing.T) {
{
Labels: []*qbtypes.Label{
{
Key: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
Value: "192.168.1.1",
},
},
@@ -37,7 +36,7 @@ func TestFilterResponse(t *testing.T) {
{
Labels: []*qbtypes.Label{
{
Key: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
Value: "example.com",
},
},
@@ -61,7 +60,7 @@ func TestFilterResponse(t *testing.T) {
{
Labels: []*qbtypes.Label{
{
Key: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
Value: "example.com",
},
},
@@ -85,12 +84,12 @@ func TestFilterResponse(t *testing.T) {
Rows: []*qbtypes.RawRow{
{
Data: map[string]any{
derivedKeyHTTPHost: "192.168.1.1",
"net.peer.name": "192.168.1.1",
},
},
{
Data: map[string]any{
derivedKeyHTTPHost: "example.com",
"net.peer.name": "example.com",
},
},
},
@@ -107,7 +106,7 @@ func TestFilterResponse(t *testing.T) {
Rows: []*qbtypes.RawRow{
{
Data: map[string]any{
derivedKeyHTTPHost: "example.com",
"net.peer.name": "example.com",
},
},
},

View File

@@ -80,12 +80,11 @@ func (q *builderQuery[T]) Fingerprint() string {
case qbtypes.LogAggregation:
aggParts = append(aggParts, a.Expression)
case qbtypes.MetricAggregation:
aggParts = append(aggParts, fmt.Sprintf("%s:%s:%s:%s:%s",
aggParts = append(aggParts, fmt.Sprintf("%s:%s:%s:%s",
a.MetricName,
a.Temporality.StringValue(),
a.TimeAggregation.StringValue(),
a.SpaceAggregation.StringValue(),
a.SpaceAggregationParam.StringValue(),
))
}
}

View File

@@ -1,454 +0,0 @@
package querier
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
)
// QueryRangeV5OpenAPIDef is the OpenAPI definition for the /api/v5/query_range endpoint.
var QueryRangeV5OpenAPIDef = handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"query"},
Summary: "Query range",
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
RequestExamples: queryRangeV5Examples,
Response: new(qbtypes.QueryRangeResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: []handler.OpenAPISecurityScheme{
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
},
}
var queryRangeV5Examples = []handler.OpenAPIExample{
{
Name: "traces_time_series",
Summary: "Time series: count spans grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "span_count"},
},
"stepInterval": "60s",
"filter": map[string]any{"expression": "service.name = 'frontend'"},
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
"order": []any{map[string]any{"key": map[string]any{"name": "span_count"}, "direction": "desc"}},
"limit": 10,
},
},
},
},
},
},
{
Name: "logs_time_series",
Summary: "Time series: count error logs grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "log_count"},
},
"stepInterval": "60s",
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
"order": []any{map[string]any{"key": map[string]any{"name": "log_count"}, "direction": "desc"}},
"limit": 10,
},
},
},
},
},
},
{
Name: "metrics_gauge_time_series",
Summary: "Time series: latest gauge value averaged across series",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "system.cpu.utilization", "timeAggregation": "latest", "spaceAggregation": "avg"},
},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "host.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "metrics_rate_time_series",
Summary: "Time series: rate of cumulative counter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum"},
},
"stepInterval": 120,
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "metrics_histogram_time_series",
Summary: "Time series: p99 latency from histogram",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.bucket", "spaceAggregation": "p99"},
},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "logs_raw",
Summary: "Raw: fetch raw log records",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
"selectFields": []any{
map[string]any{"name": "body", "fieldContext": "log"},
map[string]any{"name": "service.name", "fieldContext": "resource"},
},
"order": []any{
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "log"}, "direction": "desc"},
map[string]any{"key": map[string]any{"name": "id"}, "direction": "desc"},
},
"limit": 50,
"offset": 0,
},
},
},
},
},
},
{
Name: "traces_raw",
Summary: "Raw: fetch raw span records",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"filter": map[string]any{"expression": "service.name = 'frontend' AND has_error = true"},
"selectFields": []any{
map[string]any{"name": "name", "fieldContext": "span"},
map[string]any{"name": "duration_nano", "fieldContext": "span"},
},
"order": []any{
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "span"}, "direction": "desc"},
},
"limit": 100,
},
},
},
},
},
},
{
Name: "traces_scalar",
Summary: "Scalar: total span count",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "span_count"},
},
"filter": map[string]any{"expression": "service.name = 'frontend'"},
},
},
},
},
},
},
{
Name: "logs_scalar",
Summary: "Scalar: total error log count",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "error_count"},
},
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
},
},
},
},
},
},
{
Name: "metrics_scalar",
Summary: "Scalar: single reduced metric value",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum", "reduceTo": "sum"},
},
"stepInterval": "60s",
},
},
},
},
},
},
{
Name: "formula",
Summary: "Formula: error rate from two trace queries",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{map[string]any{"expression": "countIf(has_error = true)"}},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "B",
"signal": "traces",
"aggregations": []any{map[string]any{"expression": "count()"}},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
map[string]any{
"type": "builder_formula",
"spec": map[string]any{
"name": "error_rate",
"expression": "A / B * 100",
},
},
},
},
},
},
{
Name: "promql",
Summary: "PromQL: request rate with UTF-8 metric name",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "promql",
"spec": map[string]any{
"name": "request_rate",
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
"step": 60,
},
},
},
},
},
},
{
Name: "clickhouse_sql_traces_time_series",
Summary: "ClickHouse SQL: traces time series with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "span_rate",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value" +
" FROM signoz_traces.distributed_signoz_index_v3" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
" GROUP BY ts ORDER BY ts",
},
},
},
},
},
},
{
Name: "clickhouse_sql_logs_raw",
Summary: "ClickHouse SQL: raw logs with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "recent_errors",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT timestamp, body" +
" FROM signoz_logs.distributed_logs_v2" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
" AND severity_text = 'ERROR'" +
" ORDER BY timestamp DESC LIMIT 100",
},
},
},
},
},
},
{
Name: "clickhouse_sql_traces_scalar",
Summary: "ClickHouse SQL: scalar aggregate with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "total_spans",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT count() AS value" +
" FROM signoz_traces.distributed_signoz_index_v3" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp",
},
},
},
},
},
},
}

View File

@@ -5001,6 +5001,7 @@ func (aH *APIHandler) getDomainList(w http.ResponseWriter, r *http.Request) {
return
}
result = thirdpartyapi.MergeSemconvColumns(result)
result = thirdpartyapi.FilterIntermediateColumns(result)
// Filter IP addresses if ShowIp is false
@@ -5057,6 +5058,7 @@ func (aH *APIHandler) getDomainInfo(w http.ResponseWriter, r *http.Request) {
return
}
result = thirdpartyapi.MergeSemconvColumns(result)
result = thirdpartyapi.FilterIntermediateColumns(result)
// Filter IP addresses if ShowIp is false

View File

@@ -1,10 +1,7 @@
package signoz
import (
"bytes"
"context"
"encoding/json"
"net/http"
"os"
"reflect"
@@ -25,13 +22,10 @@ import (
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
"github.com/gorilla/mux"
"github.com/swaggest/jsonschema-go"
"github.com/swaggest/openapi-go"
"github.com/swaggest/openapi-go/openapi3"
"gopkg.in/yaml.v2"
)
type OpenAPI struct {
@@ -63,10 +57,6 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
return nil, err
}
// Register routes that live outside the APIServer modules
// so they are discovered by the OpenAPI walker.
registerQueryRoutes(apiserver.Router())
reflector := openapi3.NewReflector()
reflector.JSONSchemaReflector().DefaultOptions = append(reflector.JSONSchemaReflector().DefaultOptions, jsonschema.InterceptDefName(func(t reflect.Type, defaultDefName string) string {
if defaultDefName == "RenderSuccessResponse" {
@@ -100,67 +90,10 @@ func (openapi *OpenAPI) CreateAndWrite(path string) error {
return err
}
// The library's MarshalYAML does a JSON round-trip that converts all numbers
// to float64, causing large integers (e.g. epoch millisecond timestamps) to
// render in scientific notation (1.6409952e+12).
jsonData, err := openapi.reflector.Spec.MarshalJSON()
if err != nil {
return err
}
dec := json.NewDecoder(bytes.NewReader(jsonData))
dec.UseNumber()
var v any
if err := dec.Decode(&v); err != nil {
return err
}
convertJSONNumbers(v)
spec, err := yaml.Marshal(v)
spec, err := openapi.reflector.Spec.MarshalYAML()
if err != nil {
return err
}
return os.WriteFile(path, spec, 0o600)
}
// convertJSONNumbers recursively walks a decoded JSON structure and converts
// json.Number values to int64 (preferred) or float64 so that YAML marshaling
// renders them as plain numbers instead of quoted strings.
func convertJSONNumbers(v interface{}) {
switch val := v.(type) {
case map[string]interface{}:
for k, elem := range val {
if n, ok := elem.(json.Number); ok {
if i, err := n.Int64(); err == nil {
val[k] = i
} else if f, err := n.Float64(); err == nil {
val[k] = f
}
} else {
convertJSONNumbers(elem)
}
}
case []interface{}:
for i, elem := range val {
if n, ok := elem.(json.Number); ok {
if i64, err := n.Int64(); err == nil {
val[i] = i64
} else if f, err := n.Float64(); err == nil {
val[i] = f
}
} else {
convertJSONNumbers(elem)
}
}
}
}
func registerQueryRoutes(router *mux.Router) {
router.Handle("/api/v5/query_range", handler.New(
func(http.ResponseWriter, *http.Request) {},
querier.QueryRangeV5OpenAPIDef,
)).Methods(http.MethodPost)
}

View File

@@ -61,11 +61,13 @@ var (
}
)
type fieldMapper struct {}
type fieldMapper struct {
}
func NewFieldMapper() qbtypes.FieldMapper {
return &fieldMapper{}
}
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
switch key.FieldContext {
case telemetrytypes.FieldContextResource:
@@ -252,27 +254,12 @@ func (m *fieldMapper) buildFieldForJSON(key *telemetrytypes.TelemetryFieldKey) (
"plan length is less than 2 for promoted path: %s", key.Name)
}
node := plan[1]
promotedExpr := fmt.Sprintf(
"dynamicElement(%s, '%s')",
node.FieldPath(),
node.TerminalConfig.ElemType.StringValue(),
// promoted column first then body_json column
// TODO(Piyush): Change this in future for better performance
expr = fmt.Sprintf("coalesce(%s, %s)",
fmt.Sprintf("dynamicElement(%s, '%s')", plan[1].FieldPath(), plan[1].TerminalConfig.ElemType.StringValue()),
expr,
)
// dynamicElement returns NULL for scalar types or an empty array for array types.
if node.TerminalConfig.ElemType.IsArray {
expr = fmt.Sprintf(
"if(length(%s) > 0, %s, %s)",
promotedExpr,
promotedExpr,
expr,
)
} else {
// promoted column first then body_json column
// TODO(Piyush): Change this in future for better performance
expr = fmt.Sprintf("coalesce(%s, %s)", promotedExpr, expr)
}
}
return expr, nil
@@ -294,7 +281,8 @@ func (m *fieldMapper) buildArrayConcat(plan telemetrytypes.JSONAccessPlan) (stri
}
// Build arrayMap expressions for ALL available branches at the root level.
// Iterate branches in deterministic order (JSON then Dynamic)
// Iterate branches in deterministic order (JSON then Dynamic) so generated SQL
// is stable across environments; map iteration order is random in Go.
var arrayMapExpressions []string
for _, node := range plan {
for _, branchType := range node.BranchesInOrder() {

View File

@@ -32,6 +32,7 @@ func NewJSONConditionBuilder(key *telemetrytypes.TelemetryFieldKey, valueType te
// BuildCondition builds the full WHERE condition for body_json JSON paths
func (c *jsonConditionBuilder) buildJSONCondition(operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
conditions := []string{}
for _, node := range c.key.JSONPlan {
condition, err := c.emitPlannedCondition(node, operator, value, sb)
@@ -72,9 +73,9 @@ func (c *jsonConditionBuilder) buildTerminalCondition(node *telemetrytypes.JSONA
// switch operator for array membership checks
switch operator {
case qbtypes.FilterOperatorContains:
case qbtypes.FilterOperatorContains, qbtypes.FilterOperatorIn:
operator = qbtypes.FilterOperatorEqual
case qbtypes.FilterOperatorNotContains:
case qbtypes.FilterOperatorNotContains, qbtypes.FilterOperatorNotIn:
operator = qbtypes.FilterOperatorNotEqual
}
}
@@ -190,14 +191,13 @@ func (c *jsonConditionBuilder) buildArrayMembershipCondition(node *telemetrytype
arrayExpr = typedArrayExpr()
}
key := "x"
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(&localKeyCopy, value, key, operator)
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(&localKeyCopy, value, "x", operator)
op, err := c.applyOperator(sb, fieldExpr, operator, value)
if err != nil {
return "", err
}
return fmt.Sprintf("arrayExists(%s -> %s, %s)", key, op, arrayExpr), nil
return fmt.Sprintf("arrayExists(%s -> %s, %s)", fieldExpr, op, arrayExpr), nil
}
// recurseArrayHops recursively builds array traversal conditions
@@ -279,31 +279,27 @@ func (c *jsonConditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, field
case qbtypes.FilterOperatorNotContains:
return sb.NotILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
case qbtypes.FilterOperatorIn, qbtypes.FilterOperatorNotIn:
// emulate IN/NOT IN using OR/AND over equals to leverage indexes consistently
values, ok := value.([]any)
if !ok {
values = []any{value}
}
if operator == qbtypes.FilterOperatorIn {
return sb.In(fieldExpr, values...), nil
conds := []string{}
for _, v := range values {
if operator == qbtypes.FilterOperatorIn {
conds = append(conds, sb.E(fieldExpr, v))
} else {
conds = append(conds, sb.NE(fieldExpr, v))
}
}
return sb.NotIn(fieldExpr, values...), nil
if operator == qbtypes.FilterOperatorIn {
return sb.Or(conds...), nil
}
return sb.And(conds...), nil
case qbtypes.FilterOperatorExists:
return fmt.Sprintf("%s IS NOT NULL", fieldExpr), nil
case qbtypes.FilterOperatorNotExists:
return fmt.Sprintf("%s IS NULL", fieldExpr), nil
// between and not between
case qbtypes.FilterOperatorBetween, qbtypes.FilterOperatorNotBetween:
values, ok := value.([]any)
if !ok {
return "", qbtypes.ErrBetweenValues
}
if len(values) != 2 {
return "", qbtypes.ErrBetweenValues
}
if operator == qbtypes.FilterOperatorBetween {
return sb.Between(fieldExpr, values[0], values[1]), nil
}
return sb.NotBetween(fieldExpr, values[0], values[1]), nil
default:
return "", qbtypes.ErrUnsupportedOperator
}

View File

@@ -316,7 +316,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%1.65%", 1.65, "%1.65%", 1.65, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
},
@@ -345,7 +345,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%true%", true, "%true%", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
},
@@ -360,55 +360,12 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%passed%", "passed", "%passed%", "passed", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
},
expectedErr: nil,
},
{
name: "Dynamic array IN Operator",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{Expression: "body.education[].parameters IN [1.65, 1.99]"},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> toFloat64(x) IN (?, ?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> x IN (?, ?), arrayMap(x->dynamicElement(x, 'Array(Nullable(Float64))'), arrayFilter(x->(dynamicType(x) = 'Array(Nullable(Float64))'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), 1.65, 1.99, 1.65, 1.99, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
},
expectedErr: nil,
},
{
name: "Integer BETWEEN Operator",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{Expression: "education[].awards[].semester BETWEEN 2 AND 4"},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (arrayExists(`body_json.education`-> (arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) BETWEEN ? AND ?, dynamicElement(`body_json.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')) OR arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) BETWEEN ? AND ?, arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_json.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (arrayExists(`body_json.education`-> (arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) BETWEEN ? AND ?, dynamicElement(`body_json.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')) OR arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) BETWEEN ? AND ?, arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_json.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{float64(2), float64(4), float64(2), float64(4), uint64(1747945619), uint64(1747983448), float64(2), float64(4), float64(2), float64(4), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
},
{
name: "Integer IN Operator",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{Expression: "education[].awards[].semester IN [2, 4]"},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (arrayExists(`body_json.education`-> (arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) IN (?, ?), dynamicElement(`body_json.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')) OR arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) IN (?, ?), arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_json.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (arrayExists(`body_json.education`-> (arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) IN (?, ?), dynamicElement(`body_json.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')) OR arrayExists(`body_json.education[].awards`-> toFloat64(dynamicElement(`body_json.education[].awards`.`semester`, 'Int64')) IN (?, ?), arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_json.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{float64(2), float64(4), float64(2), float64(4), uint64(1747945619), uint64(1747983448), float64(2), float64(4), float64(2), float64(4), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
},
{
name: "Equals to 'sports' inside array of awards",
requestType: qbtypes.RequestTypeRaw,
@@ -432,7 +389,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(x -> toFloat64(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(x -> toFloat64OrNull(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(toFloat64OrNull(x) -> toFloat64OrNull(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%4%", float64(4), "%4%", float64(4), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `interests[].entities[].reviews[].entries[].metadata[].positions[].ratings` is ambiguous, found 2 different combinations of field context / data type: [name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]int64,jsondatatype=Array(Nullable(Int64)) name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]string,jsondatatype=Array(Nullable(String))]."},
},
@@ -447,7 +404,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(x -> toString(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%Good%", "Good", "%Good%", "Good", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `interests[].entities[].reviews[].entries[].metadata[].positions[].ratings` is ambiguous, found 2 different combinations of field context / data type: [name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]int64,jsondatatype=Array(Nullable(Int64)) name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]string,jsondatatype=Array(Nullable(String))]."},
},
@@ -535,7 +492,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
disableBodyJSONQuery(t)
}()
statementBuilder := buildJSONTestStatementBuilder(t, "education", "tags")
statementBuilder := buildJSONTestStatementBuilder(t, "education")
cases := []struct {
name string
requestType qbtypes.RequestType
@@ -543,20 +500,6 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
expected qbtypes.Statement
expectedErr error
}{
{
name: "Has Array promoted uses body fallback",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{Expression: "has(body.tags, 'production')"},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND has(if(length(dynamicElement(body_json_promoted.`tags`, 'Array(Nullable(String))')) > 0, dynamicElement(body_json_promoted.`tags`, 'Array(Nullable(String))'), dynamicElement(body_json.`tags`, 'Array(Nullable(String))')), ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "production", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
},
{
name: "Key inside Array(JSON) exists",
requestType: qbtypes.RequestTypeRaw,
@@ -608,7 +551,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toFloat64(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%1.65%", 1.65, "%1.65%", 1.65, "%1.65%", 1.65, "%1.65%", 1.65, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
},
@@ -637,7 +580,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%true%", true, "%true%", true, "%true%", true, "%true%", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
},
@@ -652,7 +595,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> toString(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%passed%", "passed", "%passed%", "passed", "%passed%", "passed", "%passed%", "passed", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
},

View File

@@ -3,12 +3,12 @@ package telemetrylogs
import (
"context"
"fmt"
"reflect"
"strconv"
"strings"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
func parseStrValue(valueStr string, operator qbtypes.FilterOperator) (telemetrytypes.FieldDataType, any) {
@@ -41,55 +41,31 @@ func parseStrValue(valueStr string, operator qbtypes.FilterOperator) (telemetryt
}
func InferDataType(value any, operator qbtypes.FilterOperator, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any) {
if operator.IsArrayOperator() && reflect.ValueOf(value).Kind() != reflect.Slice {
value = []any{value}
}
// closure to calculate the data type of the value
var closure func(value any, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any)
closure = func(value any, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any) {
// check if the value is a int, float, string, bool
valueType := telemetrytypes.FieldDataTypeUnspecified
switch v := value.(type) {
case []any:
// take the first element and infer the type
var scalerType telemetrytypes.FieldDataType
if len(v) > 0 {
// Note: [[...]] Slices inside Slices are not handled yet
if reflect.ValueOf(v[0]).Kind() == reflect.Slice {
return telemetrytypes.FieldDataTypeUnspecified, value
}
scalerType, _ = closure(v[0], key)
}
arrayType := telemetrytypes.ScalerFieldTypeToArrayFieldType[scalerType]
switch {
// decide on the field data type based on the key
case key.FieldDataType.IsArray():
return arrayType, v
default:
// TODO(Piyush): backward compatibility for the old String based JSON QB queries
if strings.HasSuffix(key.Name, telemetrytypes.ArrayAnyIndexSuffix) {
return arrayType, v
}
return scalerType, v
}
case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64:
valueType = telemetrytypes.FieldDataTypeInt64
case float32, float64:
valueType = telemetrytypes.FieldDataTypeFloat64
case string:
valueType, value = parseStrValue(v, operator)
case bool:
valueType = telemetrytypes.FieldDataTypeBool
// check if the value is a int, float, string, bool
valueType := telemetrytypes.FieldDataTypeUnspecified
switch v := value.(type) {
case []any:
// take the first element and infer the type
if len(v) > 0 {
valueType, _ = InferDataType(v[0], operator, key)
}
return valueType, value
return valueType, v
case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64:
valueType = telemetrytypes.FieldDataTypeInt64
case float32, float64:
valueType = telemetrytypes.FieldDataTypeFloat64
case string:
valueType, value = parseStrValue(v, operator)
case bool:
valueType = telemetrytypes.FieldDataTypeBool
}
// calculate the data type of the value
return closure(value, key)
// check if it is array
if strings.HasSuffix(key.Name, "[*]") || strings.HasSuffix(key.Name, "[]") {
valueType = telemetrytypes.FieldDataType{String: valuer.NewString(fmt.Sprintf("[]%s", valueType.StringValue()))}
}
return valueType, value
}
func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string {

View File

@@ -421,38 +421,6 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
},
expectedErr: nil,
},
{
name: "IN operator with json search",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{
Expression: "body.user_names[*] IN 'john_doe'",
},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((JSONExtract(JSON_QUERY(body, '$.\"user_names\"[*]'), 'Array(String)') = ?) AND JSON_EXISTS(body, '$.\"user_names\"[*]')) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "john_doe", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
},
{
name: "has with json search",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{
Expression: "has(body.user_names[*], 'john_doe')",
},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND has(JSONExtract(JSON_QUERY(body, '$.\"user_names\"[*]'), 'Array(String)'), ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "john_doe", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
},
}
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()

View File

@@ -112,8 +112,7 @@ func (t *telemetryMetaStore) buildBodyJSONPaths(ctx context.Context,
}
for _, fieldKey := range fieldKeys {
promotedKey := strings.Split(fieldKey.Name, telemetrytypes.ArraySep)[0]
fieldKey.Materialized = promoted.Contains(promotedKey)
fieldKey.Materialized = promoted.Contains(fieldKey.Name)
fieldKey.Indexes = indexes[fieldKey.Name]
}
@@ -502,8 +501,7 @@ func (t *telemetryMetaStore) GetPromotedPaths(ctx context.Context, paths ...stri
sb := sqlbuilder.Select("path").From(fmt.Sprintf("%s.%s", DBName, PromotedPathsTableName))
pathConditions := []string{}
for _, path := range paths {
split := strings.Split(path, telemetrytypes.ArraySep)
pathConditions = append(pathConditions, sb.Equal("path", split[0]))
pathConditions = append(pathConditions, sb.Equal("path", path))
}
sb.Where(sb.Or(pathConditions...))

View File

@@ -123,7 +123,8 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
origTimeAgg := query.Aggregations[0].TimeAggregation
origGroupBy := slices.Clone(query.GroupBy)
if query.Aggregations[0].Type == metrictypes.HistogramType {
if query.Aggregations[0].SpaceAggregation.IsPercentile() &&
query.Aggregations[0].Type != metrictypes.ExpHistogramType {
// add le in the group by if doesn't exist
leExists := false
for _, g := range query.GroupBy {
@@ -153,11 +154,7 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
}
// make the time aggregation rate and space aggregation sum
if query.Aggregations[0].SpaceAggregation.IsPercentile() {
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationRate
} else {
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationIncrease
}
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationRate
query.Aggregations[0].SpaceAggregation = metrictypes.SpaceAggregationSum
}
@@ -527,7 +524,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
return "", nil, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`, `histogram_count`]",
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`]",
)
}
sb := sqlbuilder.NewSelectBuilder()
@@ -580,34 +577,6 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
sb.From("__spatial_aggregation_cte")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
sb.GroupBy("ts")
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
}
} else if query.Aggregations[0].SpaceAggregation == metrictypes.SpaceAggregationHistogramCount {
sb.Select("ts")
for _, g := range query.GroupBy {
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
switch query.Aggregations[0].SpaceAggregationParam.(type) {
case metrictypes.ComparisonSpaceAggregationParam:
aggQuery, err := AggregationQueryForHistogramCount(query.Aggregations[0].SpaceAggregationParam.(metrictypes.ComparisonSpaceAggregationParam))
if err != nil {
return nil, err
}
sb.SelectMore(aggQuery)
default:
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "no aggregation param provided for histogram count")
}
sb.From("__spatial_aggregation_cte")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
sb.GroupBy("ts")
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)

View File

@@ -1,7 +1,6 @@
package telemetrymetrics
import (
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/errors"
@@ -309,17 +308,3 @@ func AggregationColumnForSamplesTable(
}
return aggregationColumn, nil
}
func AggregationQueryForHistogramCount(params metrictypes.ComparisonSpaceAggregationParam) (string, error) {
histogramCountThreshold := params.Threshold
switch params.Operater {
case "<=":
return fmt.Sprintf("argMaxIf(value, toFloat64(le), toFloat64(le) <= %f) + (argMinIf(value, toFloat64(le), toFloat64(le) > %f) - argMaxIf(value, toFloat64(le), toFloat64(le) <= %f)) * (%f - maxIf(toFloat64(le), toFloat64(le) <= %f)) / (minIf(toFloat64(le), toFloat64(le) > %f) - maxIf(toFloat64(le), toFloat64(le) <= %f)) AS value", histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold), nil
case ">":
return fmt.Sprintf("argMax(value, toFloat64(le)) - (argMaxIf(value, toFloat64(le), toFloat64(le) < %f) + (argMinIf(value, toFloat64(le), toFloat64(le) >= %f) - argMaxIf(value, toFloat64(le), toFloat64(le) < %f)) * (%f - maxIf(toFloat64(le), toFloat64(le) < %f)) / (minIf(toFloat64(le), toFloat64(le) >= %f) - maxIf(toFloat64(le), toFloat64(le) <= %f))) AS value", histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold), nil
default:
return "", errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid space aggregation operator, should be one of the following: [`<=`, `>`]")
}
}

View File

@@ -260,11 +260,11 @@ type MetricHighlightsResponse struct {
ActiveTimeSeries uint64 `json:"activeTimeSeries" required:"true"`
}
// MetricAttributesRequest represents the query parameters for the metric attributes endpoint.
// MetricAttributesRequest represents the payload for the metric attributes endpoint.
type MetricAttributesRequest struct {
MetricName string `json:"-"`
Start *int64 `query:"start"`
End *int64 `query:"end"`
MetricName string `json:"metricName" required:"true"`
Start *int64 `json:"start,omitempty"`
End *int64 `json:"end,omitempty"`
}
// Validate ensures MetricAttributesRequest contains acceptable values.
@@ -273,6 +273,10 @@ func (req *MetricAttributesRequest) Validate() error {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "request is nil")
}
if req.MetricName == "" {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "metric_name is required")
}
if req.Start != nil && req.End != nil {
if *req.Start >= *req.End {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "start (%d) must be less than end (%d)", *req.Start, *req.End)
@@ -282,6 +286,17 @@ func (req *MetricAttributesRequest) Validate() error {
return nil
}
// UnmarshalJSON validates input immediately after decoding.
func (req *MetricAttributesRequest) UnmarshalJSON(data []byte) error {
type raw MetricAttributesRequest
var decoded raw
if err := json.Unmarshal(data, &decoded); err != nil {
return err
}
*req = MetricAttributesRequest(decoded)
return req.Validate()
}
// MetricAttribute represents a single attribute with its values and count.
type MetricAttribute struct {
Key string `json:"key" required:"true"`
@@ -295,48 +310,6 @@ type MetricAttributesResponse struct {
TotalKeys int64 `json:"totalKeys" required:"true"`
}
// ListMetricsParams represents the query parameters for the list metrics endpoint.
type ListMetricsParams struct {
Start *int64 `query:"start"`
End *int64 `query:"end"`
Limit int `query:"limit"`
Search string `query:"searchText"`
}
// Validate ensures ListMetricsParams contains acceptable values.
func (p *ListMetricsParams) Validate() error {
if p.Start != nil && *p.Start <= 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "start must be greater than 0")
}
if p.End != nil && *p.End <= 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "end must be greater than 0")
}
if p.Start != nil && p.End != nil && *p.Start >= *p.End {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "start (%d) must be less than end (%d)", *p.Start, *p.End)
}
if p.Limit < 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit cannot be negative")
}
if p.Limit == 0 {
p.Limit = 100
}
if p.Limit > 5000 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must not exceed 5000")
}
return nil
}
// ListMetric represents a single metric with its metadata in the list metrics response.
type ListMetric struct {
MetricName string `json:"metricName" required:"true"`
Description string `json:"description" required:"true"`
MetricType metrictypes.Type `json:"type" required:"true"`
MetricUnit string `json:"unit" required:"true"`
Temporality metrictypes.Temporality `json:"temporality" required:"true"`
IsMonotonic bool `json:"isMonotonic" required:"true"`
}
// ListMetricsResponse represents the response for the list metrics endpoint.
type ListMetricsResponse struct {
Metrics []ListMetric `json:"metrics" required:"true" nullable:"true"`
type MetricNameParams struct {
MetricName string `query:"metricName" required:"true"`
}

View File

@@ -2,7 +2,6 @@ package metrictypes
import (
"database/sql/driver"
"fmt"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
@@ -173,6 +172,7 @@ var (
func (TimeAggregation) Enum() []any {
return []any{
TimeAggregationUnspecified,
TimeAggregationLatest,
TimeAggregationSum,
TimeAggregationAvg,
@@ -190,22 +190,22 @@ type SpaceAggregation struct {
}
var (
SpaceAggregationUnspecified = SpaceAggregation{valuer.NewString("")}
SpaceAggregationSum = SpaceAggregation{valuer.NewString("sum")}
SpaceAggregationAvg = SpaceAggregation{valuer.NewString("avg")}
SpaceAggregationMin = SpaceAggregation{valuer.NewString("min")}
SpaceAggregationMax = SpaceAggregation{valuer.NewString("max")}
SpaceAggregationCount = SpaceAggregation{valuer.NewString("count")}
SpaceAggregationPercentile50 = SpaceAggregation{valuer.NewString("p50")}
SpaceAggregationPercentile75 = SpaceAggregation{valuer.NewString("p75")}
SpaceAggregationPercentile90 = SpaceAggregation{valuer.NewString("p90")}
SpaceAggregationPercentile95 = SpaceAggregation{valuer.NewString("p95")}
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
SpaceAggregationHistogramCount = SpaceAggregation{valuer.NewString("histogram_count")}
SpaceAggregationUnspecified = SpaceAggregation{valuer.NewString("")}
SpaceAggregationSum = SpaceAggregation{valuer.NewString("sum")}
SpaceAggregationAvg = SpaceAggregation{valuer.NewString("avg")}
SpaceAggregationMin = SpaceAggregation{valuer.NewString("min")}
SpaceAggregationMax = SpaceAggregation{valuer.NewString("max")}
SpaceAggregationCount = SpaceAggregation{valuer.NewString("count")}
SpaceAggregationPercentile50 = SpaceAggregation{valuer.NewString("p50")}
SpaceAggregationPercentile75 = SpaceAggregation{valuer.NewString("p75")}
SpaceAggregationPercentile90 = SpaceAggregation{valuer.NewString("p90")}
SpaceAggregationPercentile95 = SpaceAggregation{valuer.NewString("p95")}
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
)
func (SpaceAggregation) Enum() []any {
return []any{
SpaceAggregationUnspecified,
SpaceAggregationSum,
SpaceAggregationAvg,
SpaceAggregationMin,
@@ -216,7 +216,6 @@ func (SpaceAggregation) Enum() []any {
SpaceAggregationPercentile90,
SpaceAggregationPercentile95,
SpaceAggregationPercentile99,
SpaceAggregationHistogramCount,
}
}
@@ -259,22 +258,3 @@ type MetricTableHints struct {
type MetricValueFilter struct {
Value float64
}
type SpaceAggregationParam interface {
StringValue() string
}
type NoSpaceAggregationParam struct{}
func (_ NoSpaceAggregationParam) StringValue() string {
return "{}"
}
type ComparisonSpaceAggregationParam struct {
Operater string `json:"operator"`
Threshold float64 `json:"limit"`
}
func (cso ComparisonSpaceAggregationParam) StringValue() string {
return fmt.Sprintf("{\"operator\": \"%s\", \"limit\": \"%f\"}", cso.Operater, cso.Threshold)
}

View File

@@ -10,35 +10,10 @@ import (
"github.com/SigNoz/signoz/pkg/types/metrictypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/swaggest/jsonschema-go"
)
type Step struct{ time.Duration }
var _ jsonschema.Exposer = Step{}
// JSONSchema returns a custom schema for Step that accepts either a duration string or a number (seconds).
func (Step) JSONSchema() (jsonschema.Schema, error) {
s := jsonschema.Schema{}
s.WithDescription("Step interval. Accepts a Go duration string (e.g., \"60s\", \"1m\", \"1h\") or a number representing seconds (e.g., 60).")
strSchema := jsonschema.Schema{}
strSchema.WithType(jsonschema.String.Type())
strSchema.WithExamples("60s", "5m", "1h")
strSchema.WithDescription("Duration string (e.g., \"60s\", \"5m\", \"1h\").")
numSchema := jsonschema.Schema{}
numSchema.WithType(jsonschema.Number.Type())
numSchema.WithExamples(60, 300, 3600)
numSchema.WithDescription("Duration in seconds.")
s.OneOf = []jsonschema.SchemaOrBool{
strSchema.ToSchemaOrBool(),
numSchema.ToSchemaOrBool(),
}
return s, nil
}
func (s *Step) UnmarshalJSON(b []byte) error {
if len(b) == 0 {
return nil
@@ -186,17 +161,6 @@ func (f FilterOperator) IsStringSearchOperator() bool {
}
}
// IsArrayOperator returns true if the operator works with array values only
func (f FilterOperator) IsArrayOperator() bool {
switch f {
case FilterOperatorIn, FilterOperatorNotIn,
FilterOperatorBetween, FilterOperatorNotBetween:
return true
default:
return false
}
}
type OrderDirection struct {
valuer.String
}
@@ -206,14 +170,6 @@ var (
OrderDirectionDesc = OrderDirection{valuer.NewString("desc")}
)
// Enum returns the acceptable values for OrderDirection.
func (OrderDirection) Enum() []any {
return []any{
OrderDirectionAsc,
OrderDirectionDesc,
}
}
var (
OrderDirectionMap = map[string]OrderDirection{
"asc": OrderDirectionAsc,
@@ -236,19 +192,6 @@ var (
ReduceToMedian = ReduceTo{valuer.NewString("median")}
)
// Enum returns the acceptable values for ReduceTo.
func (ReduceTo) Enum() []any {
return []any{
ReduceToSum,
ReduceToCount,
ReduceToAvg,
ReduceToMin,
ReduceToMax,
ReduceToLast,
ReduceToMedian,
}
}
// FunctionReduceTo applies the reduceTo operator to a time series and returns a new series with the reduced value
// reduceTo can be one of: last, sum, avg, min, max, count, median
// if reduceTo is not recognized, the function returns the original series
@@ -446,8 +389,6 @@ type MetricAggregation struct {
TimeAggregation metrictypes.TimeAggregation `json:"timeAggregation"`
// space aggregation to apply to the query
SpaceAggregation metrictypes.SpaceAggregation `json:"spaceAggregation"`
// param for space aggregation if needed
SpaceAggregationParam metrictypes.SpaceAggregationParam `json:"spaceAggregationParam"`
// table hints to use for the query
TableHints *metrictypes.MetricTableHints `json:"-"`
// value filter to apply to the query
@@ -456,40 +397,6 @@ type MetricAggregation struct {
ReduceTo ReduceTo `json:"reduceTo,omitempty"`
}
func (m *MetricAggregation) UnmarshalJSON(data []byte) error {
type Alias MetricAggregation
aux := &struct {
SpaceAggregationParam json.RawMessage `json:"spaceAggregationParam"`
*Alias
}{
Alias: (*Alias)(m),
}
if err := json.Unmarshal(data, &aux); err != nil {
return err
}
// If no param provided
if len(aux.SpaceAggregationParam) == 0 || string(aux.SpaceAggregationParam) == "null" {
m.SpaceAggregationParam = metrictypes.NoSpaceAggregationParam{}
return nil
}
switch m.SpaceAggregation {
case metrictypes.SpaceAggregationHistogramCount:
var p metrictypes.ComparisonSpaceAggregationParam
if err := json.Unmarshal(aux.SpaceAggregationParam, &p); err != nil {
return err
}
m.SpaceAggregationParam = p
default:
m.SpaceAggregationParam = metrictypes.NoSpaceAggregationParam{}
}
return nil
}
// Copy creates a deep copy of MetricAggregation
func (m MetricAggregation) Copy() MetricAggregation {
c := m

View File

@@ -36,30 +36,6 @@ var (
FunctionNameFillZero = FunctionName{valuer.NewString("fillZero")}
)
// Enum returns the acceptable values for FunctionName.
func (FunctionName) Enum() []any {
return []any{
FunctionNameCutOffMin,
FunctionNameCutOffMax,
FunctionNameClampMin,
FunctionNameClampMax,
FunctionNameAbsolute,
FunctionNameRunningDiff,
FunctionNameLog2,
FunctionNameLog10,
FunctionNameCumulativeSum,
FunctionNameEWMA3,
FunctionNameEWMA5,
FunctionNameEWMA7,
FunctionNameMedian3,
FunctionNameMedian5,
FunctionNameMedian7,
FunctionNameTimeShift,
FunctionNameAnomaly,
FunctionNameFillZero,
}
}
// Validate checks if the FunctionName is valid and one of the known types
func (fn FunctionName) Validate() error {
validFunctions := []FunctionName{

View File

@@ -16,17 +16,6 @@ var (
JoinTypeCross = JoinType{valuer.NewString("cross")}
)
// Enum returns the acceptable values for JoinType.
func (JoinType) Enum() []any {
return []any{
JoinTypeInner,
JoinTypeLeft,
JoinTypeRight,
JoinTypeFull,
JoinTypeCross,
}
}
type QueryRef struct {
Name string `json:"name"`
}

View File

@@ -2,8 +2,6 @@ package querybuildertypesv5
import (
"context"
"github.com/swaggest/jsonschema-go"
)
type Query interface {
@@ -31,12 +29,4 @@ type ExecStats struct {
StepIntervals map[string]uint64 `json:"stepIntervals,omitempty"`
}
var _ jsonschema.Preparer = &ExecStats{}
// PrepareJSONSchema adds description to the ExecStats schema.
func (e *ExecStats) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Execution statistics for the query, including rows scanned, bytes scanned, and duration.")
return nil
}
type TimeRange struct{ From, To uint64 } // ms since epoch

View File

@@ -16,17 +16,3 @@ var (
QueryTypeClickHouseSQL = QueryType{valuer.NewString("clickhouse_sql")}
QueryTypePromQL = QueryType{valuer.NewString("promql")}
)
// Enum returns the acceptable values for QueryType.
func (QueryType) Enum() []any {
return []any{
QueryTypeBuilder,
QueryTypeFormula,
// Not yet supported.
// QueryTypeSubQuery,
// QueryTypeJoin,
QueryTypeTraceOperator,
QueryTypeClickHouseSQL,
QueryTypePromQL,
}
}

View File

@@ -9,7 +9,6 @@ import (
"github.com/SigNoz/signoz/pkg/types/metrictypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/swaggest/jsonschema-go"
)
type QueryEnvelope struct {
@@ -19,71 +18,6 @@ type QueryEnvelope struct {
Spec any `json:"spec"`
}
// queryEnvelopeBuilderTrace is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=traces.
type queryEnvelopeBuilderTrace struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[TraceAggregation] `json:"spec" description:"The trace builder query specification."`
}
// queryEnvelopeBuilderLog is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=logs.
type queryEnvelopeBuilderLog struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[LogAggregation] `json:"spec" description:"The log builder query specification."`
}
// queryEnvelopeBuilderMetric is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=metrics.
type queryEnvelopeBuilderMetric struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[MetricAggregation] `json:"spec" description:"The metric builder query specification."`
}
// queryEnvelopeFormula is the OpenAPI schema for a QueryEnvelope with type=builder_formula.
type queryEnvelopeFormula struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderFormula `json:"spec" description:"The formula specification."`
}
// queryEnvelopeJoin is the OpenAPI schema for a QueryEnvelope with type=builder_join.
// type queryEnvelopeJoin struct {
// Type QueryType `json:"type" description:"The type of the query."`
// Spec QueryBuilderJoin `json:"spec" description:"The join specification."`
// }
// queryEnvelopeTraceOperator is the OpenAPI schema for a QueryEnvelope with type=builder_trace_operator.
type queryEnvelopeTraceOperator struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderTraceOperator `json:"spec" description:"The trace operator specification."`
}
// queryEnvelopePromQL is the OpenAPI schema for a QueryEnvelope with type=promql.
type queryEnvelopePromQL struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec PromQuery `json:"spec" description:"The PromQL query specification."`
}
// queryEnvelopeClickHouseSQL is the OpenAPI schema for a QueryEnvelope with type=clickhouse_sql.
type queryEnvelopeClickHouseSQL struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec ClickHouseQuery `json:"spec" description:"The ClickHouse SQL query specification."`
}
var _ jsonschema.OneOfExposer = QueryEnvelope{}
// JSONSchemaOneOf returns the oneOf variants for the QueryEnvelope discriminated union.
// Each variant represents a different query type with its corresponding spec schema.
func (QueryEnvelope) JSONSchemaOneOf() []any {
return []any{
queryEnvelopeBuilderTrace{},
queryEnvelopeBuilderLog{},
queryEnvelopeBuilderMetric{},
queryEnvelopeFormula{},
// queryEnvelopeJoin{},
queryEnvelopeTraceOperator{},
queryEnvelopePromQL{},
queryEnvelopeClickHouseSQL{},
}
}
// implement custom json unmarshaler for the QueryEnvelope
func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
var shadow struct {
@@ -196,12 +130,6 @@ type CompositeQuery struct {
Queries []QueryEnvelope `json:"queries"`
}
// PrepareJSONSchema adds description to the CompositeQuery schema.
func (c *CompositeQuery) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Composite query containing one or more query envelopes. Each query envelope specifies its type and corresponding spec.")
return nil
}
// UnmarshalJSON implements custom JSON unmarshaling to provide better error messages
func (c *CompositeQuery) UnmarshalJSON(data []byte) error {
type Alias CompositeQuery
@@ -264,16 +192,6 @@ var (
TextBoxVariableType = VariableType{valuer.NewString("text")}
)
// Enum returns the acceptable values for VariableType.
func (VariableType) Enum() []any {
return []any{
QueryVariableType,
DynamicVariableType,
CustomVariableType,
TextBoxVariableType,
}
}
type VariableItem struct {
Type VariableType `json:"type"`
Value any `json:"value"`
@@ -299,12 +217,6 @@ type QueryRangeRequest struct {
FormatOptions *FormatOptions `json:"formatOptions,omitempty"`
}
// PrepareJSONSchema adds description to the QueryRangeRequest schema.
func (q *QueryRangeRequest) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Request body for the v5 query range endpoint. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL queries.")
return nil
}
func (r *QueryRangeRequest) StepIntervalForQuery(name string) int64 {
stepsMap := make(map[string]int64)
for _, query := range r.CompositeQuery.Queries {

View File

@@ -30,15 +30,3 @@ var (
func (r RequestType) IsAggregation() bool {
return r == RequestTypeTimeSeries || r == RequestTypeScalar || r == RequestTypeDistribution
}
// Enum implements jsonschema.Enum; returns the acceptable values for RequestType.
func (RequestType) Enum() []any {
return []any{
RequestTypeScalar,
RequestTypeTimeSeries,
RequestTypeRaw,
RequestTypeRawStream,
RequestTypeTrace,
// RequestTypeDistribution,
}
}

View File

@@ -12,7 +12,6 @@ import (
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/swaggest/jsonschema-go"
)
type QBEvent struct {
@@ -43,17 +42,6 @@ type QueryData struct {
Results []any `json:"results"`
}
var _ jsonschema.OneOfExposer = QueryData{}
// JSONSchemaOneOf documents the polymorphic result types in QueryData.Results.
func (QueryData) JSONSchemaOneOf() []any {
return []any{
TimeSeriesData{},
ScalarData{},
RawData{},
}
}
type QueryRangeResponse struct {
Type RequestType `json:"type"`
Data QueryData `json:"data"`
@@ -64,14 +52,6 @@ type QueryRangeResponse struct {
QBEvent *QBEvent `json:"-"`
}
var _ jsonschema.Preparer = &QueryRangeResponse{}
// PrepareJSONSchema adds description to the QueryRangeResponse schema.
func (q *QueryRangeResponse) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Response from the v5 query range endpoint. The data.results array contains typed results depending on the requestType: TimeSeriesData for time_series, ScalarData for scalar, or RawData for raw requests.")
return nil
}
type TimeSeriesData struct {
QueryName string `json:"queryName"`
Aggregations []*AggregationBucket `json:"aggregations"`
@@ -179,14 +159,6 @@ var (
ColumnTypeAggregation = ColumnType{valuer.NewString("aggregation")}
)
// Enum returns the acceptable values for ColumnType.
func (ColumnType) Enum() []any {
return []any{
ColumnTypeGroup,
ColumnTypeAggregation,
}
}
type ColumnDescriptor struct {
telemetrytypes.TelemetryFieldKey
QueryName string `json:"queryName"`

View File

@@ -23,10 +23,8 @@ const (
// e.g., "body.status" where "body." is the prefix
BodyJSONStringSearchPrefix = "body."
ArraySep = jsontypeexporter.ArraySeparator
ArraySepSuffix = "[]"
// TODO(Piyush): Remove once we've migrated to the new array syntax
ArrayAnyIndex = "[*]."
ArrayAnyIndexSuffix = "[*]"
ArrayAnyIndex = "[*]."
)
type TelemetryFieldKey struct {

View File

@@ -172,18 +172,3 @@ func isContextValidForSignal(ctx FieldContext, signal Signal) bool {
}
return true
}
// Enum returns the acceptable values for FieldContext.
func (FieldContext) Enum() []any {
return []any{
FieldContextMetric,
FieldContextLog,
FieldContextSpan,
// FieldContextTrace,
FieldContextResource,
// FieldContextScope,
FieldContextAttribute,
// FieldContextEvent,
FieldContextBody,
}
}

View File

@@ -93,14 +93,6 @@ var (
FieldDataTypeArrayFloat64: "Array(Float64)",
FieldDataTypeArrayBool: "Array(Bool)",
}
ScalerFieldTypeToArrayFieldType = map[FieldDataType]FieldDataType{
FieldDataTypeString: FieldDataTypeArrayString,
FieldDataTypeBool: FieldDataTypeArrayBool,
FieldDataTypeNumber: FieldDataTypeArrayNumber,
FieldDataTypeInt64: FieldDataTypeArrayInt64,
FieldDataTypeFloat64: FieldDataTypeArrayFloat64,
}
)
func (f FieldDataType) CHDataType() string {
@@ -177,19 +169,3 @@ func (f FieldDataType) TagDataType() string {
return "string"
}
}
// Enum returns the acceptable values for FieldDataType.
func (FieldDataType) Enum() []any {
return []any{
FieldDataTypeString,
FieldDataTypeBool,
FieldDataTypeFloat64,
FieldDataTypeInt64,
FieldDataTypeNumber,
// FieldDataTypeArrayString,
// FieldDataTypeArrayFloat64,
// FieldDataTypeArrayBool,
// FieldDataTypeArrayInt64,
// FieldDataTypeArrayNumber,
}
}

Some files were not shown because too many files have changed in this diff Show More