Compare commits

..

6 Commits

Author SHA1 Message Date
Vinícius Lourenço
e4b2c5d894 chore: authz helpers 2026-02-19 17:39:05 -03:00
Abhi kumar
9f4d9eef46 fix: added fix for tooltip prox on hover in uplot (#10345)
* fix: added fix for tooltip prox on hover in uplot

* fix: added fix for series highlighting on focus

* fix: added support for stepinterval

* chore: added tests

* chore: pr review comments
2026-02-19 15:47:42 +05:30
Yunus M
d9cddbfb42 fix: hide scrollbar in variable selector as we expand the container horizontally (#10357) 2026-02-19 08:20:41 +00:00
Srikanth Chekuri
0828c13f7c chore: add list metrics (#10352) 2026-02-19 07:45:14 +00:00
Nikhil Soni
2c558b9bf7 fix: fix incosistent use of http attribute in ext. api (#10169)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
* fix: fix incosistent use of http attribute in ext. api

HTTP attributes like http.url, url.full along with server.name and net.peer.name
were used inconsitantly leading to bugs in aggregation query and they were
expensive to query as well since these attr are stored as json instead of
direct columns. Using columns like http_url optimises these queries since
it gets populated using all relevant attributes during ingestion itself.

* fix: switch to using http_host instead of external_http_url

external_http_url stores the hostname but the name
is confusing, so switching to http_host

* fix: use constants defined where possible

* fix: fix old attribute usage in tests
2026-02-19 06:21:32 +00:00
Yunus M
48b588fe48 chore: migrate .cursorrules to .cursor/rules/ format (#10351) 2026-02-19 11:28:02 +05:30
57 changed files with 4042 additions and 4198 deletions

7
.gitignore vendored
View File

@@ -1,8 +1,11 @@
node_modules
# editor
.vscode
!.vscode/settings.json
.zed
.idea
deploy/docker/environment_tiny/common_test
frontend/node_modules
@@ -31,8 +34,6 @@ frontend/yarn-debug.log*
frontend/yarn-error.log*
frontend/src/constants/env.ts
.idea
**/build
**/storage
**/locust-scripts/__pycache__/
@@ -229,5 +230,3 @@ cython_debug/
pyrightconfig.json
# cursor files
frontend/.cursor/

View File

@@ -17,7 +17,5 @@
},
"[html]": {
"editor.defaultFormatter": "vscode.html-language-features"
},
"python-envs.defaultEnvManager": "ms-python.python:system",
"python-envs.pythonProjects": []
}
}

View File

@@ -455,6 +455,38 @@ components:
nullable: true
type: array
type: object
MetricsexplorertypesListMetric:
properties:
description:
type: string
isMonotonic:
type: boolean
metricName:
type: string
temporality:
$ref: '#/components/schemas/MetrictypesTemporality'
type:
$ref: '#/components/schemas/MetrictypesType'
unit:
type: string
required:
- metricName
- description
- type
- unit
- temporality
- isMonotonic
type: object
MetricsexplorertypesListMetricsResponse:
properties:
metrics:
items:
$ref: '#/components/schemas/MetricsexplorertypesListMetric'
nullable: true
type: array
required:
- metrics
type: object
MetricsexplorertypesMetricAlert:
properties:
alertId:
@@ -492,19 +524,6 @@ components:
- values
- valueCount
type: object
MetricsexplorertypesMetricAttributesRequest:
properties:
end:
nullable: true
type: integer
metricName:
type: string
start:
nullable: true
type: integer
required:
- metricName
type: object
MetricsexplorertypesMetricAttributesResponse:
properties:
attributes:
@@ -2521,149 +2540,6 @@ paths:
summary: Update auth domain
tags:
- authdomains
/api/v1/export_raw_data:
get:
deprecated: false
description: This endpoints allows simple query exporting raw data for traces
and logs
operationId: HandleExportRawDataGET
parameters:
- in: query
name: format
schema:
default: csv
enum:
- csv
- jsonl
type: string
- in: query
name: source
schema:
default: logs
enum:
- logs
- traces
type: string
- in: query
name: start
schema:
minimum: 0
type: integer
- in: query
name: end
schema:
minimum: 0
type: integer
- in: query
name: limit
schema:
default: 10000
maximum: 50000
minimum: 1
type: integer
- in: query
name: filter
schema:
type: string
- in: query
name: columns
schema:
items:
type: string
type: array
- in: query
name: order_by
schema:
type: string
responses:
"200":
content:
application/json:
schema:
type: string
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
summary: Export raw data
tags:
- logs
- traces
post:
deprecated: false
description: This endpoints allows complex query exporting raw data for traces
and logs
operationId: HandleExportRawDataPOST
parameters:
- in: query
name: format
schema:
default: csv
enum:
- csv
- jsonl
type: string
requestBody:
content:
application/json:
schema:
description: Request body for the v5 query range endpoint. Supports
builder queries (traces, logs, metrics), formulas, joins, trace operators,
PromQL, and ClickHouse SQL queries.
properties:
compositeQuery:
$ref: '#/components/schemas/Querybuildertypesv5CompositeQuery'
end:
minimum: 0
type: integer
formatOptions:
$ref: '#/components/schemas/Querybuildertypesv5FormatOptions'
noCache:
type: boolean
requestType:
$ref: '#/components/schemas/Querybuildertypesv5RequestType'
schemaVersion:
type: string
start:
minimum: 0
type: integer
variables:
additionalProperties:
$ref: '#/components/schemas/Querybuildertypesv5VariableItem'
type: object
type: object
responses:
"200":
content:
application/json:
schema:
type: string
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
summary: Export raw data
tags:
- logs
- traces
/api/v1/fields/keys:
get:
deprecated: false
@@ -4892,14 +4768,83 @@ paths:
summary: Search ingestion keys for workspace
tags:
- gateway
/api/v2/metric/alerts:
/api/v2/metrics:
get:
deprecated: false
description: This endpoint returns a list of distinct metric names within the
specified time range
operationId: ListMetrics
parameters:
- in: query
name: start
schema:
nullable: true
type: integer
- in: query
name: end
schema:
nullable: true
type: integer
- in: query
name: limit
schema:
type: integer
- in: query
name: searchText
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/MetricsexplorertypesListMetricsResponse'
status:
type: string
type: object
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: List metric names
tags:
- metrics
/api/v2/metrics/{metric_name}/alerts:
get:
deprecated: false
description: This endpoint returns associated alerts for a specified metric
operationId: GetMetricAlerts
parameters:
- in: query
name: metricName
- in: path
name: metric_name
required: true
schema:
type: string
@@ -4947,14 +4892,80 @@ paths:
summary: Get metric alerts
tags:
- metrics
/api/v2/metric/dashboards:
/api/v2/metrics/{metric_name}/attributes:
get:
deprecated: false
description: This endpoint returns attribute keys and their unique values for
a specified metric
operationId: GetMetricAttributes
parameters:
- in: query
name: start
schema:
nullable: true
type: integer
- in: query
name: end
schema:
nullable: true
type: integer
- in: path
name: metric_name
required: true
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/MetricsexplorertypesMetricAttributesResponse'
status:
type: string
type: object
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get metric attributes
tags:
- metrics
/api/v2/metrics/{metric_name}/dashboards:
get:
deprecated: false
description: This endpoint returns associated dashboards for a specified metric
operationId: GetMetricDashboards
parameters:
- in: query
name: metricName
- in: path
name: metric_name
required: true
schema:
type: string
@@ -5002,15 +5013,15 @@ paths:
summary: Get metric dashboards
tags:
- metrics
/api/v2/metric/highlights:
/api/v2/metrics/{metric_name}/highlights:
get:
deprecated: false
description: This endpoint returns highlights like number of datapoints, totaltimeseries,
active time series, last received time for a specified metric
operationId: GetMetricHighlights
parameters:
- in: query
name: metricName
- in: path
name: metric_name
required: true
schema:
type: string
@@ -5059,6 +5070,67 @@ paths:
tags:
- metrics
/api/v2/metrics/{metric_name}/metadata:
get:
deprecated: false
description: This endpoint returns metadata information like metric description,
unit, type, temporality, monotonicity for a specified metric
operationId: GetMetricMetadata
parameters:
- in: path
name: metric_name
required: true
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/MetricsexplorertypesMetricMetadata'
status:
type: string
type: object
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"404":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Found
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get metric metadata
tags:
- metrics
post:
deprecated: false
description: This endpoint helps to update metadata information like metric
@@ -5114,123 +5186,6 @@ paths:
summary: Update metric metadata
tags:
- metrics
/api/v2/metrics/attributes:
post:
deprecated: false
description: This endpoint returns attribute keys and their unique values for
a specified metric
operationId: GetMetricAttributes
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/MetricsexplorertypesMetricAttributesRequest'
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/MetricsexplorertypesMetricAttributesResponse'
status:
type: string
type: object
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get metric attributes
tags:
- metrics
/api/v2/metrics/metadata:
get:
deprecated: false
description: This endpoint returns metadata information like metric description,
unit, type, temporality, monotonicity for a specified metric
operationId: GetMetricMetadata
parameters:
- in: query
name: metricName
required: true
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/MetricsexplorertypesMetricMetadata'
status:
type: string
type: object
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"404":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Found
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get metric metadata
tags:
- metrics
/api/v2/metrics/stats:
post:
deprecated: false

View File

@@ -0,0 +1,74 @@
---
description: Core testing conventions - imports, rendering, MSW, interactions, queries
globs: **/*.test.{ts,tsx}
alwaysApply: false
---
# Testing Conventions
## Imports
Always import from the test harness, never directly from `@testing-library/react`:
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { server, rest } from 'mocks-server/server';
```
## Router
Use the built-in router in `render`:
```ts
render(<Page />, undefined, { initialRoute: '/traces-explorer' });
```
Only mock `useLocation` / `useParams` if the test depends on their values.
## MSW
Global MSW server runs automatically. Override per-test:
```ts
server.use(
rest.get('*/api/v1/foo', (_req, res, ctx) =>
res(ctx.status(200), ctx.json({ ok: true })))
);
```
Keep large response fixtures in `mocks-server/__mockdata_`.
## Interactions
- Prefer `userEvent` for real user interactions (click, type, select, tab).
- Use `fireEvent` only for low-level events not covered by `userEvent` (e.g., scroll, resize). Wrap in `act(...)` if needed.
- Always `await` interactions:
```ts
const user = userEvent.setup({ pointerEventsCheck: 0 });
await user.click(screen.getByRole('button', { name: /save/i }));
```
## Timers
No global fake timers. Per-test only, for debounce/throttle:
```ts
jest.useFakeTimers();
const user = userEvent.setup({ advanceTimers: (ms) => jest.advanceTimersByTime(ms) });
await user.type(screen.getByRole('textbox'), 'query');
jest.advanceTimersByTime(400);
jest.useRealTimers();
```
## Queries
Prefer accessible queries: `getByRole` > `findByRole` > `getByLabelText` > visible text > `data-testid` (last resort).
## Anti-patterns
- Never import from `@testing-library/react` directly
- Never use global fake timers
- Never wrap `render` in `act(...)`
- Never mock infra dependencies locally (router, react-query)
- Limit to 3-5 focused tests per file

View File

@@ -0,0 +1,54 @@
---
description: When to use global vs local mocks in tests
globs: **/*.test.{ts,tsx}
alwaysApply: false
---
# Mock Strategy
## Use Global Mocks For
High-frequency dependencies (20+ test files):
- Core infrastructure: react-router-dom, react-query, antd
- Browser APIs: ResizeObserver, matchMedia, localStorage
- Utility libraries: date-fns, lodash
Available global mock files (from jest.config.ts):
- `uplot` -> `__mocks__/uplotMock.ts`
## Use Local Mocks For
- Business logic dependencies (API endpoints, custom hooks, domain components)
- Test-specific behavior (different data per test, error scenarios, loading states)
## Decision Tree
```
Used in 20+ test files?
YES -> Global mock
NO -> Business logic or test-specific?
YES -> Local mock
NO -> Consider global if usage grows
```
## Correct Usage
```ts
// Global mocks are already available - just import
import { useLocation } from 'react-router-dom';
// Local mocks for business logic
jest.mock('../api/tracesService', () => ({
getTraces: jest.fn(() => mockTracesData),
}));
```
## Anti-patterns
```ts
// Never re-mock globally mocked dependencies locally
jest.mock('react-router-dom', () => ({ ... }));
// Never put test-specific data in global mocks
jest.mock('../api/tracesService', () => ({ getTraces: jest.fn(() => specificTestData) }));
```

View File

@@ -0,0 +1,54 @@
---
description: TypeScript type safety requirements for Jest tests
globs: **/*.test.{ts,tsx}
alwaysApply: false
---
# TypeScript Type Safety in Tests
All Jest tests must be fully type-safe. Never use `any`.
## Mock Function Typing
```ts
// Use jest.mocked for module mocks
import useFoo from 'hooks/useFoo';
jest.mock('hooks/useFoo');
const mockUseFoo = jest.mocked(useFoo);
// Use jest.MockedFunction for standalone mocks
const mockFetch = jest.fn() as jest.MockedFunction<(id: number) => Promise<User>>;
```
## Mock Data
Define interfaces for all mock data:
```ts
const mockUser: User = { id: 1, name: 'John', email: 'john@example.com' };
const mockProps: ComponentProps = {
title: 'Test',
data: [mockUser],
onSelect: jest.fn() as jest.MockedFunction<(user: User) => void>,
};
```
## Hook Mocking Pattern
```ts
import useFoo from 'hooks/useFoo';
jest.mock('hooks/useFoo');
const mockUseFoo = jest.mocked(useFoo);
mockUseFoo.mockReturnValue(/* minimal shape */);
```
Prefer helpers (`rqSuccess`, `rqLoading`, `rqError`) for React Query results.
## Checklist
- All mock functions use `jest.MockedFunction<T>` or `jest.mocked()`
- All mock data has proper interfaces
- No `any` types in test files
- Component props are typed
- API response types are defined

View File

@@ -1,484 +0,0 @@
# Persona
You are an expert developer with deep knowledge of Jest, React Testing Library, MSW, and TypeScript, tasked with creating unit tests for this repository.
# Auto-detect TypeScript Usage
Check for TypeScript in the project through tsconfig.json or package.json dependencies.
Adjust syntax based on this detection.
# TypeScript Type Safety for Jest Tests
**CRITICAL**: All Jest tests MUST be fully type-safe with proper TypeScript types.
**Type Safety Requirements:**
- Use proper TypeScript interfaces for all mock data
- Type all Jest mock functions with `jest.MockedFunction<T>`
- Use generic types for React components and hooks
- Define proper return types for mock functions
- Use `as const` for literal types when needed
- Avoid `any` type use proper typing instead
# Unit Testing Focus
Focus on critical functionality (business logic, utility functions, component behavior)
Mock dependencies (API calls, external modules) before imports
Test multiple data scenarios (valid inputs, invalid inputs, edge cases)
Write maintainable tests with descriptive names grouped in describe blocks
# Global vs Local Mocks
**Use Global Mocks for:**
- High-frequency dependencies (20+ test files)
- Core infrastructure (react-router-dom, react-query, antd)
- Standard implementations across the app
- Browser APIs (ResizeObserver, matchMedia, localStorage)
- Utility libraries (date-fns, lodash)
**Use Local Mocks for:**
- Business logic dependencies (5-15 test files)
- Test-specific behavior (different data per test)
- API endpoints with specific responses
- Domain-specific components
- Error scenarios and edge cases
**Global Mock Files Available (from jest.config.ts):**
- `uplot` → `__mocks__/uplotMock.ts`
# Repo-specific Testing Conventions
## Imports
Always import from our harness:
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
```
For API mocks:
```ts
import { server, rest } from 'mocks-server/server';
```
Do not import directly from `@testing-library/react`.
## Router
Use the router built into render:
```ts
render(<Page />, undefined, { initialRoute: '/traces-explorer' });
```
Only mock `useLocation` / `useParams` if the test depends on them.
## Hook Mocks
Pattern:
```ts
import useFoo from 'hooks/useFoo';
jest.mock('hooks/useFoo');
const mockUseFoo = jest.mocked(useFoo);
mockUseFoo.mockReturnValue(/* minimal shape */ as any);
```
Prefer helpers (`rqSuccess`, `rqLoading`, `rqError`) for React Query results.
## MSW
Global MSW server runs automatically.
Override per-test:
```ts
server.use(
rest.get('*/api/v1/foo', (_req, res, ctx) => res(ctx.status(200), ctx.json({ ok: true })))
);
```
Keep large responses in `mocks-server/__mockdata_`.
## Interactions
- Prefer `userEvent` for real user interactions (click, type, select, tab).
- Use `fireEvent` only for low-level/programmatic events not covered by `userEvent` (e.g., scroll, resize, setting `element.scrollTop` for virtualization). Wrap in `act(...)` if needed.
- Always await interactions:
```ts
const user = userEvent.setup({ pointerEventsCheck: 0 });
await user.click(screen.getByRole('button', { name: /save/i }));
```
```ts
// Example: virtualized list scroll (no userEvent helper)
const scroller = container.querySelector('[data-test-id="virtuoso-scroller"]') as HTMLElement;
scroller.scrollTop = targetScrollTop;
act(() => { fireEvent.scroll(scroller); });
```
## Timers
❌ No global fake timers.
✅ Per-test only, for debounce/throttle:
```ts
jest.useFakeTimers();
const user = userEvent.setup({ advanceTimers: (ms) => jest.advanceTimersByTime(ms) });
await user.type(screen.getByRole('textbox'), 'query');
jest.advanceTimersByTime(400);
jest.useRealTimers();
```
## Queries
Prefer accessible queries (`getByRole`, `findByRole`, `getByLabelText`).
Fallback: visible text.
Last resort: `data-testid`.
# Example Test (using only configured global mocks)
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { server, rest } from 'mocks-server/server';
import MyComponent from '../MyComponent';
describe('MyComponent', () => {
it('renders and interacts', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(
rest.get('*/api/v1/example', (_req, res, ctx) => res(ctx.status(200), ctx.json({ value: 42 })))
);
render(<MyComponent />, undefined, { initialRoute: '/foo' });
expect(await screen.findByText(/value: 42/i)).toBeInTheDocument();
await user.click(screen.getByRole('button', { name: /refresh/i }));
await waitFor(() => expect(screen.getByText(/loading/i)).toBeInTheDocument());
});
});
```
# Anti-patterns
❌ Importing RTL directly
❌ Using global fake timers
❌ Wrapping render in `act(...)`
❌ Mocking infra dependencies locally (router, react-query)
✅ Use our harness (`tests/test-utils`)
✅ Use MSW for API overrides
✅ Use userEvent + await
✅ Pin time only in tests that assert relative dates
# Best Practices
- **Critical Functionality**: Prioritize testing business logic and utilities
- **Dependency Mocking**: Global mocks for infra, local mocks for business logic
- **Data Scenarios**: Always test valid, invalid, and edge cases
- **Descriptive Names**: Make test intent clear
- **Organization**: Group related tests in describe
- **Consistency**: Match repo conventions
- **Edge Cases**: Test null, undefined, unexpected values
- **Limit Scope**: 35 focused tests per file
- **Use Helpers**: `rqSuccess`, `makeUser`, etc.
- **No Any**: Enforce type safety
# Example Test
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { server, rest } from 'mocks-server/server';
import MyComponent from '../MyComponent';
describe('MyComponent', () => {
it('renders and interacts', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(
rest.get('*/api/v1/example', (_req, res, ctx) => res(ctx.status(200), ctx.json({ value: 42 })))
);
render(<MyComponent />, undefined, { initialRoute: '/foo' });
expect(await screen.findByText(/value: 42/i)).toBeInTheDocument();
await user.click(screen.getByRole('button', { name: /refresh/i }));
await waitFor(() => expect(screen.getByText(/loading/i)).toBeInTheDocument());
});
});
```
# Anti-patterns
❌ Importing RTL directly
❌ Using global fake timers
❌ Wrapping render in `act(...)`
❌ Mocking infra dependencies locally (router, react-query)
✅ Use our harness (`tests/test-utils`)
✅ Use MSW for API overrides
✅ Use userEvent + await
✅ Pin time only in tests that assert relative dates
# TypeScript Type Safety Examples
## Proper Mock Typing
```ts
// ✅ GOOD - Properly typed mocks
interface User {
id: number;
name: string;
email: string;
}
interface ApiResponse<T> {
data: T;
status: number;
message: string;
}
// Type the mock functions
const mockFetchUser = jest.fn() as jest.MockedFunction<(id: number) => Promise<ApiResponse<User>>>;
const mockUpdateUser = jest.fn() as jest.MockedFunction<(user: User) => Promise<ApiResponse<User>>>;
// Mock implementation with proper typing
mockFetchUser.mockResolvedValue({
data: { id: 1, name: 'John Doe', email: 'john@example.com' },
status: 200,
message: 'Success'
});
// ❌ BAD - Using any type
const mockFetchUser = jest.fn() as any; // Don't do this
```
## React Component Testing with Types
```ts
// ✅ GOOD - Properly typed component testing
interface ComponentProps {
title: string;
data: User[];
onUserSelect: (user: User) => void;
isLoading?: boolean;
}
const TestComponent: React.FC<ComponentProps> = ({ title, data, onUserSelect, isLoading = false }) => {
// Component implementation
};
describe('TestComponent', () => {
it('should render with proper props', () => {
// Arrange - Type the props properly
const mockProps: ComponentProps = {
title: 'Test Title',
data: [{ id: 1, name: 'John', email: 'john@example.com' }],
onUserSelect: jest.fn() as jest.MockedFunction<(user: User) => void>,
isLoading: false
};
// Act
render(<TestComponent {...mockProps} />);
// Assert
expect(screen.getByText('Test Title')).toBeInTheDocument();
});
});
```
## Hook Testing with Types
```ts
// ✅ GOOD - Properly typed hook testing
interface UseUserDataReturn {
user: User | null;
loading: boolean;
error: string | null;
refetch: () => void;
}
const useUserData = (id: number): UseUserDataReturn => {
// Hook implementation
};
describe('useUserData', () => {
it('should return user data with proper typing', () => {
// Arrange
const mockUser: User = { id: 1, name: 'John', email: 'john@example.com' };
mockFetchUser.mockResolvedValue({
data: mockUser,
status: 200,
message: 'Success'
});
// Act
const { result } = renderHook(() => useUserData(1));
// Assert
expect(result.current.user).toEqual(mockUser);
expect(result.current.loading).toBe(false);
expect(result.current.error).toBeNull();
});
});
```
## Global Mock Type Safety
```ts
// ✅ GOOD - Type-safe global mocks
// In __mocks__/routerMock.ts
export const mockUseLocation = (overrides: Partial<Location> = {}): Location => ({
pathname: '/traces',
search: '',
hash: '',
state: null,
key: 'test-key',
...overrides,
});
// In test files
const location = useLocation(); // Properly typed from global mock
expect(location.pathname).toBe('/traces');
```
# TypeScript Configuration for Jest
## Required Jest Configuration
```json
// jest.config.ts
{
"preset": "ts-jest/presets/js-with-ts-esm",
"globals": {
"ts-jest": {
"useESM": true,
"isolatedModules": true,
"tsconfig": "<rootDir>/tsconfig.jest.json"
}
},
"extensionsToTreatAsEsm": [".ts", ".tsx"],
"moduleFileExtensions": ["ts", "tsx", "js", "json"]
}
```
## TypeScript Jest Configuration
```json
// tsconfig.jest.json
{
"extends": "./tsconfig.json",
"compilerOptions": {
"types": ["jest", "@testing-library/jest-dom"],
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"moduleResolution": "node"
},
"include": [
"src/**/*",
"**/*.test.ts",
"**/*.test.tsx",
"__mocks__/**/*"
]
}
```
## Common Type Safety Patterns
### Mock Function Typing
```ts
// ✅ GOOD - Proper mock function typing
const mockApiCall = jest.fn() as jest.MockedFunction<typeof apiCall>;
const mockEventHandler = jest.fn() as jest.MockedFunction<(event: Event) => void>;
// ❌ BAD - Using any
const mockApiCall = jest.fn() as any;
```
### Generic Mock Typing
```ts
// ✅ GOOD - Generic mock typing
interface MockApiResponse<T> {
data: T;
status: number;
}
const mockFetchData = jest.fn() as jest.MockedFunction<
<T>(endpoint: string) => Promise<MockApiResponse<T>>
>;
// Usage
mockFetchData<User>('/users').mockResolvedValue({
data: { id: 1, name: 'John' },
status: 200
});
```
### React Testing Library with Types
```ts
// ✅ GOOD - Typed testing utilities
import { render, screen, RenderResult } from '@testing-library/react';
import { ComponentProps } from 'react';
type TestComponentProps = ComponentProps<typeof TestComponent>;
const renderTestComponent = (props: Partial<TestComponentProps> = {}): RenderResult => {
const defaultProps: TestComponentProps = {
title: 'Test',
data: [],
onSelect: jest.fn(),
...props
};
return render(<TestComponent {...defaultProps} />);
};
```
### Error Handling with Types
```ts
// ✅ GOOD - Typed error handling
interface ApiError {
message: string;
code: number;
details?: Record<string, unknown>;
}
const mockApiError: ApiError = {
message: 'API Error',
code: 500,
details: { endpoint: '/users' }
};
mockFetchUser.mockRejectedValue(new Error(JSON.stringify(mockApiError)));
```
## Type Safety Checklist
- [ ] All mock functions use `jest.MockedFunction<T>`
- [ ] All mock data has proper interfaces
- [ ] No `any` types in test files
- [ ] Generic types are used where appropriate
- [ ] Error types are properly defined
- [ ] Component props are typed
- [ ] Hook return types are defined
- [ ] API response types are defined
- [ ] Global mocks are type-safe
- [ ] Test utilities are properly typed
# Mock Decision Tree
```
Is it used in 20+ test files?
├─ YES → Use Global Mock
│ ├─ react-router-dom
│ ├─ react-query
│ ├─ antd components
│ └─ browser APIs
└─ NO → Is it business logic?
├─ YES → Use Local Mock
│ ├─ API endpoints
│ ├─ Custom hooks
│ └─ Domain components
└─ NO → Is it test-specific?
├─ YES → Use Local Mock
│ ├─ Error scenarios
│ ├─ Loading states
│ └─ Specific data
└─ NO → Consider Global Mock
└─ If it becomes frequently used
```
# Common Anti-Patterns to Avoid
❌ **Don't mock global dependencies locally:**
```js
// BAD - This is already globally mocked
jest.mock('react-router-dom', () => ({ ... }));
```
❌ **Don't create global mocks for test-specific data:**
```js
// BAD - This should be local
jest.mock('../api/tracesService', () => ({
getTraces: jest.fn(() => specificTestData)
}));
```
✅ **Do use global mocks for infrastructure:**
```js
// GOOD - Use global mock
import { useLocation } from 'react-router-dom';
```
✅ **Do create local mocks for business logic:**
```js
// GOOD - Local mock for specific test needs
jest.mock('../api/tracesService', () => ({
getTraces: jest.fn(() => mockTracesData)
}));
```

View File

@@ -20,17 +20,20 @@ import { useMutation, useQuery } from 'react-query';
import { GeneratedAPIInstance } from '../../../index';
import type {
GetMetricAlerts200,
GetMetricAlertsParams,
GetMetricAlertsPathParameters,
GetMetricAttributes200,
GetMetricAttributesParams,
GetMetricAttributesPathParameters,
GetMetricDashboards200,
GetMetricDashboardsParams,
GetMetricDashboardsPathParameters,
GetMetricHighlights200,
GetMetricHighlightsParams,
GetMetricHighlightsPathParameters,
GetMetricMetadata200,
GetMetricMetadataParams,
GetMetricMetadataPathParameters,
GetMetricsStats200,
GetMetricsTreemap200,
MetricsexplorertypesMetricAttributesRequestDTO,
ListMetrics200,
ListMetricsParams,
MetricsexplorertypesStatsRequestDTO,
MetricsexplorertypesTreemapRequestDTO,
MetricsexplorertypesUpdateMetricMetadataRequestDTO,
@@ -43,30 +46,128 @@ type AwaitedInput<T> = PromiseLike<T> | T;
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
/**
* This endpoint returns associated alerts for a specified metric
* @summary Get metric alerts
* This endpoint returns a list of distinct metric names within the specified time range
* @summary List metric names
*/
export const getMetricAlerts = (
params: GetMetricAlertsParams,
export const listMetrics = (
params?: ListMetricsParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricAlerts200>({
url: `/api/v2/metric/alerts`,
return GeneratedAPIInstance<ListMetrics200>({
url: `/api/v2/metrics`,
method: 'GET',
params,
signal,
});
};
export const getGetMetricAlertsQueryKey = (params?: GetMetricAlertsParams) => {
return ['getMetricAlerts', ...(params ? [params] : [])] as const;
export const getListMetricsQueryKey = (params?: ListMetricsParams) => {
return ['listMetrics', ...(params ? [params] : [])] as const;
};
export const getListMetricsQueryOptions = <
TData = Awaited<ReturnType<typeof listMetrics>>,
TError = RenderErrorResponseDTO
>(
params?: ListMetricsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof listMetrics>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getListMetricsQueryKey(params);
const queryFn: QueryFunction<Awaited<ReturnType<typeof listMetrics>>> = ({
signal,
}) => listMetrics(params, signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof listMetrics>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type ListMetricsQueryResult = NonNullable<
Awaited<ReturnType<typeof listMetrics>>
>;
export type ListMetricsQueryError = RenderErrorResponseDTO;
/**
* @summary List metric names
*/
export function useListMetrics<
TData = Awaited<ReturnType<typeof listMetrics>>,
TError = RenderErrorResponseDTO
>(
params?: ListMetricsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof listMetrics>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getListMetricsQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary List metric names
*/
export const invalidateListMetrics = async (
queryClient: QueryClient,
params?: ListMetricsParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getListMetricsQueryKey(params) },
options,
);
return queryClient;
};
/**
* This endpoint returns associated alerts for a specified metric
* @summary Get metric alerts
*/
export const getMetricAlerts = (
{ metricName }: GetMetricAlertsPathParameters,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricAlerts200>({
url: `/api/v2/metrics/${metricName}/alerts`,
method: 'GET',
signal,
});
};
export const getGetMetricAlertsQueryKey = ({
metricName,
}: GetMetricAlertsPathParameters) => {
return ['getMetricAlerts'] as const;
};
export const getGetMetricAlertsQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricAlerts>>,
TError = RenderErrorResponseDTO
>(
params: GetMetricAlertsParams,
{ metricName }: GetMetricAlertsPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricAlerts>>,
@@ -77,13 +178,19 @@ export const getGetMetricAlertsQueryOptions = <
) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getGetMetricAlertsQueryKey(params);
const queryKey =
queryOptions?.queryKey ?? getGetMetricAlertsQueryKey({ metricName });
const queryFn: QueryFunction<Awaited<ReturnType<typeof getMetricAlerts>>> = ({
signal,
}) => getMetricAlerts(params, signal);
}) => getMetricAlerts({ metricName }, signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
return {
queryKey,
queryFn,
enabled: !!metricName,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getMetricAlerts>>,
TError,
TData
@@ -103,7 +210,7 @@ export function useGetMetricAlerts<
TData = Awaited<ReturnType<typeof getMetricAlerts>>,
TError = RenderErrorResponseDTO
>(
params: GetMetricAlertsParams,
{ metricName }: GetMetricAlertsPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricAlerts>>,
@@ -112,7 +219,7 @@ export function useGetMetricAlerts<
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricAlertsQueryOptions(params, options);
const queryOptions = getGetMetricAlertsQueryOptions({ metricName }, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
@@ -128,11 +235,126 @@ export function useGetMetricAlerts<
*/
export const invalidateGetMetricAlerts = async (
queryClient: QueryClient,
params: GetMetricAlertsParams,
{ metricName }: GetMetricAlertsPathParameters,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricAlertsQueryKey(params) },
{ queryKey: getGetMetricAlertsQueryKey({ metricName }) },
options,
);
return queryClient;
};
/**
* This endpoint returns attribute keys and their unique values for a specified metric
* @summary Get metric attributes
*/
export const getMetricAttributes = (
{ metricName }: GetMetricAttributesPathParameters,
params?: GetMetricAttributesParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricAttributes200>({
url: `/api/v2/metrics/${metricName}/attributes`,
method: 'GET',
params,
signal,
});
};
export const getGetMetricAttributesQueryKey = (
{ metricName }: GetMetricAttributesPathParameters,
params?: GetMetricAttributesParams,
) => {
return ['getMetricAttributes', ...(params ? [params] : [])] as const;
};
export const getGetMetricAttributesQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricAttributes>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricAttributesPathParameters,
params?: GetMetricAttributesParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ??
getGetMetricAttributesQueryKey({ metricName }, params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMetricAttributes>>
> = ({ signal }) => getMetricAttributes({ metricName }, params, signal);
return {
queryKey,
queryFn,
enabled: !!metricName,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetMetricAttributesQueryResult = NonNullable<
Awaited<ReturnType<typeof getMetricAttributes>>
>;
export type GetMetricAttributesQueryError = RenderErrorResponseDTO;
/**
* @summary Get metric attributes
*/
export function useGetMetricAttributes<
TData = Awaited<ReturnType<typeof getMetricAttributes>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricAttributesPathParameters,
params?: GetMetricAttributesParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricAttributesQueryOptions(
{ metricName },
params,
options,
);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get metric attributes
*/
export const invalidateGetMetricAttributes = async (
queryClient: QueryClient,
{ metricName }: GetMetricAttributesPathParameters,
params?: GetMetricAttributesParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricAttributesQueryKey({ metricName }, params) },
options,
);
@@ -144,28 +366,27 @@ export const invalidateGetMetricAlerts = async (
* @summary Get metric dashboards
*/
export const getMetricDashboards = (
params: GetMetricDashboardsParams,
{ metricName }: GetMetricDashboardsPathParameters,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricDashboards200>({
url: `/api/v2/metric/dashboards`,
url: `/api/v2/metrics/${metricName}/dashboards`,
method: 'GET',
params,
signal,
});
};
export const getGetMetricDashboardsQueryKey = (
params?: GetMetricDashboardsParams,
) => {
return ['getMetricDashboards', ...(params ? [params] : [])] as const;
export const getGetMetricDashboardsQueryKey = ({
metricName,
}: GetMetricDashboardsPathParameters) => {
return ['getMetricDashboards'] as const;
};
export const getGetMetricDashboardsQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricDashboards>>,
TError = RenderErrorResponseDTO
>(
params: GetMetricDashboardsParams,
{ metricName }: GetMetricDashboardsPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricDashboards>>,
@@ -177,13 +398,18 @@ export const getGetMetricDashboardsQueryOptions = <
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetMetricDashboardsQueryKey(params);
queryOptions?.queryKey ?? getGetMetricDashboardsQueryKey({ metricName });
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMetricDashboards>>
> = ({ signal }) => getMetricDashboards(params, signal);
> = ({ signal }) => getMetricDashboards({ metricName }, signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
return {
queryKey,
queryFn,
enabled: !!metricName,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getMetricDashboards>>,
TError,
TData
@@ -203,7 +429,7 @@ export function useGetMetricDashboards<
TData = Awaited<ReturnType<typeof getMetricDashboards>>,
TError = RenderErrorResponseDTO
>(
params: GetMetricDashboardsParams,
{ metricName }: GetMetricDashboardsPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricDashboards>>,
@@ -212,7 +438,10 @@ export function useGetMetricDashboards<
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricDashboardsQueryOptions(params, options);
const queryOptions = getGetMetricDashboardsQueryOptions(
{ metricName },
options,
);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
@@ -228,11 +457,11 @@ export function useGetMetricDashboards<
*/
export const invalidateGetMetricDashboards = async (
queryClient: QueryClient,
params: GetMetricDashboardsParams,
{ metricName }: GetMetricDashboardsPathParameters,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricDashboardsQueryKey(params) },
{ queryKey: getGetMetricDashboardsQueryKey({ metricName }) },
options,
);
@@ -244,28 +473,27 @@ export const invalidateGetMetricDashboards = async (
* @summary Get metric highlights
*/
export const getMetricHighlights = (
params: GetMetricHighlightsParams,
{ metricName }: GetMetricHighlightsPathParameters,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricHighlights200>({
url: `/api/v2/metric/highlights`,
url: `/api/v2/metrics/${metricName}/highlights`,
method: 'GET',
params,
signal,
});
};
export const getGetMetricHighlightsQueryKey = (
params?: GetMetricHighlightsParams,
) => {
return ['getMetricHighlights', ...(params ? [params] : [])] as const;
export const getGetMetricHighlightsQueryKey = ({
metricName,
}: GetMetricHighlightsPathParameters) => {
return ['getMetricHighlights'] as const;
};
export const getGetMetricHighlightsQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricHighlights>>,
TError = RenderErrorResponseDTO
>(
params: GetMetricHighlightsParams,
{ metricName }: GetMetricHighlightsPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricHighlights>>,
@@ -277,13 +505,18 @@ export const getGetMetricHighlightsQueryOptions = <
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetMetricHighlightsQueryKey(params);
queryOptions?.queryKey ?? getGetMetricHighlightsQueryKey({ metricName });
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMetricHighlights>>
> = ({ signal }) => getMetricHighlights(params, signal);
> = ({ signal }) => getMetricHighlights({ metricName }, signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
return {
queryKey,
queryFn,
enabled: !!metricName,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getMetricHighlights>>,
TError,
TData
@@ -303,7 +536,7 @@ export function useGetMetricHighlights<
TData = Awaited<ReturnType<typeof getMetricHighlights>>,
TError = RenderErrorResponseDTO
>(
params: GetMetricHighlightsParams,
{ metricName }: GetMetricHighlightsPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricHighlights>>,
@@ -312,7 +545,10 @@ export function useGetMetricHighlights<
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricHighlightsQueryOptions(params, options);
const queryOptions = getGetMetricHighlightsQueryOptions(
{ metricName },
options,
);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
@@ -328,11 +564,115 @@ export function useGetMetricHighlights<
*/
export const invalidateGetMetricHighlights = async (
queryClient: QueryClient,
params: GetMetricHighlightsParams,
{ metricName }: GetMetricHighlightsPathParameters,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricHighlightsQueryKey(params) },
{ queryKey: getGetMetricHighlightsQueryKey({ metricName }) },
options,
);
return queryClient;
};
/**
* This endpoint returns metadata information like metric description, unit, type, temporality, monotonicity for a specified metric
* @summary Get metric metadata
*/
export const getMetricMetadata = (
{ metricName }: GetMetricMetadataPathParameters,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricMetadata200>({
url: `/api/v2/metrics/${metricName}/metadata`,
method: 'GET',
signal,
});
};
export const getGetMetricMetadataQueryKey = ({
metricName,
}: GetMetricMetadataPathParameters) => {
return ['getMetricMetadata'] as const;
};
export const getGetMetricMetadataQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricMetadataPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetMetricMetadataQueryKey({ metricName });
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMetricMetadata>>
> = ({ signal }) => getMetricMetadata({ metricName }, signal);
return {
queryKey,
queryFn,
enabled: !!metricName,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetMetricMetadataQueryResult = NonNullable<
Awaited<ReturnType<typeof getMetricMetadata>>
>;
export type GetMetricMetadataQueryError = RenderErrorResponseDTO;
/**
* @summary Get metric metadata
*/
export function useGetMetricMetadata<
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricMetadataPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricMetadataQueryOptions({ metricName }, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get metric metadata
*/
export const invalidateGetMetricMetadata = async (
queryClient: QueryClient,
{ metricName }: GetMetricMetadataPathParameters,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricMetadataQueryKey({ metricName }) },
options,
);
@@ -438,189 +778,6 @@ export const useUpdateMetricMetadata = <
return useMutation(mutationOptions);
};
/**
* This endpoint returns attribute keys and their unique values for a specified metric
* @summary Get metric attributes
*/
export const getMetricAttributes = (
metricsexplorertypesMetricAttributesRequestDTO: MetricsexplorertypesMetricAttributesRequestDTO,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricAttributes200>({
url: `/api/v2/metrics/attributes`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: metricsexplorertypesMetricAttributesRequestDTO,
signal,
});
};
export const getGetMetricAttributesMutationOptions = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
{ data: MetricsexplorertypesMetricAttributesRequestDTO },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
{ data: MetricsexplorertypesMetricAttributesRequestDTO },
TContext
> => {
const mutationKey = ['getMetricAttributes'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof getMetricAttributes>>,
{ data: MetricsexplorertypesMetricAttributesRequestDTO }
> = (props) => {
const { data } = props ?? {};
return getMetricAttributes(data);
};
return { mutationFn, ...mutationOptions };
};
export type GetMetricAttributesMutationResult = NonNullable<
Awaited<ReturnType<typeof getMetricAttributes>>
>;
export type GetMetricAttributesMutationBody = MetricsexplorertypesMetricAttributesRequestDTO;
export type GetMetricAttributesMutationError = RenderErrorResponseDTO;
/**
* @summary Get metric attributes
*/
export const useGetMetricAttributes = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
{ data: MetricsexplorertypesMetricAttributesRequestDTO },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
{ data: MetricsexplorertypesMetricAttributesRequestDTO },
TContext
> => {
const mutationOptions = getGetMetricAttributesMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* This endpoint returns metadata information like metric description, unit, type, temporality, monotonicity for a specified metric
* @summary Get metric metadata
*/
export const getMetricMetadata = (
params: GetMetricMetadataParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricMetadata200>({
url: `/api/v2/metrics/metadata`,
method: 'GET',
params,
signal,
});
};
export const getGetMetricMetadataQueryKey = (
params?: GetMetricMetadataParams,
) => {
return ['getMetricMetadata', ...(params ? [params] : [])] as const;
};
export const getGetMetricMetadataQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
TError = RenderErrorResponseDTO
>(
params: GetMetricMetadataParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetMetricMetadataQueryKey(params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMetricMetadata>>
> = ({ signal }) => getMetricMetadata(params, signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetMetricMetadataQueryResult = NonNullable<
Awaited<ReturnType<typeof getMetricMetadata>>
>;
export type GetMetricMetadataQueryError = RenderErrorResponseDTO;
/**
* @summary Get metric metadata
*/
export function useGetMetricMetadata<
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
TError = RenderErrorResponseDTO
>(
params: GetMetricMetadataParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricMetadataQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get metric metadata
*/
export const invalidateGetMetricMetadata = async (
queryClient: QueryClient,
params: GetMetricMetadataParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricMetadataQueryKey(params) },
options,
);
return queryClient;
};
/**
* This endpoint provides list of metrics with their number of samples and timeseries for the given time range
* @summary Get metrics statistics

View File

@@ -646,6 +646,35 @@ export interface GatewaytypesUpdatableIngestionKeyLimitDTO {
tags?: string[] | null;
}
export interface MetricsexplorertypesListMetricDTO {
/**
* @type string
*/
description: string;
/**
* @type boolean
*/
isMonotonic: boolean;
/**
* @type string
*/
metricName: string;
temporality: MetrictypesTemporalityDTO;
type: MetrictypesTypeDTO;
/**
* @type string
*/
unit: string;
}
export interface MetricsexplorertypesListMetricsResponseDTO {
/**
* @type array
* @nullable true
*/
metrics: MetricsexplorertypesListMetricDTO[] | null;
}
export interface MetricsexplorertypesMetricAlertDTO {
/**
* @type string
@@ -682,23 +711,6 @@ export interface MetricsexplorertypesMetricAttributeDTO {
values: string[] | null;
}
export interface MetricsexplorertypesMetricAttributesRequestDTO {
/**
* @type integer
* @nullable true
*/
end?: number | null;
/**
* @type string
*/
metricName: string;
/**
* @type integer
* @nullable true
*/
start?: number | null;
}
export interface MetricsexplorertypesMetricAttributesResponseDTO {
/**
* @type array
@@ -2963,14 +2975,42 @@ export type SearchIngestionKeys200 = {
status?: string;
};
export type GetMetricAlertsParams = {
export type ListMetricsParams = {
/**
* @type integer
* @nullable true
* @description undefined
*/
start?: number | null;
/**
* @type integer
* @nullable true
* @description undefined
*/
end?: number | null;
/**
* @type integer
* @description undefined
*/
limit?: number;
/**
* @type string
* @description undefined
*/
metricName: string;
searchText?: string;
};
export type ListMetrics200 = {
data?: MetricsexplorertypesListMetricsResponseDTO;
/**
* @type string
*/
status?: string;
};
export type GetMetricAlertsPathParameters = {
metricName: string;
};
export type GetMetricAlerts200 = {
data?: MetricsexplorertypesMetricAlertsResponseDTO;
/**
@@ -2979,14 +3019,35 @@ export type GetMetricAlerts200 = {
status?: string;
};
export type GetMetricDashboardsParams = {
/**
* @type string
* @description undefined
*/
export type GetMetricAttributesPathParameters = {
metricName: string;
};
export type GetMetricAttributesParams = {
/**
* @type integer
* @nullable true
* @description undefined
*/
start?: number | null;
/**
* @type integer
* @nullable true
* @description undefined
*/
end?: number | null;
};
export type GetMetricAttributes200 = {
data?: MetricsexplorertypesMetricAttributesResponseDTO;
/**
* @type string
*/
status?: string;
};
export type GetMetricDashboardsPathParameters = {
metricName: string;
};
export type GetMetricDashboards200 = {
data?: MetricsexplorertypesMetricDashboardsResponseDTO;
/**
@@ -2995,14 +3056,9 @@ export type GetMetricDashboards200 = {
status?: string;
};
export type GetMetricHighlightsParams = {
/**
* @type string
* @description undefined
*/
export type GetMetricHighlightsPathParameters = {
metricName: string;
};
export type GetMetricHighlights200 = {
data?: MetricsexplorertypesMetricHighlightsResponseDTO;
/**
@@ -3011,33 +3067,20 @@ export type GetMetricHighlights200 = {
status?: string;
};
export type GetMetricMetadataPathParameters = {
metricName: string;
};
export type GetMetricMetadata200 = {
data?: MetricsexplorertypesMetricMetadataDTO;
/**
* @type string
*/
status?: string;
};
export type UpdateMetricMetadataPathParameters = {
metricName: string;
};
export type GetMetricAttributes200 = {
data?: MetricsexplorertypesMetricAttributesResponseDTO;
/**
* @type string
*/
status?: string;
};
export type GetMetricMetadataParams = {
/**
* @type string
* @description undefined
*/
metricName: string;
};
export type GetMetricMetadata200 = {
data?: MetricsexplorertypesMetricMetadataDTO;
/**
* @type string
*/
status?: string;
};
export type GetMetricsStats200 = {
data?: MetricsexplorertypesStatsResponseDTO;
/**

View File

@@ -11,13 +11,10 @@ export const getMetricMetadata = async (
): Promise<SuccessResponseV2<MetricMetadataResponse> | ErrorResponseV2> => {
try {
const encodedMetricName = encodeURIComponent(metricName);
const response = await axios.get(
`/metrics/metadata?metricName=${encodedMetricName}`,
{
signal,
headers,
},
);
const response = await axios.get(`/metrics/${encodedMetricName}/metadata`, {
signal,
headers,
});
return {
httpStatusCode: response.status,

View File

@@ -0,0 +1,332 @@
import { ReactElement } from 'react-markdown/lib/react-markdown';
import { ENVIRONMENT } from 'constants/env';
import { BrandedPermission, buildPermission } from 'hooks/useAuthZ/utils';
import { server } from 'mocks-server/server';
import { rest } from 'msw';
import { render, screen, waitFor } from 'tests/test-utils';
import { GuardAuthZ } from './GuardAuthZ';
const BASE_URL = ENVIRONMENT.baseURL || '';
describe('GuardAuthZ', () => {
const TestChild = (): ReactElement => <div>Protected Content</div>;
const LoadingFallback = (): ReactElement => <div>Loading...</div>;
const ErrorFallback = (error: Error): ReactElement => (
<div>Error occurred: {error.message}</div>
);
const NoPermissionFallback = (_response: {
requiredPermissionName: BrandedPermission;
}): ReactElement => <div>Access denied</div>;
const NoPermissionFallbackWithSuggestions = (response: {
requiredPermissionName: BrandedPermission;
}): ReactElement => (
<div>
Access denied. Required permission: {response.requiredPermissionName}
</div>
);
it('should render children when permission is granted', async () => {
const permission = buildPermission('read', 'dashboard:*');
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({
[permission]: true,
}),
);
}),
);
render(
<GuardAuthZ relation="read" object="dashboard:*">
<TestChild />
</GuardAuthZ>,
);
await waitFor(() => {
expect(screen.getByText('Protected Content')).toBeInTheDocument();
});
});
it('should render fallbackOnLoading when loading', () => {
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(ctx.status(200), ctx.json({}));
}),
);
render(
<GuardAuthZ
relation="read"
object="dashboard:*"
fallbackOnLoading={<LoadingFallback />}
>
<TestChild />
</GuardAuthZ>,
);
expect(screen.getByText('Loading...')).toBeInTheDocument();
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
});
it('should render null when loading and no fallbackOnLoading provided', () => {
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(ctx.status(200), ctx.json({}));
}),
);
const { container } = render(
<GuardAuthZ relation="read" object="dashboard:*">
<TestChild />
</GuardAuthZ>,
);
expect(container.firstChild).toBeNull();
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
});
it('should render fallbackOnError when API error occurs', async () => {
const errorMessage = 'Internal Server Error';
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(ctx.status(500), ctx.json({ error: errorMessage }));
}),
);
render(
<GuardAuthZ
relation="read"
object="dashboard:*"
fallbackOnError={ErrorFallback}
>
<TestChild />
</GuardAuthZ>,
);
await waitFor(() => {
expect(screen.getByText(/Error occurred:/)).toBeInTheDocument();
});
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
});
it('should pass error object to fallbackOnError function', async () => {
const errorMessage = 'Network request failed';
let receivedError: Error | null = null;
const errorFallbackWithCapture = (error: Error): ReactElement => {
receivedError = error;
return <div>Captured error: {error.message}</div>;
};
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(ctx.status(500), ctx.json({ error: errorMessage }));
}),
);
render(
<GuardAuthZ
relation="read"
object="dashboard:*"
fallbackOnError={errorFallbackWithCapture}
>
<TestChild />
</GuardAuthZ>,
);
await waitFor(() => {
expect(receivedError).not.toBeNull();
});
expect(receivedError).toBeInstanceOf(Error);
expect(screen.getByText(/Captured error:/)).toBeInTheDocument();
});
it('should render null when error occurs and no fallbackOnError provided', async () => {
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(ctx.status(500), ctx.json({ error: 'Internal Server Error' }));
}),
);
const { container } = render(
<GuardAuthZ relation="read" object="dashboard:*">
<TestChild />
</GuardAuthZ>,
);
await waitFor(() => {
expect(container.firstChild).toBeNull();
});
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
});
it('should render fallbackOnNoPermissions when permission is denied', async () => {
const permission = buildPermission('update', 'dashboard:123');
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({
[permission]: false,
}),
);
}),
);
render(
<GuardAuthZ
relation="update"
object="dashboard:123"
fallbackOnNoPermissions={NoPermissionFallback}
>
<TestChild />
</GuardAuthZ>,
);
await waitFor(() => {
expect(screen.getByText('Access denied')).toBeInTheDocument();
});
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
});
it('should render null when permission is denied and no fallbackOnNoPermissions provided', async () => {
const permission = buildPermission('update', 'dashboard:123');
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({
[permission]: false,
}),
);
}),
);
const { container } = render(
<GuardAuthZ relation="update" object="dashboard:123">
<TestChild />
</GuardAuthZ>,
);
await waitFor(() => {
expect(container.firstChild).toBeNull();
});
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
});
it('should render null when permissions object is null', async () => {
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(ctx.status(200), ctx.json(null));
}),
);
const { container } = render(
<GuardAuthZ relation="read" object="dashboard:*">
<TestChild />
</GuardAuthZ>,
);
await waitFor(() => {
expect(container.firstChild).toBeNull();
});
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
});
it('should pass requiredPermissionName to fallbackOnNoPermissions', async () => {
const permission = buildPermission('update', 'dashboard:123');
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({
[permission]: false,
}),
);
}),
);
render(
<GuardAuthZ
relation="update"
object="dashboard:123"
fallbackOnNoPermissions={NoPermissionFallbackWithSuggestions}
>
<TestChild />
</GuardAuthZ>,
);
await waitFor(() => {
expect(
screen.getByText(/Access denied. Required permission:/),
).toBeInTheDocument();
});
expect(screen.getByText(new RegExp(permission))).toBeInTheDocument();
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
});
it('should handle different relation and object combinations', async () => {
const permission1 = buildPermission('read', 'dashboard:*');
const permission2 = buildPermission('delete', 'dashboard:456');
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (req, res, ctx) => {
const body = req.body as {
permissions: Array<{ relation: string; object: string }>;
};
const perm = body.permissions[0];
const permKey = `${perm.relation}/${perm.object}`;
if (permKey === permission1) {
return res(
ctx.status(200),
ctx.json({
[permission1]: true,
}),
);
}
return res(
ctx.status(200),
ctx.json({
[permission2]: true,
}),
);
}),
);
const { rerender } = render(
<GuardAuthZ relation="read" object="dashboard:*">
<TestChild />
</GuardAuthZ>,
);
await waitFor(() => {
expect(screen.getByText('Protected Content')).toBeInTheDocument();
});
rerender(
<GuardAuthZ relation="delete" object="dashboard:456">
<TestChild />
</GuardAuthZ>,
);
await waitFor(() => {
expect(screen.getByText('Protected Content')).toBeInTheDocument();
});
});
});

View File

@@ -0,0 +1,50 @@
import { ReactElement } from 'react';
import { useAuthZ } from 'hooks/useAuthZ/useAuthZ';
import {
AuthZObject,
AuthZRelation,
BrandedPermission,
buildPermission,
} from 'hooks/useAuthZ/utils';
export type GuardAuthZProps<R extends AuthZRelation> = {
children: ReactElement;
relation: R;
object: AuthZObject<R>;
fallbackOnLoading?: JSX.Element;
fallbackOnError?: (error: Error) => JSX.Element;
fallbackOnNoPermissions?: (response: {
requiredPermissionName: BrandedPermission;
}) => JSX.Element;
};
export function GuardAuthZ<R extends AuthZRelation>({
children,
relation,
object,
fallbackOnLoading,
fallbackOnError,
fallbackOnNoPermissions,
}: GuardAuthZProps<R>): JSX.Element | null {
const permission = buildPermission<R>(relation, object);
const { permissions, isLoading, error } = useAuthZ([permission]);
if (isLoading) {
return fallbackOnLoading ?? null;
}
if (error) {
return fallbackOnError?.(error) ?? null;
}
if (!permissions?.[permission]?.isGranted) {
return (
fallbackOnNoPermissions?.({
requiredPermissionName: permission,
}) ?? null
);
}
return children;
}

View File

@@ -0,0 +1,46 @@
.guard-authz-error-no-authz {
display: flex;
align-items: center;
justify-content: center;
width: 100%;
height: 100%;
padding: 24px;
.guard-authz-error-no-authz-content {
display: flex;
flex-direction: column;
justify-content: flex-start;
gap: 8px;
max-width: 500px;
}
img {
width: 32px;
height: 32px;
}
h3 {
font-size: 18px;
color: var(--bg-vanilla-100);
line-height: 18px;
}
p {
font-size: 14px;
color: var(--bg-vanilla-400);
line-height: 18px;
pre {
display: flex;
align-items: center;
background-color: var(--bg-slate-400);
cursor: pointer;
svg {
margin-left: 2px;
}
}
}
}

View File

@@ -0,0 +1,489 @@
import { ReactElement } from 'react';
import type { RouteComponentProps } from 'react-router-dom';
import { ENVIRONMENT } from 'constants/env';
import { buildPermission } from 'hooks/useAuthZ/utils';
import { server } from 'mocks-server/server';
import { rest } from 'msw';
import { render, screen, waitFor } from 'tests/test-utils';
import { createGuardedRoute } from './createGuardedRoute';
const BASE_URL = ENVIRONMENT.baseURL || '';
describe('createGuardedRoute', () => {
const TestComponent = ({ testProp }: { testProp: string }): ReactElement => (
<div>Test Component: {testProp}</div>
);
it('should render component when permission is granted', async () => {
const permission = buildPermission('read', 'dashboard:*');
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({
[permission]: true,
}),
);
}),
);
const GuardedComponent = createGuardedRoute(
TestComponent,
'read',
'dashboard:*',
);
const mockMatch = {
params: {},
isExact: true,
path: '/dashboard',
url: '/dashboard',
};
const props = {
testProp: 'test-value',
match: mockMatch,
location: ({} as unknown) as RouteComponentProps['location'],
history: ({} as unknown) as RouteComponentProps['history'],
};
render(<GuardedComponent {...props} />);
await waitFor(() => {
expect(screen.getByText('Test Component: test-value')).toBeInTheDocument();
});
});
it('should substitute route parameters in object string', async () => {
const permission = buildPermission('read', 'dashboard:123');
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({
[permission]: true,
}),
);
}),
);
const GuardedComponent = createGuardedRoute(
TestComponent,
'read',
'dashboard:{id}',
);
const mockMatch = {
params: { id: '123' },
isExact: true,
path: '/dashboard/:id',
url: '/dashboard/123',
};
const props = {
testProp: 'test-value',
match: mockMatch,
location: ({} as unknown) as RouteComponentProps['location'],
history: ({} as unknown) as RouteComponentProps['history'],
};
render(<GuardedComponent {...props} />);
await waitFor(() => {
expect(screen.getByText('Test Component: test-value')).toBeInTheDocument();
});
});
it('should handle multiple route parameters', async () => {
const permission = buildPermission('update', 'dashboard:123:456');
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({
[permission]: true,
}),
);
}),
);
const GuardedComponent = createGuardedRoute(
TestComponent,
'update',
'dashboard:{id}:{version}',
);
const mockMatch = {
params: { id: '123', version: '456' },
isExact: true,
path: '/dashboard/:id/:version',
url: '/dashboard/123/456',
};
const props = {
testProp: 'test-value',
match: mockMatch,
location: ({} as unknown) as RouteComponentProps['location'],
history: ({} as unknown) as RouteComponentProps['history'],
};
render(<GuardedComponent {...props} />);
await waitFor(() => {
expect(screen.getByText('Test Component: test-value')).toBeInTheDocument();
});
});
it('should keep placeholder when route parameter is missing', async () => {
const permission = buildPermission('read', 'dashboard:{id}');
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({
[permission]: true,
}),
);
}),
);
const GuardedComponent = createGuardedRoute(
TestComponent,
'read',
'dashboard:{id}',
);
const mockMatch = {
params: {},
isExact: true,
path: '/dashboard',
url: '/dashboard',
};
const props = {
testProp: 'test-value',
match: mockMatch,
location: ({} as unknown) as RouteComponentProps['location'],
history: ({} as unknown) as RouteComponentProps['history'],
};
render(<GuardedComponent {...props} />);
await waitFor(() => {
expect(screen.getByText('Test Component: test-value')).toBeInTheDocument();
});
});
it('should render loading fallback when loading', () => {
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(ctx.status(200), ctx.json({}));
}),
);
const GuardedComponent = createGuardedRoute(
TestComponent,
'read',
'dashboard:*',
);
const mockMatch = {
params: {},
isExact: true,
path: '/dashboard',
url: '/dashboard',
};
const props = {
testProp: 'test-value',
match: mockMatch,
location: ({} as unknown) as RouteComponentProps['location'],
history: ({} as unknown) as RouteComponentProps['history'],
};
render(<GuardedComponent {...props} />);
expect(screen.getByText('SigNoz')).toBeInTheDocument();
expect(
screen.queryByText('Test Component: test-value'),
).not.toBeInTheDocument();
});
it('should render error fallback when API error occurs', async () => {
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(ctx.status(500), ctx.json({ error: 'Internal Server Error' }));
}),
);
const GuardedComponent = createGuardedRoute(
TestComponent,
'read',
'dashboard:*',
);
const mockMatch = {
params: {},
isExact: true,
path: '/dashboard',
url: '/dashboard',
};
const props = {
testProp: 'test-value',
match: mockMatch,
location: ({} as unknown) as RouteComponentProps['location'],
history: ({} as unknown) as RouteComponentProps['history'],
};
render(<GuardedComponent {...props} />);
await waitFor(() => {
expect(screen.getByText(/Something went wrong/i)).toBeInTheDocument();
});
expect(
screen.queryByText('Test Component: test-value'),
).not.toBeInTheDocument();
});
it('should render no permissions fallback when permission is denied', async () => {
const permission = buildPermission('update', 'dashboard:123');
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({
[permission]: false,
}),
);
}),
);
const GuardedComponent = createGuardedRoute(
TestComponent,
'update',
'dashboard:{id}',
);
const mockMatch = {
params: { id: '123' },
isExact: true,
path: '/dashboard/:id',
url: '/dashboard/123',
};
const props = {
testProp: 'test-value',
match: mockMatch,
location: ({} as unknown) as RouteComponentProps['location'],
history: ({} as unknown) as RouteComponentProps['history'],
};
render(<GuardedComponent {...props} />);
await waitFor(() => {
const heading = document.querySelector('h3');
expect(heading).toBeInTheDocument();
expect(heading?.textContent).toMatch(/permission to view/i);
});
expect(screen.getByText(permission, { exact: false })).toBeInTheDocument();
expect(
screen.queryByText('Test Component: test-value'),
).not.toBeInTheDocument();
});
it('should pass all props to wrapped component', async () => {
const permission = buildPermission('read', 'dashboard:*');
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({
[permission]: true,
}),
);
}),
);
const ComponentWithMultipleProps = ({
prop1,
prop2,
prop3,
}: {
prop1: string;
prop2: number;
prop3: boolean;
}): ReactElement => (
<div>
{prop1} - {prop2} - {prop3.toString()}
</div>
);
const GuardedComponent = createGuardedRoute(
ComponentWithMultipleProps,
'read',
'dashboard:*',
);
const mockMatch = {
params: {},
isExact: true,
path: '/dashboard',
url: '/dashboard',
};
const props = {
prop1: 'value1',
prop2: 42,
prop3: true,
match: mockMatch,
location: ({} as unknown) as RouteComponentProps['location'],
history: ({} as unknown) as RouteComponentProps['history'],
};
render(<GuardedComponent {...props} />);
await waitFor(() => {
expect(screen.getByText('value1 - 42 - true')).toBeInTheDocument();
});
});
it('should memoize resolved object based on route params', async () => {
const permission1 = buildPermission('read', 'dashboard:123');
const permission2 = buildPermission('read', 'dashboard:456');
let requestCount = 0;
const requestedPermissions: string[] = [];
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (req, res, ctx) => {
requestCount++;
const body = req.body as {
permissions: Array<{ relation: string; object: string }>;
};
const perm = body.permissions[0];
requestedPermissions.push(`${perm.relation}/${perm.object}`);
if (perm.object === 'dashboard:123') {
return res(
ctx.status(200),
ctx.json({
[permission1]: true,
}),
);
}
return res(
ctx.status(200),
ctx.json({
[permission2]: true,
}),
);
}),
);
const GuardedComponent = createGuardedRoute(
TestComponent,
'read',
'dashboard:{id}',
);
const mockMatch1 = {
params: { id: '123' },
isExact: true,
path: '/dashboard/:id',
url: '/dashboard/123',
};
const props1 = {
testProp: 'test-value-1',
match: mockMatch1,
location: ({} as unknown) as RouteComponentProps['location'],
history: ({} as unknown) as RouteComponentProps['history'],
};
const { unmount } = render(<GuardedComponent {...props1} />);
await waitFor(() => {
expect(screen.getByText('Test Component: test-value-1')).toBeInTheDocument();
});
expect(requestCount).toBe(1);
expect(requestedPermissions).toContain('read/dashboard:123');
unmount();
const mockMatch2 = {
params: { id: '456' },
isExact: true,
path: '/dashboard/:id',
url: '/dashboard/456',
};
const props2 = {
testProp: 'test-value-2',
match: mockMatch2,
location: ({} as unknown) as RouteComponentProps['location'],
history: ({} as unknown) as RouteComponentProps['history'],
};
render(<GuardedComponent {...props2} />);
await waitFor(() => {
expect(screen.getByText('Test Component: test-value-2')).toBeInTheDocument();
});
expect(requestCount).toBe(2);
expect(requestedPermissions).toContain('read/dashboard:456');
});
it('should handle different relation types', async () => {
const permission = buildPermission('delete', 'dashboard:789');
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({
[permission]: true,
}),
);
}),
);
const GuardedComponent = createGuardedRoute(
TestComponent,
'delete',
'dashboard:{id}',
);
const mockMatch = {
params: { id: '789' },
isExact: true,
path: '/dashboard/:id',
url: '/dashboard/789',
};
const props = {
testProp: 'test-value',
match: mockMatch,
location: ({} as unknown) as RouteComponentProps['location'],
history: ({} as unknown) as RouteComponentProps['history'],
};
render(<GuardedComponent {...props} />);
await waitFor(() => {
expect(screen.getByText('Test Component: test-value')).toBeInTheDocument();
});
});
});

View File

@@ -0,0 +1,79 @@
import { ComponentType, ReactElement, useMemo } from 'react';
import { RouteComponentProps } from 'react-router-dom';
import { toast } from '@signozhq/sonner';
import {
AuthZObject,
AuthZRelation,
BrandedPermission,
} from 'hooks/useAuthZ/utils';
import { Copy } from 'lucide-react';
import { useCopyToClipboard } from '../../hooks/useCopyToClipboard';
import ErrorBoundaryFallback from '../../pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import AppLoading from '../AppLoading/AppLoading';
import { GuardAuthZ } from './GuardAuthZ';
import './createGuardedRoute.styles.scss';
const onErrorFallback = (): JSX.Element => <ErrorBoundaryFallback />;
function OnNoPermissionsFallback(response: {
requiredPermissionName: BrandedPermission;
}): ReactElement {
const { copyToClipboard } = useCopyToClipboard();
const onClickToCopy = (): void => {
copyToClipboard(response.requiredPermissionName, 'required-permission');
toast.success('Permission copied to clipboard');
};
return (
<div className="guard-authz-error-no-authz">
<div className="guard-authz-error-no-authz-content">
<img src="/Icons/no-data.svg" alt="No permission" />
<h3>Uh-oh! You dont have permission to view this page.</h3>
<p>
You need the following permission to view this page:
<br />
<pre onClick={onClickToCopy}>
{response.requiredPermissionName} <Copy size={12} />
</pre>
Ask your SigNoz administrator to grant access.
</p>
</div>
</div>
);
}
// eslint-disable-next-line @typescript-eslint/ban-types
export function createGuardedRoute<P extends object, R extends AuthZRelation>(
Component: ComponentType<P>,
relation: R,
object: AuthZObject<R>,
): ComponentType<P & RouteComponentProps<Record<string, string>>> {
return function GuardedRouteComponent(
props: P & RouteComponentProps<Record<string, string>>,
): ReactElement {
const resolvedObject = useMemo(() => {
const paramPattern = /\{([^}]+)\}/g;
return object.replace(paramPattern, (match, paramName) => {
const paramValue = props.match?.params?.[paramName];
return paramValue !== undefined ? paramValue : match;
}) as AuthZObject<R>;
}, [props.match?.params]);
return (
<GuardAuthZ
relation={relation}
object={resolvedObject}
fallbackOnLoading={<AppLoading />}
fallbackOnError={onErrorFallback}
fallbackOnNoPermissions={(response): ReactElement => (
<OnNoPermissionsFallback {...response} />
)}
>
<Component {...props} />
</GuardAuthZ>
);
};
}

View File

@@ -0,0 +1,3 @@
export { createGuardedRoute } from './createGuardedRoute';
export type { GuardAuthZProps } from './GuardAuthZ';
export { GuardAuthZ } from './GuardAuthZ';

View File

@@ -14,6 +14,6 @@ export const VIEW_TYPES = {
export const SPAN_ATTRIBUTES = {
URL_PATH: 'http.url',
RESPONSE_STATUS_CODE: 'response_status_code',
SERVER_NAME: 'net.peer.name',
SERVER_NAME: 'http_host',
SERVER_PORT: 'net.peer.port',
} as const;

View File

@@ -4,6 +4,7 @@ import { rest, server } from 'mocks-server/server';
import { fireEvent, render, screen, waitFor, within } from 'tests/test-utils';
import { DataSource } from 'types/common/queryBuilder';
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
import TopErrors from '../Explorer/Domains/DomainDetails/TopErrors';
import { getTopErrorsQueryPayload } from '../utils';
@@ -215,7 +216,7 @@ describe('TopErrors', () => {
value: 'true',
}),
expect.objectContaining({
key: expect.objectContaining({ key: 'net.peer.name' }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.SERVER_NAME }),
op: '=',
value: 'test-domain',
}),

View File

@@ -638,7 +638,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: 'tag',
type: '',
},
op: '=',
value: domainName,
@@ -685,7 +685,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: 'tag',
type: '',
},
op: '=',
value: domainName,
@@ -733,7 +733,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: 'tag',
type: '',
},
op: '=',
value: domainName,
@@ -780,7 +780,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: 'tag',
type: '',
},
op: '=',
value: domainName,
@@ -1302,7 +1302,7 @@ export const getTopErrorsCoRelationQueryFilters = (
{
id: 'e8a043b7',
key: {
key: 'net.peer.name',
key: SPAN_ATTRIBUTES.SERVER_NAME,
dataType: DataTypes.String,
type: '',
},
@@ -2198,7 +2198,7 @@ export const getEndPointZeroStateQueryPayload = (
key: {
key: SPAN_ATTRIBUTES.SERVER_NAME,
dataType: DataTypes.String,
type: 'tag',
type: '',
},
op: '=',
value: domainName,
@@ -2793,7 +2793,7 @@ export const getStatusCodeBarChartWidgetData = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: 'tag',
type: '',
},
op: '=',
value: domainName,

View File

@@ -50,6 +50,10 @@
}
.variable-select {
.ant-select-selector {
overflow-y: hidden !important;
}
.ant-select-item {
display: flex;
align-items: center;

View File

@@ -1,3 +1,4 @@
import { ExecStats } from 'api/v5/v5';
import { Timezone } from 'components/CustomTimePicker/timezoneUtils';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { getInitialStackedBands } from 'container/DashboardContainer/visualization/charts/utils/stackSeriesUtils';
@@ -54,6 +55,13 @@ export function prepareBarPanelConfig({
minTimeScale?: number;
maxTimeScale?: number;
}): UPlotConfigBuilder {
const stepIntervals: ExecStats['stepIntervals'] = get(
apiResponse,
'data.newResult.meta.stepIntervals',
{},
);
const minStepInterval = Math.min(...Object.values(stepIntervals));
const builder = buildBaseConfig({
widget,
isDarkMode,
@@ -65,12 +73,7 @@ export function prepareBarPanelConfig({
panelType: PANEL_TYPES.BAR,
minTimeScale,
maxTimeScale,
});
builder.setCursor({
focus: {
prox: 1e3,
},
stepInterval: minStepInterval,
});
if (widget.stackedBarChart) {
@@ -78,12 +81,6 @@ export function prepareBarPanelConfig({
builder.setBands(getInitialStackedBands(seriesCount));
}
const stepIntervals: Record<string, number> = get(
apiResponse,
'data.newResult.meta.stepIntervals',
{},
);
const seriesList: QueryData[] = apiResponse?.data?.result || [];
seriesList.forEach((series) => {
const baseLabelName = getLabelName(

View File

@@ -1,3 +1,4 @@
import { ExecStats } from 'api/v5/v5';
import { Timezone } from 'components/CustomTimePicker/timezoneUtils';
import { PANEL_TYPES } from 'constants/queryBuilder';
import {
@@ -14,6 +15,7 @@ import {
VisibilityMode,
} from 'lib/uPlotV2/config/types';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import get from 'lodash-es/get';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
@@ -54,6 +56,13 @@ export const prepareUPlotConfig = ({
minTimeScale?: number;
maxTimeScale?: number;
}): UPlotConfigBuilder => {
const stepIntervals: ExecStats['stepIntervals'] = get(
apiResponse,
'data.newResult.meta.stepIntervals',
{},
);
const minStepInterval = Math.min(...Object.values(stepIntervals));
const builder = buildBaseConfig({
widget,
isDarkMode,
@@ -65,9 +74,12 @@ export const prepareUPlotConfig = ({
panelType: PANEL_TYPES.TIME_SERIES,
minTimeScale,
maxTimeScale,
stepInterval: minStepInterval,
});
apiResponse.data?.result?.forEach((series) => {
const seriesList = apiResponse.data?.result || [];
seriesList.forEach((series) => {
const baseLabelName = getLabelName(
series.metric,
series.queryName || '', // query

View File

@@ -0,0 +1,233 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { STEP_INTERVAL_MULTIPLIER } from 'lib/uPlotV2/constants';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import uPlot from 'uplot';
import { PanelMode } from '../../types';
import { buildBaseConfig } from '../baseConfigBuilder';
jest.mock(
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
() => ({
getStoredSeriesVisibility: jest.fn(),
}),
);
jest.mock('lib/uPlotV2/utils', () => ({
calculateWidthBasedOnStepInterval: jest.fn(),
}));
const calculateWidthBasedOnStepIntervalMock = jest.requireMock(
'lib/uPlotV2/utils',
).calculateWidthBasedOnStepInterval as jest.Mock;
jest.mock('lib/uPlotLib/plugins/onClickPlugin', () => ({
__esModule: true,
default: jest.fn().mockReturnValue({ name: 'onClickPlugin' }),
}));
const createWidget = (overrides: Partial<Widgets> = {}): Widgets =>
({
id: 'widget-1',
yAxisUnit: 'ms',
isLogScale: false,
softMin: undefined,
softMax: undefined,
thresholds: [],
...overrides,
} as Widgets);
const createApiResponse = (
overrides: Partial<MetricRangePayloadProps> = {},
): MetricRangePayloadProps =>
({
data: { result: [], resultType: 'matrix', newResult: null },
...overrides,
} as MetricRangePayloadProps);
const baseProps = {
widget: createWidget(),
apiResponse: createApiResponse(),
isDarkMode: true,
panelMode: PanelMode.DASHBOARD_VIEW,
panelType: PANEL_TYPES.TIME_SERIES,
};
describe('buildBaseConfig', () => {
it('returns a UPlotConfigBuilder instance', () => {
const builder = buildBaseConfig(baseProps);
expect(builder).toBeDefined();
expect(typeof builder.getConfig).toBe('function');
expect(typeof builder.getLegendItems).toBe('function');
});
it('configures builder with widgetId and DASHBOARD_VIEW preferences', () => {
const builder = buildBaseConfig({
...baseProps,
panelMode: PanelMode.DASHBOARD_VIEW,
widget: createWidget({ id: 'my-widget' }),
});
expect(builder.getWidgetId()).toBe('my-widget');
expect(builder.getShouldSaveSelectionPreference()).toBe(true);
});
it('configures builder with IN_MEMORY selection when panelMode is DASHBOARD_EDIT', () => {
const builder = buildBaseConfig({
...baseProps,
panelMode: PanelMode.DASHBOARD_EDIT,
});
expect(builder.getShouldSaveSelectionPreference()).toBe(false);
const config = builder.getConfig();
expect(config.series).toBeDefined();
});
it('passes stepInterval to builder and cursor prox uses width * multiplier', () => {
const stepInterval = 60;
const mockWidth = 100;
calculateWidthBasedOnStepIntervalMock.mockReturnValue(mockWidth);
const builder = buildBaseConfig({
...baseProps,
stepInterval,
});
const config = builder.getConfig();
const prox = config.cursor?.hover?.prox;
expect(typeof prox).toBe('function');
const uPlotInstance = {} as uPlot;
const proxResult = (prox as (u: uPlot) => number)(uPlotInstance);
expect(calculateWidthBasedOnStepIntervalMock).toHaveBeenCalledWith({
uPlotInstance,
stepInterval,
});
expect(proxResult).toBe(mockWidth * STEP_INTERVAL_MULTIPLIER);
});
it('adds x scale with time config and min/max when provided', () => {
const builder = buildBaseConfig({
...baseProps,
minTimeScale: 1000,
maxTimeScale: 2000,
});
const config = builder.getConfig();
expect(config.scales?.x).toBeDefined();
expect(config.scales?.x?.time).toBe(true);
const range = config.scales?.x?.range;
expect(Array.isArray(range)).toBe(true);
expect((range as [number, number])[0]).toBe(1000);
});
it('configures log scale on y axis when widget.isLogScale is true', () => {
const builder = buildBaseConfig({
...baseProps,
widget: createWidget({ isLogScale: true }),
});
const config = builder.getConfig();
expect(config.scales?.y).toBeDefined();
expect(config.scales?.y?.log).toBe(10);
});
it('adds onClick plugin when onClick is a function', () => {
const onClickPlugin = jest.requireMock('lib/uPlotLib/plugins/onClickPlugin')
.default;
const onClick = jest.fn();
buildBaseConfig({
...baseProps,
onClick,
apiResponse: createApiResponse(),
});
expect(onClickPlugin).toHaveBeenCalledWith({
onClick,
apiResponse: expect.any(Object),
});
});
it('does not add onClick plugin when onClick is not a function', () => {
const onClickPlugin = jest.requireMock('lib/uPlotLib/plugins/onClickPlugin')
.default;
const builder = buildBaseConfig({
...baseProps,
});
const config = builder.getConfig();
const plugins = config.plugins ?? [];
expect(
plugins.some((p) => (p as { name?: string }).name === 'onClickPlugin'),
).toBe(false);
expect(onClickPlugin).not.toHaveBeenCalled();
});
it('adds thresholds from widget', () => {
const builder = buildBaseConfig({
...baseProps,
widget: createWidget({
thresholds: [
{
thresholdValue: 80,
thresholdColor: '#ff0000',
thresholdUnit: 'ms',
thresholdLabel: 'High',
},
] as Widgets['thresholds'],
}),
});
const config = builder.getConfig();
const drawHooks = config.hooks?.draw ?? [];
expect(drawHooks.length).toBeGreaterThan(0);
});
it('adds x and y axes with correct scaleKeys and panelType', () => {
const builder = buildBaseConfig(baseProps);
const config = builder.getConfig();
expect(config.axes).toHaveLength(2);
expect(config.axes?.[0].scale).toBe('x');
expect(config.axes?.[1].scale).toBe('y');
});
it('sets tzDate when timezone is provided', () => {
const builder = buildBaseConfig({
...baseProps,
timezone: {
name: 'America/New_York',
value: 'America/New_York',
offset: 'UTC-5',
searchIndex: 'America/New_York',
},
});
const config = builder.getConfig();
expect(config.tzDate).toBeDefined();
expect(typeof config.tzDate).toBe('function');
});
it('leaves tzDate undefined when timezone is not provided', () => {
const builder = buildBaseConfig(baseProps);
const config = builder.getConfig();
expect(config.tzDate).toBeUndefined();
});
it('register setSelect hook when onDragSelect is provided', () => {
const onDragSelect = jest.fn();
const builder = buildBaseConfig({
...baseProps,
onDragSelect,
});
const config = builder.getConfig();
expect(config.hooks?.setSelect).toBeDefined();
});
});

View File

@@ -26,6 +26,7 @@ export interface BaseConfigBuilderProps {
panelType: PANEL_TYPES;
minTimeScale?: number;
maxTimeScale?: number;
stepInterval?: number;
}
export function buildBaseConfig({
@@ -39,6 +40,7 @@ export function buildBaseConfig({
panelType,
minTimeScale,
maxTimeScale,
stepInterval,
}: BaseConfigBuilderProps): UPlotConfigBuilder {
const tzDate = timezone
? (timestamp: number): Date =>
@@ -56,6 +58,7 @@ export function buildBaseConfig({
].includes(panelMode)
? SelectionPreferencesSource.LOCAL_STORAGE
: SelectionPreferencesSource.IN_MEMORY,
stepInterval,
});
const thresholdOptions: ThresholdsDrawHookOptions = {

View File

@@ -0,0 +1,444 @@
import { ReactElement } from 'react';
import { renderHook, waitFor } from '@testing-library/react';
import { ENVIRONMENT } from 'constants/env';
import { server } from 'mocks-server/server';
import { rest } from 'msw';
import { AllTheProviders } from 'tests/test-utils';
import { useAuthZ } from './useAuthZ';
import { BrandedPermission, buildPermission } from './utils';
const BASE_URL = ENVIRONMENT.baseURL || '';
const wrapper = ({ children }: { children: ReactElement }): ReactElement => (
<AllTheProviders>{children}</AllTheProviders>
);
describe('useAuthZ', () => {
it('should fetch and return permissions successfully', async () => {
const permission1 = buildPermission('read', 'dashboard:*');
const permission2 = buildPermission('update', 'dashboard:123');
const mockApiResponse = {
[permission1]: true,
[permission2]: false,
};
const expectedResponse = {
[permission1]: {
isGranted: true,
},
[permission2]: {
isGranted: false,
},
};
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(ctx.status(200), ctx.json(mockApiResponse));
}),
);
const { result } = renderHook(() => useAuthZ([permission1, permission2]), {
wrapper,
});
expect(result.current.isLoading).toBe(true);
expect(result.current.permissions).toBeNull();
await waitFor(() => {
expect(result.current.isLoading).toBe(false);
});
expect(result.current.error).toBeNull();
expect(result.current.permissions).toEqual(expectedResponse);
});
it('should handle API errors', async () => {
const permission = buildPermission('read', 'dashboard:*');
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(ctx.status(500), ctx.json({ error: 'Internal Server Error' }));
}),
);
const { result } = renderHook(() => useAuthZ([permission]), {
wrapper,
});
await waitFor(() => {
expect(result.current.isLoading).toBe(false);
});
expect(result.current.error).not.toBeNull();
expect(result.current.permissions).toBeNull();
});
it('should refetch when permissions array changes', async () => {
const permission1 = buildPermission('read', 'dashboard:*');
const permission2 = buildPermission('update', 'dashboard:123');
const permission3 = buildPermission('delete', 'dashboard:456');
let requestCount = 0;
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (req, res, ctx) => {
requestCount++;
const body = req.body as {
permissions: Array<{ relation: string; object: string }>;
};
if (body.permissions.length === 1) {
return res(
ctx.status(200),
ctx.json({
[permission1]: true,
}),
);
}
return res(
ctx.status(200),
ctx.json({
[permission1]: true,
[permission2]: false,
[permission3]: true,
}),
);
}),
);
const { result, rerender } = renderHook<
ReturnType<typeof useAuthZ>,
BrandedPermission[]
>((permissions) => useAuthZ(permissions), {
wrapper,
initialProps: [permission1],
});
await waitFor(() => {
expect(result.current.isLoading).toBe(false);
});
expect(requestCount).toBe(1);
expect(result.current.permissions).toEqual({
[permission1]: {
isGranted: true,
},
});
rerender([permission1, permission2, permission3]);
await waitFor(() => {
expect(result.current.isLoading).toBe(false);
});
expect(requestCount).toBe(2);
expect(result.current.permissions).toEqual({
[permission1]: {
isGranted: true,
},
[permission2]: {
isGranted: false,
},
[permission3]: {
isGranted: true,
},
});
});
it('should not refetch when permissions array order changes but content is the same', async () => {
const permission1 = buildPermission('read', 'dashboard:*');
const permission2 = buildPermission('update', 'dashboard:123');
let requestCount = 0;
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
requestCount++;
return res(
ctx.status(200),
ctx.json({
[permission1]: true,
[permission2]: false,
}),
);
}),
);
const { result, rerender } = renderHook<
ReturnType<typeof useAuthZ>,
BrandedPermission[]
>((permissions) => useAuthZ(permissions), {
wrapper,
initialProps: [permission1, permission2],
});
await waitFor(() => {
expect(result.current.isLoading).toBe(false);
});
expect(requestCount).toBe(1);
rerender([permission2, permission1]);
await waitFor(() => {
expect(result.current.isLoading).toBe(false);
});
expect(requestCount).toBe(1);
});
it('should handle empty permissions array', async () => {
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
return res(ctx.status(200), ctx.json({}));
}),
);
const { result } = renderHook(() => useAuthZ([]), {
wrapper,
});
expect(result.current.isLoading).toBe(false);
expect(result.current.error).toBeNull();
expect(result.current.permissions).toEqual({});
});
it('should send correct payload format to API', async () => {
const permission1 = buildPermission('read', 'dashboard:*');
const permission2 = buildPermission('update', 'dashboard:123');
let receivedPayload: any = null;
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, async (req, res, ctx) => {
receivedPayload = await req.json();
return res(
ctx.status(200),
ctx.json({
[permission1]: true,
[permission2]: false,
}),
);
}),
);
const { result } = renderHook(() => useAuthZ([permission1, permission2]), {
wrapper,
});
await waitFor(() => {
expect(result.current.isLoading).toBe(false);
});
expect(receivedPayload).toEqual({
permissions: [
{ relation: 'read', object: 'dashboard:*' },
{ relation: 'update', object: 'dashboard:123' },
],
});
});
it('should batch multiple hooks into single flight request', async () => {
const permission1 = buildPermission('read', 'dashboard:*');
const permission2 = buildPermission('update', 'dashboard:123');
const permission3 = buildPermission('delete', 'dashboard:456');
const permission4 = buildPermission('create', 'dashboards');
let requestCount = 0;
const receivedPayloads: any[] = [];
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, async (req, res, ctx) => {
requestCount++;
const payload = await req.json();
receivedPayloads.push(payload);
const allPermissions = [permission1, permission2, permission3, permission4];
const response: Record<string, boolean> = {};
payload.permissions.forEach((p: { relation: string; object: string }) => {
const perm = `${p.relation}/${p.object}` as BrandedPermission;
if (allPermissions.includes(perm)) {
response[perm] = perm === permission1 || perm === permission3;
}
});
return res(ctx.status(200), ctx.json(response));
}),
);
const { result: result1 } = renderHook(() => useAuthZ([permission1]), {
wrapper,
});
const { result: result2 } = renderHook(() => useAuthZ([permission2]), {
wrapper,
});
const { result: result3 } = renderHook(() => useAuthZ([permission3]), {
wrapper,
});
await waitFor(
() => {
expect(result1.current.isLoading).toBe(false);
expect(result2.current.isLoading).toBe(false);
expect(result3.current.isLoading).toBe(false);
},
{ timeout: 200 },
);
expect(requestCount).toBe(1);
expect(receivedPayloads).toHaveLength(1);
expect(receivedPayloads[0].permissions).toHaveLength(3);
expect(receivedPayloads[0].permissions).toEqual(
expect.arrayContaining([
{ relation: 'read', object: 'dashboard:*' },
{ relation: 'update', object: 'dashboard:123' },
{ relation: 'delete', object: 'dashboard:456' },
]),
);
expect(result1.current.permissions).toEqual({
[permission1]: { isGranted: true },
});
expect(result2.current.permissions).toEqual({
[permission2]: { isGranted: false },
});
expect(result3.current.permissions).toEqual({
[permission3]: { isGranted: true },
});
});
it('should create separate batches for calls after single flight window', async () => {
const permission1 = buildPermission('read', 'dashboard:*');
const permission2 = buildPermission('update', 'dashboard:123');
const permission3 = buildPermission('delete', 'dashboard:456');
let requestCount = 0;
const receivedPayloads: any[] = [];
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, async (req, res, ctx) => {
requestCount++;
const payload = await req.json();
receivedPayloads.push(payload);
const response: Record<string, boolean> = {};
payload.permissions.forEach((p: { relation: string; object: string }) => {
const perm = `${p.relation}/${p.object}` as BrandedPermission;
response[perm] = perm === permission1 || perm === permission3;
});
return res(ctx.status(200), ctx.json(response));
}),
);
const { result: result1 } = renderHook(() => useAuthZ([permission1]), {
wrapper,
});
await waitFor(
() => {
expect(result1.current.isLoading).toBe(false);
},
{ timeout: 200 },
);
expect(requestCount).toBe(1);
expect(receivedPayloads[0].permissions).toHaveLength(1);
await new Promise((resolve) => setTimeout(resolve, 100));
const { result: result2 } = renderHook(() => useAuthZ([permission2]), {
wrapper,
});
const { result: result3 } = renderHook(() => useAuthZ([permission3]), {
wrapper,
});
await waitFor(
() => {
expect(result2.current.isLoading).toBe(false);
expect(result3.current.isLoading).toBe(false);
},
{ timeout: 200 },
);
expect(requestCount).toBe(2);
expect(receivedPayloads).toHaveLength(2);
expect(receivedPayloads[1].permissions).toHaveLength(2);
expect(receivedPayloads[1].permissions).toEqual(
expect.arrayContaining([
{ relation: 'update', object: 'dashboard:123' },
{ relation: 'delete', object: 'dashboard:456' },
]),
);
});
it('should not leak state between separate batches', async () => {
const permission1 = buildPermission('read', 'dashboard:*');
const permission2 = buildPermission('update', 'dashboard:123');
const permission3 = buildPermission('delete', 'dashboard:456');
let requestCount = 0;
server.use(
rest.post(`${BASE_URL}/api/v1/permissions/check`, async (req, res, ctx) => {
requestCount++;
const payload = await req.json();
const response: Record<string, boolean> = {};
payload.permissions.forEach((p: { relation: string; object: string }) => {
const perm = `${p.relation}/${p.object}` as BrandedPermission;
response[perm] = perm === permission1 || perm === permission3;
});
return res(ctx.status(200), ctx.json(response));
}),
);
const { result: result1 } = renderHook(() => useAuthZ([permission1]), {
wrapper,
});
await waitFor(
() => {
expect(result1.current.isLoading).toBe(false);
},
{ timeout: 200 },
);
expect(requestCount).toBe(1);
expect(result1.current.permissions).toEqual({
[permission1]: { isGranted: true },
});
await new Promise((resolve) => setTimeout(resolve, 100));
const { result: result2 } = renderHook(() => useAuthZ([permission2]), {
wrapper,
});
await waitFor(
() => {
expect(result2.current.isLoading).toBe(false);
},
{ timeout: 200 },
);
expect(requestCount).toBe(2);
expect(result1.current.permissions).toEqual({
[permission1]: { isGranted: true },
});
expect(result2.current.permissions).toEqual({
[permission2]: { isGranted: false },
});
expect(result1.current.permissions).not.toHaveProperty(permission2);
expect(result2.current.permissions).not.toHaveProperty(permission1);
});
});

View File

@@ -0,0 +1,136 @@
import { useMemo } from 'react';
import { useQueries } from 'react-query';
import api from 'api';
import { BrandedPermission } from './utils';
export type UseAuthZPermissionResult = {
isGranted: boolean;
};
export type AuthZCheckResponse = Record<
BrandedPermission,
UseAuthZPermissionResult
>;
export type UseAuthZResult = {
isLoading: boolean;
error: Error | null;
permissions: AuthZCheckResponse | null;
};
let ctx: Promise<AuthZCheckResponse> | null;
let pendingPermissions: BrandedPermission[] = [];
const SINGLE_FLIGHT_WAIT_TIME_MS = 50;
const AUTHZ_CACHE_TIME = 20_000;
function dispatchPermission(
permission: BrandedPermission,
): Promise<AuthZCheckResponse> {
pendingPermissions.push(permission);
if (!ctx) {
let resolve: (v: AuthZCheckResponse) => void, reject: (reason?: any) => void;
ctx = new Promise<AuthZCheckResponse>((r, re) => {
resolve = r;
reject = re;
});
setTimeout(() => {
const copiedPermissions = pendingPermissions.slice();
pendingPermissions = [];
ctx = null;
fetchManyPermissions(copiedPermissions).then(resolve).catch(reject);
}, SINGLE_FLIGHT_WAIT_TIME_MS);
}
return ctx;
}
async function fetchManyPermissions(
permissions: BrandedPermission[],
): Promise<AuthZCheckResponse> {
const permissionsPayload = permissions.map((permission) => {
const [relation, object] = permission.split('/');
return {
relation,
object,
};
});
const permissionsCheckResponse = await api
.post<Record<string, boolean>>('/permissions/check', {
permissions: permissionsPayload,
})
.then((response) => response.data);
return Object.keys(permissionsCheckResponse).reduce<AuthZCheckResponse>(
(acc, permission): AuthZCheckResponse => {
acc[permission as BrandedPermission] = {
isGranted: !!permissionsCheckResponse[permission],
};
return acc;
},
{} as AuthZCheckResponse,
);
}
export function useAuthZ(permissions: BrandedPermission[]): UseAuthZResult {
const queryResults = useQueries(
permissions.map((permission) => {
return {
queryKey: ['authz', permission],
cacheTime: AUTHZ_CACHE_TIME,
refetchOnMount: false,
refetchIntervalInBackground: false,
refetchOnWindowFocus: false,
refetchOnReconnect: true,
queryFn: async (): Promise<AuthZCheckResponse> => {
const response = await dispatchPermission(permission);
return {
[permission]: {
isGranted: response[permission].isGranted,
},
};
},
};
}),
);
const isLoading = useMemo(() => queryResults.some((q) => q.isLoading), [
queryResults,
]);
const error = useMemo(
() =>
!isLoading
? (queryResults.find((q) => !!q.error)?.error as Error) || null
: null,
[isLoading, queryResults],
);
const data = useMemo(() => {
if (isLoading || error) {
return null;
}
return queryResults.reduce((acc, q) => {
if (!q.data) {
return acc;
}
for (const [key, value] of Object.entries(q.data)) {
acc[key as BrandedPermission] = value;
}
return acc;
}, {} as AuthZCheckResponse);
}, [isLoading, error, queryResults]);
return {
isLoading,
error,
permissions: data ?? null,
};
}

View File

@@ -0,0 +1,32 @@
export type AuthZRelation =
| 'list'
| 'create'
| 'read'
| 'update'
| 'delete'
| 'assignee';
export type AuthZResource = 'dashboard';
export type AuthZObject<R extends AuthZRelation> = R extends 'list' | 'create'
? `${AuthZResource}s`
: `${AuthZResource}:${string}`;
export type AuthZPermissionCheck<R extends AuthZRelation> = {
object: AuthZObject<R>;
relation: R;
};
export type BrandedPermission = string & { __brandedPermission: true };
export function buildPermission<R extends AuthZRelation>(
relation: R,
object: AuthZObject<R>,
): BrandedPermission {
return `${relation}/${object}` as BrandedPermission;
}
export function buildObjectString(
resource: AuthZResource,
objectId: string,
): `${AuthZResource}:${string}` {
return `${resource}:${objectId}`;
}

View File

@@ -7,11 +7,16 @@ import { merge } from 'lodash-es';
import noop from 'lodash-es/noop';
import uPlot, { Cursor, Hooks, Options } from 'uplot';
import {
DEFAULT_CURSOR_CONFIG,
DEFAULT_HOVER_PROXIMITY_VALUE,
DEFAULT_PLOT_CONFIG,
STEP_INTERVAL_MULTIPLIER,
} from '../constants';
import { calculateWidthBasedOnStepInterval } from '../utils';
import {
ConfigBuilder,
ConfigBuilderProps,
DEFAULT_CURSOR_CONFIG,
DEFAULT_PLOT_CONFIG,
LegendItem,
SelectionPreferencesSource,
} from './types';
@@ -45,6 +50,8 @@ export class UPlotConfigBuilder extends ConfigBuilder<
private axes: Record<string, UPlotAxisBuilder> = {};
private stepInterval: number | undefined;
readonly scales: UPlotScaleBuilder[] = [];
private bands: uPlot.Band[] = [];
@@ -79,6 +86,7 @@ export class UPlotConfigBuilder extends ConfigBuilder<
tzDate,
selectionPreferencesSource,
shouldSaveSelectionPreference,
stepInterval,
} = args ?? {};
if (widgetId) {
this.widgetId = widgetId;
@@ -96,6 +104,10 @@ export class UPlotConfigBuilder extends ConfigBuilder<
this.shouldSaveSelectionPreference = shouldSaveSelectionPreference;
}
if (stepInterval) {
this.stepInterval = stepInterval;
}
this.onDragSelect = noop;
if (onDragSelect) {
this.onDragSelect = onDragSelect;
@@ -372,6 +384,31 @@ export class UPlotConfigBuilder extends ConfigBuilder<
return this.widgetId;
}
/**
* Get cursor configuration
*/
getCursorConfig(): Cursor {
if (this.stepInterval) {
const cursorConfig = {
...DEFAULT_CURSOR_CONFIG,
hover: {
...DEFAULT_CURSOR_CONFIG.hover,
prox: this.stepInterval
? (uPlotInstance: uPlot): number => {
const width = calculateWidthBasedOnStepInterval({
uPlotInstance,
stepInterval: this.stepInterval ?? 0,
});
return width * STEP_INTERVAL_MULTIPLIER;
}
: DEFAULT_HOVER_PROXIMITY_VALUE,
},
};
return merge({}, DEFAULT_CURSOR_CONFIG, cursorConfig, this.cursor);
}
return merge({}, DEFAULT_CURSOR_CONFIG, this.cursor);
}
/**
* Build the final uPlot.Options configuration
*/
@@ -413,7 +450,7 @@ export class UPlotConfigBuilder extends ConfigBuilder<
config.hooks = this.hooks;
config.select = this.select;
config.cursor = merge({}, DEFAULT_CURSOR_CONFIG, this.cursor);
config.cursor = this.getCursorConfig();
config.tzDate = this.tzDate;
config.plugins = this.plugins.length > 0 ? this.plugins : undefined;
config.bands = this.bands.length > 0 ? this.bands : undefined;

View File

@@ -1,6 +1,7 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { themeColors } from 'constants/theme';
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
import { calculateWidthBasedOnStepInterval } from 'lib/uPlotV2/utils';
import uPlot, { Series } from 'uplot';
import {
@@ -291,21 +292,16 @@ function getBarPathBuilder({
idx1: number,
): Series.Paths | null => {
let effectiveBarMaxWidth = barMaxWidth;
const widthBasedOnStepInterval = calculateWidthBasedOnStepInterval({
uPlotInstance: self,
stepInterval,
});
const xScale = self.scales.x as uPlot.Scale | undefined;
if (xScale && typeof xScale.min === 'number') {
const start = xScale.min as number;
const end = start + stepInterval;
const startPx = self.valToPos(start, 'x');
const endPx = self.valToPos(end, 'x');
const intervalPx = Math.abs(endPx - startPx);
if (intervalPx > 0) {
effectiveBarMaxWidth =
typeof barMaxWidth === 'number'
? Math.min(barMaxWidth, intervalPx)
: intervalPx;
}
if (widthBasedOnStepInterval > 0) {
effectiveBarMaxWidth = Math.min(
effectiveBarMaxWidth,
widthBasedOnStepInterval,
);
}
const barsCfgKey = `bars|${barAlignment}|${barWidthFactor}|${effectiveBarMaxWidth}`;

View File

@@ -1,6 +1,10 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import uPlot from 'uplot';
import {
DEFAULT_HOVER_PROXIMITY_VALUE,
STEP_INTERVAL_MULTIPLIER,
} from '../../constants';
import type { SeriesProps } from '../types';
import { DrawStyle, SelectionPreferencesSource } from '../types';
import { UPlotConfigBuilder } from '../UPlotConfigBuilder';
@@ -13,6 +17,14 @@ jest.mock(
}),
);
jest.mock('lib/uPlotV2/utils', () => ({
calculateWidthBasedOnStepInterval: jest.fn(),
}));
const calculateWidthBasedOnStepIntervalMock = jest.requireMock(
'lib/uPlotV2/utils',
).calculateWidthBasedOnStepInterval as jest.Mock;
const getStoredSeriesVisibilityMock = jest.requireMock(
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
) as {
@@ -384,6 +396,52 @@ describe('UPlotConfigBuilder', () => {
expect(config.cursor?.points).toBeDefined();
});
describe('getCursorConfig', () => {
it('returns default cursor merged with custom cursor when no stepInterval', () => {
const builder = new UPlotConfigBuilder();
builder.setCursor({
drag: { setScale: false },
});
const cursorConfig = builder.getCursorConfig();
expect(cursorConfig.drag?.setScale).toBe(false);
expect(cursorConfig.hover?.prox).toBe(DEFAULT_HOVER_PROXIMITY_VALUE);
expect(cursorConfig.points).toBeDefined();
});
it('returns hover prox as DEFAULT_HOVER_PROXIMITY_VALUE when stepInterval is not set', () => {
const builder = new UPlotConfigBuilder();
const cursorConfig = builder.getCursorConfig();
expect(cursorConfig.hover?.prox).toBe(DEFAULT_HOVER_PROXIMITY_VALUE);
});
it('returns hover prox as function when stepInterval is set, computing width * multiplier', () => {
const stepInterval = 60;
const mockWidth = 100;
calculateWidthBasedOnStepIntervalMock.mockReturnValue(mockWidth);
const builder = new UPlotConfigBuilder({ stepInterval });
const cursorConfig = builder.getCursorConfig();
expect(typeof cursorConfig.hover?.prox).toBe('function');
const uPlotInstance = {} as uPlot;
const proxResult = (cursorConfig.hover!.prox as (u: uPlot) => number)(
uPlotInstance,
);
expect(calculateWidthBasedOnStepIntervalMock).toHaveBeenCalledWith({
uPlotInstance,
stepInterval,
});
expect(proxResult).toBe(mockWidth * STEP_INTERVAL_MULTIPLIER);
});
});
it('adds plugins and includes them in config', () => {
const builder = new UPlotConfigBuilder();
const plugin: uPlot.Plugin = {

View File

@@ -1,6 +1,6 @@
import { PrecisionOption } from 'components/Graph/types';
import { PANEL_TYPES } from 'constants/queryBuilder';
import uPlot, { Cursor, Options, Series } from 'uplot';
import uPlot, { Series } from 'uplot';
import { ThresholdsDrawHookOptions } from '../hooks/types';
@@ -39,6 +39,7 @@ export interface ConfigBuilderProps {
tzDate?: uPlot.LocalDateFromUnix;
selectionPreferencesSource?: SelectionPreferencesSource;
shouldSaveSelectionPreference?: boolean;
stepInterval?: number;
}
/**
@@ -186,47 +187,3 @@ export interface LegendItem {
color: uPlot.Series['stroke'];
show: boolean;
}
export const DEFAULT_PLOT_CONFIG: Partial<Options> = {
focus: {
alpha: 0.3,
},
cursor: {
focus: {
prox: 30,
},
},
legend: {
show: false,
},
padding: [16, 16, 8, 8],
series: [],
hooks: {},
};
const POINTS_FILL_COLOR = '#FFFFFF';
export const DEFAULT_CURSOR_CONFIG: Cursor = {
drag: { setScale: true },
points: {
one: true,
size: (u, seriesIdx) => (u.series[seriesIdx]?.points?.size ?? 0) * 3,
width: (_u, _seriesIdx, size) => size / 4,
stroke: (u, seriesIdx): string => {
const points = u.series[seriesIdx]?.points;
const strokeFn =
typeof points?.stroke === 'function' ? points.stroke : undefined;
const strokeValue =
strokeFn !== undefined
? strokeFn(u, seriesIdx)
: typeof points?.stroke === 'string'
? points.stroke
: '';
return `${strokeValue}90`;
},
fill: (): string => POINTS_FILL_COLOR,
},
focus: {
prox: 30,
},
};

View File

@@ -0,0 +1,48 @@
import { Cursor, Options } from 'uplot';
const POINTS_FILL_COLOR = '#FFFFFF';
export const DEFAULT_HOVER_PROXIMITY_VALUE = 30; // only snap if within 30px horizontally
export const DEFAULT_FOCUS_PROXIMITY_VALUE = 1e6;
export const STEP_INTERVAL_MULTIPLIER = 3; // multiply the width computed by STEP_INTERVAL_MULTIPLIER to get the hover prox value
export const DEFAULT_PLOT_CONFIG: Partial<Options> = {
focus: {
alpha: 0.3,
},
legend: {
show: false,
},
padding: [16, 16, 8, 8],
series: [],
hooks: {},
};
export const DEFAULT_CURSOR_CONFIG: Cursor = {
drag: { setScale: true },
points: {
one: true,
size: (u, seriesIdx) => (u.series[seriesIdx]?.points?.size ?? 0) * 3,
width: (_u, _seriesIdx, size) => size / 4,
stroke: (u, seriesIdx): string => {
const points = u.series[seriesIdx]?.points;
const strokeFn =
typeof points?.stroke === 'function' ? points.stroke : undefined;
const strokeValue =
strokeFn !== undefined
? strokeFn(u, seriesIdx)
: typeof points?.stroke === 'string'
? points.stroke
: '';
return `${strokeValue}90`;
},
fill: (): string => POINTS_FILL_COLOR,
},
focus: {
prox: DEFAULT_FOCUS_PROXIMITY_VALUE,
},
hover: {
prox: DEFAULT_HOVER_PROXIMITY_VALUE,
bias: 0,
},
};

View File

@@ -87,7 +87,7 @@ export function shouldShowTooltipForSync(
export function shouldShowTooltipForInteraction(
controller: TooltipControllerState,
): boolean {
return controller.focusedSeriesIndex != null || controller.isAnySeriesActive;
return controller.focusedSeriesIndex != null;
}
export function updateHoverState(

View File

@@ -0,0 +1,70 @@
import uPlot from 'uplot';
import { calculateWidthBasedOnStepInterval } from '../index';
describe('calculateWidthBasedOnStepInterval', () => {
it('returns pixel width between start and start+stepInterval when xScale exists with numeric min', () => {
const valToPos = jest
.fn()
.mockReturnValueOnce(100) // startPx for start
.mockReturnValueOnce(250); // endPx for start + stepInterval
const uPlotInstance = ({
scales: { x: { min: 1000 } },
valToPos,
} as unknown) as uPlot;
const result = calculateWidthBasedOnStepInterval({
uPlotInstance,
stepInterval: 60,
});
expect(valToPos).toHaveBeenCalledWith(1000, 'x');
expect(valToPos).toHaveBeenCalledWith(1060, 'x');
expect(result).toBe(150); // Math.abs(250 - 100)
});
it('returns absolute pixel width when endPx is less than startPx', () => {
const valToPos = jest.fn().mockReturnValueOnce(250).mockReturnValueOnce(100);
const uPlotInstance = ({
scales: { x: { min: 0 } },
valToPos,
} as unknown) as uPlot;
const result = calculateWidthBasedOnStepInterval({
uPlotInstance,
stepInterval: 60,
});
expect(result).toBe(150); // Math.abs(100 - 250)
});
it('returns 0 when xScale is undefined', () => {
const uPlotInstance = ({
scales: { x: undefined },
valToPos: jest.fn(),
} as unknown) as uPlot;
const result = calculateWidthBasedOnStepInterval({
uPlotInstance,
stepInterval: 60,
});
expect(result).toBe(0);
});
it('returns 0 when xScale.min is not a number', () => {
const uPlotInstance = ({
scales: { x: { min: undefined } },
valToPos: jest.fn(),
} as unknown) as uPlot;
const result = calculateWidthBasedOnStepInterval({
uPlotInstance,
stepInterval: 60,
});
expect(result).toBe(0);
});
});

View File

@@ -0,0 +1,17 @@
export function calculateWidthBasedOnStepInterval({
uPlotInstance,
stepInterval,
}: {
uPlotInstance: uPlot;
stepInterval: number;
}): number {
const xScale = uPlotInstance.scales.x;
if (xScale && typeof xScale.min === 'number') {
const start = xScale.min as number;
const end = start + stepInterval;
const startPx = uPlotInstance.valToPos(start, 'x');
const endPx = uPlotInstance.valToPos(end, 'x');
return Math.abs(endPx - startPx);
}
return 0;
}

View File

@@ -10,6 +10,26 @@ import (
)
func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
if err := router.Handle("/api/v2/metrics", handler.New(
provider.authZ.ViewAccess(provider.metricsExplorerHandler.ListMetrics),
handler.OpenAPIDef{
ID: "ListMetrics",
Tags: []string{"metrics"},
Summary: "List metric names",
Description: "This endpoint returns a list of distinct metric names within the specified time range",
Request: nil,
RequestQuery: new(metricsexplorertypes.ListMetricsParams),
RequestContentType: "",
Response: new(metricsexplorertypes.ListMetricsResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusInternalServerError},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v2/metrics/stats", handler.New(
provider.authZ.ViewAccess(provider.metricsExplorerHandler.GetStats),
handler.OpenAPIDef{
@@ -48,26 +68,27 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v2/metrics/attributes", handler.New(
if err := router.Handle("/api/v2/metrics/{metric_name}/attributes", handler.New(
provider.authZ.ViewAccess(provider.metricsExplorerHandler.GetMetricAttributes),
handler.OpenAPIDef{
ID: "GetMetricAttributes",
Tags: []string{"metrics"},
Summary: "Get metric attributes",
Description: "This endpoint returns attribute keys and their unique values for a specified metric",
Request: new(metricsexplorertypes.MetricAttributesRequest),
RequestContentType: "application/json",
Request: nil,
RequestQuery: new(metricsexplorertypes.MetricAttributesRequest),
RequestContentType: "",
Response: new(metricsexplorertypes.MetricAttributesResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusInternalServerError},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodPost).GetError(); err != nil {
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v2/metrics/metadata", handler.New(
if err := router.Handle("/api/v2/metrics/{metric_name}/metadata", handler.New(
provider.authZ.ViewAccess(provider.metricsExplorerHandler.GetMetricMetadata),
handler.OpenAPIDef{
ID: "GetMetricMetadata",
@@ -75,7 +96,6 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
Summary: "Get metric metadata",
Description: "This endpoint returns metadata information like metric description, unit, type, temporality, monotonicity for a specified metric",
Request: nil,
RequestQuery: new(metricsexplorertypes.MetricNameParams),
RequestContentType: "",
Response: new(metricsexplorertypes.MetricMetadata),
ResponseContentType: "application/json",
@@ -106,7 +126,7 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v2/metric/highlights", handler.New(
if err := router.Handle("/api/v2/metrics/{metric_name}/highlights", handler.New(
provider.authZ.ViewAccess(provider.metricsExplorerHandler.GetMetricHighlights),
handler.OpenAPIDef{
ID: "GetMetricHighlights",
@@ -114,7 +134,6 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
Summary: "Get metric highlights",
Description: "This endpoint returns highlights like number of datapoints, totaltimeseries, active time series, last received time for a specified metric",
Request: nil,
RequestQuery: new(metricsexplorertypes.MetricNameParams),
RequestContentType: "",
Response: new(metricsexplorertypes.MetricHighlightsResponse),
ResponseContentType: "application/json",
@@ -126,7 +145,7 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v2/metric/alerts", handler.New(
if err := router.Handle("/api/v2/metrics/{metric_name}/alerts", handler.New(
provider.authZ.ViewAccess(provider.metricsExplorerHandler.GetMetricAlerts),
handler.OpenAPIDef{
ID: "GetMetricAlerts",
@@ -134,7 +153,6 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
Summary: "Get metric alerts",
Description: "This endpoint returns associated alerts for a specified metric",
Request: nil,
RequestQuery: new(metricsexplorertypes.MetricNameParams),
RequestContentType: "",
Response: new(metricsexplorertypes.MetricAlertsResponse),
ResponseContentType: "application/json",
@@ -146,7 +164,7 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v2/metric/dashboards", handler.New(
if err := router.Handle("/api/v2/metrics/{metric_name}/dashboards", handler.New(
provider.authZ.ViewAccess(provider.metricsExplorerHandler.GetMetricDashboards),
handler.OpenAPIDef{
ID: "GetMetricDashboards",
@@ -154,7 +172,6 @@ func (provider *provider) addMetricsExplorerRoutes(router *mux.Router) error {
Summary: "Get metric dashboards",
Description: "This endpoint returns associated dashboards for a specified metric",
Request: nil,
RequestQuery: new(metricsexplorertypes.MetricNameParams),
RequestContentType: "",
Response: new(metricsexplorertypes.MetricDashboardsResponse),
ResponseContentType: "application/json",

View File

@@ -18,7 +18,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types"
@@ -45,7 +44,6 @@ type provider struct {
gatewayHandler gateway.Handler
fieldsHandler fields.Handler
authzHandler authz.Handler
rawDataExportHandler rawdataexport.Handler
}
func NewFactory(
@@ -65,7 +63,6 @@ func NewFactory(
gatewayHandler gateway.Handler,
fieldsHandler fields.Handler,
authzHandler authz.Handler,
rawDataExportHandler rawdataexport.Handler,
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
return newProvider(
@@ -88,7 +85,6 @@ func NewFactory(
gatewayHandler,
fieldsHandler,
authzHandler,
rawDataExportHandler,
)
})
}
@@ -113,7 +109,6 @@ func newProvider(
gatewayHandler gateway.Handler,
fieldsHandler fields.Handler,
authzHandler authz.Handler,
rawDataExportHandler rawdataexport.Handler,
) (apiserver.APIServer, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
router := mux.NewRouter().UseEncodedPath()
@@ -136,7 +131,6 @@ func newProvider(
gatewayHandler: gatewayHandler,
fieldsHandler: fieldsHandler,
authzHandler: authzHandler,
rawDataExportHandler: rawDataExportHandler,
}
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
@@ -205,10 +199,6 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
return err
}
if err := provider.addRawDataExportRoutes(router); err != nil {
return err
}
return nil
}

View File

@@ -1,46 +0,0 @@
package signozapiserver
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
v5 "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/gorilla/mux"
)
func (provider *provider) addRawDataExportRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/export_raw_data", handler.New(provider.authZ.ViewAccess(provider.rawDataExportHandler.ExportRawData), handler.OpenAPIDef{
ID: "HandleExportRawDataGET",
Tags: []string{"logs", "traces"},
Summary: "Export raw data",
Description: "This endpoints allows simple query exporting raw data for traces and logs",
Request: new(types.ExportRawDataQueryParams),
RequestContentType: "application/json",
Response: nil,
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v1/export_raw_data", handler.New(provider.authZ.ViewAccess(provider.rawDataExportHandler.ExportRawData), handler.OpenAPIDef{
ID: "HandleExportRawDataPOST",
Tags: []string{"logs", "traces"},
Summary: "Export raw data",
Description: "This endpoints allows complex query exporting raw data for traces and logs",
Request: new(struct {
v5.QueryRangeRequest
types.ExportRawDataFormatQueryParam
}),
RequestContentType: "application/json",
Response: nil,
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -13,6 +13,14 @@ import (
"github.com/gorilla/mux"
)
func extractMetricName(req *http.Request) (string, error) {
metricName := mux.Vars(req)["metric_name"]
if metricName == "" {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "metric_name is required in URL path")
}
return metricName, nil
}
type handler struct {
module metricsexplorer.Module
}
@@ -24,6 +32,34 @@ func NewHandler(m metricsexplorer.Module) metricsexplorer.Handler {
}
}
func (h *handler) ListMetrics(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
var params metricsexplorertypes.ListMetricsParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
if err := params.Validate(); err != nil {
render.Error(rw, err)
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
out, err := h.module.ListMetrics(req.Context(), orgID, &params)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, out)
}
func (h *handler) GetStats(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
@@ -114,28 +150,23 @@ func (h *handler) GetMetricMetadata(rw http.ResponseWriter, req *http.Request) {
return
}
var params metricsexplorertypes.MetricNameParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
if params.MetricName == "" {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName query parameter is required"))
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
metadataMap, err := h.module.GetMetricMetadataMulti(req.Context(), orgID, []string{params.MetricName})
metricName, err := extractMetricName(req)
if err != nil {
render.Error(rw, err)
return
}
metadata, ok := metadataMap[params.MetricName]
orgID := valuer.MustNewUUID(claims.OrgID)
metadataMap, err := h.module.GetMetricMetadataMulti(req.Context(), orgID, []string{metricName})
if err != nil {
render.Error(rw, err)
return
}
metadata, ok := metadataMap[metricName]
if !ok || metadata == nil {
render.Error(rw, errors.NewNotFoundf(errors.CodeNotFound, "metadata not found for metric %q", params.MetricName))
render.Error(rw, errors.NewNotFoundf(errors.CodeNotFound, "metadata not found for metric %q", metricName))
return
}
@@ -149,19 +180,14 @@ func (h *handler) GetMetricAlerts(rw http.ResponseWriter, req *http.Request) {
return
}
var params metricsexplorertypes.MetricNameParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
metricName, err := extractMetricName(req)
if err != nil {
render.Error(rw, err)
return
}
if params.MetricName == "" {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName query parameter is required"))
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
out, err := h.module.GetMetricAlerts(req.Context(), orgID, params.MetricName)
out, err := h.module.GetMetricAlerts(req.Context(), orgID, metricName)
if err != nil {
render.Error(rw, err)
return
@@ -176,19 +202,14 @@ func (h *handler) GetMetricDashboards(rw http.ResponseWriter, req *http.Request)
return
}
var params metricsexplorertypes.MetricNameParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
metricName, err := extractMetricName(req)
if err != nil {
render.Error(rw, err)
return
}
if params.MetricName == "" {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName query parameter is required"))
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
out, err := h.module.GetMetricDashboards(req.Context(), orgID, params.MetricName)
out, err := h.module.GetMetricDashboards(req.Context(), orgID, metricName)
if err != nil {
render.Error(rw, err)
return
@@ -203,19 +224,14 @@ func (h *handler) GetMetricHighlights(rw http.ResponseWriter, req *http.Request)
return
}
var params metricsexplorertypes.MetricNameParams
if err := binding.Query.BindQuery(req.URL.Query(), &params); err != nil {
metricName, err := extractMetricName(req)
if err != nil {
render.Error(rw, err)
return
}
if params.MetricName == "" {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName query parameter is required"))
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
highlights, err := h.module.GetMetricHighlights(req.Context(), orgID, params.MetricName)
highlights, err := h.module.GetMetricHighlights(req.Context(), orgID, metricName)
if err != nil {
render.Error(rw, err)
return
@@ -230,8 +246,21 @@ func (h *handler) GetMetricAttributes(rw http.ResponseWriter, req *http.Request)
return
}
metricName, err := extractMetricName(req)
if err != nil {
render.Error(rw, err)
return
}
var in metricsexplorertypes.MetricAttributesRequest
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
if err := binding.Query.BindQuery(req.URL.Query(), &in); err != nil {
render.Error(rw, err)
return
}
in.MetricName = metricName
if err := in.Validate(); err != nil {
render.Error(rw, err)
return
}

View File

@@ -56,6 +56,88 @@ func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetr
}
}
func (m *module) ListMetrics(ctx context.Context, orgID valuer.UUID, params *metricsexplorertypes.ListMetricsParams) (*metricsexplorertypes.ListMetricsResponse, error) {
if err := params.Validate(); err != nil {
return nil, err
}
sb := sqlbuilder.NewSelectBuilder()
sb.Select("DISTINCT metric_name")
if params.Start != nil && params.End != nil {
start, end, distributedTsTable, _ := telemetrymetrics.WhichTSTableToUse(uint64(*params.Start), uint64(*params.End), nil)
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, distributedTsTable))
sb.Where(sb.Between("unix_milli", start, end))
} else {
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.TimeseriesV41weekTableName))
}
sb.Where(sb.E("__normalized", false))
if params.Search != "" {
searchLower := strings.ToLower(params.Search)
searchLower = strings.ReplaceAll(searchLower, "%", "\\%")
searchLower = strings.ReplaceAll(searchLower, "_", "\\_")
sb.Where(sb.Like("lower(metric_name)", fmt.Sprintf("%%%s%%", searchLower)))
}
sb.OrderBy("metric_name ASC")
sb.Limit(params.Limit)
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
valueCtx := ctxtypes.SetClickhouseMaxThreads(ctx, m.config.TelemetryStore.Threads)
db := m.telemetryStore.ClickhouseDB()
rows, err := db.Query(valueCtx, query, args...)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to list metrics")
}
defer rows.Close()
metricNames := make([]string, 0)
for rows.Next() {
var name string
if err := rows.Scan(&name); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan metric name")
}
metricNames = append(metricNames, name)
}
if err := rows.Err(); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "error iterating metric names")
}
if len(metricNames) == 0 {
return &metricsexplorertypes.ListMetricsResponse{
Metrics: []metricsexplorertypes.ListMetric{},
}, nil
}
metadata, err := m.GetMetricMetadataMulti(ctx, orgID, metricNames)
if err != nil {
return nil, err
}
metrics := make([]metricsexplorertypes.ListMetric, 0, len(metricNames))
for _, name := range metricNames {
metric := metricsexplorertypes.ListMetric{
MetricName: name,
}
if meta, ok := metadata[name]; ok && meta != nil {
metric.Description = meta.Description
metric.MetricType = meta.MetricType
metric.MetricUnit = meta.MetricUnit
metric.Temporality = meta.Temporality
metric.IsMonotonic = meta.IsMonotonic
}
metrics = append(metrics, metric)
}
return &metricsexplorertypes.ListMetricsResponse{
Metrics: metrics,
}, nil
}
func (m *module) GetStats(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.StatsRequest) (*metricsexplorertypes.StatsResponse, error) {
if err := req.Validate(); err != nil {
return nil, err

View File

@@ -10,6 +10,7 @@ import (
// Handler exposes HTTP handlers for the metrics module.
type Handler interface {
ListMetrics(http.ResponseWriter, *http.Request)
GetStats(http.ResponseWriter, *http.Request)
GetTreemap(http.ResponseWriter, *http.Request)
GetMetricMetadata(http.ResponseWriter, *http.Request)
@@ -22,6 +23,7 @@ type Handler interface {
// Module represents the metrics module interface.
type Module interface {
ListMetrics(ctx context.Context, orgID valuer.UUID, params *metricsexplorertypes.ListMetricsParams) (*metricsexplorertypes.ListMetricsResponse, error)
GetStats(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.StatsRequest) (*metricsexplorertypes.StatsResponse, error)
GetTreemap(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.TreemapRequest) (*metricsexplorertypes.TreemapResponse, error)
GetMetricMetadataMulti(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error)

View File

@@ -6,18 +6,17 @@ import (
"fmt"
"io"
"net/http"
"net/url"
"strconv"
"strings"
"sync"
"time"
"unicode"
"unicode/utf8"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/types/authtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
@@ -35,27 +34,31 @@ func NewHandler(module rawdataexport.Module) rawdataexport.Handler {
// ExportRawData handles data export requests.
//
// API Documentation:
// Endpoint: GET /api/v1/export_raw_data - logs and traces only (simple queries via query params)
// Endpoint: POST /api/v1/export_raw_data - logs, traces, and trace operator (full QueryRangeRequest in JSON body)
// Endpoint: GET /api/v1/export_raw_data
//
// GET: Converts query params to QueryRangeRequest and delegates to common export logic. No composite_query/trace operator.
// Query Parameters:
//
// GET Query Parameters:
// - source (optional): Type of data to export ["logs" (default), "metrics", "traces"]
// Note: Currently only "logs" is fully supported
//
// - source (optional): ["logs" (default) or "traces"] - metrics not supported
// - format (optional): Output format ["csv" (default), "jsonl"]
// - start (required): Start time (Unix timestamp in nanoseconds)
// - end (required): End time (Unix timestamp in nanoseconds)
// - limit (optional): Max rows to export (cannot exceed MAX_EXPORT_ROW_COUNT_LIMIT)
// - filter (optional): Filter expression
// - columns (optional): Columns to include
// - order_by (optional): Sorting ["column:direction"]
//
// POST Request Body (QueryRangeRequest):
// - start (required): Start time for query (Unix timestamp in nanoseconds)
//
// - Accepts a full QueryRangeRequest JSON body with composite_query containing QueryEnvelope array
// - Supports builder_query and builder_trace_operator types
// - format query param still controls output format
// - end (required): End time for query (Unix timestamp in nanoseconds)
//
// - limit (optional): Maximum number of rows to export
// Constraints: Must be positive and cannot exceed MAX_EXPORT_ROW_COUNT_LIMIT
//
// - filter (optional): Filter expression to apply to the query
//
// - columns (optional): Specific columns to include in export
// Default: all columns are returned
// Format: ["context.field:type", "context.field", "field"]
//
// - order_by (optional): Sorting specification ["column:direction" or "context.field:type:direction"]
// Direction: "asc" or "desc"
// Default: ["timestamp:desc", "id:desc"]
//
// Response Headers:
// - Content-Type: "text/csv" or "application/x-ndjson"
@@ -83,45 +86,75 @@ func NewHandler(module rawdataexport.Module) rawdataexport.Handler {
// Export with filter and ordering:
// GET /api/v1/export_raw_data?start=1693612800000000000&end=1693699199000000000
// &filter=severity="error"&order_by=timestamp:desc&limit=1000
//
// Export with composite query (POST only):
// POST /api/v1/export_raw_data?format=csv
// Body: {"start":1693612800000000000,"end":1693699199000000000,"composite_query":{"queries":[...]}}
func (handler *handler) ExportRawData(rw http.ResponseWriter, r *http.Request) {
var queryRangeRequest qbtypes.QueryRangeRequest
var format string
if r.Method == http.MethodGet {
var params types.ExportRawDataQueryParams
if err := binding.Query.BindQuery(r.URL.Query(), &params); err != nil {
render.Error(rw, err)
return
}
format = params.Format
var err error
queryRangeRequest, err = buildQueryRangeRequest(&params)
if err != nil {
render.Error(rw, err)
return
}
} else {
var formatParam types.ExportRawDataFormatQueryParam
if err := binding.Query.BindQuery(r.URL.Query(), &formatParam); err != nil {
render.Error(rw, err)
return
}
format = formatParam.Format
if err := json.NewDecoder(r.Body).Decode(&queryRangeRequest); err != nil {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid request body: %v", err))
return
}
}
if err := validateAndApplyExportLimits(&queryRangeRequest); err != nil {
source, err := getExportQuerySource(r.URL.Query())
if err != nil {
render.Error(rw, err)
return
}
switch source {
case "logs":
handler.exportLogs(rw, r)
case "traces":
handler.exportTraces(rw, r)
case "metrics":
handler.exportMetrics(rw, r)
default:
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid source: must be logs"))
}
}
func (handler *handler) exportMetrics(rw http.ResponseWriter, r *http.Request) {
render.Error(rw, errors.Newf(errors.TypeUnsupported, errors.CodeUnsupported, "metrics export is not yet supported"))
}
func (handler *handler) exportTraces(rw http.ResponseWriter, r *http.Request) {
render.Error(rw, errors.Newf(errors.TypeUnsupported, errors.CodeUnsupported, "traces export is not yet supported"))
}
func (handler *handler) exportLogs(rw http.ResponseWriter, r *http.Request) {
// Set up response headers
rw.Header().Set("Cache-Control", "no-cache")
rw.Header().Set("Vary", "Accept-Encoding") // Indicate that response varies based on Accept-Encoding
rw.Header().Set("Access-Control-Expose-Headers", "Content-Disposition, X-Response-Complete")
rw.Header().Set("Trailer", "X-Response-Complete")
rw.Header().Set("Transfer-Encoding", "chunked")
queryParams := r.URL.Query()
startTime, endTime, err := getExportQueryTimeRange(queryParams)
if err != nil {
render.Error(rw, err)
return
}
limit, err := getExportQueryLimit(queryParams)
if err != nil {
render.Error(rw, err)
return
}
format, err := getExportQueryFormat(queryParams)
if err != nil {
render.Error(rw, err)
return
}
// Set appropriate content type and filename
filename := fmt.Sprintf("data_exported_%s.%s", time.Now().Format("2006-01-02_150405"), format)
rw.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
filterExpression := queryParams.Get("filter")
orderByExpression, err := getExportQueryOrderBy(queryParams)
if err != nil {
render.Error(rw, err)
return
}
columns := getExportQueryColumns(queryParams)
claims, err := authtypes.ClaimsFromContext(r.Context())
if err != nil {
render.Error(rw, err)
@@ -134,145 +167,70 @@ func (handler *handler) ExportRawData(rw http.ResponseWriter, r *http.Request) {
return
}
setExportResponseHeaders(rw, format)
queryRangeRequest := qbtypes.QueryRangeRequest{
Start: startTime,
End: endTime,
RequestType: qbtypes.RequestTypeRaw,
CompositeQuery: qbtypes.CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: nil,
},
},
},
}
spec := qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Name: "raw",
Filter: &qbtypes.Filter{
Expression: filterExpression,
},
Limit: limit,
Order: orderByExpression,
}
spec.SelectFields = columns
queryRangeRequest.CompositeQuery.Queries[0].Spec = spec
// This will signal Export module to stop sending data
doneChan := make(chan any)
defer close(doneChan)
rowChan, errChan := handler.module.ExportRawData(r.Context(), orgID, &queryRangeRequest, doneChan)
isComplete, err := handler.executeExport(rowChan, errChan, format, rw)
if err != nil {
render.Error(rw, err)
return
}
rw.Header().Set("X-Response-Complete", strconv.FormatBool(isComplete))
}
var isComplete bool
// validateAndApplyExportLimits validates query types and applies default/max limits to all queries.
func validateAndApplyExportLimits(req *qbtypes.QueryRangeRequest) error {
isTraceOperatorQueryPresent := false
// Check if the trace operator query is present
queries := req.CompositeQuery.Queries
for idx := range len(queries) {
if _, ok := queries[idx].Spec.(qbtypes.QueryBuilderTraceOperator); ok {
isTraceOperatorQueryPresent = true
break
}
}
// If the trace operator query is not present, and there are multiple queries, return an error
if !isTraceOperatorQueryPresent && len(queries) > 1 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "multiple queries not allowed without a trace operator query")
}
for idx := range queries {
switch spec := queries[idx].Spec.(type) {
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
qbtypes.QueryBuilderTraceOperator:
limit := queries[idx].GetLimit()
if limit == 0 {
limit = DefaultExportRowCountLimit
} else if limit < 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be positive")
} else if limit > MaxExportRowCountLimit {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit cannot be more than %d", MaxExportRowCountLimit)
}
queries[idx].SetLimit(limit)
default:
return errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported query type: %T", spec)
}
}
return nil
}
// buildQueryRangeRequest builds a QueryRangeRequest from already-bound and validated GET query params.
func buildQueryRangeRequest(params *types.ExportRawDataQueryParams) (qbtypes.QueryRangeRequest, error) {
orderBy, err := parseExportQueryOrderBy(params.OrderBy)
if err != nil {
return qbtypes.QueryRangeRequest{}, err
}
columns := parseExportQueryColumns(params.Columns)
var filter *qbtypes.Filter
if params.Filter != "" {
filter = &qbtypes.Filter{Expression: params.Filter}
}
var query qbtypes.QueryEnvelope
switch params.Source {
case "logs":
query = qbtypes.QueryEnvelope{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: filter,
Limit: params.Limit,
Order: orderBy,
SelectFields: columns,
},
}
case "traces":
query = qbtypes.QueryEnvelope{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
Filter: filter,
Limit: params.Limit,
Order: orderBy,
SelectFields: columns,
},
}
}
return qbtypes.QueryRangeRequest{
Start: params.Start,
End: params.End,
RequestType: qbtypes.RequestTypeRaw,
CompositeQuery: qbtypes.CompositeQuery{
Queries: []qbtypes.QueryEnvelope{query},
},
}, nil
}
// setExportResponseHeaders sets common HTTP headers for export responses.
func setExportResponseHeaders(rw http.ResponseWriter, format string) {
rw.Header().Set("Cache-Control", "no-cache")
rw.Header().Set("Vary", "Accept-Encoding")
rw.Header().Set("Access-Control-Expose-Headers", "Content-Disposition, X-Response-Complete")
rw.Header().Set("Trailer", "X-Response-Complete")
rw.Header().Set("Transfer-Encoding", "chunked")
filename := fmt.Sprintf("data_exported_%s.%s", time.Now().Format("2006-01-02_150405"), format)
rw.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
}
// executeExport streams data from rowChan to the response writer in the specified format.
func (handler *handler) executeExport(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, format string, rw http.ResponseWriter) (bool, error) {
switch format {
case "csv", "":
rw.Header().Set("Content-Type", "text/csv")
csvWriter := csv.NewWriter(rw)
isComplete, err := handler.exportRawDataCSV(rowChan, errChan, csvWriter)
isComplete, err = handler.exportLogsCSV(rowChan, errChan, csvWriter)
if err != nil {
return false, err
render.Error(rw, err)
return
}
csvWriter.Flush()
return isComplete, nil
case "jsonl":
rw.Header().Set("Content-Type", "application/x-ndjson")
return handler.exportRawDataJSONL(rowChan, errChan, rw)
isComplete, err = handler.exportLogsJSONL(rowChan, errChan, rw)
if err != nil {
render.Error(rw, err)
return
}
default:
return false, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl")
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl"))
return
}
rw.Header().Set("X-Response-Complete", strconv.FormatBool(isComplete))
}
// exportRawDataCSV is a generic CSV export function that works with any raw data (logs, traces, etc.)
func (handler *handler) exportRawDataCSV(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, csvWriter *csv.Writer) (bool, error) {
func (handler *handler) exportLogsCSV(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, csvWriter *csv.Writer) (bool, error) {
var header []string
headerToIndexMapping := make(map[string]int)
var once sync.Once
headerToIndexMapping := make(map[string]int, len(header))
totalBytes := uint64(0)
for {
@@ -281,14 +239,18 @@ func (handler *handler) exportRawDataCSV(rowChan <-chan *qbtypes.RawRow, errChan
if !ok {
return true, nil
}
once.Do(func() {
header := constructCSVHeaderFromQueryResponse(row.Data)
_ = csvWriter.Write(header)
// We ignore the error here, as it will get caught in the next iteration
if header == nil {
// Initialize and write header for CSV
header = constructCSVHeaderFromQueryResponse(row.Data)
if err := csvWriter.Write(header); err != nil {
return false, err
}
for i, col := range header {
headerToIndexMapping[col] = i
}
})
}
record := constructCSVRecordFromQueryResponse(row.Data, headerToIndexMapping)
if err := csvWriter.Write(record); err != nil {
return false, err
@@ -306,8 +268,8 @@ func (handler *handler) exportRawDataCSV(rowChan <-chan *qbtypes.RawRow, errChan
}
}
// exportRawDataJSONL is a generic JSONL export function that works with any raw data (logs, traces, etc.)
func (handler *handler) exportRawDataJSONL(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, writer io.Writer) (bool, error) {
func (handler *handler) exportLogsJSONL(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, writer io.Writer) (bool, error) {
totalBytes := uint64(0)
for {
select {
@@ -315,11 +277,9 @@ func (handler *handler) exportRawDataJSONL(rowChan <-chan *qbtypes.RawRow, errCh
if !ok {
return true, nil
}
jsonBytes, err := json.Marshal(row.Data)
if err != nil {
return false, errors.NewUnexpectedf(errors.CodeInternal, "error marshaling JSON: %s", err)
}
totalBytes += uint64(len(jsonBytes)) + 1
// Handle JSON format (JSONL - one object per line)
jsonBytes, _ := json.Marshal(row.Data)
totalBytes += uint64(len(jsonBytes)) + 1 // +1 for newline
if _, err := writer.Write(jsonBytes); err != nil {
return false, errors.NewUnexpectedf(errors.CodeInternal, "error writing JSON: %s", err)
@@ -339,6 +299,69 @@ func (handler *handler) exportRawDataJSONL(rowChan <-chan *qbtypes.RawRow, errCh
}
}
func getExportQuerySource(queryParams url.Values) (string, error) {
switch queryParams.Get("source") {
case "logs", "":
return "logs", nil
case "metrics":
return "metrics", errors.NewInvalidInputf(errors.CodeInvalidInput, "metrics export not yet supported")
case "traces":
return "traces", errors.NewInvalidInputf(errors.CodeInvalidInput, "traces export not yet supported")
default:
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid source: must be logs, metrics or traces")
}
}
func getExportQueryFormat(queryParams url.Values) (string, error) {
switch queryParams.Get("format") {
case "csv", "":
return "csv", nil
case "jsonl":
return "jsonl", nil
default:
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl")
}
}
func getExportQueryLimit(queryParams url.Values) (int, error) {
limitStr := queryParams.Get("limit")
if limitStr == "" {
return DefaultExportRowCountLimit, nil
} else {
limit, err := strconv.Atoi(limitStr)
if err != nil {
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid limit format: %s", err.Error())
}
if limit <= 0 {
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be positive")
}
if limit > MaxExportRowCountLimit {
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "limit cannot be more than %d", MaxExportRowCountLimit)
}
return limit, nil
}
}
func getExportQueryTimeRange(queryParams url.Values) (uint64, uint64, error) {
startTimeStr := queryParams.Get("start")
endTimeStr := queryParams.Get("end")
if startTimeStr == "" || endTimeStr == "" {
return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "start and end time are required")
}
startTime, err := strconv.ParseUint(startTimeStr, 10, 64)
if err != nil {
return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid start time format: %s", err.Error())
}
endTime, err := strconv.ParseUint(endTimeStr, 10, 64)
if err != nil {
return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid end time format: %s", err.Error())
}
return startTime, endTime, nil
}
func constructCSVHeaderFromQueryResponse(data map[string]any) []string {
header := make([]string, 0, len(data))
for key := range data {
@@ -404,12 +427,9 @@ func constructCSVRecordFromQueryResponse(data map[string]any, headerToIndexMappi
valueStr = v.String()
default:
jsonBytes, err := json.Marshal(v)
if err != nil {
valueStr = fmt.Sprintf("%v", v)
} else {
valueStr = string(jsonBytes)
}
// For all other complex types (maps, structs, etc.)
jsonBytes, _ := json.Marshal(v)
valueStr = string(jsonBytes)
}
record[index] = sanitizeForCSV(valueStr)
@@ -418,17 +438,23 @@ func constructCSVRecordFromQueryResponse(data map[string]any, headerToIndexMappi
return record
}
// parseExportQueryColumns converts bound column strings to TelemetryFieldKey structs.
// Each column should be in the format "context.field:type" or "context.field" or "field"
func parseExportQueryColumns(columnParams []string) []telemetrytypes.TelemetryFieldKey {
// getExportQueryColumns parses the "columns" query parameters and returns a slice of TelemetryFieldKey structs.
// Each column should be a valid telemetry field key in the format "context.field:type" or "context.field" or "field"
func getExportQueryColumns(queryParams url.Values) []telemetrytypes.TelemetryFieldKey {
columnParams := queryParams["columns"]
columns := make([]telemetrytypes.TelemetryFieldKey, 0, len(columnParams))
for _, columnStr := range columnParams {
// Skip empty strings
columnStr = strings.TrimSpace(columnStr)
if columnStr == "" {
continue
}
columns = append(columns, telemetrytypes.GetFieldKeyFromKeyText(columnStr))
}
return columns
}
@@ -440,24 +466,51 @@ func getsizeOfStringSlice(slice []string) uint64 {
return totalBytes
}
// parseExportQueryOrderBy converts a bound order_by string to an OrderBy slice.
// The string should be in the format "column:direction" and is assumed already validated.
func parseExportQueryOrderBy(orderByParam string) ([]qbtypes.OrderBy, error) {
// getExportQueryOrderBy parses the "order_by" query parameters and returns a slice of OrderBy structs.
// Each "order_by" parameter should be in the format "column:direction"
// Each "column" should be a valid telemetry field key in the format "context.field:type" or "context.field" or "field"
func getExportQueryOrderBy(queryParams url.Values) ([]qbtypes.OrderBy, error) {
orderByParam := queryParams.Get("order_by")
orderByParam = strings.TrimSpace(orderByParam)
if orderByParam == "" {
return []qbtypes.OrderBy{}, nil
return telemetrylogs.DefaultLogsV2SortingOrder, nil
}
parts := strings.Split(orderByParam, ":")
if len(parts) != 2 && len(parts) != 3 {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order_by format: %s, should be <column>:<direction>", orderByParam)
}
column := strings.Join(parts[:len(parts)-1], ":")
direction := parts[len(parts)-1]
return []qbtypes.OrderBy{
orderDirection, ok := qbtypes.OrderDirectionMap[direction]
if !ok {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order_by direction: %s, should be one of %s, %s", direction, qbtypes.OrderDirectionAsc, qbtypes.OrderDirectionDesc)
}
orderByKey := telemetrytypes.GetFieldKeyFromKeyText(column)
orderBy := []qbtypes.OrderBy{
{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.GetFieldKeyFromKeyText(column),
TelemetryFieldKey: orderByKey,
},
Direction: qbtypes.OrderDirectionMap[direction],
Direction: orderDirection,
},
}, nil
}
// If we are ordering by the timestamp column, also order by the ID column
if orderByKey.Name == telemetrylogs.LogsV2TimestampColumn {
orderBy = append(orderBy, qbtypes.OrderBy{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: telemetrylogs.LogsV2IDColumn,
},
},
Direction: orderDirection,
})
}
return orderBy, nil
}

View File

@@ -2,162 +2,261 @@ package implrawdataexport
import (
"net/url"
"strconv"
"testing"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/assert"
)
func TestExportRawDataQueryParams_BindingDefaults(t *testing.T) {
var params types.ExportRawDataQueryParams
err := binding.Query.BindQuery(url.Values{}, &params)
assert.NoError(t, err)
assert.Equal(t, "logs", params.Source)
assert.Equal(t, "csv", params.Format)
assert.Equal(t, DefaultExportRowCountLimit, params.Limit)
}
func logQuery(limit int) qbtypes.QueryEnvelope {
return qbtypes.QueryEnvelope{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{Limit: limit},
}
}
func traceQuery(limit int) qbtypes.QueryEnvelope {
return qbtypes.QueryEnvelope{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{Limit: limit},
}
}
func traceOperatorQuery(limit int) qbtypes.QueryEnvelope {
return qbtypes.QueryEnvelope{
Type: qbtypes.QueryTypeTraceOperator,
Spec: qbtypes.QueryBuilderTraceOperator{Limit: limit},
}
}
func makeRequest(queries ...qbtypes.QueryEnvelope) qbtypes.QueryRangeRequest {
return qbtypes.QueryRangeRequest{
CompositeQuery: qbtypes.CompositeQuery{Queries: queries},
}
}
func TestValidateAndApplyExportLimits(t *testing.T) {
func TestGetExportQuerySource(t *testing.T) {
tests := []struct {
name string
req qbtypes.QueryRangeRequest
expectedError bool
// assertions on each query after the call (indexed)
checkQueries func(t *testing.T, queries []qbtypes.QueryEnvelope)
name string
queryParams url.Values
expectedSource string
expectedError bool
}{
{
name: "single log query, zero limit gets default",
req: makeRequest(logQuery(0)),
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
assert.Equal(t, DefaultExportRowCountLimit, q[0].GetLimit())
},
name: "default logs source",
queryParams: url.Values{},
expectedSource: "logs",
expectedError: false,
},
{
name: "single log query, valid limit kept",
req: makeRequest(logQuery(1000)),
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
assert.Equal(t, 1000, q[0].GetLimit())
},
name: "explicit logs source",
queryParams: url.Values{"source": {"logs"}},
expectedSource: "logs",
expectedError: false,
},
{
name: "single log query, max limit kept",
req: makeRequest(logQuery(MaxExportRowCountLimit)),
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
assert.Equal(t, MaxExportRowCountLimit, q[0].GetLimit())
},
name: "metrics source - not supported",
queryParams: url.Values{"source": {"metrics"}},
expectedSource: "metrics",
expectedError: true,
},
{
name: "single log query, limit exceeds max",
req: makeRequest(logQuery(MaxExportRowCountLimit + 1)),
name: "traces source - not supported",
queryParams: url.Values{"source": {"traces"}},
expectedSource: "traces",
expectedError: true,
},
{
name: "invalid source",
queryParams: url.Values{"source": {"invalid"}},
expectedSource: "",
expectedError: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
source, err := getExportQuerySource(tt.queryParams)
assert.Equal(t, tt.expectedSource, source)
if tt.expectedError {
assert.Error(t, err)
} else {
assert.NoError(t, err)
}
})
}
}
func TestGetExportQueryFormat(t *testing.T) {
tests := []struct {
name string
queryParams url.Values
expectedFormat string
expectedError bool
}{
{
name: "default csv format",
queryParams: url.Values{},
expectedFormat: "csv",
expectedError: false,
},
{
name: "explicit csv format",
queryParams: url.Values{"format": {"csv"}},
expectedFormat: "csv",
expectedError: false,
},
{
name: "jsonl format",
queryParams: url.Values{"format": {"jsonl"}},
expectedFormat: "jsonl",
expectedError: false,
},
{
name: "invalid format",
queryParams: url.Values{"format": {"xml"}},
expectedFormat: "",
expectedError: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
format, err := getExportQueryFormat(tt.queryParams)
assert.Equal(t, tt.expectedFormat, format)
if tt.expectedError {
assert.Error(t, err)
} else {
assert.NoError(t, err)
}
})
}
}
func TestGetExportQueryLimit(t *testing.T) {
tests := []struct {
name string
queryParams url.Values
expectedLimit int
expectedError bool
}{
{
name: "default limit",
queryParams: url.Values{},
expectedLimit: DefaultExportRowCountLimit,
expectedError: false,
},
{
name: "valid limit",
queryParams: url.Values{"limit": {"5000"}},
expectedLimit: 5000,
expectedError: false,
},
{
name: "maximum limit",
queryParams: url.Values{"limit": {strconv.Itoa(MaxExportRowCountLimit)}},
expectedLimit: MaxExportRowCountLimit,
expectedError: false,
},
{
name: "limit exceeds maximum",
queryParams: url.Values{"limit": {"100000"}},
expectedLimit: 0,
expectedError: true,
},
{
name: "single log query, negative limit",
req: makeRequest(logQuery(-1)),
name: "invalid limit format",
queryParams: url.Values{"limit": {"invalid"}},
expectedLimit: 0,
expectedError: true,
},
{
name: "single trace query, zero limit gets default",
req: makeRequest(traceQuery(0)),
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
assert.Equal(t, DefaultExportRowCountLimit, q[0].GetLimit())
},
},
{
name: "multiple queries without trace operator",
req: makeRequest(logQuery(0), traceQuery(0)),
expectedError: true,
},
{
name: "trace operator with other queries — non-operator disabled",
req: makeRequest(logQuery(500), traceOperatorQuery(1000)),
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
assert.True(t, q[0].IsDisabled(), "log query should be disabled")
assert.False(t, q[1].IsDisabled(), "trace operator should stay enabled")
assert.Equal(t, 1000, q[1].GetLimit())
},
},
{
name: "trace operator alone, zero limit gets default",
req: makeRequest(traceOperatorQuery(0)),
checkQueries: func(t *testing.T, q []qbtypes.QueryEnvelope) {
assert.Equal(t, DefaultExportRowCountLimit, q[0].GetLimit())
},
},
{
name: "unsupported query type",
req: makeRequest(qbtypes.QueryEnvelope{Type: qbtypes.QueryTypeBuilder, Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{}}),
name: "negative limit",
queryParams: url.Values{"limit": {"-100"}},
expectedLimit: 0,
expectedError: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := validateAndApplyExportLimits(&tt.req)
limit, err := getExportQueryLimit(tt.queryParams)
assert.Equal(t, tt.expectedLimit, limit)
if tt.expectedError {
assert.Error(t, err)
} else {
assert.NoError(t, err)
if tt.checkQueries != nil {
tt.checkQueries(t, tt.req.CompositeQuery.Queries)
}
}
})
}
}
func TestParseExportQueryColumns(t *testing.T) {
func TestGetExportQueryTimeRange(t *testing.T) {
tests := []struct {
name string
queryParams url.Values
expectedStartTime uint64
expectedEndTime uint64
expectedError bool
}{
{
name: "valid time range",
queryParams: url.Values{
"start": {"1640995200"},
"end": {"1641081600"},
},
expectedStartTime: 1640995200,
expectedEndTime: 1641081600,
expectedError: false,
},
{
name: "missing start time",
queryParams: url.Values{"end": {"1641081600"}},
expectedError: true,
},
{
name: "missing end time",
queryParams: url.Values{"start": {"1640995200"}},
expectedError: true,
},
{
name: "missing both times",
queryParams: url.Values{},
expectedError: true,
},
{
name: "invalid start time format",
queryParams: url.Values{
"start": {"invalid"},
"end": {"1641081600"},
},
expectedError: true,
},
{
name: "invalid end time format",
queryParams: url.Values{
"start": {"1640995200"},
"end": {"invalid"},
},
expectedError: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
startTime, endTime, err := getExportQueryTimeRange(tt.queryParams)
if tt.expectedError {
assert.Error(t, err)
} else {
assert.NoError(t, err)
assert.Equal(t, tt.expectedStartTime, startTime)
assert.Equal(t, tt.expectedEndTime, endTime)
}
})
}
}
func TestGetExportQueryColumns(t *testing.T) {
tests := []struct {
name string
input []string
queryParams url.Values
expectedColumns []telemetrytypes.TelemetryFieldKey
}{
{
name: "empty input",
input: []string{},
name: "no columns specified",
queryParams: url.Values{},
expectedColumns: []telemetrytypes.TelemetryFieldKey{},
},
{
name: "single column",
input: []string{"timestamp"},
name: "single column",
queryParams: url.Values{
"columns": {"timestamp"},
},
expectedColumns: []telemetrytypes.TelemetryFieldKey{
{Name: "timestamp"},
},
},
{
name: "multiple columns",
input: []string{"timestamp", "message", "level"},
name: "multiple columns",
queryParams: url.Values{
"columns": {"timestamp", "message", "level"},
},
expectedColumns: []telemetrytypes.TelemetryFieldKey{
{Name: "timestamp"},
{Name: "message"},
@@ -165,90 +264,211 @@ func TestParseExportQueryColumns(t *testing.T) {
},
},
{
name: "empty entry is skipped",
input: []string{"timestamp", "", "level"},
name: "empty column name (should be skipped)",
queryParams: url.Values{
"columns": {"timestamp", "", "level"},
},
expectedColumns: []telemetrytypes.TelemetryFieldKey{
{Name: "timestamp"},
{Name: "level"},
},
},
{
name: "whitespace-only entry is skipped",
input: []string{"timestamp", " ", "level"},
name: "whitespace column name (should be skipped)",
queryParams: url.Values{
"columns": {"timestamp", " ", "level"},
},
expectedColumns: []telemetrytypes.TelemetryFieldKey{
{Name: "timestamp"},
{Name: "level"},
},
},
{
name: "column with context and type",
input: []string{"attribute.user:string"},
name: "valid column name with data type",
queryParams: url.Values{
"columns": {"timestamp", "attribute.user:string", "level"},
},
expectedColumns: []telemetrytypes.TelemetryFieldKey{
{Name: "timestamp"},
{Name: "user", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString},
{Name: "level"},
},
},
{
name: "column with context, dot-notation name",
input: []string{"attribute.user.string"},
name: "valid column name with dot notation",
queryParams: url.Values{
"columns": {"timestamp", "attribute.user.string", "level"},
},
expectedColumns: []telemetrytypes.TelemetryFieldKey{
{Name: "timestamp"},
{Name: "user.string", FieldContext: telemetrytypes.FieldContextAttribute},
{Name: "level"},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
columns := parseExportQueryColumns(tt.input)
columns := getExportQueryColumns(tt.queryParams)
assert.Equal(t, len(tt.expectedColumns), len(columns))
for i, expected := range tt.expectedColumns {
assert.Equal(t, expected, columns[i])
for i, expectedCol := range tt.expectedColumns {
assert.Equal(t, expectedCol, columns[i])
}
})
}
}
func TestParseExportQueryOrderBy(t *testing.T) {
func TestGetExportQueryOrderBy(t *testing.T) {
tests := []struct {
name string
input string
queryParams url.Values
expectedOrder []qbtypes.OrderBy
expectedError bool
}{
{
name: "empty string returns empty slice",
input: "",
expectedOrder: []qbtypes.OrderBy{},
expectedError: false,
},
{
name: "simple column asc",
input: "timestamp:asc",
expectedOrder: []qbtypes.OrderBy{
{
Direction: qbtypes.OrderDirectionAsc,
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "timestamp"},
},
},
},
expectedError: false,
},
{
name: "simple column desc",
input: "timestamp:desc",
name: "no order specified",
queryParams: url.Values{},
expectedOrder: []qbtypes.OrderBy{
{
Direction: qbtypes.OrderDirectionDesc,
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "timestamp"},
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: telemetrylogs.LogsV2TimestampColumn,
},
},
},
{
Direction: qbtypes.OrderDirectionDesc,
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: telemetrylogs.LogsV2IDColumn,
},
},
},
},
expectedError: false,
},
{
name: "column with context and type qualifier",
input: "attribute.user:string:desc",
name: "single order error, direction not specified",
queryParams: url.Values{
"order_by": {"timestamp"},
},
expectedOrder: nil,
expectedError: true,
},
{
name: "single order no error",
queryParams: url.Values{
"order_by": {"timestamp:asc"},
},
expectedOrder: []qbtypes.OrderBy{
{
Direction: qbtypes.OrderDirectionAsc,
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: telemetrylogs.LogsV2TimestampColumn,
},
},
},
{
Direction: qbtypes.OrderDirectionAsc,
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: telemetrylogs.LogsV2IDColumn,
},
},
},
},
expectedError: false,
},
{
name: "multiple orders",
queryParams: url.Values{
"order_by": {"timestamp:asc", "body:desc", "id:asc"},
},
expectedOrder: []qbtypes.OrderBy{
{
Direction: qbtypes.OrderDirectionAsc,
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: telemetrylogs.LogsV2TimestampColumn,
},
},
},
{
Direction: qbtypes.OrderDirectionAsc,
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: telemetrylogs.LogsV2IDColumn,
},
},
},
},
expectedError: false,
},
{
name: "empty order name (should be skipped)",
queryParams: url.Values{
"order_by": {"timestamp:asc", "", "id:asc"},
},
expectedOrder: []qbtypes.OrderBy{
{
Direction: qbtypes.OrderDirectionAsc,
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: telemetrylogs.LogsV2TimestampColumn,
},
},
},
{
Direction: qbtypes.OrderDirectionAsc,
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: telemetrylogs.LogsV2IDColumn,
},
},
},
},
expectedError: false,
},
{
name: "whitespace order name (should be skipped)",
queryParams: url.Values{
"order_by": {"timestamp:asc", " ", "id:asc"},
},
expectedOrder: []qbtypes.OrderBy{
{
Direction: qbtypes.OrderDirectionAsc,
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: telemetrylogs.LogsV2TimestampColumn,
},
},
},
{
Direction: qbtypes.OrderDirectionAsc,
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: telemetrylogs.LogsV2IDColumn,
},
},
},
},
expectedError: false,
},
{
name: "invalid order name (should error out)",
queryParams: url.Values{
"order_by": {"attributes.user:", "id:asc"},
},
expectedOrder: nil,
expectedError: true,
},
{
name: "valid order name (should be included)",
queryParams: url.Values{
"order_by": {"attribute.user:string:desc", "id:asc"},
},
expectedOrder: []qbtypes.OrderBy{
{
Direction: qbtypes.OrderDirectionDesc,
@@ -264,8 +484,10 @@ func TestParseExportQueryOrderBy(t *testing.T) {
expectedError: false,
},
{
name: "column with context, dot-notation name",
input: "attribute.user.string:desc",
name: "valid order name (should be included)",
queryParams: url.Values{
"order_by": {"attribute.user.string:desc", "id:asc"},
},
expectedOrder: []qbtypes.OrderBy{
{
Direction: qbtypes.OrderDirectionDesc,
@@ -279,35 +501,18 @@ func TestParseExportQueryOrderBy(t *testing.T) {
},
expectedError: false,
},
{
name: "resource with context and type",
input: "resource.service.name:string:asc",
expectedOrder: []qbtypes.OrderBy{
{
Direction: qbtypes.OrderDirectionAsc,
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
},
expectedError: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
order, err := parseExportQueryOrderBy(tt.input)
order, err := getExportQueryOrderBy(tt.queryParams)
if tt.expectedError {
assert.Error(t, err)
} else {
assert.NoError(t, err)
assert.Equal(t, len(tt.expectedOrder), len(order))
for i, expected := range tt.expectedOrder {
assert.Equal(t, expected, order[i])
for i, expectedOrd := range tt.expectedOrder {
assert.Equal(t, expectedOrd, order[i])
}
}
})
@@ -324,8 +529,10 @@ func TestConstructCSVHeaderFromQueryResponse(t *testing.T) {
header := constructCSVHeaderFromQueryResponse(data)
// Since map iteration order is not guaranteed, check that all expected keys are present
expectedKeys := []string{"timestamp", "message", "level", "id"}
assert.Equal(t, len(expectedKeys), len(header))
for _, key := range expectedKeys {
assert.Contains(t, header, key)
}

View File

@@ -23,26 +23,8 @@ func NewModule(querier querier.Querier) rawdataexport.Module {
func (m *Module) ExportRawData(ctx context.Context, orgID valuer.UUID, rangeRequest *qbtypes.QueryRangeRequest, doneChan chan any) (chan *qbtypes.RawRow, chan error) {
isTraceOperatorQueryPresent := false
traceOperatorQueryIndex := -1
queries := rangeRequest.CompositeQuery.Queries
for idx := range len(queries) {
if _, ok := queries[idx].Spec.(qbtypes.QueryBuilderTraceOperator); ok {
isTraceOperatorQueryPresent = true
traceOperatorQueryIndex = idx
break
}
}
// If the trace operator query is present, mark the queries other than trace operator as disabled
if isTraceOperatorQueryPresent {
for idx := range len(queries) {
if idx != traceOperatorQueryIndex {
queries[idx].SetDisabled(true)
}
}
}
spec := rangeRequest.CompositeQuery.Queries[0].Spec.(qbtypes.QueryBuilderQuery[qbtypes.LogAggregation])
rowCountLimit := spec.Limit
rowChan := make(chan *qbtypes.RawRow, 1)
errChan := make(chan error, 1)
@@ -56,62 +38,52 @@ func (m *Module) ExportRawData(ctx context.Context, orgID valuer.UUID, rangeRequ
defer close(errChan)
defer close(rowChan)
if isTraceOperatorQueryPresent {
// If the trace operator query is present, we need to export the data for the trace operator query only
exportRawDataForSingleQuery(m.querier, contextWithTimeout, orgID, rangeRequest, rowChan, errChan, doneChan, traceOperatorQueryIndex)
} else {
// If the trace operator query is not present, we need to export the data for the first query only
exportRawDataForSingleQuery(m.querier, contextWithTimeout, orgID, rangeRequest, rowChan, errChan, doneChan, 0)
rowCount := 0
for rowCount < rowCountLimit {
spec.Limit = min(ChunkSize, rowCountLimit-rowCount)
spec.Offset = rowCount
rangeRequest.CompositeQuery.Queries[0].Spec = spec
response, err := m.querier.QueryRange(contextWithTimeout, orgID, rangeRequest)
if err != nil {
errChan <- err
return
}
newRowsCount := 0
for _, result := range response.Data.Results {
resultData, ok := result.(*qbtypes.RawData)
if !ok {
errChan <- errors.NewInternalf(errors.CodeInternal, "expected RawData, got %T", result)
return
}
newRowsCount += len(resultData.Rows)
for _, row := range resultData.Rows {
select {
case rowChan <- row:
case <-doneChan:
return
case <-ctx.Done():
errChan <- ctx.Err()
return
}
}
}
// Break if we did not receive any new rows
if newRowsCount == 0 {
return
}
rowCount += newRowsCount
}
}()
return rowChan, errChan
}
func exportRawDataForSingleQuery(querier querier.Querier, ctx context.Context, orgID valuer.UUID, rangeRequest *qbtypes.QueryRangeRequest, rowChan chan *qbtypes.RawRow, errChan chan error, doneChan chan any, queryIndex int) {
query := rangeRequest.CompositeQuery.Queries[queryIndex]
rowCountLimit := query.GetLimit()
rowCount := 0
for rowCount < rowCountLimit {
chunkSize := min(ChunkSize, rowCountLimit-rowCount)
query.SetLimit(chunkSize)
query.SetOffset(rowCount)
response, err := querier.QueryRange(ctx, orgID, rangeRequest)
if err != nil {
errChan <- err
return
}
newRowsCount := 0
for _, result := range response.Data.Results {
resultData, ok := result.(*qbtypes.RawData)
if !ok {
errChan <- errors.NewInternalf(errors.CodeInternal, "expected RawData, got %T", result)
return
}
newRowsCount += len(resultData.Rows)
for _, row := range resultData.Rows {
select {
case rowChan <- row:
case <-doneChan:
return
case <-ctx.Done():
errChan <- ctx.Err()
return
}
}
}
rowCount += newRowsCount
// Stop if we received fewer rows than requested — no more data available
if newRowsCount < chunkSize {
return
}
}
}

View File

@@ -12,60 +12,30 @@ import (
)
const (
urlPathKeyLegacy = "http.url"
serverAddressKeyLegacy = "net.peer.name"
urlPathKey = "url.full"
serverAddressKey = "server.address"
derivedKeyHTTPURL = "http_url" // https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_url
derivedKeyHTTPHost = "http_host"
)
var defaultStepInterval = 60 * time.Second
type SemconvFieldMapping struct {
LegacyField string
CurrentField string
FieldType telemetrytypes.FieldDataType
Context telemetrytypes.FieldContext
}
var dualSemconvGroupByKeys = map[string][]qbtypes.GroupByKey{
"server": {
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: serverAddressKey,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextAttribute,
Signal: telemetrytypes.SignalTraces,
},
var (
groupByKeyHTTPHost = qbtypes.GroupByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: derivedKeyHTTPHost,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextSpan,
Signal: telemetrytypes.SignalTraces,
},
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: serverAddressKeyLegacy,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextAttribute,
Signal: telemetrytypes.SignalTraces,
},
}
groupByKeyHTTPURL = qbtypes.GroupByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: derivedKeyHTTPURL,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextSpan,
Signal: telemetrytypes.SignalTraces,
},
},
"url": {
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: urlPathKey,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextAttribute,
Signal: telemetrytypes.SignalTraces,
},
},
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: urlPathKeyLegacy,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextAttribute,
Signal: telemetrytypes.SignalTraces,
},
},
},
}
}
)
func FilterIntermediateColumns(result *qbtypes.QueryRangeResponse) *qbtypes.QueryRangeResponse {
if result == nil || result.Data.Results == nil {
@@ -114,103 +84,6 @@ func FilterIntermediateColumns(result *qbtypes.QueryRangeResponse) *qbtypes.Quer
return result
}
func MergeSemconvColumns(result *qbtypes.QueryRangeResponse) *qbtypes.QueryRangeResponse {
if result == nil || result.Data.Results == nil {
return result
}
for _, res := range result.Data.Results {
scalarData, ok := res.(*qbtypes.ScalarData)
if !ok {
continue
}
serverAddressKeyIdx := -1
serverAddressKeyLegacyIdx := -1
for i, col := range scalarData.Columns {
if col.Name == serverAddressKey {
serverAddressKeyIdx = i
} else if col.Name == serverAddressKeyLegacy {
serverAddressKeyLegacyIdx = i
}
}
if serverAddressKeyIdx == -1 || serverAddressKeyLegacyIdx == -1 {
continue
}
var newRows [][]any
for _, row := range scalarData.Data {
if len(row) <= serverAddressKeyIdx || len(row) <= serverAddressKeyLegacyIdx {
continue
}
var serverName any
if isValidValue(row[serverAddressKeyIdx]) {
serverName = row[serverAddressKeyIdx]
} else if isValidValue(row[serverAddressKeyLegacyIdx]) {
serverName = row[serverAddressKeyLegacyIdx]
}
if serverName != nil {
newRow := make([]any, len(row)-1)
newRow[0] = serverName
targetIdx := 1
for i, val := range row {
if i != serverAddressKeyLegacyIdx && i != serverAddressKeyIdx {
if targetIdx < len(newRow) {
newRow[targetIdx] = val
targetIdx++
}
}
}
newRows = append(newRows, newRow)
}
}
newColumns := make([]*qbtypes.ColumnDescriptor, len(scalarData.Columns)-1)
targetIdx := 0
for i, col := range scalarData.Columns {
if i == serverAddressKeyIdx {
newCol := &qbtypes.ColumnDescriptor{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: serverAddressKeyLegacy,
FieldDataType: col.FieldDataType,
FieldContext: col.FieldContext,
Signal: col.Signal,
},
QueryName: col.QueryName,
AggregationIndex: col.AggregationIndex,
Meta: col.Meta,
Type: col.Type,
}
newColumns[targetIdx] = newCol
targetIdx++
} else if i != serverAddressKeyLegacyIdx {
newColumns[targetIdx] = col
targetIdx++
}
}
scalarData.Columns = newColumns
scalarData.Data = newRows
}
return result
}
func isValidValue(val any) bool {
if val == nil {
return false
}
if str, ok := val.(string); ok {
return str != "" && str != "n/a"
}
return true
}
func FilterResponse(results []*qbtypes.QueryRangeResponse) []*qbtypes.QueryRangeResponse {
filteredResults := make([]*qbtypes.QueryRangeResponse, 0, len(results))
@@ -261,7 +134,7 @@ func FilterResponse(results []*qbtypes.QueryRangeResponse) []*qbtypes.QueryRange
func shouldIncludeSeries(series *qbtypes.TimeSeries) bool {
for _, label := range series.Labels {
if label.Key.Name == serverAddressKeyLegacy || label.Key.Name == serverAddressKey {
if label.Key.Name == derivedKeyHTTPHost {
if strVal, ok := label.Value.(string); ok {
if net.ParseIP(strVal) != nil {
return false
@@ -274,12 +147,10 @@ func shouldIncludeSeries(series *qbtypes.TimeSeries) bool {
func shouldIncludeRow(row *qbtypes.RawRow) bool {
if row.Data != nil {
for _, key := range []string{serverAddressKeyLegacy, serverAddressKey} {
if domainVal, ok := row.Data[key]; ok {
if domainStr, ok := domainVal.(string); ok {
if net.ParseIP(domainStr) != nil {
return false
}
if domainVal, ok := row.Data[derivedKeyHTTPHost]; ok {
if domainStr, ok := domainVal.(string); ok {
if net.ParseIP(domainStr) != nil {
return false
}
}
}
@@ -287,8 +158,8 @@ func shouldIncludeRow(row *qbtypes.RawRow) bool {
return true
}
func mergeGroupBy(base, additional []qbtypes.GroupByKey) []qbtypes.GroupByKey {
return append(base, additional...)
func mergeGroupBy(base qbtypes.GroupByKey, additional []qbtypes.GroupByKey) []qbtypes.GroupByKey {
return append([]qbtypes.GroupByKey{base}, additional...)
}
func BuildDomainList(req *thirdpartyapitypes.ThirdPartyApiRequest) (*qbtypes.QueryRangeRequest, error) {
@@ -354,10 +225,10 @@ func buildEndpointsQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Q
Signal: telemetrytypes.SignalTraces,
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
Aggregations: []qbtypes.TraceAggregation{
{Expression: "count_distinct(http.url)"},
{Expression: fmt.Sprintf("count_distinct(%s)", derivedKeyHTTPURL)},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -373,7 +244,7 @@ func buildLastSeenQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Qu
{Expression: "max(timestamp)"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -389,7 +260,7 @@ func buildRpsQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEn
{Expression: "rate()"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -407,7 +278,7 @@ func buildErrorQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Query
{Expression: "count()"},
},
Filter: filter,
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -423,7 +294,7 @@ func buildTotalSpanQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Q
{Expression: "count()"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -439,7 +310,7 @@ func buildP99Query(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEn
{Expression: "p99(duration_nano)"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -462,10 +333,10 @@ func buildEndpointsInfoQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtyp
Signal: telemetrytypes.SignalTraces,
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
Aggregations: []qbtypes.TraceAggregation{
{Expression: "rate(http.url)"},
{Expression: fmt.Sprintf("rate(%s)", derivedKeyHTTPURL)},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["url"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPURL, req.GroupBy),
},
}
}
@@ -519,8 +390,7 @@ func buildLastSeenInfoQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtype
}
func buildBaseFilter(additionalFilter *qbtypes.Filter) *qbtypes.Filter {
baseExpression := fmt.Sprintf("(%s EXISTS OR %s EXISTS) AND kind_string = 'Client'",
urlPathKeyLegacy, urlPathKey)
baseExpression := fmt.Sprintf("%s EXISTS AND kind_string = 'Client'", derivedKeyHTTPURL)
if additionalFilter != nil && additionalFilter.Expression != "" {
// even if it contains kind_string we add with an AND so it doesn't matter if the user is overriding it.

View File

@@ -1,9 +1,10 @@
package thirdpartyapi
import (
"github.com/SigNoz/signoz/pkg/types/thirdpartyapitypes"
"testing"
"github.com/SigNoz/signoz/pkg/types/thirdpartyapitypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/assert"
@@ -28,7 +29,7 @@ func TestFilterResponse(t *testing.T) {
{
Labels: []*qbtypes.Label{
{
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
Key: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
Value: "192.168.1.1",
},
},
@@ -36,7 +37,7 @@ func TestFilterResponse(t *testing.T) {
{
Labels: []*qbtypes.Label{
{
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
Key: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
Value: "example.com",
},
},
@@ -60,7 +61,7 @@ func TestFilterResponse(t *testing.T) {
{
Labels: []*qbtypes.Label{
{
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
Key: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
Value: "example.com",
},
},
@@ -84,12 +85,12 @@ func TestFilterResponse(t *testing.T) {
Rows: []*qbtypes.RawRow{
{
Data: map[string]any{
"net.peer.name": "192.168.1.1",
derivedKeyHTTPHost: "192.168.1.1",
},
},
{
Data: map[string]any{
"net.peer.name": "example.com",
derivedKeyHTTPHost: "example.com",
},
},
},
@@ -106,7 +107,7 @@ func TestFilterResponse(t *testing.T) {
Rows: []*qbtypes.RawRow{
{
Data: map[string]any{
"net.peer.name": "example.com",
derivedKeyHTTPHost: "example.com",
},
},
},

View File

@@ -585,6 +585,9 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
aH.LicensingAPI.Activate(rw, req)
})).Methods(http.MethodGet)
// Export
router.HandleFunc("/api/v1/export_raw_data", am.ViewAccess(aH.Signoz.Handlers.RawDataExport.ExportRawData)).Methods(http.MethodGet)
router.HandleFunc("/api/v1/span_percentile", am.ViewAccess(aH.Signoz.Handlers.SpanPercentile.GetSpanPercentileDetails)).Methods(http.MethodPost)
// Query Filter Analyzer api used to extract metric names and grouping columns from a query
@@ -4998,7 +5001,6 @@ func (aH *APIHandler) getDomainList(w http.ResponseWriter, r *http.Request) {
return
}
result = thirdpartyapi.MergeSemconvColumns(result)
result = thirdpartyapi.FilterIntermediateColumns(result)
// Filter IP addresses if ShowIp is false
@@ -5055,7 +5057,6 @@ func (aH *APIHandler) getDomainInfo(w http.ResponseWriter, r *http.Request) {
return
}
result = thirdpartyapi.MergeSemconvColumns(result)
result = thirdpartyapi.FilterIntermediateColumns(result)
// Filter IP addresses if ShowIp is false

View File

@@ -23,7 +23,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/preference"
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/querier"
@@ -59,7 +58,6 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
struct{ gateway.Handler }{},
struct{ fields.Handler }{},
struct{ authz.Handler }{},
struct{ rawdataexport.Handler }{},
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
if err != nil {
return nil, err

View File

@@ -252,7 +252,6 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
handlers.GatewayHandler,
handlers.Fields,
handlers.AuthzHandler,
handlers.RawDataExport,
),
)
}

View File

@@ -260,11 +260,11 @@ type MetricHighlightsResponse struct {
ActiveTimeSeries uint64 `json:"activeTimeSeries" required:"true"`
}
// MetricAttributesRequest represents the payload for the metric attributes endpoint.
// MetricAttributesRequest represents the query parameters for the metric attributes endpoint.
type MetricAttributesRequest struct {
MetricName string `json:"metricName" required:"true"`
Start *int64 `json:"start,omitempty"`
End *int64 `json:"end,omitempty"`
MetricName string `json:"-"`
Start *int64 `query:"start"`
End *int64 `query:"end"`
}
// Validate ensures MetricAttributesRequest contains acceptable values.
@@ -273,10 +273,6 @@ func (req *MetricAttributesRequest) Validate() error {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "request is nil")
}
if req.MetricName == "" {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "metric_name is required")
}
if req.Start != nil && req.End != nil {
if *req.Start >= *req.End {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "start (%d) must be less than end (%d)", *req.Start, *req.End)
@@ -286,17 +282,6 @@ func (req *MetricAttributesRequest) Validate() error {
return nil
}
// UnmarshalJSON validates input immediately after decoding.
func (req *MetricAttributesRequest) UnmarshalJSON(data []byte) error {
type raw MetricAttributesRequest
var decoded raw
if err := json.Unmarshal(data, &decoded); err != nil {
return err
}
*req = MetricAttributesRequest(decoded)
return req.Validate()
}
// MetricAttribute represents a single attribute with its values and count.
type MetricAttribute struct {
Key string `json:"key" required:"true"`
@@ -310,6 +295,48 @@ type MetricAttributesResponse struct {
TotalKeys int64 `json:"totalKeys" required:"true"`
}
type MetricNameParams struct {
MetricName string `query:"metricName" required:"true"`
// ListMetricsParams represents the query parameters for the list metrics endpoint.
type ListMetricsParams struct {
Start *int64 `query:"start"`
End *int64 `query:"end"`
Limit int `query:"limit"`
Search string `query:"searchText"`
}
// Validate ensures ListMetricsParams contains acceptable values.
func (p *ListMetricsParams) Validate() error {
if p.Start != nil && *p.Start <= 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "start must be greater than 0")
}
if p.End != nil && *p.End <= 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "end must be greater than 0")
}
if p.Start != nil && p.End != nil && *p.Start >= *p.End {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "start (%d) must be less than end (%d)", *p.Start, *p.End)
}
if p.Limit < 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit cannot be negative")
}
if p.Limit == 0 {
p.Limit = 100
}
if p.Limit > 5000 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must not exceed 5000")
}
return nil
}
// ListMetric represents a single metric with its metadata in the list metrics response.
type ListMetric struct {
MetricName string `json:"metricName" required:"true"`
Description string `json:"description" required:"true"`
MetricType metrictypes.Type `json:"type" required:"true"`
MetricUnit string `json:"unit" required:"true"`
Temporality metrictypes.Temporality `json:"temporality" required:"true"`
IsMonotonic bool `json:"isMonotonic" required:"true"`
}
// ListMetricsResponse represents the response for the list metrics endpoint.
type ListMetricsResponse struct {
Metrics []ListMetric `json:"metrics" required:"true" nullable:"true"`
}

View File

@@ -1,386 +0,0 @@
package querybuildertypesv5
import "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
// GetExpression returns the expression string. Panics for Join, PromQuery and ClickHouseQuery.
func (q *QueryEnvelope) GetExpression() string {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
return spec.Expression
case QueryBuilderFormula:
return spec.Expression
}
panic("unsupported spec type")
}
// GetReturnSpansFrom returns the return-spans-from value. Panics for all types except TraceOperator.
func (q *QueryEnvelope) GetReturnSpansFrom() string {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
return spec.ReturnSpansFrom
}
panic("unsupported spec type")
}
// GetSignal returns the signal. Panics for all types except Query[T].
func (q *QueryEnvelope) GetSignal() telemetrytypes.Signal {
switch spec := q.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
return spec.Signal
case QueryBuilderQuery[LogAggregation]:
return spec.Signal
case QueryBuilderQuery[MetricAggregation]:
return spec.Signal
}
panic("unsupported spec type")
}
// GetSource returns the source. Panics for all types except Query[T].
func (q *QueryEnvelope) GetSource() telemetrytypes.Source {
switch spec := q.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
return spec.Source
case QueryBuilderQuery[LogAggregation]:
return spec.Source
case QueryBuilderQuery[MetricAggregation]:
return spec.Source
}
panic("unsupported spec type")
}
// GetQuery returns the raw query string. Panics for all types except PromQuery and ClickHouseQuery.
func (q *QueryEnvelope) GetQuery() string {
switch spec := q.Spec.(type) {
case PromQuery:
return spec.Query
case ClickHouseQuery:
return spec.Query
}
panic("unsupported spec type")
}
// GetStep returns the PromQL step size. Panics for all types except PromQuery.
func (q *QueryEnvelope) GetStep() Step {
switch spec := q.Spec.(type) {
case PromQuery:
return spec.Step
}
panic("unsupported spec type")
}
// GetStats returns the PromQL stats flag. Panics for all types except PromQuery.
func (q *QueryEnvelope) GetStats() bool {
switch spec := q.Spec.(type) {
case PromQuery:
return spec.Stats
}
panic("unsupported spec type")
}
// GetLeft returns the left query reference of a join. Panics for all types except QueryBuilderJoin.
func (q *QueryEnvelope) GetLeft() QueryRef {
switch spec := q.Spec.(type) {
case QueryBuilderJoin:
return spec.Left
}
panic("unsupported spec type")
}
// GetRight returns the right query reference of a join. Panics for all types except QueryBuilderJoin.
func (q *QueryEnvelope) GetRight() QueryRef {
switch spec := q.Spec.(type) {
case QueryBuilderJoin:
return spec.Right
}
panic("unsupported spec type")
}
// GetJoinType returns the join type. Panics for all types except QueryBuilderJoin.
func (q *QueryEnvelope) GetJoinType() JoinType {
switch spec := q.Spec.(type) {
case QueryBuilderJoin:
return spec.Type
}
panic("unsupported spec type")
}
// GetOn returns the join ON condition. Panics for all types except QueryBuilderJoin.
func (q *QueryEnvelope) GetOn() string {
switch spec := q.Spec.(type) {
case QueryBuilderJoin:
return spec.On
}
panic("unsupported spec type")
}
// GetQueryName returns the name of the spec.
func (q *QueryEnvelope) GetQueryName() string {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
return spec.Name
case QueryBuilderQuery[TraceAggregation]:
return spec.Name
case QueryBuilderQuery[LogAggregation]:
return spec.Name
case QueryBuilderQuery[MetricAggregation]:
return spec.Name
case QueryBuilderFormula:
return spec.Name
case QueryBuilderJoin:
return spec.Name
case PromQuery:
return spec.Name
case ClickHouseQuery:
return spec.Name
}
panic("unsupported spec type")
}
// IsDisabled returns whether the spec is disabled.
func (q *QueryEnvelope) IsDisabled() bool {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
return spec.Disabled
case QueryBuilderQuery[TraceAggregation]:
return spec.Disabled
case QueryBuilderQuery[LogAggregation]:
return spec.Disabled
case QueryBuilderQuery[MetricAggregation]:
return spec.Disabled
case QueryBuilderFormula:
return spec.Disabled
case QueryBuilderJoin:
return spec.Disabled
case PromQuery:
return spec.Disabled
case ClickHouseQuery:
return spec.Disabled
}
panic("unsupported spec type")
}
// GetLimit returns the row limit. Panics for PromQuery and ClickHouseQuery.
func (q *QueryEnvelope) GetLimit() int {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
return spec.Limit
case QueryBuilderQuery[TraceAggregation]:
return spec.Limit
case QueryBuilderQuery[LogAggregation]:
return spec.Limit
case QueryBuilderQuery[MetricAggregation]:
return spec.Limit
case QueryBuilderFormula:
return spec.Limit
case QueryBuilderJoin:
return spec.Limit
}
panic("unsupported spec type")
}
// GetOffset returns the row offset. Panics for Formula, Join, PromQuery and ClickHouseQuery.
func (q *QueryEnvelope) GetOffset() int {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
return spec.Offset
case QueryBuilderQuery[TraceAggregation]:
return spec.Offset
case QueryBuilderQuery[LogAggregation]:
return spec.Offset
case QueryBuilderQuery[MetricAggregation]:
return spec.Offset
}
panic("unsupported spec type")
}
// GetType returns the QueryType of the envelope.
func (q *QueryEnvelope) GetType() QueryType {
return q.Type
}
// GetOrder returns the order-by clauses. Panics for PromQuery and ClickHouseQuery.
func (q *QueryEnvelope) GetOrder() []OrderBy {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
return spec.Order
case QueryBuilderQuery[TraceAggregation]:
return spec.Order
case QueryBuilderQuery[LogAggregation]:
return spec.Order
case QueryBuilderQuery[MetricAggregation]:
return spec.Order
case QueryBuilderFormula:
return spec.Order
case QueryBuilderJoin:
return spec.Order
}
panic("unsupported spec type")
}
// GetGroupBy returns the group-by keys. Panics for Formula, PromQuery and ClickHouseQuery.
func (q *QueryEnvelope) GetGroupBy() []GroupByKey {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
return spec.GroupBy
case QueryBuilderQuery[TraceAggregation]:
return spec.GroupBy
case QueryBuilderQuery[LogAggregation]:
return spec.GroupBy
case QueryBuilderQuery[MetricAggregation]:
return spec.GroupBy
case QueryBuilderJoin:
return spec.GroupBy
}
panic("unsupported spec type")
}
// GetFilter returns the filter. Panics for Formula, PromQuery and ClickHouseQuery.
func (q *QueryEnvelope) GetFilter() *Filter {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
return spec.Filter
case QueryBuilderQuery[TraceAggregation]:
return spec.Filter
case QueryBuilderQuery[LogAggregation]:
return spec.Filter
case QueryBuilderQuery[MetricAggregation]:
return spec.Filter
case QueryBuilderJoin:
return spec.Filter
}
panic("unsupported spec type")
}
// GetHaving returns the having clause. Panics for PromQuery and ClickHouseQuery.
func (q *QueryEnvelope) GetHaving() *Having {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
return spec.Having
case QueryBuilderQuery[TraceAggregation]:
return spec.Having
case QueryBuilderQuery[LogAggregation]:
return spec.Having
case QueryBuilderQuery[MetricAggregation]:
return spec.Having
case QueryBuilderFormula:
return spec.Having
case QueryBuilderJoin:
return spec.Having
}
panic("unsupported spec type")
}
// GetFunctions returns the post-processing functions. Panics for PromQuery and ClickHouseQuery.
func (q *QueryEnvelope) GetFunctions() []Function {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
return spec.Functions
case QueryBuilderQuery[TraceAggregation]:
return spec.Functions
case QueryBuilderQuery[LogAggregation]:
return spec.Functions
case QueryBuilderQuery[MetricAggregation]:
return spec.Functions
case QueryBuilderFormula:
return spec.Functions
case QueryBuilderJoin:
return spec.Functions
}
panic("unsupported spec type")
}
// GetSelectFields returns the selected fields. Panics for Formula, PromQuery and ClickHouseQuery.
func (q *QueryEnvelope) GetSelectFields() []telemetrytypes.TelemetryFieldKey {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
return spec.SelectFields
case QueryBuilderQuery[TraceAggregation]:
return spec.SelectFields
case QueryBuilderQuery[LogAggregation]:
return spec.SelectFields
case QueryBuilderQuery[MetricAggregation]:
return spec.SelectFields
case QueryBuilderJoin:
return spec.SelectFields
}
panic("unsupported spec type")
}
// GetLegend returns the legend label. Panics for QueryBuilderJoin.
func (q *QueryEnvelope) GetLegend() string {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
return spec.Legend
case QueryBuilderQuery[TraceAggregation]:
return spec.Legend
case QueryBuilderQuery[LogAggregation]:
return spec.Legend
case QueryBuilderQuery[MetricAggregation]:
return spec.Legend
case QueryBuilderFormula:
return spec.Legend
case PromQuery:
return spec.Legend
case ClickHouseQuery:
return spec.Legend
}
panic("unsupported spec type")
}
// GetCursor returns the pagination cursor. Panics for Formula, Join, PromQuery and ClickHouseQuery.
func (q *QueryEnvelope) GetCursor() string {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
return spec.Cursor
case QueryBuilderQuery[TraceAggregation]:
return spec.Cursor
case QueryBuilderQuery[LogAggregation]:
return spec.Cursor
case QueryBuilderQuery[MetricAggregation]:
return spec.Cursor
}
panic("unsupported spec type")
}
// GetStepInterval returns the step interval. Panics for Formula, Join, PromQuery and ClickHouseQuery.
func (q *QueryEnvelope) GetStepInterval() Step {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
return spec.StepInterval
case QueryBuilderQuery[TraceAggregation]:
return spec.StepInterval
case QueryBuilderQuery[LogAggregation]:
return spec.StepInterval
case QueryBuilderQuery[MetricAggregation]:
return spec.StepInterval
}
panic("unsupported spec type")
}
// GetSecondaryAggregations returns the secondary aggregations. Panics for TraceOperator, Formula, PromQuery and ClickHouseQuery.
func (q *QueryEnvelope) GetSecondaryAggregations() []SecondaryAggregation {
switch spec := q.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
return spec.SecondaryAggregations
case QueryBuilderQuery[LogAggregation]:
return spec.SecondaryAggregations
case QueryBuilderQuery[MetricAggregation]:
return spec.SecondaryAggregations
case QueryBuilderJoin:
return spec.SecondaryAggregations
}
panic("unsupported spec type")
}
// GetLimitBy returns the limit-by configuration. Panics for TraceOperator, Formula, Join, PromQuery and ClickHouseQuery.
func (q *QueryEnvelope) GetLimitBy() *LimitBy {
switch spec := q.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
return spec.LimitBy
case QueryBuilderQuery[LogAggregation]:
return spec.LimitBy
case QueryBuilderQuery[MetricAggregation]:
return spec.LimitBy
}
panic("unsupported spec type")
}

View File

@@ -1,458 +0,0 @@
package querybuildertypesv5
import "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
// SetExpression sets the expression string of the spec, if applicable.
func (q *QueryEnvelope) SetExpression(expression string) {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
spec.Expression = expression
q.Spec = spec
case QueryBuilderFormula:
spec.Expression = expression
q.Spec = spec
}
}
// SetReturnSpansFrom sets the return-spans-from value, if applicable.
func (q *QueryEnvelope) SetReturnSpansFrom(returnSpansFrom string) {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
spec.ReturnSpansFrom = returnSpansFrom
q.Spec = spec
}
}
// SetSignal sets the signal of the spec, if applicable.
func (q *QueryEnvelope) SetSignal(signal telemetrytypes.Signal) {
switch spec := q.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
spec.Signal = signal
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.Signal = signal
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.Signal = signal
q.Spec = spec
}
}
// SetSource sets the source of the spec, if applicable.
func (q *QueryEnvelope) SetSource(source telemetrytypes.Source) {
switch spec := q.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
spec.Source = source
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.Source = source
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.Source = source
q.Spec = spec
}
}
// SetQuery sets the raw query string of the spec, if applicable.
func (q *QueryEnvelope) SetQuery(query string) {
switch spec := q.Spec.(type) {
case PromQuery:
spec.Query = query
q.Spec = spec
case ClickHouseQuery:
spec.Query = query
q.Spec = spec
}
}
// SetStep sets the PromQL step size, if applicable.
func (q *QueryEnvelope) SetStep(step Step) {
switch spec := q.Spec.(type) {
case PromQuery:
spec.Step = step
q.Spec = spec
}
}
// SetStats sets the PromQL stats flag, if applicable.
func (q *QueryEnvelope) SetStats(stats bool) {
switch spec := q.Spec.(type) {
case PromQuery:
spec.Stats = stats
q.Spec = spec
}
}
// SetLeft sets the left query reference of a join, if applicable.
func (q *QueryEnvelope) SetLeft(left QueryRef) {
switch spec := q.Spec.(type) {
case QueryBuilderJoin:
spec.Left = left
q.Spec = spec
}
}
// SetRight sets the right query reference of a join, if applicable.
func (q *QueryEnvelope) SetRight(right QueryRef) {
switch spec := q.Spec.(type) {
case QueryBuilderJoin:
spec.Right = right
q.Spec = spec
}
}
// SetJoinType sets the join type, if applicable.
func (q *QueryEnvelope) SetJoinType(joinType JoinType) {
switch spec := q.Spec.(type) {
case QueryBuilderJoin:
spec.Type = joinType
q.Spec = spec
}
}
// SetOn sets the join ON condition, if applicable.
func (q *QueryEnvelope) SetOn(on string) {
switch spec := q.Spec.(type) {
case QueryBuilderJoin:
spec.On = on
q.Spec = spec
}
}
// SetQueryName sets the name of the spec, if applicable.
func (q *QueryEnvelope) SetQueryName(name string) {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
spec.Name = name
q.Spec = spec
case QueryBuilderQuery[TraceAggregation]:
spec.Name = name
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.Name = name
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.Name = name
q.Spec = spec
case QueryBuilderFormula:
spec.Name = name
q.Spec = spec
case QueryBuilderJoin:
spec.Name = name
q.Spec = spec
case PromQuery:
spec.Name = name
q.Spec = spec
case ClickHouseQuery:
spec.Name = name
q.Spec = spec
}
}
// SetDisabled sets the disabled flag of the spec, if applicable.
func (q *QueryEnvelope) SetDisabled(disabled bool) {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
spec.Disabled = disabled
q.Spec = spec
case QueryBuilderQuery[TraceAggregation]:
spec.Disabled = disabled
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.Disabled = disabled
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.Disabled = disabled
q.Spec = spec
case QueryBuilderFormula:
spec.Disabled = disabled
q.Spec = spec
case QueryBuilderJoin:
spec.Disabled = disabled
q.Spec = spec
case PromQuery:
spec.Disabled = disabled
q.Spec = spec
case ClickHouseQuery:
spec.Disabled = disabled
q.Spec = spec
}
}
// SetLimit sets the row limit of the spec, if applicable.
func (q *QueryEnvelope) SetLimit(limit int) {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
spec.Limit = limit
q.Spec = spec
case QueryBuilderQuery[TraceAggregation]:
spec.Limit = limit
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.Limit = limit
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.Limit = limit
q.Spec = spec
case QueryBuilderFormula:
spec.Limit = limit
q.Spec = spec
case QueryBuilderJoin:
spec.Limit = limit
q.Spec = spec
}
}
// SetOffset sets the row offset of the spec, if applicable.
func (q *QueryEnvelope) SetOffset(offset int) {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
spec.Offset = offset
q.Spec = spec
case QueryBuilderQuery[TraceAggregation]:
spec.Offset = offset
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.Offset = offset
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.Offset = offset
q.Spec = spec
}
}
// SetType sets the QueryType of the envelope.
func (q *QueryEnvelope) SetType(t QueryType) {
q.Type = t
}
// SetOrder sets the order-by clauses of the spec, if applicable.
func (q *QueryEnvelope) SetOrder(order []OrderBy) {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
spec.Order = order
q.Spec = spec
case QueryBuilderQuery[TraceAggregation]:
spec.Order = order
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.Order = order
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.Order = order
q.Spec = spec
case QueryBuilderFormula:
spec.Order = order
q.Spec = spec
case QueryBuilderJoin:
spec.Order = order
q.Spec = spec
}
}
// SetGroupBy sets the group-by keys of the spec, if applicable.
func (q *QueryEnvelope) SetGroupBy(groupBy []GroupByKey) {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
spec.GroupBy = groupBy
q.Spec = spec
case QueryBuilderQuery[TraceAggregation]:
spec.GroupBy = groupBy
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.GroupBy = groupBy
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.GroupBy = groupBy
q.Spec = spec
case QueryBuilderJoin:
spec.GroupBy = groupBy
q.Spec = spec
}
}
// SetFilter sets the filter of the spec, if applicable.
func (q *QueryEnvelope) SetFilter(filter *Filter) {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
spec.Filter = filter
q.Spec = spec
case QueryBuilderQuery[TraceAggregation]:
spec.Filter = filter
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.Filter = filter
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.Filter = filter
q.Spec = spec
case QueryBuilderJoin:
spec.Filter = filter
q.Spec = spec
}
}
// SetHaving sets the having clause of the spec, if applicable.
func (q *QueryEnvelope) SetHaving(having *Having) {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
spec.Having = having
q.Spec = spec
case QueryBuilderQuery[TraceAggregation]:
spec.Having = having
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.Having = having
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.Having = having
q.Spec = spec
case QueryBuilderFormula:
spec.Having = having
q.Spec = spec
case QueryBuilderJoin:
spec.Having = having
q.Spec = spec
}
}
// SetFunctions sets the post-processing functions of the spec, if applicable.
func (q *QueryEnvelope) SetFunctions(functions []Function) {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
spec.Functions = functions
q.Spec = spec
case QueryBuilderQuery[TraceAggregation]:
spec.Functions = functions
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.Functions = functions
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.Functions = functions
q.Spec = spec
case QueryBuilderFormula:
spec.Functions = functions
q.Spec = spec
case QueryBuilderJoin:
spec.Functions = functions
q.Spec = spec
}
}
// SetSelectFields sets the selected fields of the spec, if applicable.
func (q *QueryEnvelope) SetSelectFields(fields []telemetrytypes.TelemetryFieldKey) {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
spec.SelectFields = fields
q.Spec = spec
case QueryBuilderQuery[TraceAggregation]:
spec.SelectFields = fields
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.SelectFields = fields
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.SelectFields = fields
q.Spec = spec
case QueryBuilderJoin:
spec.SelectFields = fields
q.Spec = spec
}
}
// SetLegend sets the legend label of the spec, if applicable.
func (q *QueryEnvelope) SetLegend(legend string) {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
spec.Legend = legend
q.Spec = spec
case QueryBuilderQuery[TraceAggregation]:
spec.Legend = legend
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.Legend = legend
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.Legend = legend
q.Spec = spec
case QueryBuilderFormula:
spec.Legend = legend
q.Spec = spec
case PromQuery:
spec.Legend = legend
q.Spec = spec
case ClickHouseQuery:
spec.Legend = legend
q.Spec = spec
}
}
// SetCursor sets the pagination cursor of the spec, if applicable.
func (q *QueryEnvelope) SetCursor(cursor string) {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
spec.Cursor = cursor
q.Spec = spec
case QueryBuilderQuery[TraceAggregation]:
spec.Cursor = cursor
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.Cursor = cursor
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.Cursor = cursor
q.Spec = spec
}
}
// SetStepInterval sets the step interval of the spec, if applicable.
func (q *QueryEnvelope) SetStepInterval(step Step) {
switch spec := q.Spec.(type) {
case QueryBuilderTraceOperator:
spec.StepInterval = step
q.Spec = spec
case QueryBuilderQuery[TraceAggregation]:
spec.StepInterval = step
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.StepInterval = step
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.StepInterval = step
q.Spec = spec
}
}
// SetSecondaryAggregations sets the secondary aggregations of the spec, if applicable.
func (q *QueryEnvelope) SetSecondaryAggregations(secondaryAggregations []SecondaryAggregation) {
switch spec := q.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
spec.SecondaryAggregations = secondaryAggregations
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.SecondaryAggregations = secondaryAggregations
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.SecondaryAggregations = secondaryAggregations
q.Spec = spec
case QueryBuilderJoin:
spec.SecondaryAggregations = secondaryAggregations
q.Spec = spec
}
}
// SetLimitBy sets the limit-by configuration of the spec, if applicable.
func (q *QueryEnvelope) SetLimitBy(limitBy *LimitBy) {
switch spec := q.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
spec.LimitBy = limitBy
q.Spec = spec
case QueryBuilderQuery[LogAggregation]:
spec.LimitBy = limitBy
q.Spec = spec
case QueryBuilderQuery[MetricAggregation]:
spec.LimitBy = limitBy
q.Spec = spec
}
}

View File

@@ -10,9 +10,55 @@ import (
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
// queryName returns the name from any query envelope spec type.
func (e QueryEnvelope) queryName() string {
switch spec := e.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
return spec.Name
case QueryBuilderQuery[LogAggregation]:
return spec.Name
case QueryBuilderQuery[MetricAggregation]:
return spec.Name
case QueryBuilderFormula:
return spec.Name
case QueryBuilderTraceOperator:
return spec.Name
case QueryBuilderJoin:
return spec.Name
case PromQuery:
return spec.Name
case ClickHouseQuery:
return spec.Name
}
return ""
}
// isDisabled returns the disabled status from any query envelope spec type.
func (e QueryEnvelope) isDisabled() bool {
switch spec := e.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
return spec.Disabled
case QueryBuilderQuery[LogAggregation]:
return spec.Disabled
case QueryBuilderQuery[MetricAggregation]:
return spec.Disabled
case QueryBuilderFormula:
return spec.Disabled
case QueryBuilderTraceOperator:
return spec.Disabled
case QueryBuilderJoin:
return spec.Disabled
case PromQuery:
return spec.Disabled
case ClickHouseQuery:
return spec.Disabled
}
return false
}
// getQueryIdentifier returns a friendly identifier for a query based on its type and name/content
func getQueryIdentifier(envelope QueryEnvelope, index int) string {
name := envelope.GetQueryName()
name := envelope.queryName()
var typeLabel string
switch envelope.Type {
@@ -425,7 +471,7 @@ func (r *QueryRangeRequest) Validate() error {
// validateAllQueriesNotDisabled validates that at least one query in the composite query is enabled
func (r *QueryRangeRequest) validateAllQueriesNotDisabled() error {
for _, envelope := range r.CompositeQuery.Queries {
if !envelope.IsDisabled() {
if !envelope.isDisabled() {
return nil
}
}
@@ -460,7 +506,7 @@ func (c *CompositeQuery) Validate(requestType RequestType) error {
// Check name uniqueness for builder queries
if envelope.Type == QueryTypeBuilder || envelope.Type == QueryTypeSubQuery {
name := envelope.GetQueryName()
name := envelope.queryName()
if name != "" {
if queryNames[name] {
return errors.NewInvalidInputf(

View File

@@ -816,7 +816,7 @@ func TestQueryEnvelope_Helpers(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := tt.envelope.GetQueryName()
got := tt.envelope.queryName()
if got != tt.want {
t.Errorf("queryName() = %q, want %q", got, tt.want)
}
@@ -868,7 +868,7 @@ func TestQueryEnvelope_Helpers(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := tt.envelope.IsDisabled()
got := tt.envelope.isDisabled()
if got != tt.want {
t.Errorf("isDisabled() = %v, want %v", got, tt.want)
}

View File

@@ -1,35 +0,0 @@
package types
// ExportRawDataQueryParams represents the query parameters for the export raw data endpoint
type ExportRawDataQueryParams struct {
ExportRawDataFormatQueryParam
// Source specifies the type of data to export: "logs" or "traces"
Source string `query:"source,default=logs" default:"logs" enum:"logs,traces"`
// Start is the start time for the query (Unix timestamp in nanoseconds)
Start uint64 `query:"start"`
// End is the end time for the query (Unix timestamp in nanoseconds)
End uint64 `query:"end"`
// Limit specifies the maximum number of rows to export
Limit int `query:"limit,default=10000" default:"10000" minimum:"1" maximum:"50000"`
// Filter is a filter expression to apply to the query
Filter string `query:"filter"`
// Columns specifies the columns to include in the export
// Format: ["context.field:type", "context.field", "field"]
Columns []string `query:"columns"`
// OrderBy specifies the sorting order
// Format: "column:direction" or "context.field:type:direction"
// Direction can be "asc" or "desc"
OrderBy string `query:"order_by"`
}
type ExportRawDataFormatQueryParam struct {
// Format specifies the output format: "csv" or "jsonl"
Format string `query:"format,default=csv" default:"csv" enum:"csv,jsonl"`
}

View File

@@ -1,634 +0,0 @@
import csv
import io
import json
from datetime import datetime, timedelta, timezone
from http import HTTPStatus
from typing import Callable, List
from urllib.parse import urlencode
import requests
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.logs import Logs
def test_export_logs_csv(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
"""
Setup:
Insert 3 logs with different severity levels and attributes.
Tests:
1. Export logs as CSV format
2. Verify CSV structure and content
3. Validate headers are present
4. Check log data is correctly formatted
"""
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
insert_logs(
[
Logs(
timestamp=now - timedelta(seconds=10),
body="Application started successfully",
severity_text="INFO",
resources={
"service.name": "api-service",
"deployment.environment": "production",
"host.name": "server-01",
},
attributes={
"http.method": "GET",
"http.status_code": 200,
"user.id": "user123",
},
),
Logs(
timestamp=now - timedelta(seconds=8),
body="Connection to database failed",
severity_text="ERROR",
resources={
"service.name": "api-service",
"deployment.environment": "production",
"host.name": "server-01",
},
attributes={
"error.type": "ConnectionError",
"db.name": "production_db",
},
),
Logs(
timestamp=now - timedelta(seconds=5),
body="Request processed",
severity_text="DEBUG",
resources={
"service.name": "worker-service",
"deployment.environment": "production",
"host.name": "server-02",
},
attributes={
"request.id": "req-456",
"duration_ms": 150.5,
},
),
]
)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Calculate timestamps in nanoseconds
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
end_ns = int(now.timestamp() * 1e9)
params = {
"start": start_ns,
"end": end_ns,
}
# Export logs as CSV (default format, no source needed)
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/export_raw_data?{urlencode(params)}"
),
timeout=30,
headers={
"authorization": f"Bearer {token}",
},
)
assert response.status_code == HTTPStatus.OK
assert response.headers["Content-Type"] == "text/csv"
assert "attachment" in response.headers.get("Content-Disposition", "")
# Parse CSV content
csv_content = response.text
csv_reader = csv.DictReader(io.StringIO(csv_content))
rows = list(csv_reader)
assert len(rows) == 3, f"Expected 3 rows, got {len(rows)}"
# Verify log bodies are present in the exported data
bodies = [row.get("body") for row in rows]
assert "Application started successfully" in bodies
assert "Connection to database failed" in bodies
assert "Request processed" in bodies
# Verify severity levels
severities = [row.get("severity_text") for row in rows]
assert "INFO" in severities
assert "ERROR" in severities
assert "DEBUG" in severities
def test_export_logs_jsonl(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
"""
Setup:
Insert 2 logs with different attributes.
Tests:
1. Export logs as JSONL format
2. Verify JSONL structure and content
3. Check each line is valid JSON
4. Validate log data is correctly formatted
"""
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
insert_logs(
[
Logs(
timestamp=now - timedelta(seconds=10),
body="User logged in",
severity_text="INFO",
resources={
"service.name": "auth-service",
"deployment.environment": "staging",
},
attributes={
"user.email": "test@example.com",
"session.id": "sess-789",
},
),
Logs(
timestamp=now - timedelta(seconds=5),
body="Payment processed successfully",
severity_text="INFO",
resources={
"service.name": "payment-service",
"deployment.environment": "staging",
},
attributes={
"transaction.id": "txn-123",
"amount": 99.99,
},
),
]
)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Calculate timestamps in nanoseconds
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
end_ns = int(now.timestamp() * 1e9)
params = {
"start": start_ns,
"end": end_ns,
"format": "jsonl",
"source": "logs",
}
# Export logs as JSONL
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/export_raw_data?{urlencode(params)}"
),
timeout=10,
headers={
"authorization": f"Bearer {token}",
},
)
assert response.status_code == HTTPStatus.OK
assert response.headers["Content-Type"] == "application/x-ndjson"
assert "attachment" in response.headers.get("Content-Disposition", "")
# Parse JSONL content
jsonl_lines = response.text.strip().split("\n")
assert len(jsonl_lines) == 2, f"Expected 2 lines, got {len(jsonl_lines)}"
# Verify each line is valid JSON
json_objects = []
for line in jsonl_lines:
obj = json.loads(line)
json_objects.append(obj)
assert "id" in obj
assert "timestamp" in obj
assert "body" in obj
assert "severity_text" in obj
# Verify log bodies
bodies = [obj.get("body") for obj in json_objects]
assert "User logged in" in bodies
assert "Payment processed successfully" in bodies
def test_export_logs_with_filter(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
"""
Setup:
Insert logs with different severity levels.
Tests:
1. Export logs with filter applied
2. Verify only filtered logs are returned
"""
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
insert_logs(
[
Logs(
timestamp=now - timedelta(seconds=10),
body="Info message",
severity_text="INFO",
resources={
"service.name": "test-service",
},
attributes={},
),
Logs(
timestamp=now - timedelta(seconds=8),
body="Error message",
severity_text="ERROR",
resources={
"service.name": "test-service",
},
attributes={},
),
Logs(
timestamp=now - timedelta(seconds=5),
body="Another error message",
severity_text="ERROR",
resources={
"service.name": "test-service",
},
attributes={},
),
]
)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Calculate timestamps in nanoseconds
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
end_ns = int(now.timestamp() * 1e9)
params = {
"start": start_ns,
"end": end_ns,
"format": "jsonl",
"source": "logs",
"filter": "severity_text = 'ERROR'",
}
# Export logs with filter
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/export_raw_data?{urlencode(params)}"
),
timeout=10,
headers={
"authorization": f"Bearer {token}",
},
)
assert response.status_code == HTTPStatus.OK
assert response.headers["Content-Type"] == "application/x-ndjson"
# Parse JSONL content
jsonl_lines = response.text.strip().split("\n")
assert len(jsonl_lines) == 2, f"Expected 2 lines (filtered), got {len(jsonl_lines)}"
# Verify only ERROR logs are returned
for line in jsonl_lines:
obj = json.loads(line)
assert obj["severity_text"] == "ERROR"
assert "error message" in obj["body"].lower()
def test_export_logs_with_limit(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
"""
Setup:
Insert 5 logs.
Tests:
1. Export logs with limit applied
2. Verify only limited number of logs are returned
"""
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs = []
for i in range(5):
logs.append(
Logs(
timestamp=now - timedelta(seconds=i),
body=f"Log message {i}",
severity_text="INFO",
resources={
"service.name": "test-service",
},
attributes={
"index": i,
},
)
)
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Calculate timestamps in nanoseconds
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
end_ns = int(now.timestamp() * 1e9)
params = {
"start": start_ns,
"end": end_ns,
"format": "csv",
"source": "logs",
"limit": 3,
}
# Export logs with limit
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/export_raw_data?{urlencode(params)}"
),
timeout=10,
headers={
"authorization": f"Bearer {token}",
},
)
assert response.status_code == HTTPStatus.OK
assert response.headers["Content-Type"] == "text/csv"
# Parse CSV content
csv_content = response.text
csv_reader = csv.DictReader(io.StringIO(csv_content))
rows = list(csv_reader)
assert len(rows) == 3, f"Expected 3 rows (limited), got {len(rows)}"
def test_export_logs_with_columns(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
"""
Setup:
Insert logs with various attributes.
Tests:
1. Export logs with specific columns
2. Verify only specified columns are returned
"""
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
insert_logs(
[
Logs(
timestamp=now - timedelta(seconds=10),
body="Test log message",
severity_text="INFO",
resources={
"service.name": "test-service",
"deployment.environment": "production",
},
attributes={
"http.method": "GET",
"http.status_code": 200,
},
),
]
)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Calculate timestamps in nanoseconds
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
end_ns = int(now.timestamp() * 1e9)
# Request only specific columns
params = {
"start": start_ns,
"end": end_ns,
"format": "csv",
"source": "logs",
"columns": ["timestamp", "severity_text", "body"],
}
# Export logs with specific columns
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/export_raw_data?{urlencode(params, doseq=True)}"
),
timeout=10,
headers={
"authorization": f"Bearer {token}",
},
)
assert response.status_code == HTTPStatus.OK
assert response.headers["Content-Type"] == "text/csv"
# Parse CSV content
csv_content = response.text
csv_reader = csv.DictReader(io.StringIO(csv_content))
rows = list(csv_reader)
assert len(rows) == 1
# Verify the specified columns are present
row = rows[0]
assert "timestamp" in row
assert "severity_text" in row
assert "body" in row
assert row["severity_text"] == "INFO"
assert row["body"] == "Test log message"
def test_export_logs_with_order_by(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
"""
Setup:
Insert logs at different timestamps.
Tests:
1. Export logs with ascending timestamp order
2. Verify logs are returned in correct order
"""
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
insert_logs(
[
Logs(
timestamp=now - timedelta(seconds=10),
body="First log",
severity_text="INFO",
resources={
"service.name": "test-service",
},
attributes={},
),
Logs(
timestamp=now - timedelta(seconds=5),
body="Second log",
severity_text="INFO",
resources={
"service.name": "test-service",
},
attributes={},
),
Logs(
timestamp=now - timedelta(seconds=1),
body="Third log",
severity_text="INFO",
resources={
"service.name": "test-service",
},
attributes={},
),
]
)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Calculate timestamps in nanoseconds
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
end_ns = int(now.timestamp() * 1e9)
params = {
"start": start_ns,
"end": end_ns,
"format": "jsonl",
"source": "logs",
"order_by": "timestamp:asc",
}
# Export logs with ascending order
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/export_raw_data?{urlencode(params)}"
),
timeout=10,
headers={
"authorization": f"Bearer {token}",
},
)
assert response.status_code == HTTPStatus.OK
assert response.headers["Content-Type"] == "application/x-ndjson"
# Parse JSONL content
jsonl_lines = response.text.strip().split("\n")
assert len(jsonl_lines) == 3
# Verify order - first log should be "First log" (oldest)
json_objects = [json.loads(line) for line in jsonl_lines]
assert json_objects[0]["body"] == "First log"
assert json_objects[1]["body"] == "Second log"
assert json_objects[2]["body"] == "Third log"
def test_export_logs_with_complex_filter(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
"""
Setup:
Insert logs with various service names and severity levels.
Tests:
1. Export logs with complex filter (multiple conditions)
2. Verify only logs matching all conditions are returned
"""
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
insert_logs(
[
Logs(
timestamp=now - timedelta(seconds=10),
body="API error occurred",
severity_text="ERROR",
resources={
"service.name": "api-service",
},
attributes={},
),
Logs(
timestamp=now - timedelta(seconds=8),
body="Worker info message",
severity_text="INFO",
resources={
"service.name": "worker-service",
},
attributes={},
),
Logs(
timestamp=now - timedelta(seconds=5),
body="API info message",
severity_text="INFO",
resources={
"service.name": "api-service",
},
attributes={},
),
]
)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Calculate timestamps in nanoseconds
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
end_ns = int(now.timestamp() * 1e9)
# Filter for api-service AND ERROR severity
params = {
"start": start_ns,
"end": end_ns,
"format": "jsonl",
"source": "logs",
"filter": "service.name = 'api-service' AND severity_text = 'ERROR'",
}
# Export logs with complex filter
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/export_raw_data?{urlencode(params)}"
),
timeout=10,
headers={
"authorization": f"Bearer {token}",
},
)
assert response.status_code == HTTPStatus.OK
assert response.headers["Content-Type"] == "application/x-ndjson"
# Parse JSONL content
jsonl_lines = response.text.strip().split("\n")
assert (
len(jsonl_lines) == 1
), f"Expected 1 line (complex filter), got {len(jsonl_lines)}"
# Verify the filtered log
filtered_obj = json.loads(jsonl_lines[0])
assert filtered_obj["body"] == "API error occurred"
assert filtered_obj["severity_text"] == "ERROR"

View File

@@ -1,782 +0,0 @@
import csv
import io
import json
from datetime import datetime, timedelta, timezone
from http import HTTPStatus
from typing import Callable, List
from urllib.parse import urlencode
import requests
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.traces import TraceIdGenerator, Traces, TracesKind, TracesStatusCode
def test_export_traces_csv(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_traces: Callable[[List[Traces]], None],
) -> None:
"""
Setup:
Insert 3 traces with different attributes.
Tests:
1. Export traces as CSV format
2. Verify CSV structure and content
3. Validate headers are present
4. Check trace data is correctly formatted
"""
http_service_trace_id = TraceIdGenerator.trace_id()
http_service_span_id = TraceIdGenerator.span_id()
http_service_db_span_id = TraceIdGenerator.span_id()
topic_service_trace_id = TraceIdGenerator.trace_id()
topic_service_span_id = TraceIdGenerator.span_id()
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
insert_traces(
[
Traces(
timestamp=now - timedelta(seconds=4),
duration=timedelta(seconds=3),
trace_id=http_service_trace_id,
span_id=http_service_span_id,
parent_span_id="",
name="POST /integration",
kind=TracesKind.SPAN_KIND_SERVER,
status_code=TracesStatusCode.STATUS_CODE_OK,
status_message="",
resources={
"deployment.environment": "production",
"service.name": "http-service",
"os.type": "linux",
"host.name": "linux-000",
},
attributes={
"net.transport": "IP.TCP",
"http.scheme": "http",
"http.user_agent": "Integration Test",
"http.request.method": "POST",
"http.response.status_code": "200",
},
),
Traces(
timestamp=now - timedelta(seconds=3.5),
duration=timedelta(seconds=0.5),
trace_id=http_service_trace_id,
span_id=http_service_db_span_id,
parent_span_id=http_service_span_id,
name="SELECT",
kind=TracesKind.SPAN_KIND_CLIENT,
status_code=TracesStatusCode.STATUS_CODE_OK,
status_message="",
resources={
"deployment.environment": "production",
"service.name": "http-service",
"os.type": "linux",
"host.name": "linux-000",
},
attributes={
"db.name": "integration",
"db.operation": "SELECT",
"db.statement": "SELECT * FROM integration",
},
),
Traces(
timestamp=now - timedelta(seconds=1),
duration=timedelta(seconds=2),
trace_id=topic_service_trace_id,
span_id=topic_service_span_id,
parent_span_id="",
name="topic publish",
kind=TracesKind.SPAN_KIND_PRODUCER,
status_code=TracesStatusCode.STATUS_CODE_OK,
status_message="",
resources={
"deployment.environment": "production",
"service.name": "topic-service",
"os.type": "linux",
"host.name": "linux-001",
},
attributes={
"message.type": "SENT",
"messaging.operation": "publish",
"messaging.message.id": "001",
},
),
]
)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Calculate timestamps in nanoseconds
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
end_ns = int(now.timestamp() * 1e9)
params = {
"start": start_ns,
"end": end_ns,
"source": "traces",
"limit": 1000,
}
# Export traces as CSV (GET for simple queries)
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/export_raw_data?{urlencode(params)}"
),
timeout=30,
headers={
"authorization": f"Bearer {token}",
},
)
assert response.status_code == HTTPStatus.OK
assert response.headers["Content-Type"] == "text/csv"
assert "attachment" in response.headers.get("Content-Disposition", "")
# Parse CSV content
csv_content = response.text
csv_reader = csv.DictReader(io.StringIO(csv_content))
rows = list(csv_reader)
assert len(rows) == 3, f"Expected 3 rows, got {len(rows)}"
# Verify trace IDs are present in the exported data
trace_ids = [row.get("trace_id") for row in rows]
assert http_service_trace_id in trace_ids
assert topic_service_trace_id in trace_ids
# Verify span names are present
span_names = [row.get("name") for row in rows]
assert "POST /integration" in span_names
assert "SELECT" in span_names
assert "topic publish" in span_names
def test_export_traces_jsonl(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_traces: Callable[[List[Traces]], None],
) -> None:
"""
Setup:
Insert 2 traces with different attributes.
Tests:
1. Export traces as JSONL format
2. Verify JSONL structure and content
3. Check each line is valid JSON
4. Validate trace data is correctly formatted
"""
http_service_trace_id = TraceIdGenerator.trace_id()
http_service_span_id = TraceIdGenerator.span_id()
topic_service_trace_id = TraceIdGenerator.trace_id()
topic_service_span_id = TraceIdGenerator.span_id()
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
insert_traces(
[
Traces(
timestamp=now - timedelta(seconds=4),
duration=timedelta(seconds=3),
trace_id=http_service_trace_id,
span_id=http_service_span_id,
parent_span_id="",
name="POST /api/test",
kind=TracesKind.SPAN_KIND_SERVER,
status_code=TracesStatusCode.STATUS_CODE_OK,
status_message="",
resources={
"service.name": "api-service",
"deployment.environment": "staging",
},
attributes={
"http.request.method": "POST",
"http.response.status_code": "201",
},
),
Traces(
timestamp=now - timedelta(seconds=2),
duration=timedelta(seconds=1),
trace_id=topic_service_trace_id,
span_id=topic_service_span_id,
parent_span_id="",
name="queue.process",
kind=TracesKind.SPAN_KIND_CONSUMER,
status_code=TracesStatusCode.STATUS_CODE_OK,
status_message="",
resources={
"service.name": "queue-service",
"deployment.environment": "staging",
},
attributes={
"messaging.operation": "process",
"messaging.system": "rabbitmq",
},
),
]
)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Calculate timestamps in nanoseconds
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
end_ns = int(now.timestamp() * 1e9)
params = {
"start": start_ns,
"end": end_ns,
"format": "jsonl",
"source": "traces",
"limit": 1000,
}
# Export traces as JSONL (GET for simple queries)
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/export_raw_data?{urlencode(params)}"
),
timeout=10,
headers={
"authorization": f"Bearer {token}",
},
)
assert response.status_code == HTTPStatus.OK
assert response.headers["Content-Type"] == "application/x-ndjson"
assert "attachment" in response.headers.get("Content-Disposition", "")
# Parse JSONL content
jsonl_lines = response.text.strip().split("\n")
assert len(jsonl_lines) == 2, f"Expected 2 lines, got {len(jsonl_lines)}"
# Verify each line is valid JSON
json_objects = []
for line in jsonl_lines:
obj = json.loads(line)
json_objects.append(obj)
assert "trace_id" in obj
assert "span_id" in obj
assert "name" in obj
# Verify trace IDs are present
trace_ids = [obj.get("trace_id") for obj in json_objects]
assert http_service_trace_id in trace_ids
assert topic_service_trace_id in trace_ids
# Verify span names are present
span_names = [obj.get("name") for obj in json_objects]
assert "POST /api/test" in span_names
assert "queue.process" in span_names
def test_export_traces_with_filter(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_traces: Callable[[List[Traces]], None],
) -> None:
"""
Setup:
Insert traces with different service names.
Tests:
1. Export traces with filter applied
2. Verify only filtered traces are returned
"""
service_a_trace_id = TraceIdGenerator.trace_id()
service_a_span_id = TraceIdGenerator.span_id()
service_b_trace_id = TraceIdGenerator.trace_id()
service_b_span_id = TraceIdGenerator.span_id()
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
insert_traces(
[
Traces(
timestamp=now - timedelta(seconds=4),
duration=timedelta(seconds=1),
trace_id=service_a_trace_id,
span_id=service_a_span_id,
parent_span_id="",
name="operation-a",
kind=TracesKind.SPAN_KIND_SERVER,
status_code=TracesStatusCode.STATUS_CODE_OK,
status_message="",
resources={
"service.name": "service-a",
},
attributes={},
),
Traces(
timestamp=now - timedelta(seconds=2),
duration=timedelta(seconds=1),
trace_id=service_b_trace_id,
span_id=service_b_span_id,
parent_span_id="",
name="operation-b",
kind=TracesKind.SPAN_KIND_SERVER,
status_code=TracesStatusCode.STATUS_CODE_OK,
status_message="",
resources={
"service.name": "service-b",
},
attributes={},
),
]
)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Calculate timestamps in nanoseconds
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
end_ns = int(now.timestamp() * 1e9)
params = {
"start": start_ns,
"end": end_ns,
"format": "jsonl",
"source": "traces",
"limit": 1000,
"filter": "service.name = 'service-a'",
}
# Export traces with filter (GET supports filter param)
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/export_raw_data?{urlencode(params)}"
),
timeout=10,
headers={
"authorization": f"Bearer {token}",
},
)
assert response.status_code == HTTPStatus.OK
assert response.headers["Content-Type"] == "application/x-ndjson"
# Parse JSONL content
jsonl_lines = response.text.strip().split("\n")
assert len(jsonl_lines) == 1, f"Expected 1 line (filtered), got {len(jsonl_lines)}"
# Verify the filtered trace
filtered_obj = json.loads(jsonl_lines[0])
assert filtered_obj["trace_id"] == service_a_trace_id
assert filtered_obj["name"] == "operation-a"
def test_export_traces_with_limit(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_traces: Callable[[List[Traces]], None],
) -> None:
"""
Setup:
Insert 5 traces.
Tests:
1. Export traces with limit applied
2. Verify only limited number of traces are returned
"""
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
traces = []
for i in range(5):
traces.append(
Traces(
timestamp=now - timedelta(seconds=i),
duration=timedelta(seconds=1),
trace_id=TraceIdGenerator.trace_id(),
span_id=TraceIdGenerator.span_id(),
parent_span_id="",
name=f"operation-{i}",
kind=TracesKind.SPAN_KIND_SERVER,
status_code=TracesStatusCode.STATUS_CODE_OK,
status_message="",
resources={
"service.name": "test-service",
},
attributes={},
)
)
insert_traces(traces)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Calculate timestamps in nanoseconds
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
end_ns = int(now.timestamp() * 1e9)
params = {
"start": start_ns,
"end": end_ns,
"format": "csv",
"source": "traces",
"limit": 3,
}
# Export traces with limit (GET supports limit param)
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/export_raw_data?{urlencode(params)}"
),
timeout=10,
headers={
"authorization": f"Bearer {token}",
},
)
assert response.status_code == HTTPStatus.OK
assert response.headers["Content-Type"] == "text/csv"
# Parse CSV content
csv_content = response.text
csv_reader = csv.DictReader(io.StringIO(csv_content))
rows = list(csv_reader)
assert len(rows) == 3, f"Expected 3 rows (limited), got {len(rows)}"
def test_export_traces_multiple_queries_rejected(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
"""
Tests:
1. POST with multiple builder queries but no trace operator is rejected
2. Verify 400 error is returned
"""
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
end_ns = int(now.timestamp() * 1e9)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
body = {
"start": start_ns,
"end": end_ns,
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"signal": "traces",
"name": "A",
"limit": 1000,
"filter": {"expression": "service.name = 'service-a'"},
},
},
{
"type": "builder_query",
"spec": {
"signal": "traces",
"name": "B",
"limit": 1000,
"filter": {"expression": "service.name = 'service-b'"},
},
},
]
},
}
url = signoz.self.host_configs["8080"].get("/api/v1/export_raw_data?format=jsonl")
response = requests.post(
url,
json=body,
timeout=10,
headers={
"authorization": f"Bearer {token}",
"Content-Type": "application/json",
},
)
assert response.status_code == HTTPStatus.BAD_REQUEST
def test_export_traces_with_composite_query_trace_operator(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_traces: Callable[[List[Traces]], None],
) -> None:
"""
Setup:
Insert multiple traces with parent-child relationships.
Tests:
1. Export traces using trace operator in composite query (POST)
2. Verify trace operator query works correctly
"""
parent_trace_id = TraceIdGenerator.trace_id()
parent_span_id = TraceIdGenerator.span_id()
child_span_id_1 = TraceIdGenerator.span_id()
child_span_id_2 = TraceIdGenerator.span_id()
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
insert_traces(
[
Traces(
timestamp=now - timedelta(seconds=10),
duration=timedelta(seconds=5),
trace_id=parent_trace_id,
span_id=parent_span_id,
parent_span_id="",
name="parent-operation",
kind=TracesKind.SPAN_KIND_SERVER,
status_code=TracesStatusCode.STATUS_CODE_OK,
status_message="",
resources={
"service.name": "parent-service",
},
attributes={
"operation.type": "parent",
},
),
Traces(
timestamp=now - timedelta(seconds=9),
duration=timedelta(seconds=2),
trace_id=parent_trace_id,
span_id=child_span_id_1,
parent_span_id=parent_span_id,
name="child-operation-1",
kind=TracesKind.SPAN_KIND_INTERNAL,
status_code=TracesStatusCode.STATUS_CODE_OK,
status_message="",
resources={
"service.name": "parent-service",
},
attributes={
"operation.type": "child",
},
),
Traces(
timestamp=now - timedelta(seconds=7),
duration=timedelta(seconds=1),
trace_id=parent_trace_id,
span_id=child_span_id_2,
parent_span_id=parent_span_id,
name="child-operation-2",
kind=TracesKind.SPAN_KIND_INTERNAL,
status_code=TracesStatusCode.STATUS_CODE_OK,
status_message="",
resources={
"service.name": "parent-service",
},
attributes={
"operation.type": "child",
},
),
]
)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Calculate timestamps in nanoseconds
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
end_ns = int(now.timestamp() * 1e9)
# A: spans with operation.type = 'parent'
query_a = {
"type": "builder_query",
"spec": {
"signal": "traces",
"name": "A",
"limit": 1000,
"filter": {"expression": "operation.type = 'parent'"},
},
}
# B: spans with operation.type = 'child'
query_b = {
"type": "builder_query",
"spec": {
"signal": "traces",
"name": "B",
"limit": 1000,
"filter": {"expression": "operation.type = 'child'"},
},
}
# Trace operator: find traces where A has a direct descendant B
query_c = {
"type": "builder_trace_operator",
"spec": {
"name": "C",
"expression": "A => B",
"returnSpansFrom": "A",
"limit": 1000,
"order": [{"key": {"name": "timestamp"}, "direction": "desc"}],
},
}
body = {
"start": start_ns,
"end": end_ns,
"requestType": "raw",
"compositeQuery": {
"queries": [query_a, query_b, query_c],
},
}
url = signoz.self.host_configs["8080"].get("/api/v1/export_raw_data?format=jsonl")
response = requests.post(
url,
json=body,
timeout=10,
headers={
"authorization": f"Bearer {token}",
"Content-Type": "application/json",
},
)
print(response.text)
assert response.status_code == HTTPStatus.OK
assert response.headers["Content-Type"] == "application/x-ndjson"
# Parse JSONL content
jsonl_lines = response.text.strip().split("\n")
assert len(jsonl_lines) == 1, f"Expected at least 1 line, got {len(jsonl_lines)}"
# Verify all returned spans belong to the matched trace
json_objects = [json.loads(line) for line in jsonl_lines]
trace_ids = [obj.get("trace_id") for obj in json_objects]
assert all(tid == parent_trace_id for tid in trace_ids)
# Verify the parent span (returnSpansFrom = "A") is present
span_names = [obj.get("name") for obj in json_objects]
assert "parent-operation" in span_names
def test_export_traces_with_select_fields(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_traces: Callable[[List[Traces]], None],
) -> None:
"""
Setup:
Insert traces with various attributes.
Tests:
1. Export traces with specific select fields via POST
2. Verify only specified fields are returned in the output
"""
trace_id = TraceIdGenerator.trace_id()
span_id = TraceIdGenerator.span_id()
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
insert_traces(
[
Traces(
timestamp=now - timedelta(seconds=10),
duration=timedelta(seconds=2),
trace_id=trace_id,
span_id=span_id,
parent_span_id="",
name="test-operation",
kind=TracesKind.SPAN_KIND_SERVER,
status_code=TracesStatusCode.STATUS_CODE_OK,
status_message="",
resources={
"service.name": "test-service",
"deployment.environment": "production",
"host.name": "server-01",
},
attributes={
"http.method": "POST",
"http.status_code": "201",
"user.id": "user123",
},
),
]
)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Calculate timestamps in nanoseconds
start_ns = int((now - timedelta(minutes=5)).timestamp() * 1e9)
end_ns = int(now.timestamp() * 1e9)
body = {
"start": start_ns,
"end": end_ns,
"requestType": "raw",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"signal": "traces",
"name": "A",
"limit": 1000,
"selectFields": [
{
"name": "trace_id",
"fieldDataType": "string",
"fieldContext": "span",
"signal": "traces",
},
{
"name": "span_id",
"fieldDataType": "string",
"fieldContext": "span",
"signal": "traces",
},
{
"name": "name",
"fieldDataType": "string",
"fieldContext": "span",
"signal": "traces",
},
{
"name": "service.name",
"fieldDataType": "string",
"fieldContext": "resource",
"signal": "traces",
},
],
},
}
]
},
}
url = signoz.self.host_configs["8080"].get("/api/v1/export_raw_data?format=jsonl")
response = requests.post(
url,
json=body,
timeout=10,
headers={
"authorization": f"Bearer {token}",
"Content-Type": "application/json",
},
)
assert response.status_code == HTTPStatus.OK
assert response.headers["Content-Type"] == "application/x-ndjson"
# Parse JSONL content
jsonl_lines = response.text.strip().split("\n")
assert len(jsonl_lines) == 1
# Verify the selected fields are present
result = json.loads(jsonl_lines[0])
assert "trace_id" in result
assert "span_id" in result
assert "name" in result
# Verify values
assert result["trace_id"] == trace_id
assert result["span_id"] == span_id
assert result["name"] == "test-operation"