Compare commits

...

3 Commits

Author SHA1 Message Date
Nikhil Soni
2c558b9bf7 fix: fix incosistent use of http attribute in ext. api (#10169)
Some checks are pending
build-staging / prepare (push) Waiting to run
build-staging / js-build (push) Blocked by required conditions
build-staging / go-build (push) Blocked by required conditions
build-staging / staging (push) Blocked by required conditions
Release Drafter / update_release_draft (push) Waiting to run
* fix: fix incosistent use of http attribute in ext. api

HTTP attributes like http.url, url.full along with server.name and net.peer.name
were used inconsitantly leading to bugs in aggregation query and they were
expensive to query as well since these attr are stored as json instead of
direct columns. Using columns like http_url optimises these queries since
it gets populated using all relevant attributes during ingestion itself.

* fix: switch to using http_host instead of external_http_url

external_http_url stores the hostname but the name
is confusing, so switching to http_host

* fix: use constants defined where possible

* fix: fix old attribute usage in tests
2026-02-19 06:21:32 +00:00
Yunus M
48b588fe48 chore: migrate .cursorrules to .cursor/rules/ format (#10351) 2026-02-19 11:28:02 +05:30
Srikanth Chekuri
5c86b80682 chore: add OpenAPI spec for /v5/query_range (#10239)
Some checks failed
build-staging / staging (push) Has been cancelled
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
2026-02-18 20:21:37 +00:00
32 changed files with 4407 additions and 1911 deletions

7
.gitignore vendored
View File

@@ -1,8 +1,11 @@
node_modules
# editor
.vscode
!.vscode/settings.json
.zed
.idea
deploy/docker/environment_tiny/common_test
frontend/node_modules
@@ -31,8 +34,6 @@ frontend/yarn-debug.log*
frontend/yarn-error.log*
frontend/src/constants/env.ts
.idea
**/build
**/storage
**/locust-scripts/__pycache__/
@@ -229,5 +230,3 @@ cython_debug/
pyrightconfig.json
# cursor files
frontend/.cursor/

File diff suppressed because it is too large Load Diff

View File

@@ -10,6 +10,7 @@ import (
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
@@ -106,7 +107,10 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
// v5
router.HandleFunc("/api/v5/query_range", am.ViewAccess(ah.queryRangeV5)).Methods(http.MethodPost)
router.Handle("/api/v5/query_range", handler.New(
am.ViewAccess(ah.queryRangeV5),
querierAPI.QueryRangeV5OpenAPIDef,
)).Methods(http.MethodPost)
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)

View File

@@ -0,0 +1,74 @@
---
description: Core testing conventions - imports, rendering, MSW, interactions, queries
globs: **/*.test.{ts,tsx}
alwaysApply: false
---
# Testing Conventions
## Imports
Always import from the test harness, never directly from `@testing-library/react`:
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { server, rest } from 'mocks-server/server';
```
## Router
Use the built-in router in `render`:
```ts
render(<Page />, undefined, { initialRoute: '/traces-explorer' });
```
Only mock `useLocation` / `useParams` if the test depends on their values.
## MSW
Global MSW server runs automatically. Override per-test:
```ts
server.use(
rest.get('*/api/v1/foo', (_req, res, ctx) =>
res(ctx.status(200), ctx.json({ ok: true })))
);
```
Keep large response fixtures in `mocks-server/__mockdata_`.
## Interactions
- Prefer `userEvent` for real user interactions (click, type, select, tab).
- Use `fireEvent` only for low-level events not covered by `userEvent` (e.g., scroll, resize). Wrap in `act(...)` if needed.
- Always `await` interactions:
```ts
const user = userEvent.setup({ pointerEventsCheck: 0 });
await user.click(screen.getByRole('button', { name: /save/i }));
```
## Timers
No global fake timers. Per-test only, for debounce/throttle:
```ts
jest.useFakeTimers();
const user = userEvent.setup({ advanceTimers: (ms) => jest.advanceTimersByTime(ms) });
await user.type(screen.getByRole('textbox'), 'query');
jest.advanceTimersByTime(400);
jest.useRealTimers();
```
## Queries
Prefer accessible queries: `getByRole` > `findByRole` > `getByLabelText` > visible text > `data-testid` (last resort).
## Anti-patterns
- Never import from `@testing-library/react` directly
- Never use global fake timers
- Never wrap `render` in `act(...)`
- Never mock infra dependencies locally (router, react-query)
- Limit to 3-5 focused tests per file

View File

@@ -0,0 +1,54 @@
---
description: When to use global vs local mocks in tests
globs: **/*.test.{ts,tsx}
alwaysApply: false
---
# Mock Strategy
## Use Global Mocks For
High-frequency dependencies (20+ test files):
- Core infrastructure: react-router-dom, react-query, antd
- Browser APIs: ResizeObserver, matchMedia, localStorage
- Utility libraries: date-fns, lodash
Available global mock files (from jest.config.ts):
- `uplot` -> `__mocks__/uplotMock.ts`
## Use Local Mocks For
- Business logic dependencies (API endpoints, custom hooks, domain components)
- Test-specific behavior (different data per test, error scenarios, loading states)
## Decision Tree
```
Used in 20+ test files?
YES -> Global mock
NO -> Business logic or test-specific?
YES -> Local mock
NO -> Consider global if usage grows
```
## Correct Usage
```ts
// Global mocks are already available - just import
import { useLocation } from 'react-router-dom';
// Local mocks for business logic
jest.mock('../api/tracesService', () => ({
getTraces: jest.fn(() => mockTracesData),
}));
```
## Anti-patterns
```ts
// Never re-mock globally mocked dependencies locally
jest.mock('react-router-dom', () => ({ ... }));
// Never put test-specific data in global mocks
jest.mock('../api/tracesService', () => ({ getTraces: jest.fn(() => specificTestData) }));
```

View File

@@ -0,0 +1,54 @@
---
description: TypeScript type safety requirements for Jest tests
globs: **/*.test.{ts,tsx}
alwaysApply: false
---
# TypeScript Type Safety in Tests
All Jest tests must be fully type-safe. Never use `any`.
## Mock Function Typing
```ts
// Use jest.mocked for module mocks
import useFoo from 'hooks/useFoo';
jest.mock('hooks/useFoo');
const mockUseFoo = jest.mocked(useFoo);
// Use jest.MockedFunction for standalone mocks
const mockFetch = jest.fn() as jest.MockedFunction<(id: number) => Promise<User>>;
```
## Mock Data
Define interfaces for all mock data:
```ts
const mockUser: User = { id: 1, name: 'John', email: 'john@example.com' };
const mockProps: ComponentProps = {
title: 'Test',
data: [mockUser],
onSelect: jest.fn() as jest.MockedFunction<(user: User) => void>,
};
```
## Hook Mocking Pattern
```ts
import useFoo from 'hooks/useFoo';
jest.mock('hooks/useFoo');
const mockUseFoo = jest.mocked(useFoo);
mockUseFoo.mockReturnValue(/* minimal shape */);
```
Prefer helpers (`rqSuccess`, `rqLoading`, `rqError`) for React Query results.
## Checklist
- All mock functions use `jest.MockedFunction<T>` or `jest.mocked()`
- All mock data has proper interfaces
- No `any` types in test files
- Component props are typed
- API response types are defined

View File

@@ -1,484 +0,0 @@
# Persona
You are an expert developer with deep knowledge of Jest, React Testing Library, MSW, and TypeScript, tasked with creating unit tests for this repository.
# Auto-detect TypeScript Usage
Check for TypeScript in the project through tsconfig.json or package.json dependencies.
Adjust syntax based on this detection.
# TypeScript Type Safety for Jest Tests
**CRITICAL**: All Jest tests MUST be fully type-safe with proper TypeScript types.
**Type Safety Requirements:**
- Use proper TypeScript interfaces for all mock data
- Type all Jest mock functions with `jest.MockedFunction<T>`
- Use generic types for React components and hooks
- Define proper return types for mock functions
- Use `as const` for literal types when needed
- Avoid `any` type use proper typing instead
# Unit Testing Focus
Focus on critical functionality (business logic, utility functions, component behavior)
Mock dependencies (API calls, external modules) before imports
Test multiple data scenarios (valid inputs, invalid inputs, edge cases)
Write maintainable tests with descriptive names grouped in describe blocks
# Global vs Local Mocks
**Use Global Mocks for:**
- High-frequency dependencies (20+ test files)
- Core infrastructure (react-router-dom, react-query, antd)
- Standard implementations across the app
- Browser APIs (ResizeObserver, matchMedia, localStorage)
- Utility libraries (date-fns, lodash)
**Use Local Mocks for:**
- Business logic dependencies (5-15 test files)
- Test-specific behavior (different data per test)
- API endpoints with specific responses
- Domain-specific components
- Error scenarios and edge cases
**Global Mock Files Available (from jest.config.ts):**
- `uplot` → `__mocks__/uplotMock.ts`
# Repo-specific Testing Conventions
## Imports
Always import from our harness:
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
```
For API mocks:
```ts
import { server, rest } from 'mocks-server/server';
```
Do not import directly from `@testing-library/react`.
## Router
Use the router built into render:
```ts
render(<Page />, undefined, { initialRoute: '/traces-explorer' });
```
Only mock `useLocation` / `useParams` if the test depends on them.
## Hook Mocks
Pattern:
```ts
import useFoo from 'hooks/useFoo';
jest.mock('hooks/useFoo');
const mockUseFoo = jest.mocked(useFoo);
mockUseFoo.mockReturnValue(/* minimal shape */ as any);
```
Prefer helpers (`rqSuccess`, `rqLoading`, `rqError`) for React Query results.
## MSW
Global MSW server runs automatically.
Override per-test:
```ts
server.use(
rest.get('*/api/v1/foo', (_req, res, ctx) => res(ctx.status(200), ctx.json({ ok: true })))
);
```
Keep large responses in `mocks-server/__mockdata_`.
## Interactions
- Prefer `userEvent` for real user interactions (click, type, select, tab).
- Use `fireEvent` only for low-level/programmatic events not covered by `userEvent` (e.g., scroll, resize, setting `element.scrollTop` for virtualization). Wrap in `act(...)` if needed.
- Always await interactions:
```ts
const user = userEvent.setup({ pointerEventsCheck: 0 });
await user.click(screen.getByRole('button', { name: /save/i }));
```
```ts
// Example: virtualized list scroll (no userEvent helper)
const scroller = container.querySelector('[data-test-id="virtuoso-scroller"]') as HTMLElement;
scroller.scrollTop = targetScrollTop;
act(() => { fireEvent.scroll(scroller); });
```
## Timers
❌ No global fake timers.
✅ Per-test only, for debounce/throttle:
```ts
jest.useFakeTimers();
const user = userEvent.setup({ advanceTimers: (ms) => jest.advanceTimersByTime(ms) });
await user.type(screen.getByRole('textbox'), 'query');
jest.advanceTimersByTime(400);
jest.useRealTimers();
```
## Queries
Prefer accessible queries (`getByRole`, `findByRole`, `getByLabelText`).
Fallback: visible text.
Last resort: `data-testid`.
# Example Test (using only configured global mocks)
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { server, rest } from 'mocks-server/server';
import MyComponent from '../MyComponent';
describe('MyComponent', () => {
it('renders and interacts', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(
rest.get('*/api/v1/example', (_req, res, ctx) => res(ctx.status(200), ctx.json({ value: 42 })))
);
render(<MyComponent />, undefined, { initialRoute: '/foo' });
expect(await screen.findByText(/value: 42/i)).toBeInTheDocument();
await user.click(screen.getByRole('button', { name: /refresh/i }));
await waitFor(() => expect(screen.getByText(/loading/i)).toBeInTheDocument());
});
});
```
# Anti-patterns
❌ Importing RTL directly
❌ Using global fake timers
❌ Wrapping render in `act(...)`
❌ Mocking infra dependencies locally (router, react-query)
✅ Use our harness (`tests/test-utils`)
✅ Use MSW for API overrides
✅ Use userEvent + await
✅ Pin time only in tests that assert relative dates
# Best Practices
- **Critical Functionality**: Prioritize testing business logic and utilities
- **Dependency Mocking**: Global mocks for infra, local mocks for business logic
- **Data Scenarios**: Always test valid, invalid, and edge cases
- **Descriptive Names**: Make test intent clear
- **Organization**: Group related tests in describe
- **Consistency**: Match repo conventions
- **Edge Cases**: Test null, undefined, unexpected values
- **Limit Scope**: 35 focused tests per file
- **Use Helpers**: `rqSuccess`, `makeUser`, etc.
- **No Any**: Enforce type safety
# Example Test
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { server, rest } from 'mocks-server/server';
import MyComponent from '../MyComponent';
describe('MyComponent', () => {
it('renders and interacts', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(
rest.get('*/api/v1/example', (_req, res, ctx) => res(ctx.status(200), ctx.json({ value: 42 })))
);
render(<MyComponent />, undefined, { initialRoute: '/foo' });
expect(await screen.findByText(/value: 42/i)).toBeInTheDocument();
await user.click(screen.getByRole('button', { name: /refresh/i }));
await waitFor(() => expect(screen.getByText(/loading/i)).toBeInTheDocument());
});
});
```
# Anti-patterns
❌ Importing RTL directly
❌ Using global fake timers
❌ Wrapping render in `act(...)`
❌ Mocking infra dependencies locally (router, react-query)
✅ Use our harness (`tests/test-utils`)
✅ Use MSW for API overrides
✅ Use userEvent + await
✅ Pin time only in tests that assert relative dates
# TypeScript Type Safety Examples
## Proper Mock Typing
```ts
// ✅ GOOD - Properly typed mocks
interface User {
id: number;
name: string;
email: string;
}
interface ApiResponse<T> {
data: T;
status: number;
message: string;
}
// Type the mock functions
const mockFetchUser = jest.fn() as jest.MockedFunction<(id: number) => Promise<ApiResponse<User>>>;
const mockUpdateUser = jest.fn() as jest.MockedFunction<(user: User) => Promise<ApiResponse<User>>>;
// Mock implementation with proper typing
mockFetchUser.mockResolvedValue({
data: { id: 1, name: 'John Doe', email: 'john@example.com' },
status: 200,
message: 'Success'
});
// ❌ BAD - Using any type
const mockFetchUser = jest.fn() as any; // Don't do this
```
## React Component Testing with Types
```ts
// ✅ GOOD - Properly typed component testing
interface ComponentProps {
title: string;
data: User[];
onUserSelect: (user: User) => void;
isLoading?: boolean;
}
const TestComponent: React.FC<ComponentProps> = ({ title, data, onUserSelect, isLoading = false }) => {
// Component implementation
};
describe('TestComponent', () => {
it('should render with proper props', () => {
// Arrange - Type the props properly
const mockProps: ComponentProps = {
title: 'Test Title',
data: [{ id: 1, name: 'John', email: 'john@example.com' }],
onUserSelect: jest.fn() as jest.MockedFunction<(user: User) => void>,
isLoading: false
};
// Act
render(<TestComponent {...mockProps} />);
// Assert
expect(screen.getByText('Test Title')).toBeInTheDocument();
});
});
```
## Hook Testing with Types
```ts
// ✅ GOOD - Properly typed hook testing
interface UseUserDataReturn {
user: User | null;
loading: boolean;
error: string | null;
refetch: () => void;
}
const useUserData = (id: number): UseUserDataReturn => {
// Hook implementation
};
describe('useUserData', () => {
it('should return user data with proper typing', () => {
// Arrange
const mockUser: User = { id: 1, name: 'John', email: 'john@example.com' };
mockFetchUser.mockResolvedValue({
data: mockUser,
status: 200,
message: 'Success'
});
// Act
const { result } = renderHook(() => useUserData(1));
// Assert
expect(result.current.user).toEqual(mockUser);
expect(result.current.loading).toBe(false);
expect(result.current.error).toBeNull();
});
});
```
## Global Mock Type Safety
```ts
// ✅ GOOD - Type-safe global mocks
// In __mocks__/routerMock.ts
export const mockUseLocation = (overrides: Partial<Location> = {}): Location => ({
pathname: '/traces',
search: '',
hash: '',
state: null,
key: 'test-key',
...overrides,
});
// In test files
const location = useLocation(); // Properly typed from global mock
expect(location.pathname).toBe('/traces');
```
# TypeScript Configuration for Jest
## Required Jest Configuration
```json
// jest.config.ts
{
"preset": "ts-jest/presets/js-with-ts-esm",
"globals": {
"ts-jest": {
"useESM": true,
"isolatedModules": true,
"tsconfig": "<rootDir>/tsconfig.jest.json"
}
},
"extensionsToTreatAsEsm": [".ts", ".tsx"],
"moduleFileExtensions": ["ts", "tsx", "js", "json"]
}
```
## TypeScript Jest Configuration
```json
// tsconfig.jest.json
{
"extends": "./tsconfig.json",
"compilerOptions": {
"types": ["jest", "@testing-library/jest-dom"],
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"moduleResolution": "node"
},
"include": [
"src/**/*",
"**/*.test.ts",
"**/*.test.tsx",
"__mocks__/**/*"
]
}
```
## Common Type Safety Patterns
### Mock Function Typing
```ts
// ✅ GOOD - Proper mock function typing
const mockApiCall = jest.fn() as jest.MockedFunction<typeof apiCall>;
const mockEventHandler = jest.fn() as jest.MockedFunction<(event: Event) => void>;
// ❌ BAD - Using any
const mockApiCall = jest.fn() as any;
```
### Generic Mock Typing
```ts
// ✅ GOOD - Generic mock typing
interface MockApiResponse<T> {
data: T;
status: number;
}
const mockFetchData = jest.fn() as jest.MockedFunction<
<T>(endpoint: string) => Promise<MockApiResponse<T>>
>;
// Usage
mockFetchData<User>('/users').mockResolvedValue({
data: { id: 1, name: 'John' },
status: 200
});
```
### React Testing Library with Types
```ts
// ✅ GOOD - Typed testing utilities
import { render, screen, RenderResult } from '@testing-library/react';
import { ComponentProps } from 'react';
type TestComponentProps = ComponentProps<typeof TestComponent>;
const renderTestComponent = (props: Partial<TestComponentProps> = {}): RenderResult => {
const defaultProps: TestComponentProps = {
title: 'Test',
data: [],
onSelect: jest.fn(),
...props
};
return render(<TestComponent {...defaultProps} />);
};
```
### Error Handling with Types
```ts
// ✅ GOOD - Typed error handling
interface ApiError {
message: string;
code: number;
details?: Record<string, unknown>;
}
const mockApiError: ApiError = {
message: 'API Error',
code: 500,
details: { endpoint: '/users' }
};
mockFetchUser.mockRejectedValue(new Error(JSON.stringify(mockApiError)));
```
## Type Safety Checklist
- [ ] All mock functions use `jest.MockedFunction<T>`
- [ ] All mock data has proper interfaces
- [ ] No `any` types in test files
- [ ] Generic types are used where appropriate
- [ ] Error types are properly defined
- [ ] Component props are typed
- [ ] Hook return types are defined
- [ ] API response types are defined
- [ ] Global mocks are type-safe
- [ ] Test utilities are properly typed
# Mock Decision Tree
```
Is it used in 20+ test files?
├─ YES → Use Global Mock
│ ├─ react-router-dom
│ ├─ react-query
│ ├─ antd components
│ └─ browser APIs
└─ NO → Is it business logic?
├─ YES → Use Local Mock
│ ├─ API endpoints
│ ├─ Custom hooks
│ └─ Domain components
└─ NO → Is it test-specific?
├─ YES → Use Local Mock
│ ├─ Error scenarios
│ ├─ Loading states
│ └─ Specific data
└─ NO → Consider Global Mock
└─ If it becomes frequently used
```
# Common Anti-Patterns to Avoid
❌ **Don't mock global dependencies locally:**
```js
// BAD - This is already globally mocked
jest.mock('react-router-dom', () => ({ ... }));
```
❌ **Don't create global mocks for test-specific data:**
```js
// BAD - This should be local
jest.mock('../api/tracesService', () => ({
getTraces: jest.fn(() => specificTestData)
}));
```
✅ **Do use global mocks for infrastructure:**
```js
// GOOD - Use global mock
import { useLocation } from 'react-router-dom';
```
✅ **Do create local mocks for business logic:**
```js
// GOOD - Local mock for specific test needs
jest.mock('../api/tracesService', () => ({
getTraces: jest.fn(() => mockTracesData)
}));
```

View File

@@ -0,0 +1,107 @@
/**
* ! Do not edit manually
* * The file has been auto-generated using Orval for SigNoz
* * regenerate with 'yarn generate:api'
* SigNoz
*/
import type {
MutationFunction,
UseMutationOptions,
UseMutationResult,
} from 'react-query';
import { useMutation } from 'react-query';
import { GeneratedAPIInstance } from '../../../index';
import type {
Querybuildertypesv5QueryRangeRequestDTO,
QueryRangeV5200,
RenderErrorResponseDTO,
} from '../sigNoz.schemas';
type AwaitedInput<T> = PromiseLike<T> | T;
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
/**
* Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.
* @summary Query range
*/
export const queryRangeV5 = (
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<QueryRangeV5200>({
url: `/api/v5/query_range`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: querybuildertypesv5QueryRangeRequestDTO,
signal,
});
};
export const getQueryRangeV5MutationOptions = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof queryRangeV5>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof queryRangeV5>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
> => {
const mutationKey = ['queryRangeV5'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof queryRangeV5>>,
{ data: Querybuildertypesv5QueryRangeRequestDTO }
> = (props) => {
const { data } = props ?? {};
return queryRangeV5(data);
};
return { mutationFn, ...mutationOptions };
};
export type QueryRangeV5MutationResult = NonNullable<
Awaited<ReturnType<typeof queryRangeV5>>
>;
export type QueryRangeV5MutationBody = Querybuildertypesv5QueryRangeRequestDTO;
export type QueryRangeV5MutationError = RenderErrorResponseDTO;
/**
* @summary Query range
*/
export const useQueryRangeV5 = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof queryRangeV5>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof queryRangeV5>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
> => {
const mutationOptions = getQueryRangeV5MutationOptions(options);
return useMutation(mutationOptions);
};

File diff suppressed because it is too large Load Diff

View File

@@ -14,6 +14,6 @@ export const VIEW_TYPES = {
export const SPAN_ATTRIBUTES = {
URL_PATH: 'http.url',
RESPONSE_STATUS_CODE: 'response_status_code',
SERVER_NAME: 'net.peer.name',
SERVER_NAME: 'http_host',
SERVER_PORT: 'net.peer.port',
} as const;

View File

@@ -4,6 +4,7 @@ import { rest, server } from 'mocks-server/server';
import { fireEvent, render, screen, waitFor, within } from 'tests/test-utils';
import { DataSource } from 'types/common/queryBuilder';
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
import TopErrors from '../Explorer/Domains/DomainDetails/TopErrors';
import { getTopErrorsQueryPayload } from '../utils';
@@ -215,7 +216,7 @@ describe('TopErrors', () => {
value: 'true',
}),
expect.objectContaining({
key: expect.objectContaining({ key: 'net.peer.name' }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.SERVER_NAME }),
op: '=',
value: 'test-domain',
}),

View File

@@ -638,7 +638,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: 'tag',
type: '',
},
op: '=',
value: domainName,
@@ -685,7 +685,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: 'tag',
type: '',
},
op: '=',
value: domainName,
@@ -733,7 +733,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: 'tag',
type: '',
},
op: '=',
value: domainName,
@@ -780,7 +780,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: 'tag',
type: '',
},
op: '=',
value: domainName,
@@ -1302,7 +1302,7 @@ export const getTopErrorsCoRelationQueryFilters = (
{
id: 'e8a043b7',
key: {
key: 'net.peer.name',
key: SPAN_ATTRIBUTES.SERVER_NAME,
dataType: DataTypes.String,
type: '',
},
@@ -2198,7 +2198,7 @@ export const getEndPointZeroStateQueryPayload = (
key: {
key: SPAN_ATTRIBUTES.SERVER_NAME,
dataType: DataTypes.String,
type: 'tag',
type: '',
},
op: '=',
value: domainName,
@@ -2793,7 +2793,7 @@ export const getStatusCodeBarChartWidgetData = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: 'tag',
type: '',
},
op: '=',
value: domainName,

View File

@@ -7,6 +7,7 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/swaggest/openapi-go"
"github.com/swaggest/openapi-go/openapi3"
)
type ServeOpenAPIFunc func(openapi.OperationContext)
@@ -59,7 +60,39 @@ func (handler *handler) ServeOpenAPI(opCtx openapi.OperationContext) {
}
// Add request structure
opCtx.AddReqStructure(handler.openAPIDef.Request, openapi.WithContentType(handler.openAPIDef.RequestContentType))
reqOpts := []openapi.ContentOption{openapi.WithContentType(handler.openAPIDef.RequestContentType)}
if len(handler.openAPIDef.RequestExamples) > 0 {
reqOpts = append(reqOpts, openapi.WithCustomize(func(cor openapi.ContentOrReference) {
rbOrRef, ok := cor.(*openapi3.RequestBodyOrRef)
if !ok || rbOrRef.RequestBody == nil {
return
}
ct := handler.openAPIDef.RequestContentType
if ct == "" {
ct = "application/json"
}
mt, exists := rbOrRef.RequestBody.Content[ct]
if !exists {
return
}
if mt.Examples == nil {
mt.Examples = make(map[string]openapi3.ExampleOrRef)
}
for _, ex := range handler.openAPIDef.RequestExamples {
val := ex.Value
oaExample := openapi3.Example{Value: &val}
if ex.Summary != "" {
oaExample.WithSummary(ex.Summary)
}
if ex.Description != "" {
oaExample.WithDescription(ex.Description)
}
mt.Examples[ex.Name] = openapi3.ExampleOrRef{Example: &oaExample}
}
rbOrRef.RequestBody.Content[ct] = mt
}))
}
opCtx.AddReqStructure(handler.openAPIDef.Request, reqOpts...)
// Add request query structure
opCtx.AddReqStructure(handler.openAPIDef.RequestQuery)

View File

@@ -9,6 +9,14 @@ import (
"github.com/swaggest/rest/openapi"
)
// OpenAPIExample is a named example for an OpenAPI operation.
type OpenAPIExample struct {
Name string
Summary string
Description string
Value any
}
// Def is the definition of an OpenAPI operation
type OpenAPIDef struct {
ID string
@@ -18,6 +26,7 @@ type OpenAPIDef struct {
Request any
RequestQuery any
RequestContentType string
RequestExamples []OpenAPIExample
Response any
ResponseContentType string
SuccessStatusCode int

View File

@@ -12,60 +12,30 @@ import (
)
const (
urlPathKeyLegacy = "http.url"
serverAddressKeyLegacy = "net.peer.name"
urlPathKey = "url.full"
serverAddressKey = "server.address"
derivedKeyHTTPURL = "http_url" // https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_url
derivedKeyHTTPHost = "http_host"
)
var defaultStepInterval = 60 * time.Second
type SemconvFieldMapping struct {
LegacyField string
CurrentField string
FieldType telemetrytypes.FieldDataType
Context telemetrytypes.FieldContext
}
var dualSemconvGroupByKeys = map[string][]qbtypes.GroupByKey{
"server": {
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: serverAddressKey,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextAttribute,
Signal: telemetrytypes.SignalTraces,
},
var (
groupByKeyHTTPHost = qbtypes.GroupByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: derivedKeyHTTPHost,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextSpan,
Signal: telemetrytypes.SignalTraces,
},
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: serverAddressKeyLegacy,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextAttribute,
Signal: telemetrytypes.SignalTraces,
},
}
groupByKeyHTTPURL = qbtypes.GroupByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: derivedKeyHTTPURL,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextSpan,
Signal: telemetrytypes.SignalTraces,
},
},
"url": {
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: urlPathKey,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextAttribute,
Signal: telemetrytypes.SignalTraces,
},
},
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: urlPathKeyLegacy,
FieldDataType: telemetrytypes.FieldDataTypeString,
FieldContext: telemetrytypes.FieldContextAttribute,
Signal: telemetrytypes.SignalTraces,
},
},
},
}
}
)
func FilterIntermediateColumns(result *qbtypes.QueryRangeResponse) *qbtypes.QueryRangeResponse {
if result == nil || result.Data.Results == nil {
@@ -114,103 +84,6 @@ func FilterIntermediateColumns(result *qbtypes.QueryRangeResponse) *qbtypes.Quer
return result
}
func MergeSemconvColumns(result *qbtypes.QueryRangeResponse) *qbtypes.QueryRangeResponse {
if result == nil || result.Data.Results == nil {
return result
}
for _, res := range result.Data.Results {
scalarData, ok := res.(*qbtypes.ScalarData)
if !ok {
continue
}
serverAddressKeyIdx := -1
serverAddressKeyLegacyIdx := -1
for i, col := range scalarData.Columns {
if col.Name == serverAddressKey {
serverAddressKeyIdx = i
} else if col.Name == serverAddressKeyLegacy {
serverAddressKeyLegacyIdx = i
}
}
if serverAddressKeyIdx == -1 || serverAddressKeyLegacyIdx == -1 {
continue
}
var newRows [][]any
for _, row := range scalarData.Data {
if len(row) <= serverAddressKeyIdx || len(row) <= serverAddressKeyLegacyIdx {
continue
}
var serverName any
if isValidValue(row[serverAddressKeyIdx]) {
serverName = row[serverAddressKeyIdx]
} else if isValidValue(row[serverAddressKeyLegacyIdx]) {
serverName = row[serverAddressKeyLegacyIdx]
}
if serverName != nil {
newRow := make([]any, len(row)-1)
newRow[0] = serverName
targetIdx := 1
for i, val := range row {
if i != serverAddressKeyLegacyIdx && i != serverAddressKeyIdx {
if targetIdx < len(newRow) {
newRow[targetIdx] = val
targetIdx++
}
}
}
newRows = append(newRows, newRow)
}
}
newColumns := make([]*qbtypes.ColumnDescriptor, len(scalarData.Columns)-1)
targetIdx := 0
for i, col := range scalarData.Columns {
if i == serverAddressKeyIdx {
newCol := &qbtypes.ColumnDescriptor{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: serverAddressKeyLegacy,
FieldDataType: col.FieldDataType,
FieldContext: col.FieldContext,
Signal: col.Signal,
},
QueryName: col.QueryName,
AggregationIndex: col.AggregationIndex,
Meta: col.Meta,
Type: col.Type,
}
newColumns[targetIdx] = newCol
targetIdx++
} else if i != serverAddressKeyLegacyIdx {
newColumns[targetIdx] = col
targetIdx++
}
}
scalarData.Columns = newColumns
scalarData.Data = newRows
}
return result
}
func isValidValue(val any) bool {
if val == nil {
return false
}
if str, ok := val.(string); ok {
return str != "" && str != "n/a"
}
return true
}
func FilterResponse(results []*qbtypes.QueryRangeResponse) []*qbtypes.QueryRangeResponse {
filteredResults := make([]*qbtypes.QueryRangeResponse, 0, len(results))
@@ -261,7 +134,7 @@ func FilterResponse(results []*qbtypes.QueryRangeResponse) []*qbtypes.QueryRange
func shouldIncludeSeries(series *qbtypes.TimeSeries) bool {
for _, label := range series.Labels {
if label.Key.Name == serverAddressKeyLegacy || label.Key.Name == serverAddressKey {
if label.Key.Name == derivedKeyHTTPHost {
if strVal, ok := label.Value.(string); ok {
if net.ParseIP(strVal) != nil {
return false
@@ -274,12 +147,10 @@ func shouldIncludeSeries(series *qbtypes.TimeSeries) bool {
func shouldIncludeRow(row *qbtypes.RawRow) bool {
if row.Data != nil {
for _, key := range []string{serverAddressKeyLegacy, serverAddressKey} {
if domainVal, ok := row.Data[key]; ok {
if domainStr, ok := domainVal.(string); ok {
if net.ParseIP(domainStr) != nil {
return false
}
if domainVal, ok := row.Data[derivedKeyHTTPHost]; ok {
if domainStr, ok := domainVal.(string); ok {
if net.ParseIP(domainStr) != nil {
return false
}
}
}
@@ -287,8 +158,8 @@ func shouldIncludeRow(row *qbtypes.RawRow) bool {
return true
}
func mergeGroupBy(base, additional []qbtypes.GroupByKey) []qbtypes.GroupByKey {
return append(base, additional...)
func mergeGroupBy(base qbtypes.GroupByKey, additional []qbtypes.GroupByKey) []qbtypes.GroupByKey {
return append([]qbtypes.GroupByKey{base}, additional...)
}
func BuildDomainList(req *thirdpartyapitypes.ThirdPartyApiRequest) (*qbtypes.QueryRangeRequest, error) {
@@ -354,10 +225,10 @@ func buildEndpointsQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Q
Signal: telemetrytypes.SignalTraces,
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
Aggregations: []qbtypes.TraceAggregation{
{Expression: "count_distinct(http.url)"},
{Expression: fmt.Sprintf("count_distinct(%s)", derivedKeyHTTPURL)},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -373,7 +244,7 @@ func buildLastSeenQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Qu
{Expression: "max(timestamp)"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -389,7 +260,7 @@ func buildRpsQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEn
{Expression: "rate()"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -407,7 +278,7 @@ func buildErrorQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Query
{Expression: "count()"},
},
Filter: filter,
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -423,7 +294,7 @@ func buildTotalSpanQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.Q
{Expression: "count()"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -439,7 +310,7 @@ func buildP99Query(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEn
{Expression: "p99(duration_nano)"},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPHost, req.GroupBy),
},
}
}
@@ -462,10 +333,10 @@ func buildEndpointsInfoQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtyp
Signal: telemetrytypes.SignalTraces,
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
Aggregations: []qbtypes.TraceAggregation{
{Expression: "rate(http.url)"},
{Expression: fmt.Sprintf("rate(%s)", derivedKeyHTTPURL)},
},
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["url"], req.GroupBy),
GroupBy: mergeGroupBy(groupByKeyHTTPURL, req.GroupBy),
},
}
}
@@ -519,8 +390,7 @@ func buildLastSeenInfoQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtype
}
func buildBaseFilter(additionalFilter *qbtypes.Filter) *qbtypes.Filter {
baseExpression := fmt.Sprintf("(%s EXISTS OR %s EXISTS) AND kind_string = 'Client'",
urlPathKeyLegacy, urlPathKey)
baseExpression := fmt.Sprintf("%s EXISTS AND kind_string = 'Client'", derivedKeyHTTPURL)
if additionalFilter != nil && additionalFilter.Expression != "" {
// even if it contains kind_string we add with an AND so it doesn't matter if the user is overriding it.

View File

@@ -1,9 +1,10 @@
package thirdpartyapi
import (
"github.com/SigNoz/signoz/pkg/types/thirdpartyapitypes"
"testing"
"github.com/SigNoz/signoz/pkg/types/thirdpartyapitypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/assert"
@@ -28,7 +29,7 @@ func TestFilterResponse(t *testing.T) {
{
Labels: []*qbtypes.Label{
{
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
Key: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
Value: "192.168.1.1",
},
},
@@ -36,7 +37,7 @@ func TestFilterResponse(t *testing.T) {
{
Labels: []*qbtypes.Label{
{
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
Key: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
Value: "example.com",
},
},
@@ -60,7 +61,7 @@ func TestFilterResponse(t *testing.T) {
{
Labels: []*qbtypes.Label{
{
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
Key: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
Value: "example.com",
},
},
@@ -84,12 +85,12 @@ func TestFilterResponse(t *testing.T) {
Rows: []*qbtypes.RawRow{
{
Data: map[string]any{
"net.peer.name": "192.168.1.1",
derivedKeyHTTPHost: "192.168.1.1",
},
},
{
Data: map[string]any{
"net.peer.name": "example.com",
derivedKeyHTTPHost: "example.com",
},
},
},
@@ -106,7 +107,7 @@ func TestFilterResponse(t *testing.T) {
Rows: []*qbtypes.RawRow{
{
Data: map[string]any{
"net.peer.name": "example.com",
derivedKeyHTTPHost: "example.com",
},
},
},

454
pkg/querier/openapi.go Normal file
View File

@@ -0,0 +1,454 @@
package querier
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
)
// QueryRangeV5OpenAPIDef is the OpenAPI definition for the /api/v5/query_range endpoint.
var QueryRangeV5OpenAPIDef = handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"query"},
Summary: "Query range",
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
RequestExamples: queryRangeV5Examples,
Response: new(qbtypes.QueryRangeResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: []handler.OpenAPISecurityScheme{
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
},
}
var queryRangeV5Examples = []handler.OpenAPIExample{
{
Name: "traces_time_series",
Summary: "Time series: count spans grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "span_count"},
},
"stepInterval": "60s",
"filter": map[string]any{"expression": "service.name = 'frontend'"},
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
"order": []any{map[string]any{"key": map[string]any{"name": "span_count"}, "direction": "desc"}},
"limit": 10,
},
},
},
},
},
},
{
Name: "logs_time_series",
Summary: "Time series: count error logs grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "log_count"},
},
"stepInterval": "60s",
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
"order": []any{map[string]any{"key": map[string]any{"name": "log_count"}, "direction": "desc"}},
"limit": 10,
},
},
},
},
},
},
{
Name: "metrics_gauge_time_series",
Summary: "Time series: latest gauge value averaged across series",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "system.cpu.utilization", "timeAggregation": "latest", "spaceAggregation": "avg"},
},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "host.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "metrics_rate_time_series",
Summary: "Time series: rate of cumulative counter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum"},
},
"stepInterval": 120,
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "metrics_histogram_time_series",
Summary: "Time series: p99 latency from histogram",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.bucket", "spaceAggregation": "p99"},
},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "logs_raw",
Summary: "Raw: fetch raw log records",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
"selectFields": []any{
map[string]any{"name": "body", "fieldContext": "log"},
map[string]any{"name": "service.name", "fieldContext": "resource"},
},
"order": []any{
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "log"}, "direction": "desc"},
map[string]any{"key": map[string]any{"name": "id"}, "direction": "desc"},
},
"limit": 50,
"offset": 0,
},
},
},
},
},
},
{
Name: "traces_raw",
Summary: "Raw: fetch raw span records",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"filter": map[string]any{"expression": "service.name = 'frontend' AND has_error = true"},
"selectFields": []any{
map[string]any{"name": "name", "fieldContext": "span"},
map[string]any{"name": "duration_nano", "fieldContext": "span"},
},
"order": []any{
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "span"}, "direction": "desc"},
},
"limit": 100,
},
},
},
},
},
},
{
Name: "traces_scalar",
Summary: "Scalar: total span count",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "span_count"},
},
"filter": map[string]any{"expression": "service.name = 'frontend'"},
},
},
},
},
},
},
{
Name: "logs_scalar",
Summary: "Scalar: total error log count",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "error_count"},
},
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
},
},
},
},
},
},
{
Name: "metrics_scalar",
Summary: "Scalar: single reduced metric value",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum", "reduceTo": "sum"},
},
"stepInterval": "60s",
},
},
},
},
},
},
{
Name: "formula",
Summary: "Formula: error rate from two trace queries",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{map[string]any{"expression": "countIf(has_error = true)"}},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "B",
"signal": "traces",
"aggregations": []any{map[string]any{"expression": "count()"}},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
map[string]any{
"type": "builder_formula",
"spec": map[string]any{
"name": "error_rate",
"expression": "A / B * 100",
},
},
},
},
},
},
{
Name: "promql",
Summary: "PromQL: request rate with UTF-8 metric name",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "promql",
"spec": map[string]any{
"name": "request_rate",
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
"step": 60,
},
},
},
},
},
},
{
Name: "clickhouse_sql_traces_time_series",
Summary: "ClickHouse SQL: traces time series with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "span_rate",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value" +
" FROM signoz_traces.distributed_signoz_index_v3" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
" GROUP BY ts ORDER BY ts",
},
},
},
},
},
},
{
Name: "clickhouse_sql_logs_raw",
Summary: "ClickHouse SQL: raw logs with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "recent_errors",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT timestamp, body" +
" FROM signoz_logs.distributed_logs_v2" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
" AND severity_text = 'ERROR'" +
" ORDER BY timestamp DESC LIMIT 100",
},
},
},
},
},
},
{
Name: "clickhouse_sql_traces_scalar",
Summary: "ClickHouse SQL: scalar aggregate with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "total_spans",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT count() AS value" +
" FROM signoz_traces.distributed_signoz_index_v3" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp",
},
},
},
},
},
},
}

View File

@@ -5001,7 +5001,6 @@ func (aH *APIHandler) getDomainList(w http.ResponseWriter, r *http.Request) {
return
}
result = thirdpartyapi.MergeSemconvColumns(result)
result = thirdpartyapi.FilterIntermediateColumns(result)
// Filter IP addresses if ShowIp is false
@@ -5058,7 +5057,6 @@ func (aH *APIHandler) getDomainInfo(w http.ResponseWriter, r *http.Request) {
return
}
result = thirdpartyapi.MergeSemconvColumns(result)
result = thirdpartyapi.FilterIntermediateColumns(result)
// Filter IP addresses if ShowIp is false

View File

@@ -1,7 +1,10 @@
package signoz
import (
"bytes"
"context"
"encoding/json"
"net/http"
"os"
"reflect"
@@ -22,10 +25,13 @@ import (
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
"github.com/gorilla/mux"
"github.com/swaggest/jsonschema-go"
"github.com/swaggest/openapi-go"
"github.com/swaggest/openapi-go/openapi3"
"gopkg.in/yaml.v2"
)
type OpenAPI struct {
@@ -57,6 +63,10 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
return nil, err
}
// Register routes that live outside the APIServer modules
// so they are discovered by the OpenAPI walker.
registerQueryRoutes(apiserver.Router())
reflector := openapi3.NewReflector()
reflector.JSONSchemaReflector().DefaultOptions = append(reflector.JSONSchemaReflector().DefaultOptions, jsonschema.InterceptDefName(func(t reflect.Type, defaultDefName string) string {
if defaultDefName == "RenderSuccessResponse" {
@@ -90,10 +100,67 @@ func (openapi *OpenAPI) CreateAndWrite(path string) error {
return err
}
spec, err := openapi.reflector.Spec.MarshalYAML()
// The library's MarshalYAML does a JSON round-trip that converts all numbers
// to float64, causing large integers (e.g. epoch millisecond timestamps) to
// render in scientific notation (1.6409952e+12).
jsonData, err := openapi.reflector.Spec.MarshalJSON()
if err != nil {
return err
}
dec := json.NewDecoder(bytes.NewReader(jsonData))
dec.UseNumber()
var v any
if err := dec.Decode(&v); err != nil {
return err
}
convertJSONNumbers(v)
spec, err := yaml.Marshal(v)
if err != nil {
return err
}
return os.WriteFile(path, spec, 0o600)
}
// convertJSONNumbers recursively walks a decoded JSON structure and converts
// json.Number values to int64 (preferred) or float64 so that YAML marshaling
// renders them as plain numbers instead of quoted strings.
func convertJSONNumbers(v interface{}) {
switch val := v.(type) {
case map[string]interface{}:
for k, elem := range val {
if n, ok := elem.(json.Number); ok {
if i, err := n.Int64(); err == nil {
val[k] = i
} else if f, err := n.Float64(); err == nil {
val[k] = f
}
} else {
convertJSONNumbers(elem)
}
}
case []interface{}:
for i, elem := range val {
if n, ok := elem.(json.Number); ok {
if i64, err := n.Int64(); err == nil {
val[i] = i64
} else if f, err := n.Float64(); err == nil {
val[i] = f
}
} else {
convertJSONNumbers(elem)
}
}
}
}
func registerQueryRoutes(router *mux.Router) {
router.Handle("/api/v5/query_range", handler.New(
func(http.ResponseWriter, *http.Request) {},
querier.QueryRangeV5OpenAPIDef,
)).Methods(http.MethodPost)
}

View File

@@ -172,7 +172,6 @@ var (
func (TimeAggregation) Enum() []any {
return []any{
TimeAggregationUnspecified,
TimeAggregationLatest,
TimeAggregationSum,
TimeAggregationAvg,
@@ -205,7 +204,6 @@ var (
func (SpaceAggregation) Enum() []any {
return []any{
SpaceAggregationUnspecified,
SpaceAggregationSum,
SpaceAggregationAvg,
SpaceAggregationMin,

View File

@@ -10,10 +10,35 @@ import (
"github.com/SigNoz/signoz/pkg/types/metrictypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/swaggest/jsonschema-go"
)
type Step struct{ time.Duration }
var _ jsonschema.Exposer = Step{}
// JSONSchema returns a custom schema for Step that accepts either a duration string or a number (seconds).
func (Step) JSONSchema() (jsonschema.Schema, error) {
s := jsonschema.Schema{}
s.WithDescription("Step interval. Accepts a Go duration string (e.g., \"60s\", \"1m\", \"1h\") or a number representing seconds (e.g., 60).")
strSchema := jsonschema.Schema{}
strSchema.WithType(jsonschema.String.Type())
strSchema.WithExamples("60s", "5m", "1h")
strSchema.WithDescription("Duration string (e.g., \"60s\", \"5m\", \"1h\").")
numSchema := jsonschema.Schema{}
numSchema.WithType(jsonschema.Number.Type())
numSchema.WithExamples(60, 300, 3600)
numSchema.WithDescription("Duration in seconds.")
s.OneOf = []jsonschema.SchemaOrBool{
strSchema.ToSchemaOrBool(),
numSchema.ToSchemaOrBool(),
}
return s, nil
}
func (s *Step) UnmarshalJSON(b []byte) error {
if len(b) == 0 {
return nil
@@ -181,6 +206,14 @@ var (
OrderDirectionDesc = OrderDirection{valuer.NewString("desc")}
)
// Enum returns the acceptable values for OrderDirection.
func (OrderDirection) Enum() []any {
return []any{
OrderDirectionAsc,
OrderDirectionDesc,
}
}
var (
OrderDirectionMap = map[string]OrderDirection{
"asc": OrderDirectionAsc,
@@ -203,6 +236,19 @@ var (
ReduceToMedian = ReduceTo{valuer.NewString("median")}
)
// Enum returns the acceptable values for ReduceTo.
func (ReduceTo) Enum() []any {
return []any{
ReduceToSum,
ReduceToCount,
ReduceToAvg,
ReduceToMin,
ReduceToMax,
ReduceToLast,
ReduceToMedian,
}
}
// FunctionReduceTo applies the reduceTo operator to a time series and returns a new series with the reduced value
// reduceTo can be one of: last, sum, avg, min, max, count, median
// if reduceTo is not recognized, the function returns the original series

View File

@@ -36,6 +36,30 @@ var (
FunctionNameFillZero = FunctionName{valuer.NewString("fillZero")}
)
// Enum returns the acceptable values for FunctionName.
func (FunctionName) Enum() []any {
return []any{
FunctionNameCutOffMin,
FunctionNameCutOffMax,
FunctionNameClampMin,
FunctionNameClampMax,
FunctionNameAbsolute,
FunctionNameRunningDiff,
FunctionNameLog2,
FunctionNameLog10,
FunctionNameCumulativeSum,
FunctionNameEWMA3,
FunctionNameEWMA5,
FunctionNameEWMA7,
FunctionNameMedian3,
FunctionNameMedian5,
FunctionNameMedian7,
FunctionNameTimeShift,
FunctionNameAnomaly,
FunctionNameFillZero,
}
}
// Validate checks if the FunctionName is valid and one of the known types
func (fn FunctionName) Validate() error {
validFunctions := []FunctionName{

View File

@@ -16,6 +16,17 @@ var (
JoinTypeCross = JoinType{valuer.NewString("cross")}
)
// Enum returns the acceptable values for JoinType.
func (JoinType) Enum() []any {
return []any{
JoinTypeInner,
JoinTypeLeft,
JoinTypeRight,
JoinTypeFull,
JoinTypeCross,
}
}
type QueryRef struct {
Name string `json:"name"`
}

View File

@@ -2,6 +2,8 @@ package querybuildertypesv5
import (
"context"
"github.com/swaggest/jsonschema-go"
)
type Query interface {
@@ -29,4 +31,12 @@ type ExecStats struct {
StepIntervals map[string]uint64 `json:"stepIntervals,omitempty"`
}
var _ jsonschema.Preparer = &ExecStats{}
// PrepareJSONSchema adds description to the ExecStats schema.
func (e *ExecStats) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Execution statistics for the query, including rows scanned, bytes scanned, and duration.")
return nil
}
type TimeRange struct{ From, To uint64 } // ms since epoch

View File

@@ -16,3 +16,17 @@ var (
QueryTypeClickHouseSQL = QueryType{valuer.NewString("clickhouse_sql")}
QueryTypePromQL = QueryType{valuer.NewString("promql")}
)
// Enum returns the acceptable values for QueryType.
func (QueryType) Enum() []any {
return []any{
QueryTypeBuilder,
QueryTypeFormula,
// Not yet supported.
// QueryTypeSubQuery,
// QueryTypeJoin,
QueryTypeTraceOperator,
QueryTypeClickHouseSQL,
QueryTypePromQL,
}
}

View File

@@ -9,6 +9,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/metrictypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/swaggest/jsonschema-go"
)
type QueryEnvelope struct {
@@ -18,6 +19,71 @@ type QueryEnvelope struct {
Spec any `json:"spec"`
}
// queryEnvelopeBuilderTrace is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=traces.
type queryEnvelopeBuilderTrace struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[TraceAggregation] `json:"spec" description:"The trace builder query specification."`
}
// queryEnvelopeBuilderLog is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=logs.
type queryEnvelopeBuilderLog struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[LogAggregation] `json:"spec" description:"The log builder query specification."`
}
// queryEnvelopeBuilderMetric is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=metrics.
type queryEnvelopeBuilderMetric struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[MetricAggregation] `json:"spec" description:"The metric builder query specification."`
}
// queryEnvelopeFormula is the OpenAPI schema for a QueryEnvelope with type=builder_formula.
type queryEnvelopeFormula struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderFormula `json:"spec" description:"The formula specification."`
}
// queryEnvelopeJoin is the OpenAPI schema for a QueryEnvelope with type=builder_join.
// type queryEnvelopeJoin struct {
// Type QueryType `json:"type" description:"The type of the query."`
// Spec QueryBuilderJoin `json:"spec" description:"The join specification."`
// }
// queryEnvelopeTraceOperator is the OpenAPI schema for a QueryEnvelope with type=builder_trace_operator.
type queryEnvelopeTraceOperator struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderTraceOperator `json:"spec" description:"The trace operator specification."`
}
// queryEnvelopePromQL is the OpenAPI schema for a QueryEnvelope with type=promql.
type queryEnvelopePromQL struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec PromQuery `json:"spec" description:"The PromQL query specification."`
}
// queryEnvelopeClickHouseSQL is the OpenAPI schema for a QueryEnvelope with type=clickhouse_sql.
type queryEnvelopeClickHouseSQL struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec ClickHouseQuery `json:"spec" description:"The ClickHouse SQL query specification."`
}
var _ jsonschema.OneOfExposer = QueryEnvelope{}
// JSONSchemaOneOf returns the oneOf variants for the QueryEnvelope discriminated union.
// Each variant represents a different query type with its corresponding spec schema.
func (QueryEnvelope) JSONSchemaOneOf() []any {
return []any{
queryEnvelopeBuilderTrace{},
queryEnvelopeBuilderLog{},
queryEnvelopeBuilderMetric{},
queryEnvelopeFormula{},
// queryEnvelopeJoin{},
queryEnvelopeTraceOperator{},
queryEnvelopePromQL{},
queryEnvelopeClickHouseSQL{},
}
}
// implement custom json unmarshaler for the QueryEnvelope
func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
var shadow struct {
@@ -130,6 +196,12 @@ type CompositeQuery struct {
Queries []QueryEnvelope `json:"queries"`
}
// PrepareJSONSchema adds description to the CompositeQuery schema.
func (c *CompositeQuery) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Composite query containing one or more query envelopes. Each query envelope specifies its type and corresponding spec.")
return nil
}
// UnmarshalJSON implements custom JSON unmarshaling to provide better error messages
func (c *CompositeQuery) UnmarshalJSON(data []byte) error {
type Alias CompositeQuery
@@ -192,6 +264,16 @@ var (
TextBoxVariableType = VariableType{valuer.NewString("text")}
)
// Enum returns the acceptable values for VariableType.
func (VariableType) Enum() []any {
return []any{
QueryVariableType,
DynamicVariableType,
CustomVariableType,
TextBoxVariableType,
}
}
type VariableItem struct {
Type VariableType `json:"type"`
Value any `json:"value"`
@@ -217,6 +299,12 @@ type QueryRangeRequest struct {
FormatOptions *FormatOptions `json:"formatOptions,omitempty"`
}
// PrepareJSONSchema adds description to the QueryRangeRequest schema.
func (q *QueryRangeRequest) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Request body for the v5 query range endpoint. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL queries.")
return nil
}
func (r *QueryRangeRequest) StepIntervalForQuery(name string) int64 {
stepsMap := make(map[string]int64)
for _, query := range r.CompositeQuery.Queries {

View File

@@ -30,3 +30,15 @@ var (
func (r RequestType) IsAggregation() bool {
return r == RequestTypeTimeSeries || r == RequestTypeScalar || r == RequestTypeDistribution
}
// Enum implements jsonschema.Enum; returns the acceptable values for RequestType.
func (RequestType) Enum() []any {
return []any{
RequestTypeScalar,
RequestTypeTimeSeries,
RequestTypeRaw,
RequestTypeRawStream,
RequestTypeTrace,
// RequestTypeDistribution,
}
}

View File

@@ -12,6 +12,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/swaggest/jsonschema-go"
)
type QBEvent struct {
@@ -42,6 +43,17 @@ type QueryData struct {
Results []any `json:"results"`
}
var _ jsonschema.OneOfExposer = QueryData{}
// JSONSchemaOneOf documents the polymorphic result types in QueryData.Results.
func (QueryData) JSONSchemaOneOf() []any {
return []any{
TimeSeriesData{},
ScalarData{},
RawData{},
}
}
type QueryRangeResponse struct {
Type RequestType `json:"type"`
Data QueryData `json:"data"`
@@ -52,6 +64,14 @@ type QueryRangeResponse struct {
QBEvent *QBEvent `json:"-"`
}
var _ jsonschema.Preparer = &QueryRangeResponse{}
// PrepareJSONSchema adds description to the QueryRangeResponse schema.
func (q *QueryRangeResponse) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Response from the v5 query range endpoint. The data.results array contains typed results depending on the requestType: TimeSeriesData for time_series, ScalarData for scalar, or RawData for raw requests.")
return nil
}
type TimeSeriesData struct {
QueryName string `json:"queryName"`
Aggregations []*AggregationBucket `json:"aggregations"`
@@ -159,6 +179,14 @@ var (
ColumnTypeAggregation = ColumnType{valuer.NewString("aggregation")}
)
// Enum returns the acceptable values for ColumnType.
func (ColumnType) Enum() []any {
return []any{
ColumnTypeGroup,
ColumnTypeAggregation,
}
}
type ColumnDescriptor struct {
telemetrytypes.TelemetryFieldKey
QueryName string `json:"queryName"`

View File

@@ -172,3 +172,18 @@ func isContextValidForSignal(ctx FieldContext, signal Signal) bool {
}
return true
}
// Enum returns the acceptable values for FieldContext.
func (FieldContext) Enum() []any {
return []any{
FieldContextMetric,
FieldContextLog,
FieldContextSpan,
// FieldContextTrace,
FieldContextResource,
// FieldContextScope,
FieldContextAttribute,
// FieldContextEvent,
FieldContextBody,
}
}

View File

@@ -177,3 +177,19 @@ func (f FieldDataType) TagDataType() string {
return "string"
}
}
// Enum returns the acceptable values for FieldDataType.
func (FieldDataType) Enum() []any {
return []any{
FieldDataTypeString,
FieldDataTypeBool,
FieldDataTypeFloat64,
FieldDataTypeInt64,
FieldDataTypeNumber,
// FieldDataTypeArrayString,
// FieldDataTypeArrayFloat64,
// FieldDataTypeArrayBool,
// FieldDataTypeArrayInt64,
// FieldDataTypeArrayNumber,
}
}

View File

@@ -12,3 +12,12 @@ var (
SignalMetrics = Signal{valuer.NewString("metrics")}
SignalUnspecified = Signal{valuer.NewString("")}
)
// Enum returns the acceptable values for Signal.
func (Signal) Enum() []any {
return []any{
SignalTraces,
SignalLogs,
SignalMetrics,
}
}

View File

@@ -10,3 +10,10 @@ var (
SourceMeter = Source{valuer.NewString("meter")}
SourceUnspecified = Source{valuer.NewString("")}
)
// Enum returns the acceptable values for Source.
func (Source) Enum() []any {
return []any{
SourceMeter,
}
}