mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-07 02:12:11 +00:00
Compare commits
18 Commits
issue_3017
...
test/uplot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8f4bc50b9c | ||
|
|
e4693ce64c | ||
|
|
ca9b3a910a | ||
|
|
ff336719b9 | ||
|
|
9b02b3daf3 | ||
|
|
9dc7d2389a | ||
|
|
87b8855bfe | ||
|
|
9f8e655fcd | ||
|
|
da5860297f | ||
|
|
54fca5ba44 | ||
|
|
cba9c820a4 | ||
|
|
66f4c3d6ec | ||
|
|
a0f407a848 | ||
|
|
3562de8fbb | ||
|
|
ef80fb39fd | ||
|
|
594d4dc737 | ||
|
|
01415b58be | ||
|
|
f7728c9019 |
@@ -2226,6 +2226,12 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"422":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unprocessable Entity
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
@@ -2665,6 +2671,7 @@ paths:
|
||||
parameters:
|
||||
- in: query
|
||||
name: metricName
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
@@ -2719,6 +2726,7 @@ paths:
|
||||
parameters:
|
||||
- in: query
|
||||
name: metricName
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
@@ -2774,6 +2782,7 @@ paths:
|
||||
parameters:
|
||||
- in: query
|
||||
name: metricName
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
@@ -2940,6 +2949,7 @@ paths:
|
||||
parameters:
|
||||
- in: query
|
||||
name: metricName
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
@@ -3807,6 +3817,9 @@ components:
|
||||
type: string
|
||||
alertName:
|
||||
type: string
|
||||
required:
|
||||
- alertName
|
||||
- alertId
|
||||
type: object
|
||||
MetricsexplorertypesMetricAlertsResponse:
|
||||
properties:
|
||||
@@ -3815,6 +3828,8 @@ components:
|
||||
$ref: '#/components/schemas/MetricsexplorertypesMetricAlert'
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- alerts
|
||||
type: object
|
||||
MetricsexplorertypesMetricAttribute:
|
||||
properties:
|
||||
@@ -3828,6 +3843,10 @@ components:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- key
|
||||
- values
|
||||
- valueCount
|
||||
type: object
|
||||
MetricsexplorertypesMetricAttributesRequest:
|
||||
properties:
|
||||
@@ -3839,6 +3858,8 @@ components:
|
||||
start:
|
||||
nullable: true
|
||||
type: integer
|
||||
required:
|
||||
- metricName
|
||||
type: object
|
||||
MetricsexplorertypesMetricAttributesResponse:
|
||||
properties:
|
||||
@@ -3850,6 +3871,9 @@ components:
|
||||
totalKeys:
|
||||
format: int64
|
||||
type: integer
|
||||
required:
|
||||
- attributes
|
||||
- totalKeys
|
||||
type: object
|
||||
MetricsexplorertypesMetricDashboard:
|
||||
properties:
|
||||
@@ -3861,6 +3885,11 @@ components:
|
||||
type: string
|
||||
widgetName:
|
||||
type: string
|
||||
required:
|
||||
- dashboardName
|
||||
- dashboardId
|
||||
- widgetId
|
||||
- widgetName
|
||||
type: object
|
||||
MetricsexplorertypesMetricDashboardsResponse:
|
||||
properties:
|
||||
@@ -3869,6 +3898,8 @@ components:
|
||||
$ref: '#/components/schemas/MetricsexplorertypesMetricDashboard'
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- dashboards
|
||||
type: object
|
||||
MetricsexplorertypesMetricHighlightsResponse:
|
||||
properties:
|
||||
@@ -3884,6 +3915,11 @@ components:
|
||||
totalTimeSeries:
|
||||
minimum: 0
|
||||
type: integer
|
||||
required:
|
||||
- dataPoints
|
||||
- lastReceived
|
||||
- totalTimeSeries
|
||||
- activeTimeSeries
|
||||
type: object
|
||||
MetricsexplorertypesMetricMetadata:
|
||||
properties:
|
||||
@@ -3892,11 +3928,27 @@ components:
|
||||
isMonotonic:
|
||||
type: boolean
|
||||
temporality:
|
||||
enum:
|
||||
- delta
|
||||
- cumulative
|
||||
- unspecified
|
||||
type: string
|
||||
type:
|
||||
enum:
|
||||
- gauge
|
||||
- sum
|
||||
- histogram
|
||||
- summary
|
||||
- exponentialhistogram
|
||||
type: string
|
||||
unit:
|
||||
type: string
|
||||
required:
|
||||
- description
|
||||
- type
|
||||
- unit
|
||||
- temporality
|
||||
- isMonotonic
|
||||
type: object
|
||||
MetricsexplorertypesStat:
|
||||
properties:
|
||||
@@ -3911,9 +3963,22 @@ components:
|
||||
minimum: 0
|
||||
type: integer
|
||||
type:
|
||||
enum:
|
||||
- gauge
|
||||
- sum
|
||||
- histogram
|
||||
- summary
|
||||
- exponentialhistogram
|
||||
type: string
|
||||
unit:
|
||||
type: string
|
||||
required:
|
||||
- metricName
|
||||
- description
|
||||
- type
|
||||
- unit
|
||||
- timeseries
|
||||
- samples
|
||||
type: object
|
||||
MetricsexplorertypesStatsRequest:
|
||||
properties:
|
||||
@@ -3931,6 +3996,10 @@ components:
|
||||
start:
|
||||
format: int64
|
||||
type: integer
|
||||
required:
|
||||
- start
|
||||
- end
|
||||
- limit
|
||||
type: object
|
||||
MetricsexplorertypesStatsResponse:
|
||||
properties:
|
||||
@@ -3942,6 +4011,9 @@ components:
|
||||
total:
|
||||
minimum: 0
|
||||
type: integer
|
||||
required:
|
||||
- metrics
|
||||
- total
|
||||
type: object
|
||||
MetricsexplorertypesTreemapEntry:
|
||||
properties:
|
||||
@@ -3953,6 +4025,10 @@ components:
|
||||
totalValue:
|
||||
minimum: 0
|
||||
type: integer
|
||||
required:
|
||||
- metricName
|
||||
- percentage
|
||||
- totalValue
|
||||
type: object
|
||||
MetricsexplorertypesTreemapRequest:
|
||||
properties:
|
||||
@@ -3964,10 +4040,18 @@ components:
|
||||
limit:
|
||||
type: integer
|
||||
mode:
|
||||
enum:
|
||||
- timeseries
|
||||
- samples
|
||||
type: string
|
||||
start:
|
||||
format: int64
|
||||
type: integer
|
||||
required:
|
||||
- start
|
||||
- end
|
||||
- limit
|
||||
- mode
|
||||
type: object
|
||||
MetricsexplorertypesTreemapResponse:
|
||||
properties:
|
||||
@@ -3981,6 +4065,9 @@ components:
|
||||
$ref: '#/components/schemas/MetricsexplorertypesTreemapEntry'
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- timeseries
|
||||
- samples
|
||||
type: object
|
||||
MetricsexplorertypesUpdateMetricMetadataRequest:
|
||||
properties:
|
||||
@@ -3991,11 +4078,28 @@ components:
|
||||
metricName:
|
||||
type: string
|
||||
temporality:
|
||||
enum:
|
||||
- delta
|
||||
- cumulative
|
||||
- unspecified
|
||||
type: string
|
||||
type:
|
||||
enum:
|
||||
- gauge
|
||||
- sum
|
||||
- histogram
|
||||
- summary
|
||||
- exponentialhistogram
|
||||
type: string
|
||||
unit:
|
||||
type: string
|
||||
required:
|
||||
- metricName
|
||||
- type
|
||||
- description
|
||||
- unit
|
||||
- temporality
|
||||
- isMonotonic
|
||||
type: object
|
||||
PreferencetypesPreference:
|
||||
properties:
|
||||
@@ -4278,6 +4382,9 @@ components:
|
||||
type: string
|
||||
orgId:
|
||||
type: string
|
||||
required:
|
||||
- orgId
|
||||
- email
|
||||
type: object
|
||||
TypesPostableInvite:
|
||||
properties:
|
||||
|
||||
@@ -61,6 +61,8 @@ module.exports = {
|
||||
curly: 'error', // Requires curly braces for all control statements
|
||||
eqeqeq: ['error', 'always', { null: 'ignore' }], // Enforces === and !== (allows == null for null/undefined check)
|
||||
'no-console': ['error', { allow: ['warn', 'error'] }], // Warns on console.log, allows console.warn/error
|
||||
// TODO: Change this to error in May 2026
|
||||
'max-params': ['warn', 3], // a function can have max 3 params after which it should become an object
|
||||
|
||||
// TypeScript rules
|
||||
'@typescript-eslint/explicit-function-return-type': 'error', // Requires explicit return types on functions
|
||||
@@ -116,7 +118,7 @@ module.exports = {
|
||||
},
|
||||
],
|
||||
'import/no-extraneous-dependencies': ['error', { devDependencies: true }], // Prevents importing packages not in package.json
|
||||
// 'import/no-cycle': 'warn', // TODO: Enable later to detect circular dependencies
|
||||
'import/no-cycle': 'warn', // Warns about circular dependencies
|
||||
|
||||
// Import sorting rules
|
||||
'simple-import-sort/imports': [
|
||||
@@ -146,6 +148,19 @@ module.exports = {
|
||||
'sonarjs/no-duplicate-string': 'off', // Disabled - can be noisy (enable periodically to check)
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
files: [
|
||||
'**/*.test.{js,jsx,ts,tsx}',
|
||||
'**/*.spec.{js,jsx,ts,tsx}',
|
||||
'**/__tests__/**/*.{js,jsx,ts,tsx}',
|
||||
],
|
||||
rules: {
|
||||
// Tests often have intentional duplication and complexity - disable SonarJS rules
|
||||
'sonarjs/cognitive-complexity': 'off', // Tests can be complex
|
||||
'sonarjs/no-identical-functions': 'off', // Similar test patterns are OK
|
||||
'sonarjs/no-small-switch': 'off', // Small switches are OK in tests
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['src/api/generated/**/*.ts'],
|
||||
rules: {
|
||||
@@ -153,7 +168,6 @@ module.exports = {
|
||||
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
||||
'no-nested-ternary': 'off',
|
||||
'@typescript-eslint/no-unused-vars': 'warn',
|
||||
'sonarjs/no-duplicate-string': 'off',
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
@@ -2,6 +2,11 @@
|
||||
|
||||
Embrace the spirit of collaboration and contribute to the success of our open-source project by adhering to these frontend development guidelines with precision and passion.
|
||||
|
||||
### Export Style
|
||||
|
||||
- **React components** (`src/components/`, `src/container/`, `src/pages/`): Prefer **default exports** for the main component in each file
|
||||
- **Utilities, hooks, APIs, types, constants** (`src/utils/`, `src/hooks/`, `src/api/`, `src/lib/`, `src/types/`, `src/constants/`): Prefer **named exports** for better tree-shaking and explicit imports
|
||||
|
||||
### React and Components
|
||||
|
||||
- Strive to create small and modular components, ensuring they are divided into individual pieces for improved maintainability and reusability.
|
||||
|
||||
@@ -12,6 +12,8 @@ export interface MockUPlotInstance {
|
||||
export interface MockUPlotPaths {
|
||||
spline: jest.Mock;
|
||||
bars: jest.Mock;
|
||||
linear: jest.Mock;
|
||||
stepped: jest.Mock;
|
||||
}
|
||||
|
||||
// Create mock instance methods
|
||||
@@ -23,10 +25,20 @@ const createMockUPlotInstance = (): MockUPlotInstance => ({
|
||||
setSeries: jest.fn(),
|
||||
});
|
||||
|
||||
// Create mock paths
|
||||
const mockPaths: MockUPlotPaths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
// Path builder: (self, seriesIdx, idx0, idx1) => paths or null
|
||||
const createMockPathBuilder = (): jest.Mock =>
|
||||
jest.fn(() => ({
|
||||
stroke: jest.fn(),
|
||||
fill: jest.fn(),
|
||||
clip: jest.fn(),
|
||||
}));
|
||||
|
||||
// Create mock paths - linear, spline, stepped needed by UPlotSeriesBuilder.getPathBuilder
|
||||
const mockPaths = {
|
||||
spline: jest.fn(() => createMockPathBuilder()),
|
||||
bars: jest.fn(() => createMockPathBuilder()),
|
||||
linear: jest.fn(() => createMockPathBuilder()),
|
||||
stepped: jest.fn((opts?: { align?: number }) => createMockPathBuilder()),
|
||||
};
|
||||
|
||||
// Mock static methods
|
||||
|
||||
@@ -28,8 +28,10 @@ import type {
|
||||
GatewaytypesPostableIngestionKeyLimitDTO,
|
||||
GatewaytypesUpdatableIngestionKeyLimitDTO,
|
||||
GetIngestionKeys200,
|
||||
GetIngestionKeysParams,
|
||||
RenderErrorResponseDTO,
|
||||
SearchIngestionKeys200,
|
||||
SearchIngestionKeysParams,
|
||||
UpdateIngestionKeyLimitPathParameters,
|
||||
UpdateIngestionKeyPathParameters,
|
||||
} from '../sigNoz.schemas';
|
||||
@@ -42,35 +44,44 @@ type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
* This endpoint returns the ingestion keys for a workspace
|
||||
* @summary Get ingestion keys for workspace
|
||||
*/
|
||||
export const getIngestionKeys = (signal?: AbortSignal) => {
|
||||
export const getIngestionKeys = (
|
||||
params?: GetIngestionKeysParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetIngestionKeys200>({
|
||||
url: `/api/v2/gateway/ingestion_keys`,
|
||||
method: 'GET',
|
||||
params,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetIngestionKeysQueryKey = () => {
|
||||
return ['getIngestionKeys'] as const;
|
||||
export const getGetIngestionKeysQueryKey = (
|
||||
params?: GetIngestionKeysParams,
|
||||
) => {
|
||||
return ['getIngestionKeys', ...(params ? [params] : [])] as const;
|
||||
};
|
||||
|
||||
export const getGetIngestionKeysQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getIngestionKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
}) => {
|
||||
>(
|
||||
params?: GetIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getIngestionKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getGetIngestionKeysQueryKey();
|
||||
const queryKey = queryOptions?.queryKey ?? getGetIngestionKeysQueryKey(params);
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof getIngestionKeys>>> = ({
|
||||
signal,
|
||||
}) => getIngestionKeys(signal);
|
||||
}) => getIngestionKeys(params, signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getIngestionKeys>>,
|
||||
@@ -91,14 +102,17 @@ export type GetIngestionKeysQueryError = RenderErrorResponseDTO;
|
||||
export function useGetIngestionKeys<
|
||||
TData = Awaited<ReturnType<typeof getIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getIngestionKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetIngestionKeysQueryOptions(options);
|
||||
>(
|
||||
params?: GetIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getIngestionKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetIngestionKeysQueryOptions(params, options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
@@ -114,10 +128,11 @@ export function useGetIngestionKeys<
|
||||
*/
|
||||
export const invalidateGetIngestionKeys = async (
|
||||
queryClient: QueryClient,
|
||||
params?: GetIngestionKeysParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetIngestionKeysQueryKey() },
|
||||
{ queryKey: getGetIngestionKeysQueryKey(params) },
|
||||
options,
|
||||
);
|
||||
|
||||
@@ -662,35 +677,45 @@ export const useUpdateIngestionKeyLimit = <
|
||||
* This endpoint returns the ingestion keys for a workspace
|
||||
* @summary Search ingestion keys for workspace
|
||||
*/
|
||||
export const searchIngestionKeys = (signal?: AbortSignal) => {
|
||||
export const searchIngestionKeys = (
|
||||
params?: SearchIngestionKeysParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<SearchIngestionKeys200>({
|
||||
url: `/api/v2/gateway/ingestion_keys/search`,
|
||||
method: 'GET',
|
||||
params,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getSearchIngestionKeysQueryKey = () => {
|
||||
return ['searchIngestionKeys'] as const;
|
||||
export const getSearchIngestionKeysQueryKey = (
|
||||
params?: SearchIngestionKeysParams,
|
||||
) => {
|
||||
return ['searchIngestionKeys', ...(params ? [params] : [])] as const;
|
||||
};
|
||||
|
||||
export const getSearchIngestionKeysQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
}) => {
|
||||
>(
|
||||
params?: SearchIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getSearchIngestionKeysQueryKey();
|
||||
const queryKey =
|
||||
queryOptions?.queryKey ?? getSearchIngestionKeysQueryKey(params);
|
||||
|
||||
const queryFn: QueryFunction<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>
|
||||
> = ({ signal }) => searchIngestionKeys(signal);
|
||||
> = ({ signal }) => searchIngestionKeys(params, signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
@@ -711,14 +736,17 @@ export type SearchIngestionKeysQueryError = RenderErrorResponseDTO;
|
||||
export function useSearchIngestionKeys<
|
||||
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getSearchIngestionKeysQueryOptions(options);
|
||||
>(
|
||||
params?: SearchIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getSearchIngestionKeysQueryOptions(params, options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
@@ -734,10 +762,11 @@ export function useSearchIngestionKeys<
|
||||
*/
|
||||
export const invalidateSearchIngestionKeys = async (
|
||||
queryClient: QueryClient,
|
||||
params?: SearchIngestionKeysParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getSearchIngestionKeysQueryKey() },
|
||||
{ queryKey: getSearchIngestionKeysQueryKey(params) },
|
||||
options,
|
||||
);
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
* @summary Get metric alerts
|
||||
*/
|
||||
export const getMetricAlerts = (
|
||||
params?: GetMetricAlertsParams,
|
||||
params: GetMetricAlertsParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetMetricAlerts200>({
|
||||
@@ -66,7 +66,7 @@ export const getGetMetricAlertsQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getMetricAlerts>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetMetricAlertsParams,
|
||||
params: GetMetricAlertsParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricAlerts>>,
|
||||
@@ -103,7 +103,7 @@ export function useGetMetricAlerts<
|
||||
TData = Awaited<ReturnType<typeof getMetricAlerts>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetMetricAlertsParams,
|
||||
params: GetMetricAlertsParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricAlerts>>,
|
||||
@@ -128,7 +128,7 @@ export function useGetMetricAlerts<
|
||||
*/
|
||||
export const invalidateGetMetricAlerts = async (
|
||||
queryClient: QueryClient,
|
||||
params?: GetMetricAlertsParams,
|
||||
params: GetMetricAlertsParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
@@ -144,7 +144,7 @@ export const invalidateGetMetricAlerts = async (
|
||||
* @summary Get metric dashboards
|
||||
*/
|
||||
export const getMetricDashboards = (
|
||||
params?: GetMetricDashboardsParams,
|
||||
params: GetMetricDashboardsParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetMetricDashboards200>({
|
||||
@@ -165,7 +165,7 @@ export const getGetMetricDashboardsQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getMetricDashboards>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetMetricDashboardsParams,
|
||||
params: GetMetricDashboardsParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricDashboards>>,
|
||||
@@ -203,7 +203,7 @@ export function useGetMetricDashboards<
|
||||
TData = Awaited<ReturnType<typeof getMetricDashboards>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetMetricDashboardsParams,
|
||||
params: GetMetricDashboardsParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricDashboards>>,
|
||||
@@ -228,7 +228,7 @@ export function useGetMetricDashboards<
|
||||
*/
|
||||
export const invalidateGetMetricDashboards = async (
|
||||
queryClient: QueryClient,
|
||||
params?: GetMetricDashboardsParams,
|
||||
params: GetMetricDashboardsParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
@@ -244,7 +244,7 @@ export const invalidateGetMetricDashboards = async (
|
||||
* @summary Get metric highlights
|
||||
*/
|
||||
export const getMetricHighlights = (
|
||||
params?: GetMetricHighlightsParams,
|
||||
params: GetMetricHighlightsParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetMetricHighlights200>({
|
||||
@@ -265,7 +265,7 @@ export const getGetMetricHighlightsQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getMetricHighlights>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetMetricHighlightsParams,
|
||||
params: GetMetricHighlightsParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricHighlights>>,
|
||||
@@ -303,7 +303,7 @@ export function useGetMetricHighlights<
|
||||
TData = Awaited<ReturnType<typeof getMetricHighlights>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetMetricHighlightsParams,
|
||||
params: GetMetricHighlightsParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricHighlights>>,
|
||||
@@ -328,7 +328,7 @@ export function useGetMetricHighlights<
|
||||
*/
|
||||
export const invalidateGetMetricHighlights = async (
|
||||
queryClient: QueryClient,
|
||||
params?: GetMetricHighlightsParams,
|
||||
params: GetMetricHighlightsParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
@@ -526,7 +526,7 @@ export const useGetMetricAttributes = <
|
||||
* @summary Get metric metadata
|
||||
*/
|
||||
export const getMetricMetadata = (
|
||||
params?: GetMetricMetadataParams,
|
||||
params: GetMetricMetadataParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetMetricMetadata200>({
|
||||
@@ -547,7 +547,7 @@ export const getGetMetricMetadataQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetMetricMetadataParams,
|
||||
params: GetMetricMetadataParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricMetadata>>,
|
||||
@@ -585,7 +585,7 @@ export function useGetMetricMetadata<
|
||||
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetMetricMetadataParams,
|
||||
params: GetMetricMetadataParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricMetadata>>,
|
||||
@@ -610,7 +610,7 @@ export function useGetMetricMetadata<
|
||||
*/
|
||||
export const invalidateGetMetricMetadata = async (
|
||||
queryClient: QueryClient,
|
||||
params?: GetMetricMetadataParams,
|
||||
params: GetMetricMetadataParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
|
||||
@@ -650,11 +650,11 @@ export interface MetricsexplorertypesMetricAlertDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
alertId?: string;
|
||||
alertId: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
alertName?: string;
|
||||
alertName: string;
|
||||
}
|
||||
|
||||
export interface MetricsexplorertypesMetricAlertsResponseDTO {
|
||||
@@ -662,24 +662,24 @@ export interface MetricsexplorertypesMetricAlertsResponseDTO {
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
alerts?: MetricsexplorertypesMetricAlertDTO[] | null;
|
||||
alerts: MetricsexplorertypesMetricAlertDTO[] | null;
|
||||
}
|
||||
|
||||
export interface MetricsexplorertypesMetricAttributeDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
key?: string;
|
||||
key: string;
|
||||
/**
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
*/
|
||||
valueCount?: number;
|
||||
valueCount: number;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
values?: string[] | null;
|
||||
values: string[] | null;
|
||||
}
|
||||
|
||||
export interface MetricsexplorertypesMetricAttributesRequestDTO {
|
||||
@@ -691,7 +691,7 @@ export interface MetricsexplorertypesMetricAttributesRequestDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
metricName?: string;
|
||||
metricName: string;
|
||||
/**
|
||||
* @type integer
|
||||
* @nullable true
|
||||
@@ -704,31 +704,31 @@ export interface MetricsexplorertypesMetricAttributesResponseDTO {
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
attributes?: MetricsexplorertypesMetricAttributeDTO[] | null;
|
||||
attributes: MetricsexplorertypesMetricAttributeDTO[] | null;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
totalKeys?: number;
|
||||
totalKeys: number;
|
||||
}
|
||||
|
||||
export interface MetricsexplorertypesMetricDashboardDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
dashboardId?: string;
|
||||
dashboardId: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
dashboardName?: string;
|
||||
dashboardName: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
widgetId?: string;
|
||||
widgetId: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
widgetName?: string;
|
||||
widgetName: string;
|
||||
}
|
||||
|
||||
export interface MetricsexplorertypesMetricDashboardsResponseDTO {
|
||||
@@ -736,7 +736,7 @@ export interface MetricsexplorertypesMetricDashboardsResponseDTO {
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
dashboards?: MetricsexplorertypesMetricDashboardDTO[] | null;
|
||||
dashboards: MetricsexplorertypesMetricDashboardDTO[] | null;
|
||||
}
|
||||
|
||||
export interface MetricsexplorertypesMetricHighlightsResponseDTO {
|
||||
@@ -744,74 +744,96 @@ export interface MetricsexplorertypesMetricHighlightsResponseDTO {
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
*/
|
||||
activeTimeSeries?: number;
|
||||
activeTimeSeries: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
*/
|
||||
dataPoints?: number;
|
||||
dataPoints: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
*/
|
||||
lastReceived?: number;
|
||||
lastReceived: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
*/
|
||||
totalTimeSeries?: number;
|
||||
totalTimeSeries: number;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesMetricMetadataDTOTemporality {
|
||||
delta = 'delta',
|
||||
cumulative = 'cumulative',
|
||||
unspecified = 'unspecified',
|
||||
}
|
||||
export enum MetricsexplorertypesMetricMetadataDTOType {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface MetricsexplorertypesMetricMetadataDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
description?: string;
|
||||
description: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
isMonotonic?: boolean;
|
||||
isMonotonic: boolean;
|
||||
/**
|
||||
* @enum delta,cumulative,unspecified
|
||||
* @type string
|
||||
*/
|
||||
temporality: MetricsexplorertypesMetricMetadataDTOTemporality;
|
||||
/**
|
||||
* @enum gauge,sum,histogram,summary,exponentialhistogram
|
||||
* @type string
|
||||
*/
|
||||
type: MetricsexplorertypesMetricMetadataDTOType;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
temporality?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
type?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
unit?: string;
|
||||
unit: string;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesStatDTOType {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface MetricsexplorertypesStatDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
description?: string;
|
||||
description: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
metricName?: string;
|
||||
metricName: string;
|
||||
/**
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
*/
|
||||
samples?: number;
|
||||
samples: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
*/
|
||||
timeseries?: number;
|
||||
timeseries: number;
|
||||
/**
|
||||
* @enum gauge,sum,histogram,summary,exponentialhistogram
|
||||
* @type string
|
||||
*/
|
||||
type: MetricsexplorertypesStatDTOType;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
type?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
unit?: string;
|
||||
unit: string;
|
||||
}
|
||||
|
||||
export interface MetricsexplorertypesStatsRequestDTO {
|
||||
@@ -819,12 +841,12 @@ export interface MetricsexplorertypesStatsRequestDTO {
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
end?: number;
|
||||
end: number;
|
||||
filter?: Querybuildertypesv5FilterDTO;
|
||||
/**
|
||||
* @type integer
|
||||
*/
|
||||
limit?: number;
|
||||
limit: number;
|
||||
/**
|
||||
* @type integer
|
||||
*/
|
||||
@@ -834,7 +856,7 @@ export interface MetricsexplorertypesStatsRequestDTO {
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
start?: number;
|
||||
start: number;
|
||||
}
|
||||
|
||||
export interface MetricsexplorertypesStatsResponseDTO {
|
||||
@@ -842,51 +864,56 @@ export interface MetricsexplorertypesStatsResponseDTO {
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
metrics?: MetricsexplorertypesStatDTO[] | null;
|
||||
metrics: MetricsexplorertypesStatDTO[] | null;
|
||||
/**
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
*/
|
||||
total?: number;
|
||||
total: number;
|
||||
}
|
||||
|
||||
export interface MetricsexplorertypesTreemapEntryDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
metricName?: string;
|
||||
metricName: string;
|
||||
/**
|
||||
* @type number
|
||||
* @format double
|
||||
*/
|
||||
percentage?: number;
|
||||
percentage: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
*/
|
||||
totalValue?: number;
|
||||
totalValue: number;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesTreemapRequestDTOMode {
|
||||
timeseries = 'timeseries',
|
||||
samples = 'samples',
|
||||
}
|
||||
export interface MetricsexplorertypesTreemapRequestDTO {
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
end?: number;
|
||||
end: number;
|
||||
filter?: Querybuildertypesv5FilterDTO;
|
||||
/**
|
||||
* @type integer
|
||||
*/
|
||||
limit?: number;
|
||||
limit: number;
|
||||
/**
|
||||
* @enum timeseries,samples
|
||||
* @type string
|
||||
*/
|
||||
mode?: string;
|
||||
mode: MetricsexplorertypesTreemapRequestDTOMode;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
start?: number;
|
||||
start: number;
|
||||
}
|
||||
|
||||
export interface MetricsexplorertypesTreemapResponseDTO {
|
||||
@@ -894,39 +921,53 @@ export interface MetricsexplorertypesTreemapResponseDTO {
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
samples?: MetricsexplorertypesTreemapEntryDTO[] | null;
|
||||
samples: MetricsexplorertypesTreemapEntryDTO[] | null;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
timeseries?: MetricsexplorertypesTreemapEntryDTO[] | null;
|
||||
timeseries: MetricsexplorertypesTreemapEntryDTO[] | null;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality {
|
||||
delta = 'delta',
|
||||
cumulative = 'cumulative',
|
||||
unspecified = 'unspecified',
|
||||
}
|
||||
export enum MetricsexplorertypesUpdateMetricMetadataRequestDTOType {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
description?: string;
|
||||
description: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
isMonotonic?: boolean;
|
||||
isMonotonic: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
metricName?: string;
|
||||
metricName: string;
|
||||
/**
|
||||
* @enum delta,cumulative,unspecified
|
||||
* @type string
|
||||
*/
|
||||
temporality: MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality;
|
||||
/**
|
||||
* @enum gauge,sum,histogram,summary,exponentialhistogram
|
||||
* @type string
|
||||
*/
|
||||
type: MetricsexplorertypesUpdateMetricMetadataRequestDTOType;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
temporality?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
type?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
unit?: string;
|
||||
unit: string;
|
||||
}
|
||||
|
||||
export interface PreferencetypesPreferenceDTO {
|
||||
@@ -1851,6 +1892,19 @@ export type GetFeatures200 = {
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type GetIngestionKeysParams = {
|
||||
/**
|
||||
* @type integer
|
||||
* @description undefined
|
||||
*/
|
||||
page?: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @description undefined
|
||||
*/
|
||||
per_page?: number;
|
||||
};
|
||||
|
||||
export type GetIngestionKeys200 = {
|
||||
data?: GatewaytypesGettableIngestionKeysDTO;
|
||||
/**
|
||||
@@ -1890,6 +1944,24 @@ export type DeleteIngestionKeyLimitPathParameters = {
|
||||
export type UpdateIngestionKeyLimitPathParameters = {
|
||||
limitId: string;
|
||||
};
|
||||
export type SearchIngestionKeysParams = {
|
||||
/**
|
||||
* @type string
|
||||
* @description undefined
|
||||
*/
|
||||
name?: string;
|
||||
/**
|
||||
* @type integer
|
||||
* @description undefined
|
||||
*/
|
||||
page?: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @description undefined
|
||||
*/
|
||||
per_page?: number;
|
||||
};
|
||||
|
||||
export type SearchIngestionKeys200 = {
|
||||
data?: GatewaytypesGettableIngestionKeysDTO;
|
||||
/**
|
||||
@@ -1903,7 +1975,7 @@ export type GetMetricAlertsParams = {
|
||||
* @type string
|
||||
* @description undefined
|
||||
*/
|
||||
metricName?: string;
|
||||
metricName: string;
|
||||
};
|
||||
|
||||
export type GetMetricAlerts200 = {
|
||||
@@ -1919,7 +1991,7 @@ export type GetMetricDashboardsParams = {
|
||||
* @type string
|
||||
* @description undefined
|
||||
*/
|
||||
metricName?: string;
|
||||
metricName: string;
|
||||
};
|
||||
|
||||
export type GetMetricDashboards200 = {
|
||||
@@ -1935,7 +2007,7 @@ export type GetMetricHighlightsParams = {
|
||||
* @type string
|
||||
* @description undefined
|
||||
*/
|
||||
metricName?: string;
|
||||
metricName: string;
|
||||
};
|
||||
|
||||
export type GetMetricHighlights200 = {
|
||||
@@ -1962,7 +2034,7 @@ export type GetMetricMetadataParams = {
|
||||
* @type string
|
||||
* @description undefined
|
||||
*/
|
||||
metricName?: string;
|
||||
metricName: string;
|
||||
};
|
||||
|
||||
export type GetMetricMetadata200 = {
|
||||
|
||||
@@ -22,7 +22,7 @@ import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { PenLine, Trash2 } from 'lucide-react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariablesStore';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { TVariableMode } from './types';
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
import { memo, useMemo } from 'react';
|
||||
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
|
||||
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
||||
|
||||
import SelectVariableInput from './SelectVariableInput';
|
||||
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
|
||||
import { VariableItemProps } from './VariableItem';
|
||||
|
||||
type CustomVariableInputProps = Pick<
|
||||
VariableItemProps,
|
||||
'variableData' | 'onValueUpdate'
|
||||
>;
|
||||
|
||||
function CustomVariableInput({
|
||||
variableData,
|
||||
onValueUpdate,
|
||||
}: CustomVariableInputProps): JSX.Element {
|
||||
const optionsData: (string | number | boolean)[] = useMemo(() => {
|
||||
return sortValues(
|
||||
commaValuesParser(variableData.customValue || ''),
|
||||
variableData.sort,
|
||||
) as (string | number | boolean)[];
|
||||
}, [variableData.customValue, variableData.sort]);
|
||||
|
||||
const {
|
||||
value,
|
||||
defaultValue,
|
||||
enableSelectAll,
|
||||
onChange,
|
||||
onDropdownVisibleChange,
|
||||
handleClear,
|
||||
} = useDashboardVariableSelectHelper({
|
||||
variableData,
|
||||
optionsData,
|
||||
onValueUpdate,
|
||||
});
|
||||
|
||||
return (
|
||||
<SelectVariableInput
|
||||
variableId={variableData.id}
|
||||
options={optionsData}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
onDropdownVisibleChange={onDropdownVisibleChange}
|
||||
onClear={handleClear}
|
||||
enableSelectAll={enableSelectAll}
|
||||
defaultValue={defaultValue}
|
||||
isMultiSelect={variableData.multiSelect}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(CustomVariableInput);
|
||||
@@ -25,12 +25,6 @@
|
||||
}
|
||||
}
|
||||
|
||||
&.focused {
|
||||
.variable-value {
|
||||
outline: 1px solid var(--bg-robin-400);
|
||||
}
|
||||
}
|
||||
|
||||
.variable-value {
|
||||
display: flex;
|
||||
min-width: 120px;
|
||||
@@ -48,6 +42,11 @@
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 16px; /* 133.333% */
|
||||
|
||||
&:hover,
|
||||
&:focus-within {
|
||||
outline: 1px solid var(--bg-robin-400);
|
||||
}
|
||||
}
|
||||
|
||||
.variable-select {
|
||||
@@ -99,12 +98,6 @@
|
||||
|
||||
.lightMode {
|
||||
.variable-item {
|
||||
&.focused {
|
||||
.variable-value {
|
||||
border: 1px solid var(--bg-robin-400);
|
||||
}
|
||||
}
|
||||
|
||||
.variable-name {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
@@ -115,6 +108,11 @@
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--bg-ink-400);
|
||||
|
||||
&:hover,
|
||||
&:focus-within {
|
||||
outline: 1px solid var(--bg-robin-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -124,3 +122,9 @@
|
||||
padding: 4px 12px;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.dashboard-variables-selection-container {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import { memo, useEffect, useState } from 'react';
|
||||
import { memo, useCallback, useEffect, useMemo } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { Row } from 'antd';
|
||||
import { ALL_SELECTED_VALUE } from 'components/NewSelect/utils';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import {
|
||||
useDashboardVariables,
|
||||
useDashboardVariablesSelector,
|
||||
} from 'hooks/dashboard/useDashboardVariables';
|
||||
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
|
||||
import { AppState } from 'store/reducers';
|
||||
@@ -12,20 +14,13 @@ import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import DynamicVariableSelection from './DynamicVariableSelection';
|
||||
import {
|
||||
buildDependencies,
|
||||
buildDependencyGraph,
|
||||
buildParentDependencyGraph,
|
||||
IDependencyData,
|
||||
onUpdateVariableNode,
|
||||
} from './util';
|
||||
import { onUpdateVariableNode } from './util';
|
||||
import VariableItem from './VariableItem';
|
||||
|
||||
import './DashboardVariableSelection.styles.scss';
|
||||
|
||||
function DashboardVariableSelection(): JSX.Element | null {
|
||||
const {
|
||||
selectedDashboard,
|
||||
setSelectedDashboard,
|
||||
updateLocalStorageDashboardVariables,
|
||||
variablesToGetUpdated,
|
||||
@@ -35,11 +30,11 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
const { updateUrlVariable, getUrlVariables } = useVariablesFromUrl();
|
||||
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
|
||||
const [variablesTableData, setVariablesTableData] = useState<any>([]);
|
||||
|
||||
const [dependencyData, setDependencyData] = useState<IDependencyData | null>(
|
||||
null,
|
||||
const sortedVariablesArray = useDashboardVariablesSelector(
|
||||
(state) => state.sortedVariablesArray,
|
||||
);
|
||||
const dependencyData = useDashboardVariablesSelector(
|
||||
(state) => state.dependencyData,
|
||||
);
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
@@ -47,24 +42,6 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const tableRowData = [];
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const [key, value] of Object.entries(dashboardVariables)) {
|
||||
const { id } = value;
|
||||
|
||||
tableRowData.push({
|
||||
key,
|
||||
name: key,
|
||||
...dashboardVariables[key],
|
||||
id,
|
||||
});
|
||||
}
|
||||
|
||||
tableRowData.sort((a, b) => a.order - b.order);
|
||||
|
||||
setVariablesTableData(tableRowData);
|
||||
|
||||
// Initialize variables with default values if not in URL
|
||||
initializeDefaultVariables(
|
||||
dashboardVariables,
|
||||
@@ -73,58 +50,36 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
);
|
||||
}, [getUrlVariables, updateUrlVariable, dashboardVariables]);
|
||||
|
||||
useEffect(() => {
|
||||
if (variablesTableData.length > 0) {
|
||||
const depGrp = buildDependencies(variablesTableData);
|
||||
const { order, graph, hasCycle, cycleNodes } = buildDependencyGraph(depGrp);
|
||||
const parentDependencyGraph = buildParentDependencyGraph(graph);
|
||||
|
||||
// cleanup order to only include variables that are of type 'QUERY'
|
||||
const cleanedOrder = order.filter((variable) => {
|
||||
const variableData = variablesTableData.find(
|
||||
(v: IDashboardVariable) => v.name === variable,
|
||||
);
|
||||
return variableData?.type === 'QUERY';
|
||||
});
|
||||
|
||||
setDependencyData({
|
||||
order: cleanedOrder,
|
||||
graph,
|
||||
parentDependencyGraph,
|
||||
hasCycle,
|
||||
cycleNodes,
|
||||
});
|
||||
}
|
||||
}, [dashboardVariables, variablesTableData]);
|
||||
|
||||
// this handles the case where the dependency order changes i.e. variable list updated via creation or deletion etc. and we need to refetch the variables
|
||||
// also trigger when the global time changes
|
||||
useEffect(
|
||||
() => {
|
||||
if (!isEmpty(dependencyData?.order)) {
|
||||
setVariablesToGetUpdated(dependencyData?.order || []);
|
||||
}
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[JSON.stringify(dependencyData?.order), minTime, maxTime],
|
||||
// Memoize the order key to avoid unnecessary triggers
|
||||
const dependencyOrderKey = useMemo(
|
||||
() => dependencyData?.order?.join(',') ?? '',
|
||||
[dependencyData?.order],
|
||||
);
|
||||
|
||||
// Trigger refetch when dependency order changes or global time changes
|
||||
useEffect(() => {
|
||||
if (dependencyData?.order && dependencyData.order.length > 0) {
|
||||
setVariablesToGetUpdated(dependencyData?.order || []);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [dependencyOrderKey, minTime, maxTime]);
|
||||
|
||||
// Performance optimization: For dynamic variables with allSelected=true, we don't store
|
||||
// individual values in localStorage since we can always derive them from available options.
|
||||
// This makes localStorage much lighter and more efficient.
|
||||
const onValueUpdate = (
|
||||
name: string,
|
||||
id: string,
|
||||
value: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
haveCustomValuesSelected?: boolean,
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
): void => {
|
||||
if (id) {
|
||||
const onValueUpdate = useCallback(
|
||||
(
|
||||
name: string,
|
||||
id: string,
|
||||
value: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
haveCustomValuesSelected?: boolean,
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
): void => {
|
||||
// For dynamic variables, only store in localStorage when NOT allSelected
|
||||
// This makes localStorage much lighter by avoiding storing all individual values
|
||||
const variable = dashboardVariables?.[id] || dashboardVariables?.[name];
|
||||
const isDynamic = variable?.type === 'DYNAMIC';
|
||||
const variable = dashboardVariables[id] || dashboardVariables[name];
|
||||
const isDynamic = variable.type === 'DYNAMIC';
|
||||
updateLocalStorageDashboardVariables(name, value, allSelected, isDynamic);
|
||||
|
||||
if (allSelected) {
|
||||
@@ -133,41 +88,39 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
updateUrlVariable(name || id, value);
|
||||
}
|
||||
|
||||
if (selectedDashboard) {
|
||||
setSelectedDashboard((prev) => {
|
||||
if (prev) {
|
||||
const oldVariables = prev?.data.variables;
|
||||
// this is added to handle case where we have two different
|
||||
// schemas for variable response
|
||||
if (oldVariables?.[id]) {
|
||||
oldVariables[id] = {
|
||||
...oldVariables[id],
|
||||
selectedValue: value,
|
||||
allSelected,
|
||||
haveCustomValuesSelected,
|
||||
};
|
||||
}
|
||||
if (oldVariables?.[name]) {
|
||||
oldVariables[name] = {
|
||||
...oldVariables[name],
|
||||
selectedValue: value,
|
||||
allSelected,
|
||||
haveCustomValuesSelected,
|
||||
};
|
||||
}
|
||||
return {
|
||||
...prev,
|
||||
data: {
|
||||
...prev?.data,
|
||||
variables: {
|
||||
...oldVariables,
|
||||
},
|
||||
},
|
||||
setSelectedDashboard((prev) => {
|
||||
if (prev) {
|
||||
const oldVariables = { ...prev?.data.variables };
|
||||
// this is added to handle case where we have two different
|
||||
// schemas for variable response
|
||||
if (oldVariables?.[id]) {
|
||||
oldVariables[id] = {
|
||||
...oldVariables[id],
|
||||
selectedValue: value,
|
||||
allSelected,
|
||||
haveCustomValuesSelected,
|
||||
};
|
||||
}
|
||||
return prev;
|
||||
});
|
||||
}
|
||||
if (oldVariables?.[name]) {
|
||||
oldVariables[name] = {
|
||||
...oldVariables[name],
|
||||
selectedValue: value,
|
||||
allSelected,
|
||||
haveCustomValuesSelected,
|
||||
};
|
||||
}
|
||||
return {
|
||||
...prev,
|
||||
data: {
|
||||
...prev?.data,
|
||||
variables: {
|
||||
...oldVariables,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
return prev;
|
||||
});
|
||||
|
||||
if (dependencyData) {
|
||||
const updatedVariables: string[] = [];
|
||||
@@ -183,48 +136,42 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
} else {
|
||||
setVariablesToGetUpdated((prev) => prev.filter((v) => v !== name));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (!dashboardVariables) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const orderBasedSortedVariables = variablesTableData.sort(
|
||||
(a: { order: number }, b: { order: number }) => a.order - b.order,
|
||||
},
|
||||
[
|
||||
// This can be removed
|
||||
dashboardVariables,
|
||||
updateLocalStorageDashboardVariables,
|
||||
dependencyData,
|
||||
updateUrlVariable,
|
||||
setSelectedDashboard,
|
||||
setVariablesToGetUpdated,
|
||||
],
|
||||
);
|
||||
|
||||
return (
|
||||
<Row style={{ display: 'flex', gap: '12px' }}>
|
||||
{orderBasedSortedVariables &&
|
||||
Array.isArray(orderBasedSortedVariables) &&
|
||||
orderBasedSortedVariables.length > 0 &&
|
||||
orderBasedSortedVariables.map((variable) =>
|
||||
variable.type === 'DYNAMIC' ? (
|
||||
<DynamicVariableSelection
|
||||
key={`${variable.name}${variable.id}${variable.order}`}
|
||||
existingVariables={dashboardVariables}
|
||||
variableData={{
|
||||
name: variable.name,
|
||||
...variable,
|
||||
}}
|
||||
onValueUpdate={onValueUpdate}
|
||||
/>
|
||||
) : (
|
||||
<VariableItem
|
||||
key={`${variable.name}${variable.id}}${variable.order}`}
|
||||
existingVariables={dashboardVariables}
|
||||
variableData={{
|
||||
name: variable.name,
|
||||
...variable,
|
||||
}}
|
||||
onValueUpdate={onValueUpdate}
|
||||
variablesToGetUpdated={variablesToGetUpdated}
|
||||
setVariablesToGetUpdated={setVariablesToGetUpdated}
|
||||
dependencyData={dependencyData}
|
||||
/>
|
||||
),
|
||||
)}
|
||||
<Row className="dashboard-variables-selection-container">
|
||||
{sortedVariablesArray.map((variable) => {
|
||||
const key = `${variable.name}${variable.id}${variable.order}`;
|
||||
|
||||
return variable.type === 'DYNAMIC' ? (
|
||||
<DynamicVariableSelection
|
||||
key={key}
|
||||
existingVariables={dashboardVariables}
|
||||
variableData={variable}
|
||||
onValueUpdate={onValueUpdate}
|
||||
/>
|
||||
) : (
|
||||
<VariableItem
|
||||
key={key}
|
||||
existingVariables={dashboardVariables}
|
||||
variableData={variable}
|
||||
onValueUpdate={onValueUpdate}
|
||||
variablesToGetUpdated={variablesToGetUpdated}
|
||||
setVariablesToGetUpdated={setVariablesToGetUpdated}
|
||||
dependencyData={dependencyData}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</Row>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -23,9 +23,9 @@ import { SelectItemStyle } from './styles';
|
||||
import {
|
||||
areArraysEqual,
|
||||
getOptionsForDynamicVariable,
|
||||
getSelectValue,
|
||||
uniqueValues,
|
||||
} from './util';
|
||||
import { getSelectValue } from './VariableItem';
|
||||
|
||||
import './DashboardVariableSelection.styles.scss';
|
||||
|
||||
|
||||
@@ -0,0 +1,229 @@
|
||||
import { memo, useCallback, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
||||
import { isArray, isString } from 'lodash-es';
|
||||
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import { variablePropsToPayloadVariables } from '../utils';
|
||||
import SelectVariableInput from './SelectVariableInput';
|
||||
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
|
||||
import { areArraysEqual, checkAPIInvocation } from './util';
|
||||
|
||||
interface QueryVariableInputProps {
|
||||
variableData: IDashboardVariable;
|
||||
existingVariables: Record<string, IDashboardVariable>;
|
||||
onValueUpdate: (
|
||||
name: string,
|
||||
id: string,
|
||||
value: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
) => void;
|
||||
variablesToGetUpdated: string[];
|
||||
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
|
||||
dependencyData: IDependencyData | null;
|
||||
}
|
||||
|
||||
function QueryVariableInput({
|
||||
variableData,
|
||||
existingVariables,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
dependencyData,
|
||||
onValueUpdate,
|
||||
}: QueryVariableInputProps): JSX.Element {
|
||||
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
|
||||
[],
|
||||
);
|
||||
const [errorMessage, setErrorMessage] = useState<null | string>(null);
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const {
|
||||
tempSelection,
|
||||
setTempSelection,
|
||||
value,
|
||||
defaultValue,
|
||||
enableSelectAll,
|
||||
onChange,
|
||||
onDropdownVisibleChange,
|
||||
handleClear,
|
||||
} = useDashboardVariableSelectHelper({
|
||||
variableData,
|
||||
optionsData,
|
||||
onValueUpdate,
|
||||
});
|
||||
|
||||
const validVariableUpdate = (): boolean => {
|
||||
if (!variableData.name) {
|
||||
return false;
|
||||
}
|
||||
return Boolean(
|
||||
variablesToGetUpdated.length &&
|
||||
variablesToGetUpdated[0] === variableData.name,
|
||||
);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const getOptions = (variablesRes: VariableResponseProps | null): void => {
|
||||
try {
|
||||
setErrorMessage(null);
|
||||
|
||||
if (
|
||||
variablesRes?.variableValues &&
|
||||
Array.isArray(variablesRes?.variableValues)
|
||||
) {
|
||||
const newOptionsData = sortValues(
|
||||
variablesRes?.variableValues,
|
||||
variableData.sort,
|
||||
);
|
||||
|
||||
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
|
||||
|
||||
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
|
||||
let valueNotInList = false;
|
||||
|
||||
if (isArray(variableData.selectedValue)) {
|
||||
variableData.selectedValue.forEach((val) => {
|
||||
if (!newOptionsData.includes(val)) {
|
||||
valueNotInList = true;
|
||||
}
|
||||
});
|
||||
} else if (
|
||||
isString(variableData.selectedValue) &&
|
||||
!newOptionsData.includes(variableData.selectedValue)
|
||||
) {
|
||||
valueNotInList = true;
|
||||
}
|
||||
|
||||
// variablesData.allSelected is added for the case where on change of options we need to update the
|
||||
// local storage
|
||||
if (
|
||||
variableData.name &&
|
||||
(validVariableUpdate() || valueNotInList || variableData.allSelected)
|
||||
) {
|
||||
if (
|
||||
variableData.allSelected &&
|
||||
variableData.multiSelect &&
|
||||
variableData.showALLOption
|
||||
) {
|
||||
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
|
||||
|
||||
// Update tempSelection to maintain ALL state when dropdown is open
|
||||
if (tempSelection !== undefined) {
|
||||
setTempSelection(newOptionsData.map((option) => option.toString()));
|
||||
}
|
||||
} else {
|
||||
const value = variableData.selectedValue;
|
||||
let allSelected = false;
|
||||
|
||||
if (variableData.multiSelect) {
|
||||
const { selectedValue } = variableData;
|
||||
allSelected =
|
||||
newOptionsData.length > 0 &&
|
||||
Array.isArray(selectedValue) &&
|
||||
newOptionsData.every((option) => selectedValue.includes(option));
|
||||
}
|
||||
|
||||
if (variableData.name && variableData.id) {
|
||||
onValueUpdate(variableData.name, variableData.id, value, allSelected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setOptionsData(newOptionsData);
|
||||
} else {
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((name) => name !== variableData.name),
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
};
|
||||
|
||||
const { isLoading, refetch } = useQuery(
|
||||
[
|
||||
REACT_QUERY_KEY.DASHBOARD_BY_ID,
|
||||
variableData.name || '',
|
||||
`${minTime}`,
|
||||
`${maxTime}`,
|
||||
JSON.stringify(dependencyData?.order),
|
||||
],
|
||||
{
|
||||
enabled:
|
||||
variableData &&
|
||||
variableData.type === 'QUERY' &&
|
||||
checkAPIInvocation(
|
||||
variablesToGetUpdated,
|
||||
variableData,
|
||||
dependencyData?.parentDependencyGraph,
|
||||
),
|
||||
queryFn: () =>
|
||||
dashboardVariablesQuery({
|
||||
query: variableData.queryValue || '',
|
||||
variables: variablePropsToPayloadVariables(existingVariables),
|
||||
}),
|
||||
refetchOnWindowFocus: false,
|
||||
onSuccess: (response) => {
|
||||
getOptions(response.payload);
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((v) => v !== variableData.name),
|
||||
);
|
||||
},
|
||||
onError: (error: {
|
||||
details: {
|
||||
error: string;
|
||||
};
|
||||
}) => {
|
||||
const { details } = error;
|
||||
|
||||
if (details.error) {
|
||||
let message = details.error;
|
||||
if ((details.error ?? '').toString().includes('Syntax error:')) {
|
||||
message =
|
||||
'Please make sure query is valid and dependent variables are selected';
|
||||
}
|
||||
setErrorMessage(message);
|
||||
}
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((v) => v !== variableData.name),
|
||||
);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const handleRetry = useCallback((): void => {
|
||||
setErrorMessage(null);
|
||||
refetch();
|
||||
}, [refetch]);
|
||||
|
||||
return (
|
||||
<SelectVariableInput
|
||||
variableId={variableData.id}
|
||||
options={optionsData}
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
onDropdownVisibleChange={onDropdownVisibleChange}
|
||||
onClear={handleClear}
|
||||
enableSelectAll={enableSelectAll}
|
||||
defaultValue={defaultValue}
|
||||
isMultiSelect={variableData.multiSelect}
|
||||
// query variable specific, API related props
|
||||
loading={isLoading}
|
||||
errorMessage={errorMessage}
|
||||
onRetry={handleRetry}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(QueryVariableInput);
|
||||
@@ -0,0 +1,134 @@
|
||||
import { memo, useMemo } from 'react';
|
||||
import { orange } from '@ant-design/colors';
|
||||
import { WarningOutlined } from '@ant-design/icons';
|
||||
import { Popover, Tooltip, Typography } from 'antd';
|
||||
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import { ALL_SELECT_VALUE } from '../utils';
|
||||
import { SelectItemStyle } from './styles';
|
||||
|
||||
const errorIconStyle = { margin: '0 0.5rem' };
|
||||
|
||||
interface SelectVariableInputProps {
|
||||
variableId: string;
|
||||
options: (string | number | boolean)[];
|
||||
value: string | string[] | undefined;
|
||||
enableSelectAll: boolean;
|
||||
isMultiSelect: boolean;
|
||||
onChange: (value: string | string[]) => void;
|
||||
onClear: () => void;
|
||||
defaultValue?: string | string[];
|
||||
onDropdownVisibleChange?: (visible: boolean) => void;
|
||||
loading?: boolean;
|
||||
errorMessage?: string | null;
|
||||
onRetry?: () => void;
|
||||
}
|
||||
|
||||
const MAX_TAG_DISPLAY_VALUES = 10;
|
||||
|
||||
function maxTagPlaceholder(
|
||||
omittedValues: { label?: React.ReactNode; value?: string | number }[],
|
||||
): JSX.Element {
|
||||
const valuesToShow = omittedValues.slice(0, MAX_TAG_DISPLAY_VALUES);
|
||||
const hasMore = omittedValues.length > MAX_TAG_DISPLAY_VALUES;
|
||||
const tooltipText =
|
||||
valuesToShow.map(({ value: v }) => v ?? '').join(', ') +
|
||||
(hasMore ? ` + ${omittedValues.length - MAX_TAG_DISPLAY_VALUES} more` : '');
|
||||
|
||||
return (
|
||||
<Tooltip title={tooltipText}>
|
||||
<span>+ {omittedValues.length} </span>
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
|
||||
function SelectVariableInput({
|
||||
variableId,
|
||||
options,
|
||||
value,
|
||||
onChange,
|
||||
onDropdownVisibleChange,
|
||||
onClear,
|
||||
loading,
|
||||
errorMessage,
|
||||
onRetry,
|
||||
enableSelectAll,
|
||||
isMultiSelect,
|
||||
defaultValue,
|
||||
}: SelectVariableInputProps): JSX.Element {
|
||||
const selectOptions = useMemo(
|
||||
() =>
|
||||
options.map((option) => ({
|
||||
label: option.toString(),
|
||||
value: option.toString(),
|
||||
})),
|
||||
[options],
|
||||
);
|
||||
|
||||
const commonProps = useMemo(
|
||||
() => ({
|
||||
// main props
|
||||
key: variableId,
|
||||
value,
|
||||
defaultValue,
|
||||
|
||||
// setup props
|
||||
placeholder: 'Select value',
|
||||
className: 'variable-select',
|
||||
popupClassName: 'dropdown-styles',
|
||||
getPopupContainer: popupContainer,
|
||||
style: SelectItemStyle,
|
||||
showSearch: true,
|
||||
bordered: false,
|
||||
|
||||
// dynamic props
|
||||
'data-testid': 'variable-select',
|
||||
onChange,
|
||||
loading,
|
||||
options: selectOptions,
|
||||
errorMessage,
|
||||
onRetry,
|
||||
}),
|
||||
[
|
||||
variableId,
|
||||
defaultValue,
|
||||
onChange,
|
||||
loading,
|
||||
selectOptions,
|
||||
value,
|
||||
errorMessage,
|
||||
onRetry,
|
||||
],
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
{isMultiSelect ? (
|
||||
<CustomMultiSelect
|
||||
{...commonProps}
|
||||
placement="bottomLeft"
|
||||
maxTagCount={2}
|
||||
onDropdownVisibleChange={onDropdownVisibleChange}
|
||||
maxTagPlaceholder={maxTagPlaceholder}
|
||||
onClear={onClear}
|
||||
enableAllSelection={enableSelectAll}
|
||||
maxTagTextLength={30}
|
||||
allowClear={value !== ALL_SELECT_VALUE && value !== 'ALL'}
|
||||
/>
|
||||
) : (
|
||||
<CustomSelect {...commonProps} />
|
||||
)}
|
||||
|
||||
{errorMessage && (
|
||||
<span style={errorIconStyle}>
|
||||
<Popover placement="top" content={<Typography>{errorMessage}</Typography>}>
|
||||
<WarningOutlined style={{ color: orange[5] }} />
|
||||
</Popover>
|
||||
</span>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(SelectVariableInput);
|
||||
@@ -0,0 +1,85 @@
|
||||
import { memo, useCallback, useRef, useState } from 'react';
|
||||
import { Input, InputRef } from 'antd';
|
||||
|
||||
import { VariableItemProps } from './VariableItem';
|
||||
|
||||
type TextboxVariableInputProps = Pick<
|
||||
VariableItemProps,
|
||||
'variableData' | 'onValueUpdate'
|
||||
>;
|
||||
|
||||
function TextboxVariableInput({
|
||||
variableData,
|
||||
onValueUpdate,
|
||||
}: TextboxVariableInputProps): JSX.Element {
|
||||
const handleChange = useCallback(
|
||||
(inputValue: string | string[]): void => {
|
||||
if (inputValue === variableData.selectedValue) {
|
||||
return;
|
||||
}
|
||||
if (variableData.name) {
|
||||
onValueUpdate(variableData.name, variableData.id, inputValue, false);
|
||||
}
|
||||
},
|
||||
[
|
||||
onValueUpdate,
|
||||
variableData.id,
|
||||
variableData.name,
|
||||
variableData.selectedValue,
|
||||
],
|
||||
);
|
||||
|
||||
const textboxInputRef = useRef<InputRef>(null);
|
||||
const [textboxInputValue, setTextboxInputValue] = useState<string>(
|
||||
(variableData.selectedValue?.toString() ||
|
||||
variableData.defaultValue?.toString()) ??
|
||||
'',
|
||||
);
|
||||
|
||||
const handleInputOnChange = useCallback(
|
||||
(event: React.ChangeEvent<HTMLInputElement>) => {
|
||||
setTextboxInputValue(event.target.value);
|
||||
},
|
||||
[setTextboxInputValue],
|
||||
);
|
||||
|
||||
const handleInputOnBlur = useCallback(
|
||||
(event: React.FocusEvent<HTMLInputElement>): void => {
|
||||
const value = event.target.value.trim();
|
||||
// If empty, reset to default value
|
||||
if (!value && variableData.defaultValue) {
|
||||
setTextboxInputValue(variableData.defaultValue.toString());
|
||||
handleChange(variableData.defaultValue.toString());
|
||||
} else {
|
||||
handleChange(value);
|
||||
}
|
||||
},
|
||||
[handleChange, variableData.defaultValue],
|
||||
);
|
||||
|
||||
const handleInputOnKeyDown = useCallback(
|
||||
(event: React.KeyboardEvent<HTMLInputElement>): void => {
|
||||
if (event.key === 'Enter') {
|
||||
textboxInputRef.current?.blur();
|
||||
}
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
return (
|
||||
<Input
|
||||
key={variableData.id}
|
||||
ref={textboxInputRef}
|
||||
placeholder="Enter value"
|
||||
data-testid={`variable-textbox-${variableData.id}`}
|
||||
bordered={false}
|
||||
value={textboxInputValue}
|
||||
title={textboxInputValue}
|
||||
onChange={handleInputOnChange}
|
||||
onBlur={handleInputOnBlur}
|
||||
onKeyDown={handleInputOnKeyDown}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(TextboxVariableInput);
|
||||
@@ -1,34 +1,16 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
/* eslint-disable jsx-a11y/click-events-have-key-events */
|
||||
/* eslint-disable jsx-a11y/no-static-element-interactions */
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { orange } from '@ant-design/colors';
|
||||
import { InfoCircleOutlined, WarningOutlined } from '@ant-design/icons';
|
||||
import { Input, InputRef, Popover, Tooltip, Typography } from 'antd';
|
||||
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
|
||||
import { CustomMultiSelect, CustomSelect } from 'components/NewSelect';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
|
||||
import sortValues from 'lib/dashbaordVariables/sortVariableValues';
|
||||
import { debounce, isArray, isEmpty, isString } from 'lodash-es';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { memo } from 'react';
|
||||
import { InfoCircleOutlined } from '@ant-design/icons';
|
||||
import { Tooltip, Typography } from 'antd';
|
||||
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import { ALL_SELECT_VALUE, variablePropsToPayloadVariables } from '../utils';
|
||||
import { SelectItemStyle } from './styles';
|
||||
import { areArraysEqual, checkAPIInvocation, IDependencyData } from './util';
|
||||
import CustomVariableInput from './CustomVariableInput';
|
||||
import QueryVariableInput from './QueryVariableInput';
|
||||
import TextboxVariableInput from './TextboxVariableInput';
|
||||
|
||||
import './DashboardVariableSelection.styles.scss';
|
||||
|
||||
interface VariableItemProps {
|
||||
export interface VariableItemProps {
|
||||
variableData: IDashboardVariable;
|
||||
existingVariables: Record<string, IDashboardVariable>;
|
||||
onValueUpdate: (
|
||||
@@ -42,488 +24,49 @@ interface VariableItemProps {
|
||||
dependencyData: IDependencyData | null;
|
||||
}
|
||||
|
||||
export const getSelectValue = (
|
||||
selectedValue: IDashboardVariable['selectedValue'],
|
||||
variableData: IDashboardVariable,
|
||||
): string | string[] | undefined => {
|
||||
if (Array.isArray(selectedValue)) {
|
||||
if (!variableData.multiSelect && selectedValue.length === 1) {
|
||||
return selectedValue[0]?.toString();
|
||||
}
|
||||
return selectedValue.map((item) => item.toString());
|
||||
}
|
||||
return selectedValue?.toString();
|
||||
};
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function VariableItem({
|
||||
variableData,
|
||||
existingVariables,
|
||||
onValueUpdate,
|
||||
existingVariables,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
dependencyData,
|
||||
}: VariableItemProps): JSX.Element {
|
||||
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
|
||||
[],
|
||||
);
|
||||
const [tempSelection, setTempSelection] = useState<
|
||||
string | string[] | undefined
|
||||
>(undefined);
|
||||
|
||||
// Local state for textbox input to ensure smooth editing experience
|
||||
const [textboxInputValue, setTextboxInputValue] = useState<string>(
|
||||
(variableData.selectedValue?.toString() ||
|
||||
variableData.defaultValue?.toString()) ??
|
||||
'',
|
||||
);
|
||||
const [isTextboxFocused, setIsTextboxFocused] = useState<boolean>(false);
|
||||
const textboxInputRef = useRef<InputRef>(null);
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const validVariableUpdate = (): boolean => {
|
||||
if (!variableData.name) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// variableData.name is present as the top element or next in the queue - variablesToGetUpdated
|
||||
return Boolean(
|
||||
variablesToGetUpdated.length &&
|
||||
variablesToGetUpdated[0] === variableData.name,
|
||||
);
|
||||
};
|
||||
|
||||
const [errorMessage, setErrorMessage] = useState<null | string>(null);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const getOptions = (variablesRes: VariableResponseProps | null): void => {
|
||||
if (variablesRes && variableData.type === 'QUERY') {
|
||||
try {
|
||||
setErrorMessage(null);
|
||||
|
||||
if (
|
||||
variablesRes?.variableValues &&
|
||||
Array.isArray(variablesRes?.variableValues)
|
||||
) {
|
||||
const newOptionsData = sortValues(
|
||||
variablesRes?.variableValues,
|
||||
variableData.sort,
|
||||
);
|
||||
|
||||
const oldOptionsData = sortValues(optionsData, variableData.sort) as never;
|
||||
|
||||
if (!areArraysEqual(newOptionsData, oldOptionsData)) {
|
||||
/* eslint-disable no-useless-escape */
|
||||
|
||||
let valueNotInList = false;
|
||||
|
||||
if (isArray(variableData.selectedValue)) {
|
||||
variableData.selectedValue.forEach((val) => {
|
||||
const isUsed = newOptionsData.includes(val);
|
||||
|
||||
if (!isUsed) {
|
||||
valueNotInList = true;
|
||||
}
|
||||
});
|
||||
} else if (isString(variableData.selectedValue)) {
|
||||
const isUsed = newOptionsData.includes(variableData.selectedValue);
|
||||
|
||||
if (!isUsed) {
|
||||
valueNotInList = true;
|
||||
}
|
||||
}
|
||||
|
||||
// variablesData.allSelected is added for the case where on change of options we need to update the
|
||||
// local storage
|
||||
if (
|
||||
variableData.type === 'QUERY' &&
|
||||
variableData.name &&
|
||||
(validVariableUpdate() || valueNotInList || variableData.allSelected)
|
||||
) {
|
||||
if (
|
||||
variableData.allSelected &&
|
||||
variableData.multiSelect &&
|
||||
variableData.showALLOption
|
||||
) {
|
||||
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
|
||||
|
||||
// Update tempSelection to maintain ALL state when dropdown is open
|
||||
if (tempSelection !== undefined) {
|
||||
setTempSelection(newOptionsData.map((option) => option.toString()));
|
||||
}
|
||||
} else {
|
||||
const value = variableData.selectedValue;
|
||||
let allSelected = false;
|
||||
|
||||
if (variableData.multiSelect) {
|
||||
const { selectedValue } = variableData;
|
||||
allSelected =
|
||||
newOptionsData.length > 0 &&
|
||||
Array.isArray(selectedValue) &&
|
||||
newOptionsData.every((option) => selectedValue.includes(option));
|
||||
}
|
||||
|
||||
if (variableData && variableData?.name && variableData?.id) {
|
||||
onValueUpdate(variableData.name, variableData.id, value, allSelected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setOptionsData(newOptionsData);
|
||||
} else {
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((name) => name !== variableData.name),
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
} else if (variableData.type === 'CUSTOM') {
|
||||
const optionsData = sortValues(
|
||||
commaValuesParser(variableData.customValue || ''),
|
||||
variableData.sort,
|
||||
) as never;
|
||||
|
||||
setOptionsData(optionsData);
|
||||
}
|
||||
};
|
||||
|
||||
const { isLoading, refetch } = useQuery(
|
||||
[
|
||||
REACT_QUERY_KEY.DASHBOARD_BY_ID,
|
||||
variableData.name || '',
|
||||
`${minTime}`,
|
||||
`${maxTime}`,
|
||||
JSON.stringify(dependencyData?.order),
|
||||
],
|
||||
{
|
||||
enabled:
|
||||
variableData &&
|
||||
variableData.type === 'QUERY' &&
|
||||
checkAPIInvocation(
|
||||
variablesToGetUpdated,
|
||||
variableData,
|
||||
dependencyData?.parentDependencyGraph,
|
||||
),
|
||||
queryFn: () =>
|
||||
dashboardVariablesQuery({
|
||||
query: variableData.queryValue || '',
|
||||
variables: variablePropsToPayloadVariables(existingVariables),
|
||||
}),
|
||||
refetchOnWindowFocus: false,
|
||||
onSuccess: (response) => {
|
||||
getOptions(response.payload);
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((v) => v !== variableData.name),
|
||||
);
|
||||
},
|
||||
onError: (error: {
|
||||
details: {
|
||||
error: string;
|
||||
};
|
||||
}) => {
|
||||
const { details } = error;
|
||||
|
||||
if (details.error) {
|
||||
let message = details.error;
|
||||
if ((details.error ?? '').toString().includes('Syntax error:')) {
|
||||
message =
|
||||
'Please make sure query is valid and dependent variables are selected';
|
||||
}
|
||||
setErrorMessage(message);
|
||||
}
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((v) => v !== variableData.name),
|
||||
);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const handleChange = useCallback(
|
||||
(inputValue: string | string[]): void => {
|
||||
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
|
||||
|
||||
if (
|
||||
value === variableData.selectedValue ||
|
||||
(Array.isArray(value) &&
|
||||
Array.isArray(variableData.selectedValue) &&
|
||||
areArraysEqual(value, variableData.selectedValue))
|
||||
) {
|
||||
return;
|
||||
}
|
||||
if (variableData.name) {
|
||||
// Check if ALL is effectively selected by comparing with available options
|
||||
const isAllSelected =
|
||||
Array.isArray(value) &&
|
||||
value.length > 0 &&
|
||||
optionsData.every((option) => value.includes(option.toString()));
|
||||
|
||||
if (isAllSelected && variableData.showALLOption) {
|
||||
// For ALL selection, pass null to avoid storing values
|
||||
onValueUpdate(variableData.name, variableData.id, optionsData, true);
|
||||
} else {
|
||||
onValueUpdate(variableData.name, variableData.id, value, false);
|
||||
}
|
||||
}
|
||||
},
|
||||
[
|
||||
variableData.multiSelect,
|
||||
variableData.selectedValue,
|
||||
variableData.name,
|
||||
variableData.id,
|
||||
onValueUpdate,
|
||||
optionsData,
|
||||
variableData.showALLOption,
|
||||
],
|
||||
);
|
||||
|
||||
// Add a handler for tracking temporary selection changes
|
||||
const handleTempChange = (inputValue: string | string[]): void => {
|
||||
// Store the selection in temporary state while dropdown is open
|
||||
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
|
||||
setTempSelection(value);
|
||||
};
|
||||
|
||||
// Handle dropdown visibility changes
|
||||
const handleDropdownVisibleChange = (visible: boolean): void => {
|
||||
// Initialize temp selection when opening dropdown
|
||||
if (visible) {
|
||||
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
|
||||
}
|
||||
// Apply changes when closing dropdown
|
||||
else if (!visible && tempSelection !== undefined) {
|
||||
// Call handleChange with the temporarily stored selection
|
||||
handleChange(tempSelection);
|
||||
setTempSelection(undefined);
|
||||
}
|
||||
};
|
||||
|
||||
// do not debounce the above function as we do not need debounce in select variables
|
||||
const debouncedHandleChange = debounce(handleChange, 500);
|
||||
|
||||
const { selectedValue } = variableData;
|
||||
const selectedValueStringified = useMemo(
|
||||
() => getSelectValue(selectedValue, variableData),
|
||||
[selectedValue, variableData],
|
||||
);
|
||||
|
||||
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
|
||||
|
||||
const selectValue =
|
||||
variableData.allSelected && enableSelectAll
|
||||
? 'ALL'
|
||||
: selectedValueStringified;
|
||||
|
||||
// Apply default value on first render if no selection exists
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const finalSelectedValues = useMemo(() => {
|
||||
if (variableData.multiSelect) {
|
||||
let value = tempSelection || selectedValue;
|
||||
if (isEmpty(value)) {
|
||||
if (variableData.showALLOption) {
|
||||
if (variableData.defaultValue) {
|
||||
value = variableData.defaultValue;
|
||||
} else {
|
||||
value = optionsData;
|
||||
}
|
||||
} else if (variableData.defaultValue) {
|
||||
value = variableData.defaultValue;
|
||||
} else {
|
||||
value = optionsData?.[0];
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
if (isEmpty(selectedValue)) {
|
||||
if (variableData.defaultValue) {
|
||||
return variableData.defaultValue;
|
||||
}
|
||||
return optionsData[0]?.toString();
|
||||
}
|
||||
|
||||
return selectedValue;
|
||||
}, [
|
||||
variableData.multiSelect,
|
||||
variableData.showALLOption,
|
||||
variableData.defaultValue,
|
||||
selectedValue,
|
||||
tempSelection,
|
||||
optionsData,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
(variableData.multiSelect && !(tempSelection || selectValue)) ||
|
||||
isEmpty(selectValue)
|
||||
) {
|
||||
handleChange(finalSelectedValues as string[] | string);
|
||||
}
|
||||
}, [
|
||||
finalSelectedValues,
|
||||
handleChange,
|
||||
selectValue,
|
||||
tempSelection,
|
||||
variableData.multiSelect,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
// Fetch options for CUSTOM Type
|
||||
if (variableData.type === 'CUSTOM') {
|
||||
getOptions(null);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [variableData.type, variableData.customValue]);
|
||||
const { name, description, type: variableType } = variableData;
|
||||
|
||||
return (
|
||||
<div className={`variable-item${isTextboxFocused ? ' focused' : ''}`}>
|
||||
<div className="variable-item">
|
||||
<Typography.Text className="variable-name" ellipsis>
|
||||
${variableData.name}
|
||||
{variableData.description && (
|
||||
<Tooltip title={variableData.description}>
|
||||
${name}
|
||||
{description && (
|
||||
<Tooltip title={description}>
|
||||
<InfoCircleOutlined className="info-icon" />
|
||||
</Tooltip>
|
||||
)}
|
||||
</Typography.Text>
|
||||
|
||||
<div className="variable-value">
|
||||
{variableData.type === 'TEXTBOX' ? (
|
||||
<Input
|
||||
ref={textboxInputRef}
|
||||
placeholder="Enter value"
|
||||
data-testid={`variable-textbox-${variableData.id}`}
|
||||
bordered={false}
|
||||
value={textboxInputValue}
|
||||
title={textboxInputValue}
|
||||
onChange={(e): void => {
|
||||
setTextboxInputValue(e.target.value);
|
||||
}}
|
||||
onFocus={(): void => {
|
||||
setIsTextboxFocused(true);
|
||||
}}
|
||||
onBlur={(e): void => {
|
||||
setIsTextboxFocused(false);
|
||||
const value = e.target.value.trim();
|
||||
// If empty, reset to default value
|
||||
if (!value && variableData.defaultValue) {
|
||||
setTextboxInputValue(variableData.defaultValue.toString());
|
||||
debouncedHandleChange(variableData.defaultValue.toString());
|
||||
} else {
|
||||
debouncedHandleChange(value);
|
||||
}
|
||||
}}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter') {
|
||||
const value = textboxInputValue.trim();
|
||||
if (!value && variableData.defaultValue) {
|
||||
setTextboxInputValue(variableData.defaultValue.toString());
|
||||
debouncedHandleChange(variableData.defaultValue.toString());
|
||||
} else {
|
||||
debouncedHandleChange(value);
|
||||
}
|
||||
textboxInputRef.current?.blur();
|
||||
}
|
||||
}}
|
||||
{variableType === 'TEXTBOX' && (
|
||||
<TextboxVariableInput
|
||||
variableData={variableData}
|
||||
onValueUpdate={onValueUpdate}
|
||||
/>
|
||||
) : (
|
||||
optionsData &&
|
||||
(variableData.multiSelect ? (
|
||||
<CustomMultiSelect
|
||||
key={
|
||||
selectValue && Array.isArray(selectValue)
|
||||
? selectValue.join(' ')
|
||||
: selectValue || variableData.id
|
||||
}
|
||||
options={optionsData.map((option) => ({
|
||||
label: option.toString(),
|
||||
value: option.toString(),
|
||||
}))}
|
||||
defaultValue={variableData.defaultValue || selectValue}
|
||||
onChange={handleTempChange}
|
||||
bordered={false}
|
||||
placeholder="Select value"
|
||||
placement="bottomLeft"
|
||||
style={SelectItemStyle}
|
||||
loading={isLoading}
|
||||
showSearch
|
||||
data-testid="variable-select"
|
||||
className="variable-select"
|
||||
popupClassName="dropdown-styles"
|
||||
maxTagCount={2}
|
||||
getPopupContainer={popupContainer}
|
||||
value={tempSelection || selectValue}
|
||||
onDropdownVisibleChange={handleDropdownVisibleChange}
|
||||
errorMessage={errorMessage}
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
maxTagPlaceholder={(omittedValues): JSX.Element => {
|
||||
const maxDisplayValues = 10;
|
||||
const valuesToShow = omittedValues.slice(0, maxDisplayValues);
|
||||
const hasMore = omittedValues.length > maxDisplayValues;
|
||||
const tooltipText =
|
||||
valuesToShow.map(({ value }) => value).join(', ') +
|
||||
(hasMore ? ` + ${omittedValues.length - maxDisplayValues} more` : '');
|
||||
|
||||
return (
|
||||
<Tooltip title={tooltipText}>
|
||||
<span>+ {omittedValues.length} </span>
|
||||
</Tooltip>
|
||||
);
|
||||
}}
|
||||
onClear={(): void => {
|
||||
handleChange([]);
|
||||
}}
|
||||
enableAllSelection={enableSelectAll}
|
||||
maxTagTextLength={30}
|
||||
allowClear={selectValue !== ALL_SELECT_VALUE && selectValue !== 'ALL'}
|
||||
onRetry={(): void => {
|
||||
setErrorMessage(null);
|
||||
refetch();
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
<CustomSelect
|
||||
key={
|
||||
selectValue && Array.isArray(selectValue)
|
||||
? selectValue.join(' ')
|
||||
: selectValue || variableData.id
|
||||
}
|
||||
defaultValue={variableData.defaultValue || selectValue}
|
||||
onChange={handleChange}
|
||||
bordered={false}
|
||||
placeholder="Select value"
|
||||
style={SelectItemStyle}
|
||||
loading={isLoading}
|
||||
showSearch
|
||||
data-testid="variable-select"
|
||||
className="variable-select"
|
||||
popupClassName="dropdown-styles"
|
||||
getPopupContainer={popupContainer}
|
||||
options={optionsData.map((option) => ({
|
||||
label: option.toString(),
|
||||
value: option.toString(),
|
||||
}))}
|
||||
value={selectValue}
|
||||
errorMessage={errorMessage}
|
||||
onRetry={(): void => {
|
||||
setErrorMessage(null);
|
||||
refetch();
|
||||
}}
|
||||
/>
|
||||
))
|
||||
)}
|
||||
{variableData.type !== 'TEXTBOX' && errorMessage && (
|
||||
<span style={{ margin: '0 0.5rem' }}>
|
||||
<Popover
|
||||
placement="top"
|
||||
content={<Typography>{errorMessage}</Typography>}
|
||||
>
|
||||
<WarningOutlined style={{ color: orange[5] }} />
|
||||
</Popover>
|
||||
</span>
|
||||
{variableType === 'CUSTOM' && (
|
||||
<CustomVariableInput
|
||||
variableData={variableData}
|
||||
onValueUpdate={onValueUpdate}
|
||||
/>
|
||||
)}
|
||||
{variableType === 'QUERY' && (
|
||||
<QueryVariableInput
|
||||
variableData={variableData}
|
||||
onValueUpdate={onValueUpdate}
|
||||
existingVariables={existingVariables}
|
||||
variablesToGetUpdated={variablesToGetUpdated}
|
||||
setVariablesToGetUpdated={setVariablesToGetUpdated}
|
||||
dependencyData={dependencyData}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,201 @@
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { areArraysEqual, getSelectValue } from './util';
|
||||
|
||||
interface UseDashboardVariableSelectHelperParams {
|
||||
variableData: IDashboardVariable;
|
||||
optionsData: (string | number | boolean)[];
|
||||
onValueUpdate: (
|
||||
name: string,
|
||||
id: string,
|
||||
value: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
) => void;
|
||||
}
|
||||
|
||||
interface UseDashboardVariableSelectHelperReturn {
|
||||
// State
|
||||
tempSelection: string | string[] | undefined;
|
||||
setTempSelection: React.Dispatch<
|
||||
React.SetStateAction<string | string[] | undefined>
|
||||
>;
|
||||
value: string | string[] | undefined;
|
||||
defaultValue: string | string[] | undefined;
|
||||
|
||||
// Derived values
|
||||
enableSelectAll: boolean;
|
||||
|
||||
// Handlers
|
||||
onChange: (value: string | string[]) => void;
|
||||
onDropdownVisibleChange: (visible: boolean) => void;
|
||||
handleClear: () => void;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
export function useDashboardVariableSelectHelper({
|
||||
variableData,
|
||||
optionsData,
|
||||
onValueUpdate,
|
||||
}: UseDashboardVariableSelectHelperParams): UseDashboardVariableSelectHelperReturn {
|
||||
const { selectedValue } = variableData;
|
||||
|
||||
const [tempSelection, setTempSelection] = useState<
|
||||
string | string[] | undefined
|
||||
>(undefined);
|
||||
|
||||
const selectedValueStringified = useMemo(
|
||||
() => getSelectValue(selectedValue, variableData),
|
||||
[selectedValue, variableData],
|
||||
);
|
||||
|
||||
const enableSelectAll = variableData.multiSelect && variableData.showALLOption;
|
||||
|
||||
const selectValue =
|
||||
variableData.allSelected && enableSelectAll
|
||||
? 'ALL'
|
||||
: selectedValueStringified;
|
||||
|
||||
const handleChange = useCallback(
|
||||
(inputValue: string | string[]): void => {
|
||||
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
|
||||
|
||||
if (
|
||||
value === variableData.selectedValue ||
|
||||
(Array.isArray(value) &&
|
||||
Array.isArray(variableData.selectedValue) &&
|
||||
areArraysEqual(value, variableData.selectedValue))
|
||||
) {
|
||||
return;
|
||||
}
|
||||
if (variableData.name) {
|
||||
// Check if ALL is effectively selected by comparing with available options
|
||||
const isAllSelected =
|
||||
Array.isArray(value) &&
|
||||
value.length > 0 &&
|
||||
optionsData.every((option) => value.includes(option.toString()));
|
||||
|
||||
if (isAllSelected && variableData.showALLOption) {
|
||||
// For ALL selection, pass optionsData as the value and set allSelected to true
|
||||
onValueUpdate(variableData.name, variableData.id, optionsData, true);
|
||||
} else {
|
||||
onValueUpdate(variableData.name, variableData.id, value, false);
|
||||
}
|
||||
}
|
||||
},
|
||||
[
|
||||
variableData.multiSelect,
|
||||
variableData.selectedValue,
|
||||
variableData.name,
|
||||
variableData.id,
|
||||
variableData.showALLOption,
|
||||
onValueUpdate,
|
||||
optionsData,
|
||||
],
|
||||
);
|
||||
|
||||
const handleTempChange = useCallback(
|
||||
(inputValue: string | string[]): void => {
|
||||
// Store the selection in temporary state while dropdown is open
|
||||
const value = variableData.multiSelect && !inputValue ? [] : inputValue;
|
||||
setTempSelection(value);
|
||||
},
|
||||
[variableData.multiSelect],
|
||||
);
|
||||
|
||||
// Apply default value on first render if no selection exists
|
||||
const finalSelectedValues = useMemo(() => {
|
||||
if (variableData.multiSelect) {
|
||||
let value = tempSelection || selectedValue;
|
||||
if (isEmpty(value)) {
|
||||
if (variableData.showALLOption) {
|
||||
if (variableData.defaultValue) {
|
||||
value = variableData.defaultValue;
|
||||
} else {
|
||||
value = optionsData;
|
||||
}
|
||||
} else if (variableData.defaultValue) {
|
||||
value = variableData.defaultValue;
|
||||
} else {
|
||||
value = optionsData?.[0];
|
||||
}
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
if (isEmpty(selectedValue)) {
|
||||
if (variableData.defaultValue) {
|
||||
return variableData.defaultValue;
|
||||
}
|
||||
return optionsData[0]?.toString();
|
||||
}
|
||||
|
||||
return selectedValue;
|
||||
}, [
|
||||
variableData.multiSelect,
|
||||
variableData.showALLOption,
|
||||
variableData.defaultValue,
|
||||
selectedValue,
|
||||
tempSelection,
|
||||
optionsData,
|
||||
]);
|
||||
|
||||
// Apply default values when needed
|
||||
useEffect(() => {
|
||||
if (
|
||||
(variableData.multiSelect && !(tempSelection || selectValue)) ||
|
||||
isEmpty(selectValue)
|
||||
) {
|
||||
handleChange(finalSelectedValues as string[] | string);
|
||||
}
|
||||
}, [
|
||||
finalSelectedValues,
|
||||
handleChange,
|
||||
selectValue,
|
||||
tempSelection,
|
||||
variableData.multiSelect,
|
||||
]);
|
||||
|
||||
// Handle dropdown visibility changes
|
||||
const onDropdownVisibleChange = useCallback(
|
||||
(visible: boolean): void => {
|
||||
// Initialize temp selection when opening dropdown
|
||||
if (visible) {
|
||||
setTempSelection(getSelectValue(variableData.selectedValue, variableData));
|
||||
}
|
||||
// Apply changes when closing dropdown
|
||||
else if (!visible && tempSelection !== undefined) {
|
||||
// Call handleChange with the temporarily stored selection
|
||||
handleChange(tempSelection);
|
||||
setTempSelection(undefined);
|
||||
}
|
||||
},
|
||||
[variableData, tempSelection, handleChange],
|
||||
);
|
||||
|
||||
const handleClear = useCallback((): void => {
|
||||
handleChange([]);
|
||||
}, [handleChange]);
|
||||
|
||||
const value = variableData.multiSelect
|
||||
? tempSelection || selectValue
|
||||
: selectValue;
|
||||
|
||||
const defaultValue = variableData.defaultValue || selectValue;
|
||||
|
||||
const onChange = useMemo(() => {
|
||||
return variableData.multiSelect ? handleTempChange : handleChange;
|
||||
}, [variableData.multiSelect, handleTempChange, handleChange]);
|
||||
|
||||
return {
|
||||
tempSelection,
|
||||
setTempSelection,
|
||||
enableSelectAll,
|
||||
onDropdownVisibleChange,
|
||||
handleClear,
|
||||
value,
|
||||
defaultValue,
|
||||
onChange,
|
||||
};
|
||||
}
|
||||
@@ -3,7 +3,7 @@ import { useCallback } from 'react';
|
||||
import { useAddDynamicVariableToPanels } from 'hooks/dashboard/useAddDynamicVariableToPanels';
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariablesStore';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { OptionData } from 'components/NewSelect/types';
|
||||
import { isEmpty, isNull } from 'lodash-es';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariablesStore';
|
||||
import {
|
||||
IDashboardVariables,
|
||||
IDependencyData,
|
||||
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
export function areArraysEqual(
|
||||
@@ -97,14 +100,6 @@ export const buildDependencies = (
|
||||
return graph;
|
||||
};
|
||||
|
||||
export interface IDependencyData {
|
||||
order: string[];
|
||||
graph: VariableGraph;
|
||||
parentDependencyGraph: VariableGraph;
|
||||
hasCycle: boolean;
|
||||
cycleNodes?: string[];
|
||||
}
|
||||
|
||||
export const buildParentDependencyGraph = (
|
||||
graph: VariableGraph,
|
||||
): VariableGraph => {
|
||||
@@ -386,3 +381,16 @@ export const uniqueValues = (values: string[] | string): string[] | string => {
|
||||
|
||||
return values;
|
||||
};
|
||||
|
||||
export const getSelectValue = (
|
||||
selectedValue: IDashboardVariable['selectedValue'],
|
||||
variableData: IDashboardVariable,
|
||||
): string | string[] | undefined => {
|
||||
if (Array.isArray(selectedValue)) {
|
||||
if (!variableData.multiSelect && selectedValue.length === 1) {
|
||||
return selectedValue[0]?.toString();
|
||||
}
|
||||
return selectedValue.map((item) => item.toString());
|
||||
}
|
||||
return selectedValue?.toString();
|
||||
};
|
||||
|
||||
@@ -0,0 +1,104 @@
|
||||
import { useCallback, useRef } from 'react';
|
||||
import ChartLayout from 'container/DashboardContainer/visualization/layout/ChartLayout/ChartLayout';
|
||||
import Legend from 'lib/uPlotV2/components/Legend/Legend';
|
||||
import Tooltip from 'lib/uPlotV2/components/Tooltip/Tooltip';
|
||||
import {
|
||||
LegendPosition,
|
||||
TooltipRenderArgs,
|
||||
} from 'lib/uPlotV2/components/types';
|
||||
import UPlotChart from 'lib/uPlotV2/components/UPlotChart';
|
||||
import { PlotContextProvider } from 'lib/uPlotV2/context/PlotContext';
|
||||
import TooltipPlugin from 'lib/uPlotV2/plugins/TooltipPlugin/TooltipPlugin';
|
||||
import _noop from 'lodash-es/noop';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
import { ChartProps } from '../types';
|
||||
|
||||
const TOOLTIP_WIDTH_PADDING = 60;
|
||||
const TOOLTIP_MIN_WIDTH = 200;
|
||||
|
||||
export default function TimeSeries({
|
||||
legendConfig = { position: LegendPosition.BOTTOM },
|
||||
config,
|
||||
data,
|
||||
width: containerWidth,
|
||||
height: containerHeight,
|
||||
disableTooltip = false,
|
||||
canPinTooltip = false,
|
||||
timezone,
|
||||
yAxisUnit,
|
||||
decimalPrecision,
|
||||
syncMode,
|
||||
syncKey,
|
||||
onDestroy = _noop,
|
||||
children,
|
||||
layoutChildren,
|
||||
'data-testid': testId,
|
||||
}: ChartProps): JSX.Element {
|
||||
const plotInstanceRef = useRef<uPlot | null>(null);
|
||||
|
||||
const legendComponent = useCallback(
|
||||
(averageLegendWidth: number): React.ReactNode => {
|
||||
return (
|
||||
<Legend
|
||||
config={config}
|
||||
position={legendConfig.position}
|
||||
averageLegendWidth={averageLegendWidth}
|
||||
/>
|
||||
);
|
||||
},
|
||||
[config, legendConfig.position],
|
||||
);
|
||||
|
||||
return (
|
||||
<PlotContextProvider>
|
||||
<ChartLayout
|
||||
config={config}
|
||||
containerWidth={containerWidth}
|
||||
containerHeight={containerHeight}
|
||||
legendConfig={legendConfig}
|
||||
legendComponent={legendComponent}
|
||||
layoutChildren={layoutChildren}
|
||||
>
|
||||
{({ chartWidth, chartHeight, averageLegendWidth }): JSX.Element => (
|
||||
<UPlotChart
|
||||
config={config}
|
||||
data={data}
|
||||
width={chartWidth}
|
||||
height={chartHeight}
|
||||
plotRef={(plot): void => {
|
||||
plotInstanceRef.current = plot;
|
||||
}}
|
||||
onDestroy={(plot: uPlot): void => {
|
||||
plotInstanceRef.current = null;
|
||||
onDestroy(plot);
|
||||
}}
|
||||
data-testid={testId}
|
||||
>
|
||||
{children}
|
||||
{!disableTooltip && (
|
||||
<TooltipPlugin
|
||||
config={config}
|
||||
canPinTooltip={canPinTooltip}
|
||||
syncMode={syncMode}
|
||||
maxWidth={Math.max(
|
||||
TOOLTIP_MIN_WIDTH,
|
||||
averageLegendWidth + TOOLTIP_WIDTH_PADDING,
|
||||
)}
|
||||
syncKey={syncKey}
|
||||
render={(props: TooltipRenderArgs): React.ReactNode => (
|
||||
<Tooltip
|
||||
{...props}
|
||||
timezone={timezone}
|
||||
yAxisUnit={yAxisUnit}
|
||||
decimalPrecision={decimalPrecision}
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
</UPlotChart>
|
||||
)}
|
||||
</ChartLayout>
|
||||
</PlotContextProvider>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
import { PrecisionOption } from 'components/Graph/types';
|
||||
import { LegendConfig } from 'lib/uPlotV2/components/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { DashboardCursorSync } from 'lib/uPlotV2/plugins/TooltipPlugin/types';
|
||||
|
||||
interface BaseChartProps {
|
||||
width: number;
|
||||
height: number;
|
||||
disableTooltip?: boolean;
|
||||
timezone: string;
|
||||
syncMode?: DashboardCursorSync;
|
||||
syncKey?: string;
|
||||
canPinTooltip?: boolean;
|
||||
yAxisUnit?: string;
|
||||
decimalPrecision?: PrecisionOption;
|
||||
}
|
||||
|
||||
interface TimeSeriesChartProps extends BaseChartProps {
|
||||
config: UPlotConfigBuilder;
|
||||
legendConfig: LegendConfig;
|
||||
data: uPlot.AlignedData;
|
||||
plotRef?: (plot: uPlot | null) => void;
|
||||
onDestroy?: (plot: uPlot) => void;
|
||||
children?: React.ReactNode;
|
||||
layoutChildren?: React.ReactNode;
|
||||
'data-testid'?: string;
|
||||
}
|
||||
|
||||
export type ChartProps = TimeSeriesChartProps;
|
||||
@@ -5,25 +5,32 @@ export interface ChartDimensions {
|
||||
height: number;
|
||||
legendWidth: number;
|
||||
legendHeight: number;
|
||||
legendsPerSet: number;
|
||||
averageLegendWidth: number;
|
||||
}
|
||||
|
||||
const AVG_CHAR_WIDTH = 8;
|
||||
const LEGEND_WIDTH_PERCENTILE = 0.85;
|
||||
const DEFAULT_AVG_LABEL_LENGTH = 15;
|
||||
const LEGEND_GAP = 16;
|
||||
const BASE_LEGEND_WIDTH = 16;
|
||||
const LEGEND_PADDING = 12;
|
||||
const LEGEND_LINE_HEIGHT = 36;
|
||||
const LEGEND_LINE_HEIGHT = 28;
|
||||
|
||||
/**
|
||||
* Average text width from series labels (for legendsPerSet).
|
||||
* Calculates the average width of the legend items based on the labels of the series.
|
||||
* @param legends - The labels of the series.
|
||||
* @returns The average width of the legend items.
|
||||
*/
|
||||
export function calculateAverageLegendWidth(legends: string[]): number {
|
||||
if (legends.length === 0) {
|
||||
return DEFAULT_AVG_LABEL_LENGTH;
|
||||
return DEFAULT_AVG_LABEL_LENGTH * AVG_CHAR_WIDTH;
|
||||
}
|
||||
const averageLabelLength =
|
||||
legends.reduce((sum, l) => sum + l.length, 0) / legends.length;
|
||||
return averageLabelLength * AVG_CHAR_WIDTH;
|
||||
|
||||
const lengths = legends.map((l) => l.length).sort((a, b) => a - b);
|
||||
|
||||
const index = Math.ceil(LEGEND_WIDTH_PERCENTILE * lengths.length) - 1;
|
||||
const percentileLength = lengths[Math.max(0, index)];
|
||||
|
||||
return BASE_LEGEND_WIDTH + percentileLength * AVG_CHAR_WIDTH;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -64,7 +71,7 @@ export function calculateChartDimensions({
|
||||
height: 0,
|
||||
legendWidth: 0,
|
||||
legendHeight: 0,
|
||||
legendsPerSet: 0,
|
||||
averageLegendWidth: 0,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -85,13 +92,15 @@ export function calculateChartDimensions({
|
||||
legendWidth: rightLegendWidth,
|
||||
legendHeight: containerHeight,
|
||||
// Single vertical list on the right.
|
||||
legendsPerSet: 1,
|
||||
averageLegendWidth: rightLegendWidth,
|
||||
};
|
||||
}
|
||||
|
||||
const legendRowHeight = LEGEND_LINE_HEIGHT + LEGEND_PADDING;
|
||||
|
||||
const legendItemWidth = Math.min(approxLegendItemWidth, 400);
|
||||
const legendItemWidth = Math.ceil(
|
||||
Math.min(approxLegendItemWidth, MAX_LEGEND_WIDTH),
|
||||
);
|
||||
const legendItemsPerRow = Math.max(
|
||||
1,
|
||||
Math.floor((containerWidth - LEGEND_PADDING * 2) / legendItemWidth),
|
||||
@@ -114,17 +123,11 @@ export function calculateChartDimensions({
|
||||
maxAllowedLegendHeight,
|
||||
);
|
||||
|
||||
// How many legend items per row in the Legend component.
|
||||
const legendsPerSet = Math.ceil(
|
||||
(containerWidth + LEGEND_GAP) /
|
||||
(Math.min(MAX_LEGEND_WIDTH, approxLegendItemWidth) + LEGEND_GAP),
|
||||
);
|
||||
|
||||
return {
|
||||
width: containerWidth,
|
||||
height: Math.max(0, containerHeight - bottomLegendHeight),
|
||||
legendWidth: containerWidth,
|
||||
legendHeight: bottomLegendHeight,
|
||||
legendsPerSet,
|
||||
averageLegendWidth: legendItemWidth,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -0,0 +1,21 @@
|
||||
.chart-manager-container {
|
||||
width: 100%;
|
||||
max-height: calc(40% - 40px);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
|
||||
.chart-manager-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
gap: 16px;
|
||||
|
||||
.chart-manager-actions-container {
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,147 @@
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { Button, Input } from 'antd';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import { getGraphManagerTableColumns } from 'container/GridCardLayout/GridCard/FullView/TableRender/GraphManagerColumns';
|
||||
import { ExtendedChartDataset } from 'container/GridCardLayout/GridCard/FullView/types';
|
||||
import { getDefaultTableDataSet } from 'container/GridCardLayout/GridCard/FullView/utils';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { usePlotContext } from 'lib/uPlotV2/context/PlotContext';
|
||||
import useLegendsSync from 'lib/uPlotV2/hooks/useLegendsSync';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
|
||||
import './ChartManager.styles.scss';
|
||||
|
||||
interface ChartManagerProps {
|
||||
config: UPlotConfigBuilder;
|
||||
alignedData: uPlot.AlignedData;
|
||||
yAxisUnit?: string;
|
||||
onCancel?: () => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* ChartManager provides a tabular view to manage the visibility of
|
||||
* individual series on a uPlot chart.
|
||||
*
|
||||
* It syncs with the legend state coming from the plot context and
|
||||
* allows users to:
|
||||
* - filter series by label
|
||||
* - toggle individual series on/off
|
||||
* - persist the visibility configuration to local storage.
|
||||
*
|
||||
* @param config - `UPlotConfigBuilder` instance used to derive chart options.
|
||||
* @param alignedData - uPlot aligned data used to build the initial table dataset.
|
||||
* @param yAxisUnit - Optional unit label for Y-axis values shown in the table.
|
||||
* @param onCancel - Optional callback invoked when the user cancels the dialog.
|
||||
*/
|
||||
export default function ChartManager({
|
||||
config,
|
||||
alignedData,
|
||||
yAxisUnit,
|
||||
onCancel,
|
||||
}: ChartManagerProps): JSX.Element {
|
||||
const { notifications } = useNotifications();
|
||||
const { legendItemsMap } = useLegendsSync({
|
||||
config,
|
||||
subscribeToFocusChange: false,
|
||||
});
|
||||
const {
|
||||
onToggleSeriesOnOff,
|
||||
onToggleSeriesVisibility,
|
||||
syncSeriesVisibilityToLocalStorage,
|
||||
} = usePlotContext();
|
||||
const { isDashboardLocked } = useDashboard();
|
||||
|
||||
const [tableDataSet, setTableDataSet] = useState<ExtendedChartDataset[]>(() =>
|
||||
getDefaultTableDataSet(config.getConfig() as uPlot.Options, alignedData),
|
||||
);
|
||||
|
||||
const graphVisibilityState = useMemo(
|
||||
() =>
|
||||
Object.entries(legendItemsMap).reduce<boolean[]>((acc, [key, item]) => {
|
||||
acc[Number(key)] = item.show;
|
||||
return acc;
|
||||
}, []),
|
||||
[legendItemsMap],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
setTableDataSet(
|
||||
getDefaultTableDataSet(config.getConfig() as uPlot.Options, alignedData),
|
||||
);
|
||||
}, [alignedData, config]);
|
||||
|
||||
const filterHandler = useCallback(
|
||||
(event: React.ChangeEvent<HTMLInputElement>): void => {
|
||||
const value = event.target.value.toString().toLowerCase();
|
||||
const updatedDataSet = tableDataSet.map((item) => {
|
||||
if (item.label?.toLocaleLowerCase().includes(value)) {
|
||||
return { ...item, show: true };
|
||||
}
|
||||
return { ...item, show: false };
|
||||
});
|
||||
setTableDataSet(updatedDataSet);
|
||||
},
|
||||
[tableDataSet],
|
||||
);
|
||||
|
||||
const dataSource = useMemo(
|
||||
() =>
|
||||
tableDataSet.filter(
|
||||
(item, index) => index !== 0 && item.show, // skipping the first item as it is the x-axis
|
||||
),
|
||||
[tableDataSet],
|
||||
);
|
||||
|
||||
const columns = useMemo(
|
||||
() =>
|
||||
getGraphManagerTableColumns({
|
||||
tableDataSet,
|
||||
checkBoxOnChangeHandler: (_e, index) => {
|
||||
onToggleSeriesOnOff(index);
|
||||
},
|
||||
graphVisibilityState,
|
||||
labelClickedHandler: onToggleSeriesVisibility,
|
||||
yAxisUnit,
|
||||
isGraphDisabled: isDashboardLocked,
|
||||
}),
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[tableDataSet, graphVisibilityState, yAxisUnit, isDashboardLocked],
|
||||
);
|
||||
|
||||
const handleSave = useCallback((): void => {
|
||||
syncSeriesVisibilityToLocalStorage();
|
||||
notifications.success({
|
||||
message: 'The updated graphs & legends are saved',
|
||||
});
|
||||
if (onCancel) {
|
||||
onCancel();
|
||||
}
|
||||
}, [syncSeriesVisibilityToLocalStorage, notifications, onCancel]);
|
||||
|
||||
return (
|
||||
<div className="chart-manager-container">
|
||||
<div className="chart-manager-header">
|
||||
<Input onChange={filterHandler} placeholder="Filter Series" />
|
||||
<div className="chart-manager-actions-container">
|
||||
<Button type="default" onClick={onCancel}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button type="primary" onClick={handleSave}>
|
||||
Save
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="chart-manager-table-container">
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
dataSource={dataSource}
|
||||
virtual
|
||||
rowKey="index"
|
||||
scroll={{ y: 200 }}
|
||||
pagination={false}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,120 @@
|
||||
import { useCallback } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import {
|
||||
getTimeRangeFromStepInterval,
|
||||
isApmMetric,
|
||||
} from 'container/PanelWrapper/utils';
|
||||
import { getUplotClickData } from 'container/QueryTable/Drilldown/drilldownUtils';
|
||||
import useGraphContextMenu from 'container/QueryTable/Drilldown/useGraphContextMenu';
|
||||
import {
|
||||
PopoverPosition,
|
||||
useCoordinates,
|
||||
} from 'periscope/components/ContextMenu';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
interface UseTimeSeriesContextMenuParams {
|
||||
widget: Widgets;
|
||||
queryResponse: UseQueryResult<
|
||||
SuccessResponse<MetricRangePayloadProps, unknown>,
|
||||
Error
|
||||
>;
|
||||
}
|
||||
|
||||
export const usePanelContextMenu = ({
|
||||
widget,
|
||||
queryResponse,
|
||||
}: UseTimeSeriesContextMenuParams): {
|
||||
coordinates: { x: number; y: number } | null;
|
||||
popoverPosition: PopoverPosition | null;
|
||||
onClose: () => void;
|
||||
menuItemsConfig: {
|
||||
header?: string | React.ReactNode;
|
||||
items?: React.ReactNode;
|
||||
};
|
||||
clickHandlerWithContextMenu: (...args: any[]) => void;
|
||||
} => {
|
||||
const {
|
||||
coordinates,
|
||||
popoverPosition,
|
||||
clickedData,
|
||||
onClose,
|
||||
subMenu,
|
||||
onClick,
|
||||
setSubMenu,
|
||||
} = useCoordinates();
|
||||
|
||||
const { menuItemsConfig } = useGraphContextMenu({
|
||||
widgetId: widget.id || '',
|
||||
query: widget.query,
|
||||
graphData: clickedData,
|
||||
onClose,
|
||||
coordinates,
|
||||
subMenu,
|
||||
setSubMenu,
|
||||
contextLinks: widget.contextLinks,
|
||||
panelType: widget.panelTypes,
|
||||
queryRange: queryResponse,
|
||||
});
|
||||
|
||||
const clickHandlerWithContextMenu = useCallback(
|
||||
(...args: any[]) => {
|
||||
const [
|
||||
xValue,
|
||||
_yvalue,
|
||||
_mouseX,
|
||||
_mouseY,
|
||||
metric,
|
||||
queryData,
|
||||
absoluteMouseX,
|
||||
absoluteMouseY,
|
||||
axesData,
|
||||
focusedSeries,
|
||||
] = args;
|
||||
|
||||
const data = getUplotClickData({
|
||||
metric,
|
||||
queryData,
|
||||
absoluteMouseX,
|
||||
absoluteMouseY,
|
||||
focusedSeries,
|
||||
});
|
||||
|
||||
let timeRange;
|
||||
|
||||
if (axesData && queryData?.queryName) {
|
||||
const compositeQuery = (queryResponse?.data?.params as any)?.compositeQuery;
|
||||
|
||||
if (compositeQuery?.queries) {
|
||||
const specificQuery = compositeQuery.queries.find(
|
||||
(query: any) => query.spec?.name === queryData.queryName,
|
||||
);
|
||||
|
||||
const stepInterval = specificQuery?.spec?.stepInterval || 60;
|
||||
|
||||
timeRange = getTimeRangeFromStepInterval(
|
||||
stepInterval,
|
||||
metric?.clickedTimestamp || xValue,
|
||||
specificQuery?.spec?.signal === DataSource.METRICS &&
|
||||
isApmMetric(specificQuery?.spec?.aggregations[0]?.metricName),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (data && data?.record?.queryName) {
|
||||
onClick(data.coord, { ...data.record, label: data.label, timeRange });
|
||||
}
|
||||
},
|
||||
[onClick, queryResponse],
|
||||
);
|
||||
|
||||
return {
|
||||
coordinates,
|
||||
popoverPosition,
|
||||
onClose,
|
||||
menuItemsConfig,
|
||||
clickHandlerWithContextMenu,
|
||||
};
|
||||
};
|
||||
@@ -11,6 +11,7 @@ export interface ChartLayoutProps {
|
||||
children: (props: {
|
||||
chartWidth: number;
|
||||
chartHeight: number;
|
||||
averageLegendWidth: number;
|
||||
}) => React.ReactNode;
|
||||
layoutChildren?: React.ReactNode;
|
||||
containerWidth: number;
|
||||
@@ -56,6 +57,7 @@ export default function ChartLayout({
|
||||
{children({
|
||||
chartWidth: chartDimensions.width,
|
||||
chartHeight: chartDimensions.height,
|
||||
averageLegendWidth: chartDimensions.averageLegendWidth,
|
||||
})}
|
||||
</div>
|
||||
<div
|
||||
@@ -65,7 +67,7 @@ export default function ChartLayout({
|
||||
width: chartDimensions.legendWidth,
|
||||
}}
|
||||
>
|
||||
{legendComponent(chartDimensions.legendsPerSet)}
|
||||
{legendComponent(chartDimensions.averageLegendWidth)}
|
||||
</div>
|
||||
</div>
|
||||
{layoutChildren}
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
.panel-container {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
}
|
||||
@@ -0,0 +1,176 @@
|
||||
import { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import TimeSeries from 'container/DashboardContainer/visualization/charts/TimeSeries/TimeSeries';
|
||||
import ChartManager from 'container/DashboardContainer/visualization/components/ChartManager/ChartManager';
|
||||
import { usePanelContextMenu } from 'container/DashboardContainer/visualization/hooks/usePanelContextMenu';
|
||||
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import { LineInterpolation } from 'lib/uPlotV2/config/types';
|
||||
import { ContextMenu } from 'periscope/components/ContextMenu';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import uPlot from 'uplot';
|
||||
import { getTimeRange } from 'utils/getTimeRange';
|
||||
|
||||
import { prepareChartData, prepareUPlotConfig } from '../TimeSeriesPanel/utils';
|
||||
|
||||
import '../Panel.styles.scss';
|
||||
|
||||
function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
|
||||
const {
|
||||
panelMode,
|
||||
queryResponse,
|
||||
widget,
|
||||
onDragSelect,
|
||||
isFullViewMode,
|
||||
onToggleModelHandler,
|
||||
} = props;
|
||||
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const [minTimeScale, setMinTimeScale] = useState<number>();
|
||||
const [maxTimeScale, setMaxTimeScale] = useState<number>();
|
||||
const containerDimensions = useResizeObserver(graphRef);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
useEffect(() => {
|
||||
if (toScrollWidgetId === widget.id) {
|
||||
graphRef.current?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
graphRef.current?.focus();
|
||||
setToScrollWidgetId('');
|
||||
}
|
||||
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
|
||||
|
||||
useEffect((): void => {
|
||||
const { startTime, endTime } = getTimeRange(queryResponse);
|
||||
|
||||
setMinTimeScale(startTime);
|
||||
setMaxTimeScale(endTime);
|
||||
}, [queryResponse]);
|
||||
|
||||
const {
|
||||
coordinates,
|
||||
popoverPosition,
|
||||
onClose,
|
||||
menuItemsConfig,
|
||||
clickHandlerWithContextMenu,
|
||||
} = usePanelContextMenu({
|
||||
widget,
|
||||
queryResponse,
|
||||
});
|
||||
|
||||
const chartData = useMemo(() => {
|
||||
if (!queryResponse?.data?.payload) {
|
||||
return [];
|
||||
}
|
||||
return prepareChartData(queryResponse?.data?.payload);
|
||||
}, [queryResponse?.data?.payload]);
|
||||
|
||||
const config = useMemo(() => {
|
||||
const tzDate = (timestamp: number): Date =>
|
||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value);
|
||||
|
||||
return prepareUPlotConfig({
|
||||
widgetId: widget.id || '',
|
||||
apiResponse: queryResponse?.data?.payload as MetricRangePayloadProps,
|
||||
tzDate,
|
||||
minTimeScale: minTimeScale,
|
||||
maxTimeScale: maxTimeScale,
|
||||
isLogScale: widget?.isLogScale ?? false,
|
||||
thresholds: {
|
||||
scaleKey: 'y',
|
||||
thresholds: (widget.thresholds || []).map((threshold) => ({
|
||||
thresholdValue: threshold.thresholdValue ?? 0,
|
||||
thresholdColor: threshold.thresholdColor,
|
||||
thresholdUnit: threshold.thresholdUnit,
|
||||
thresholdLabel: threshold.thresholdLabel,
|
||||
})),
|
||||
yAxisUnit: widget.yAxisUnit,
|
||||
},
|
||||
yAxisUnit: widget.yAxisUnit || '',
|
||||
softMin: widget.softMin === undefined ? null : widget.softMin,
|
||||
softMax: widget.softMax === undefined ? null : widget.softMax,
|
||||
spanGaps: false,
|
||||
colorMapping: widget.customLegendColors ?? {},
|
||||
lineInterpolation: LineInterpolation.Spline,
|
||||
isDarkMode,
|
||||
onClick: clickHandlerWithContextMenu,
|
||||
onDragSelect,
|
||||
currentQuery: widget.query,
|
||||
panelMode,
|
||||
});
|
||||
}, [
|
||||
widget.id,
|
||||
maxTimeScale,
|
||||
minTimeScale,
|
||||
timezone.value,
|
||||
widget.customLegendColors,
|
||||
widget.isLogScale,
|
||||
widget.softMax,
|
||||
widget.softMin,
|
||||
isDarkMode,
|
||||
queryResponse?.data?.payload,
|
||||
widget.query,
|
||||
widget.thresholds,
|
||||
widget.yAxisUnit,
|
||||
panelMode,
|
||||
clickHandlerWithContextMenu,
|
||||
onDragSelect,
|
||||
]);
|
||||
|
||||
const layoutChildren = useMemo(() => {
|
||||
if (!isFullViewMode) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<ChartManager
|
||||
config={config}
|
||||
alignedData={chartData}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
onCancel={onToggleModelHandler}
|
||||
/>
|
||||
);
|
||||
}, [
|
||||
isFullViewMode,
|
||||
config,
|
||||
chartData,
|
||||
widget.yAxisUnit,
|
||||
onToggleModelHandler,
|
||||
]);
|
||||
|
||||
return (
|
||||
<div className="panel-container" ref={graphRef}>
|
||||
{containerDimensions.width > 0 && containerDimensions.height > 0 && (
|
||||
<TimeSeries
|
||||
config={config}
|
||||
legendConfig={{
|
||||
position: widget?.legendPosition ?? LegendPosition.BOTTOM,
|
||||
}}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
decimalPrecision={widget.decimalPrecision}
|
||||
timezone={timezone.value}
|
||||
data={chartData as uPlot.AlignedData}
|
||||
width={containerDimensions.width}
|
||||
height={containerDimensions.height}
|
||||
layoutChildren={layoutChildren}
|
||||
>
|
||||
<ContextMenu
|
||||
coordinates={coordinates}
|
||||
popoverPosition={popoverPosition}
|
||||
title={menuItemsConfig.header as string}
|
||||
items={menuItemsConfig.items}
|
||||
onClose={onClose}
|
||||
/>
|
||||
</TimeSeries>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default TimeSeriesPanel;
|
||||
@@ -0,0 +1,170 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import {
|
||||
fillMissingXAxisTimestamps,
|
||||
getXAxisTimestamps,
|
||||
} from 'container/DashboardContainer/visualization/panels/utils';
|
||||
import { getLegend } from 'lib/dashboard/getQueryResults';
|
||||
import getLabelName from 'lib/getLabelName';
|
||||
import onClickPlugin, {
|
||||
OnClickPluginOpts,
|
||||
} from 'lib/uPlotLib/plugins/onClickPlugin';
|
||||
import {
|
||||
DistributionType,
|
||||
DrawStyle,
|
||||
LineInterpolation,
|
||||
LineStyle,
|
||||
SelectionPreferencesSource,
|
||||
VisibilityMode,
|
||||
} from 'lib/uPlotV2/config/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { ThresholdsDrawHookOptions } from 'lib/uPlotV2/hooks/types';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { PanelMode } from '../types';
|
||||
|
||||
export const prepareChartData = (
|
||||
apiResponse: MetricRangePayloadProps,
|
||||
): uPlot.AlignedData => {
|
||||
const seriesList = apiResponse?.data?.result || [];
|
||||
const timestampArr = getXAxisTimestamps(seriesList);
|
||||
const yAxisValuesArr = fillMissingXAxisTimestamps(timestampArr, seriesList);
|
||||
|
||||
return [timestampArr, ...yAxisValuesArr];
|
||||
};
|
||||
|
||||
export const prepareUPlotConfig = ({
|
||||
widgetId,
|
||||
apiResponse,
|
||||
tzDate,
|
||||
minTimeScale,
|
||||
maxTimeScale,
|
||||
isLogScale,
|
||||
thresholds,
|
||||
softMin,
|
||||
softMax,
|
||||
spanGaps,
|
||||
colorMapping,
|
||||
lineInterpolation,
|
||||
isDarkMode,
|
||||
currentQuery,
|
||||
onDragSelect,
|
||||
onClick,
|
||||
yAxisUnit,
|
||||
panelMode,
|
||||
}: {
|
||||
widgetId: string;
|
||||
apiResponse: MetricRangePayloadProps;
|
||||
tzDate: uPlot.LocalDateFromUnix;
|
||||
minTimeScale: number | undefined;
|
||||
maxTimeScale: number | undefined;
|
||||
isLogScale: boolean;
|
||||
softMin: number | null;
|
||||
softMax: number | null;
|
||||
spanGaps: boolean;
|
||||
colorMapping: Record<string, string>;
|
||||
lineInterpolation: LineInterpolation;
|
||||
isDarkMode: boolean;
|
||||
thresholds: ThresholdsDrawHookOptions;
|
||||
currentQuery: Query;
|
||||
yAxisUnit: string;
|
||||
onDragSelect: (startTime: number, endTime: number) => void;
|
||||
onClick?: OnClickPluginOpts['onClick'];
|
||||
panelMode: PanelMode;
|
||||
}): UPlotConfigBuilder => {
|
||||
const builder = new UPlotConfigBuilder({
|
||||
onDragSelect,
|
||||
widgetId,
|
||||
tzDate,
|
||||
shouldSaveSelectionPreference: panelMode === PanelMode.DASHBOARD_VIEW,
|
||||
selectionPreferencesSource: [
|
||||
PanelMode.DASHBOARD_VIEW,
|
||||
PanelMode.STANDALONE_VIEW,
|
||||
].includes(panelMode)
|
||||
? SelectionPreferencesSource.LOCAL_STORAGE
|
||||
: SelectionPreferencesSource.IN_MEMORY,
|
||||
});
|
||||
|
||||
// X scale – time axis
|
||||
builder.addScale({
|
||||
scaleKey: 'x',
|
||||
time: true,
|
||||
min: minTimeScale,
|
||||
max: maxTimeScale,
|
||||
logBase: isLogScale ? 10 : undefined,
|
||||
distribution: isLogScale
|
||||
? DistributionType.Logarithmic
|
||||
: DistributionType.Linear,
|
||||
});
|
||||
|
||||
// Y scale – value axis, driven primarily by softMin/softMax and data
|
||||
builder.addScale({
|
||||
scaleKey: 'y',
|
||||
time: false,
|
||||
min: undefined,
|
||||
max: undefined,
|
||||
softMin: softMin ?? undefined,
|
||||
softMax: softMax ?? undefined,
|
||||
thresholds,
|
||||
logBase: isLogScale ? 10 : undefined,
|
||||
distribution: isLogScale
|
||||
? DistributionType.Logarithmic
|
||||
: DistributionType.Linear,
|
||||
});
|
||||
|
||||
builder.addThresholds(thresholds);
|
||||
|
||||
if (typeof onClick === 'function') {
|
||||
builder.addPlugin(
|
||||
onClickPlugin({
|
||||
onClick,
|
||||
apiResponse,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
builder.addAxis({
|
||||
scaleKey: 'x',
|
||||
show: true,
|
||||
side: 2,
|
||||
isDarkMode,
|
||||
isLogScale: false,
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
});
|
||||
|
||||
builder.addAxis({
|
||||
scaleKey: 'y',
|
||||
show: true,
|
||||
side: 3,
|
||||
isDarkMode,
|
||||
isLogScale: false,
|
||||
yAxisUnit,
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
});
|
||||
|
||||
apiResponse.data?.result?.forEach((series) => {
|
||||
const baseLabelName = getLabelName(
|
||||
series.metric,
|
||||
series.queryName || '', // query
|
||||
series.legend || '',
|
||||
);
|
||||
|
||||
const label = currentQuery
|
||||
? getLegend(series, currentQuery, baseLabelName)
|
||||
: baseLabelName;
|
||||
|
||||
builder.addSeries({
|
||||
scaleKey: 'y',
|
||||
drawStyle: DrawStyle.Line,
|
||||
label: label,
|
||||
colorMapping,
|
||||
spanGaps,
|
||||
lineStyle: LineStyle.Solid,
|
||||
lineInterpolation,
|
||||
showPoints: VisibilityMode.Never,
|
||||
pointSize: 5,
|
||||
isDarkMode,
|
||||
});
|
||||
});
|
||||
return builder;
|
||||
};
|
||||
@@ -0,0 +1,54 @@
|
||||
import { normalizePlotValue } from 'lib/uPlotV2/utils/dataUtils';
|
||||
import { QueryData } from 'types/api/widgets/getQuery';
|
||||
|
||||
export function getXAxisTimestamps(seriesList: QueryData[]): number[] {
|
||||
const timestamps = new Set<number>();
|
||||
|
||||
seriesList.forEach((series: { values?: [number, string][] }) => {
|
||||
if (series?.values) {
|
||||
series.values.forEach((value) => {
|
||||
timestamps.add(value[0]);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const timestampsArr = Array.from(timestamps);
|
||||
timestampsArr.sort((a, b) => a - b);
|
||||
|
||||
return timestampsArr;
|
||||
}
|
||||
|
||||
export function fillMissingXAxisTimestamps(
|
||||
timestampArr: number[],
|
||||
data: Array<{ values?: [number, string][] }>,
|
||||
): (number | null)[][] {
|
||||
// Ensure we work with a sorted, de‑duplicated list of x-axis timestamps
|
||||
const canonicalTimestamps = Array.from(new Set(timestampArr)).sort(
|
||||
(a, b) => a - b,
|
||||
);
|
||||
|
||||
return data.map(({ values }) =>
|
||||
buildSeriesYValues(canonicalTimestamps, values),
|
||||
);
|
||||
}
|
||||
|
||||
function buildSeriesYValues(
|
||||
timestamps: number[],
|
||||
values?: [number, string][],
|
||||
): (number | null)[] {
|
||||
if (!values?.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const valueByTimestamp = new Map<number, number | null>();
|
||||
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
const [timestamp, rawValue] = values[i];
|
||||
valueByTimestamp.set(timestamp, normalizePlotValue(rawValue));
|
||||
}
|
||||
|
||||
return timestamps.map((timestamp) => {
|
||||
const value = valueByTimestamp.get(timestamp);
|
||||
return value !== undefined ? value : null;
|
||||
});
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import { ToggleGraphProps } from 'components/Graph/types';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariablesStore';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
initialQueriesMap,
|
||||
initialQueryBuilderFormValues,
|
||||
} from 'constants/queryBuilder';
|
||||
import { IUseDashboardVariablesReturn } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { IUseDashboardVariablesReturn } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import { QueryBuilderContext } from 'providers/QueryBuilder';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React from 'react';
|
||||
import { renderHook } from '@testing-library/react';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariablesStore';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
|
||||
import useGetResolvedText from '../useGetResolvedText';
|
||||
|
||||
|
||||
@@ -1,21 +1,40 @@
|
||||
import { useSyncExternalStore } from 'react';
|
||||
|
||||
import { useCallback, useRef, useSyncExternalStore } from 'react';
|
||||
import { dashboardVariablesStore } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
|
||||
import {
|
||||
dashboardVariablesStore,
|
||||
IDashboardVariables,
|
||||
} from '../../providers/Dashboard/store/dashboardVariablesStore';
|
||||
IDashboardVariablesStoreState,
|
||||
IUseDashboardVariablesReturn,
|
||||
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
|
||||
export interface IUseDashboardVariablesReturn {
|
||||
dashboardVariables: IDashboardVariables;
|
||||
}
|
||||
/**
|
||||
* Generic selector hook for dashboard variables store
|
||||
* Allows granular subscriptions to any part of the store state
|
||||
*
|
||||
* @example
|
||||
* ! Select top-level field
|
||||
* const variables = useDashboardVariablesSelector(s => s.variables);
|
||||
*
|
||||
* ! Select specific variable
|
||||
* const fooVar = useDashboardVariablesSelector(s => s.variables['foo']);
|
||||
*
|
||||
* ! Select derived value
|
||||
* const hasVariables = useDashboardVariablesSelector(s => Object.keys(s.variables).length > 0);
|
||||
*/
|
||||
export const useDashboardVariablesSelector = <T>(
|
||||
selector: (state: IDashboardVariablesStoreState) => T,
|
||||
): T => {
|
||||
const selectorRef = useRef(selector);
|
||||
selectorRef.current = selector;
|
||||
|
||||
export const useDashboardVariables = (): IUseDashboardVariablesReturn => {
|
||||
const dashboardVariables = useSyncExternalStore(
|
||||
dashboardVariablesStore.subscribe,
|
||||
dashboardVariablesStore.getSnapshot,
|
||||
const getSnapshot = useCallback(
|
||||
() => selectorRef.current(dashboardVariablesStore.getSnapshot()),
|
||||
[],
|
||||
);
|
||||
|
||||
return {
|
||||
dashboardVariables,
|
||||
};
|
||||
return useSyncExternalStore(dashboardVariablesStore.subscribe, getSnapshot);
|
||||
};
|
||||
|
||||
export const useDashboardVariables = (): IUseDashboardVariablesReturn => {
|
||||
const dashboardVariables = useDashboardVariablesSelector((s) => s.variables);
|
||||
|
||||
return { dashboardVariables };
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariablesStore';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import store from 'store';
|
||||
|
||||
export const getDashboardVariables = (
|
||||
|
||||
@@ -1,7 +1,18 @@
|
||||
.legend-search-container {
|
||||
flex-shrink: 0;
|
||||
width: 100%;
|
||||
padding-right: 8px;
|
||||
|
||||
.legend-search-input {
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
.legend-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
|
||||
@@ -13,10 +24,50 @@
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.legend-empty-state {
|
||||
font-size: 12px;
|
||||
color: var(--bg-vanilla-400);
|
||||
text-align: center;
|
||||
padding: 12px;
|
||||
padding: 2rem 0;
|
||||
}
|
||||
|
||||
.legend-virtuoso-container {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
|
||||
.virtuoso-grid-list {
|
||||
min-width: 0;
|
||||
display: grid;
|
||||
grid-auto-flow: row;
|
||||
grid-template-columns: repeat(
|
||||
auto-fill,
|
||||
minmax(var(--legend-average-width, 240px), 1fr)
|
||||
);
|
||||
row-gap: 4px;
|
||||
column-gap: 12px;
|
||||
}
|
||||
|
||||
.virtuoso-grid-item {
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
&.legend-virtuoso-container-right {
|
||||
.virtuoso-grid-list {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
}
|
||||
|
||||
&.legend-virtuoso-container-single-row {
|
||||
.virtuoso-grid-list {
|
||||
grid-template-columns: repeat(
|
||||
auto-fit,
|
||||
minmax(var(--legend-average-width, 240px), max-content)
|
||||
);
|
||||
justify-content: center;
|
||||
}
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
}
|
||||
@@ -58,9 +109,15 @@
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
padding: 4px 8px;
|
||||
max-width: 100%;
|
||||
overflow: hidden;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
|
||||
&.legend-item-right {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
&.legend-item-off {
|
||||
opacity: 0.3;
|
||||
text-decoration: line-through;
|
||||
|
||||
@@ -1,23 +1,21 @@
|
||||
import { useCallback, useMemo, useRef } from 'react';
|
||||
import { Virtuoso } from 'react-virtuoso';
|
||||
import { Tooltip as AntdTooltip } from 'antd';
|
||||
import { useCallback, useMemo, useRef, useState } from 'react';
|
||||
import { VirtuosoGrid } from 'react-virtuoso';
|
||||
import { Input, Tooltip as AntdTooltip } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { LegendItem } from 'lib/uPlotV2/config/types';
|
||||
import useLegendsSync from 'lib/uPlotV2/hooks/useLegendsSync';
|
||||
import { LegendPosition } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { LegendProps } from '../types';
|
||||
import { LegendPosition, LegendProps } from '../types';
|
||||
import { useLegendActions } from './useLegendActions';
|
||||
|
||||
import './Legend.styles.scss';
|
||||
|
||||
export const MAX_LEGEND_WIDTH = 320;
|
||||
const LEGENDS_PER_SET_DEFAULT = 5;
|
||||
export const MAX_LEGEND_WIDTH = 240;
|
||||
|
||||
export default function Legend({
|
||||
position = LegendPosition.BOTTOM,
|
||||
config,
|
||||
legendsPerSet = LEGENDS_PER_SET_DEFAULT,
|
||||
averageLegendWidth = MAX_LEGEND_WIDTH,
|
||||
}: LegendProps): JSX.Element {
|
||||
const {
|
||||
legendItemsMap,
|
||||
@@ -33,56 +31,63 @@ export default function Legend({
|
||||
focusedSeriesIndex,
|
||||
});
|
||||
const legendContainerRef = useRef<HTMLDivElement | null>(null);
|
||||
const [legendSearchQuery, setLegendSearchQuery] = useState('');
|
||||
|
||||
// Chunk legend items into rows of LEGENDS_PER_ROW items each
|
||||
const legendRows = useMemo(() => {
|
||||
const legendItems = Object.values(legendItemsMap);
|
||||
const legendItems = useMemo(() => Object.values(legendItemsMap), [
|
||||
legendItemsMap,
|
||||
]);
|
||||
|
||||
return legendItems.reduce((acc: LegendItem[][], curr, i) => {
|
||||
if (i % legendsPerSet === 0) {
|
||||
acc.push([]);
|
||||
}
|
||||
acc[acc.length - 1].push(curr);
|
||||
return acc;
|
||||
}, [] as LegendItem[][]);
|
||||
}, [legendItemsMap, legendsPerSet]);
|
||||
const isSingleRow = useMemo(() => {
|
||||
if (!legendContainerRef.current || position !== LegendPosition.BOTTOM) {
|
||||
return false;
|
||||
}
|
||||
const containerWidth = legendContainerRef.current.clientWidth;
|
||||
|
||||
const renderLegendRow = useCallback(
|
||||
(rowIndex: number, row: LegendItem[]): JSX.Element => (
|
||||
<div
|
||||
key={rowIndex}
|
||||
className={cx(
|
||||
'legend-row',
|
||||
`legend-row-${position.toLowerCase()}`,
|
||||
legendRows.length === 1 && position === LegendPosition.BOTTOM
|
||||
? 'legend-single-row'
|
||||
: '',
|
||||
)}
|
||||
>
|
||||
{row.map((item) => (
|
||||
<AntdTooltip key={item.seriesIndex} title={item.label}>
|
||||
<div
|
||||
data-legend-item-id={item.seriesIndex}
|
||||
className={cx('legend-item', {
|
||||
'legend-item-off': !item.show,
|
||||
'legend-item-focused': focusedSeriesIndex === item.seriesIndex,
|
||||
})}
|
||||
style={{ maxWidth: `min(${MAX_LEGEND_WIDTH}px, 100%)` }}
|
||||
>
|
||||
<div
|
||||
className="legend-marker"
|
||||
style={{ borderColor: String(item.color) }}
|
||||
data-is-legend-marker={true}
|
||||
/>
|
||||
<span className="legend-label">{item.label}</span>
|
||||
</div>
|
||||
</AntdTooltip>
|
||||
))}
|
||||
</div>
|
||||
const totalLegendWidth = legendItems.length * (averageLegendWidth + 16);
|
||||
const totalRows = Math.ceil(totalLegendWidth / containerWidth);
|
||||
return totalRows <= 1;
|
||||
}, [averageLegendWidth, legendContainerRef, legendItems.length, position]);
|
||||
|
||||
const visibleLegendItems = useMemo(() => {
|
||||
if (position !== LegendPosition.RIGHT || !legendSearchQuery.trim()) {
|
||||
return legendItems;
|
||||
}
|
||||
|
||||
const query = legendSearchQuery.trim().toLowerCase();
|
||||
return legendItems.filter((item) =>
|
||||
item.label?.toLowerCase().includes(query),
|
||||
);
|
||||
}, [position, legendSearchQuery, legendItems]);
|
||||
|
||||
const renderLegendItem = useCallback(
|
||||
(item: LegendItem): JSX.Element => (
|
||||
<AntdTooltip key={item.seriesIndex} title={item.label}>
|
||||
<div
|
||||
data-legend-item-id={item.seriesIndex}
|
||||
className={cx('legend-item', `legend-item-${position.toLowerCase()}`, {
|
||||
'legend-item-off': !item.show,
|
||||
'legend-item-focused': focusedSeriesIndex === item.seriesIndex,
|
||||
})}
|
||||
>
|
||||
<div
|
||||
className="legend-marker"
|
||||
style={{ borderColor: String(item.color) }}
|
||||
data-is-legend-marker={true}
|
||||
/>
|
||||
<span className="legend-label">{item.label}</span>
|
||||
</div>
|
||||
</AntdTooltip>
|
||||
),
|
||||
[focusedSeriesIndex, position, legendRows],
|
||||
[focusedSeriesIndex, position],
|
||||
);
|
||||
|
||||
const isEmptyState = useMemo(() => {
|
||||
if (position !== LegendPosition.RIGHT || !legendSearchQuery.trim()) {
|
||||
return false;
|
||||
}
|
||||
return visibleLegendItems.length === 0;
|
||||
}, [position, legendSearchQuery, visibleLegendItems]);
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={legendContainerRef}
|
||||
@@ -90,12 +95,36 @@ export default function Legend({
|
||||
onClick={onLegendClick}
|
||||
onMouseMove={onLegendMouseMove}
|
||||
onMouseLeave={onLegendMouseLeave}
|
||||
style={{
|
||||
['--legend-average-width' as string]: `${averageLegendWidth + 16}px`, // 16px is the marker width
|
||||
}}
|
||||
>
|
||||
<Virtuoso
|
||||
className="legend-virtuoso-container"
|
||||
data={legendRows}
|
||||
itemContent={(index, row): JSX.Element => renderLegendRow(index, row)}
|
||||
/>
|
||||
{position === LegendPosition.RIGHT && (
|
||||
<div className="legend-search-container">
|
||||
<Input
|
||||
allowClear
|
||||
placeholder="Search..."
|
||||
value={legendSearchQuery}
|
||||
onChange={(e): void => setLegendSearchQuery(e.target.value)}
|
||||
className="legend-search-input"
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{isEmptyState ? (
|
||||
<div className="legend-empty-state">
|
||||
No series found matching "{legendSearchQuery}"
|
||||
</div>
|
||||
) : (
|
||||
<VirtuosoGrid
|
||||
className={cx(
|
||||
'legend-virtuoso-container',
|
||||
`legend-virtuoso-container-${position.toLowerCase()}`,
|
||||
{ 'legend-virtuoso-container-single-row': isSingleRow },
|
||||
)}
|
||||
data={visibleLegendItems}
|
||||
itemContent={(_, item): JSX.Element => renderLegendItem(item)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
-webkit-font-smoothing: antialiased;
|
||||
color: var(--bg-vanilla-100);
|
||||
border-radius: 6px;
|
||||
padding: 1rem 1rem 0.5rem 1rem;
|
||||
padding: 1rem 0.5rem 0.5rem 1rem;
|
||||
border: 1px solid var(--bg-ink-100);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
@@ -15,6 +15,12 @@
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--bg-ink-500);
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
|
||||
.uplot-tooltip-list {
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-vanilla-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.uplot-tooltip-header {
|
||||
@@ -22,18 +28,18 @@
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.uplot-tooltip-list-container {
|
||||
height: 100%;
|
||||
.uplot-tooltip-list {
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
}
|
||||
&::-webkit-scrollbar-corner {
|
||||
background: transparent;
|
||||
}
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: rgb(136, 136, 136);
|
||||
}
|
||||
.uplot-tooltip-list {
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-slate-100);
|
||||
border-radius: 0.5rem;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -75,7 +75,7 @@ export interface LegendConfig {
|
||||
export interface LegendProps {
|
||||
position?: LegendPosition;
|
||||
config: UPlotConfigBuilder;
|
||||
legendsPerSet?: number;
|
||||
averageLegendWidth?: number;
|
||||
}
|
||||
|
||||
export interface TooltipContentItem {
|
||||
|
||||
@@ -0,0 +1,356 @@
|
||||
import { getToolTipValue } from 'components/Graph/yAxisConfig';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { uPlotXAxisValuesFormat } from 'lib/uPlotLib/utils/constants';
|
||||
import getGridColor from 'lib/uPlotLib/utils/getGridColor';
|
||||
import type uPlot from 'uplot';
|
||||
|
||||
import type { AxisProps } from '../types';
|
||||
import { UPlotAxisBuilder } from '../UPlotAxisBuilder';
|
||||
|
||||
jest.mock('components/Graph/yAxisConfig', () => ({
|
||||
getToolTipValue: jest.fn(),
|
||||
}));
|
||||
|
||||
const createAxisProps = (overrides: Partial<AxisProps> = {}): AxisProps => ({
|
||||
scaleKey: 'x',
|
||||
label: 'Time',
|
||||
isDarkMode: false,
|
||||
show: true,
|
||||
...overrides,
|
||||
});
|
||||
|
||||
describe('UPlotAxisBuilder', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('builds basic axis config with defaults', () => {
|
||||
const builder = new UPlotAxisBuilder(
|
||||
createAxisProps({
|
||||
scaleKey: 'x',
|
||||
label: 'Time',
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.scale).toBe('x');
|
||||
expect(config.label).toBe('Time');
|
||||
expect(config.show).toBe(true);
|
||||
expect(config.side).toBe(2);
|
||||
expect(config.gap).toBe(5);
|
||||
|
||||
// Default grid and ticks are created
|
||||
expect(config.grid).toEqual({
|
||||
stroke: getGridColor(false),
|
||||
width: 0.2,
|
||||
show: true,
|
||||
});
|
||||
expect(config.ticks).toEqual({
|
||||
width: 0.3,
|
||||
show: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('merges custom grid config over defaults and respects isDarkMode and isLogScale', () => {
|
||||
const builder = new UPlotAxisBuilder(
|
||||
createAxisProps({
|
||||
isDarkMode: true,
|
||||
isLogScale: true,
|
||||
grid: {
|
||||
width: 1,
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.grid).toEqual({
|
||||
// stroke falls back to theme-based default when not provided
|
||||
stroke: getGridColor(true),
|
||||
// provided width overrides default
|
||||
width: 1,
|
||||
// show falls back to default when not provided
|
||||
show: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('uses provided ticks config when present and falls back to defaults otherwise', () => {
|
||||
const customTicks = { width: 1, show: false };
|
||||
const withTicks = new UPlotAxisBuilder(
|
||||
createAxisProps({
|
||||
ticks: customTicks,
|
||||
}),
|
||||
);
|
||||
const withoutTicks = new UPlotAxisBuilder(createAxisProps());
|
||||
|
||||
expect(withTicks.getConfig().ticks).toBe(customTicks);
|
||||
expect(withoutTicks.getConfig().ticks).toEqual({
|
||||
width: 0.3,
|
||||
show: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('uses time-based X-axis values formatter for time-series like panels', () => {
|
||||
const builder = new UPlotAxisBuilder(
|
||||
createAxisProps({
|
||||
scaleKey: 'x',
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.values).toBe(uPlotXAxisValuesFormat);
|
||||
});
|
||||
|
||||
it('does not attach X-axis datetime formatter when panel type is not supported', () => {
|
||||
const builder = new UPlotAxisBuilder(
|
||||
createAxisProps({
|
||||
scaleKey: 'x',
|
||||
panelType: PANEL_TYPES.LIST, // not in PANEL_TYPES_WITH_X_AXIS_DATETIME_FORMAT
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.values).toBeUndefined();
|
||||
});
|
||||
|
||||
it('builds Y-axis values formatter that delegates to getToolTipValue', () => {
|
||||
const yBuilder = new UPlotAxisBuilder(
|
||||
createAxisProps({
|
||||
scaleKey: 'y',
|
||||
yAxisUnit: 'ms',
|
||||
decimalPrecision: 3,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = yBuilder.getConfig();
|
||||
expect(typeof config.values).toBe('function');
|
||||
|
||||
(getToolTipValue as jest.Mock).mockImplementation(
|
||||
(value: string, unit?: string, precision?: unknown) =>
|
||||
`formatted:${value}:${unit}:${precision}`,
|
||||
);
|
||||
|
||||
// Simulate uPlot calling the values formatter
|
||||
const valuesFn = (config.values as unknown) as (
|
||||
self: uPlot,
|
||||
vals: unknown[],
|
||||
) => string[];
|
||||
const result = valuesFn({} as uPlot, [1, null, 2, Number.NaN]);
|
||||
|
||||
expect(getToolTipValue).toHaveBeenCalledTimes(2);
|
||||
expect(getToolTipValue).toHaveBeenNthCalledWith(1, '1', 'ms', 3);
|
||||
expect(getToolTipValue).toHaveBeenNthCalledWith(2, '2', 'ms', 3);
|
||||
|
||||
// Null/NaN values should map to empty strings
|
||||
expect(result).toEqual(['formatted:1:ms:3', '', 'formatted:2:ms:3', '']);
|
||||
});
|
||||
|
||||
it('adds dynamic size calculator only for Y-axis when size is not provided', () => {
|
||||
const yBuilder = new UPlotAxisBuilder(
|
||||
createAxisProps({
|
||||
scaleKey: 'y',
|
||||
}),
|
||||
);
|
||||
const xBuilder = new UPlotAxisBuilder(
|
||||
createAxisProps({
|
||||
scaleKey: 'x',
|
||||
}),
|
||||
);
|
||||
|
||||
const yConfig = yBuilder.getConfig();
|
||||
const xConfig = xBuilder.getConfig();
|
||||
|
||||
expect(typeof yConfig.size).toBe('function');
|
||||
expect(xConfig.size).toBeUndefined();
|
||||
});
|
||||
|
||||
it('uses explicit size function when provided', () => {
|
||||
const sizeFn: uPlot.Axis.Size = jest.fn(() => 100) as uPlot.Axis.Size;
|
||||
|
||||
const builder = new UPlotAxisBuilder(
|
||||
createAxisProps({
|
||||
scaleKey: 'y',
|
||||
size: sizeFn,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
expect(config.size).toBe(sizeFn);
|
||||
});
|
||||
|
||||
it('builds stroke color based on stroke and isDarkMode', () => {
|
||||
const explicitStroke = new UPlotAxisBuilder(
|
||||
createAxisProps({
|
||||
stroke: '#ff0000',
|
||||
}),
|
||||
);
|
||||
const darkStroke = new UPlotAxisBuilder(
|
||||
createAxisProps({
|
||||
stroke: undefined,
|
||||
isDarkMode: true,
|
||||
}),
|
||||
);
|
||||
const lightStroke = new UPlotAxisBuilder(
|
||||
createAxisProps({
|
||||
stroke: undefined,
|
||||
isDarkMode: false,
|
||||
}),
|
||||
);
|
||||
|
||||
expect(explicitStroke.getConfig().stroke).toBe('#ff0000');
|
||||
expect(darkStroke.getConfig().stroke).toBe('white');
|
||||
expect(lightStroke.getConfig().stroke).toBe('black');
|
||||
});
|
||||
|
||||
it('uses explicit values formatter when provided', () => {
|
||||
const customValues: uPlot.Axis.Values = jest.fn(() => ['a', 'b', 'c']);
|
||||
|
||||
const builder = new UPlotAxisBuilder(
|
||||
createAxisProps({
|
||||
scaleKey: 'y',
|
||||
values: customValues,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.values).toBe(customValues);
|
||||
});
|
||||
|
||||
it('returns undefined values for scaleKey neither x nor y', () => {
|
||||
const builder = new UPlotAxisBuilder(createAxisProps({ scaleKey: 'custom' }));
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.values).toBeUndefined();
|
||||
});
|
||||
|
||||
it('omits stroke when stroke and isDarkMode are both undefined', () => {
|
||||
const builder = new UPlotAxisBuilder(
|
||||
createAxisProps({
|
||||
scaleKey: 'custom',
|
||||
stroke: undefined,
|
||||
isDarkMode: undefined,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.stroke).toBeUndefined();
|
||||
});
|
||||
|
||||
it('includes space in config when provided', () => {
|
||||
const builder = new UPlotAxisBuilder(
|
||||
createAxisProps({ scaleKey: 'y', space: 50 }),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.space).toBe(50);
|
||||
});
|
||||
|
||||
it('includes PANEL_TYPES.BAR and PANEL_TYPES.PIE in X-axis datetime formatter', () => {
|
||||
const barBuilder = new UPlotAxisBuilder(
|
||||
createAxisProps({
|
||||
scaleKey: 'x',
|
||||
panelType: PANEL_TYPES.BAR,
|
||||
}),
|
||||
);
|
||||
const pieBuilder = new UPlotAxisBuilder(
|
||||
createAxisProps({
|
||||
scaleKey: 'x',
|
||||
panelType: PANEL_TYPES.PIE,
|
||||
}),
|
||||
);
|
||||
|
||||
expect(barBuilder.getConfig().values).toBe(uPlotXAxisValuesFormat);
|
||||
expect(pieBuilder.getConfig().values).toBe(uPlotXAxisValuesFormat);
|
||||
});
|
||||
|
||||
it('invokes Y-axis size calculator and delegates to getExistingAxisSize when cycleNum > 1', () => {
|
||||
const builder = new UPlotAxisBuilder(createAxisProps({ scaleKey: 'y' }));
|
||||
|
||||
const config = builder.getConfig();
|
||||
const sizeFn = config.size;
|
||||
expect(typeof sizeFn).toBe('function');
|
||||
|
||||
const mockAxis = {
|
||||
_size: 80,
|
||||
ticks: { size: 10 },
|
||||
font: ['12px sans-serif'],
|
||||
};
|
||||
const mockSelf = ({
|
||||
axes: [mockAxis],
|
||||
ctx: { measureText: jest.fn(() => ({ width: 60 })), font: '' },
|
||||
} as unknown) as uPlot;
|
||||
|
||||
const result = (sizeFn as (
|
||||
s: uPlot,
|
||||
v: string[],
|
||||
a: number,
|
||||
c: number,
|
||||
) => number)(
|
||||
mockSelf,
|
||||
['100', '200'],
|
||||
0,
|
||||
2, // cycleNum > 1
|
||||
);
|
||||
|
||||
expect(result).toBe(80);
|
||||
});
|
||||
|
||||
it('invokes Y-axis size calculator and computes from text width when cycleNum <= 1', () => {
|
||||
const builder = new UPlotAxisBuilder(
|
||||
createAxisProps({ scaleKey: 'y', gap: 8 }),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
const sizeFn = config.size;
|
||||
expect(typeof sizeFn).toBe('function');
|
||||
|
||||
const mockAxis = {
|
||||
ticks: { size: 12 },
|
||||
font: ['12px sans-serif'],
|
||||
};
|
||||
const measureText = jest.fn(() => ({ width: 48 }));
|
||||
const mockSelf = ({
|
||||
axes: [mockAxis],
|
||||
ctx: {
|
||||
measureText,
|
||||
get font() {
|
||||
return '';
|
||||
},
|
||||
set font(_v: string) {
|
||||
/* noop */
|
||||
},
|
||||
},
|
||||
} as unknown) as uPlot;
|
||||
|
||||
const result = (sizeFn as (
|
||||
s: uPlot,
|
||||
v: string[],
|
||||
a: number,
|
||||
c: number,
|
||||
) => number)(mockSelf, ['10', '2000ms'], 0, 0);
|
||||
|
||||
expect(measureText).toHaveBeenCalledWith('2000ms');
|
||||
expect(result).toBeGreaterThanOrEqual(12 + 8);
|
||||
});
|
||||
|
||||
it('merge updates axis props', () => {
|
||||
const builder = new UPlotAxisBuilder(
|
||||
createAxisProps({ scaleKey: 'y', label: 'Original' }),
|
||||
);
|
||||
|
||||
builder.merge({ label: 'Merged', yAxisUnit: 'bytes' });
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.label).toBe('Merged');
|
||||
expect(config.values).toBeDefined();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,331 @@
|
||||
import uPlot from 'uplot';
|
||||
|
||||
import type { SeriesProps } from '../types';
|
||||
import { DrawStyle, SelectionPreferencesSource } from '../types';
|
||||
import { UPlotConfigBuilder } from '../UPlotConfigBuilder';
|
||||
|
||||
// Mock only the real boundary that hits localStorage
|
||||
jest.mock(
|
||||
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
|
||||
() => ({
|
||||
getStoredSeriesVisibility: jest.fn(),
|
||||
}),
|
||||
);
|
||||
|
||||
const getStoredSeriesVisibilityMock = jest.requireMock(
|
||||
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
|
||||
) as {
|
||||
getStoredSeriesVisibility: jest.Mock;
|
||||
};
|
||||
|
||||
describe('UPlotConfigBuilder', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
const createSeriesProps = (
|
||||
overrides: Partial<SeriesProps> = {},
|
||||
): SeriesProps => ({
|
||||
scaleKey: 'y',
|
||||
label: 'Requests',
|
||||
colorMapping: {},
|
||||
drawStyle: DrawStyle.Line,
|
||||
...overrides,
|
||||
});
|
||||
|
||||
it('returns correct save selection preference flag from constructor args', () => {
|
||||
const builder = new UPlotConfigBuilder({
|
||||
shouldSaveSelectionPreference: true,
|
||||
});
|
||||
|
||||
expect(builder.getShouldSaveSelectionPreference()).toBe(true);
|
||||
});
|
||||
|
||||
it('returns widgetId from constructor args', () => {
|
||||
const builder = new UPlotConfigBuilder({ widgetId: 'widget-123' });
|
||||
|
||||
expect(builder.getWidgetId()).toBe('widget-123');
|
||||
});
|
||||
|
||||
it('sets tzDate from constructor and includes it in config', () => {
|
||||
const tzDate = (ts: number): Date => new Date(ts);
|
||||
const builder = new UPlotConfigBuilder({ tzDate });
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.tzDate).toBe(tzDate);
|
||||
});
|
||||
|
||||
it('does not call onDragSelect for click without drag (width === 0)', () => {
|
||||
const onDragSelect = jest.fn();
|
||||
const builder = new UPlotConfigBuilder({ onDragSelect });
|
||||
|
||||
const config = builder.getConfig();
|
||||
const setSelectHooks = config.hooks?.setSelect ?? [];
|
||||
expect(setSelectHooks.length).toBe(1);
|
||||
|
||||
const uplotInstance = ({
|
||||
select: { left: 10, width: 0 },
|
||||
posToVal: jest.fn(),
|
||||
} as unknown) as uPlot;
|
||||
|
||||
// Simulate uPlot calling the hook
|
||||
const setSelectHook = setSelectHooks[0];
|
||||
expect(setSelectHook).toBeDefined();
|
||||
if (!setSelectHook) {
|
||||
throw new Error('Expected setSelect hook to be registered');
|
||||
}
|
||||
setSelectHook(uplotInstance);
|
||||
|
||||
expect(onDragSelect).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('calls onDragSelect with start and end times in milliseconds for a drag selection', () => {
|
||||
const onDragSelect = jest.fn();
|
||||
const builder = new UPlotConfigBuilder({ onDragSelect });
|
||||
|
||||
const config = builder.getConfig();
|
||||
const setSelectHooks = config.hooks?.setSelect ?? [];
|
||||
expect(setSelectHooks.length).toBe(1);
|
||||
|
||||
const posToVal = jest
|
||||
.fn()
|
||||
// left position
|
||||
.mockReturnValueOnce(100)
|
||||
// left + width
|
||||
.mockReturnValueOnce(110);
|
||||
|
||||
const uplotInstance = ({
|
||||
select: { left: 50, width: 20 },
|
||||
posToVal,
|
||||
} as unknown) as uPlot;
|
||||
|
||||
const setSelectHook = setSelectHooks[0];
|
||||
expect(setSelectHook).toBeDefined();
|
||||
if (!setSelectHook) {
|
||||
throw new Error('Expected setSelect hook to be registered');
|
||||
}
|
||||
setSelectHook(uplotInstance);
|
||||
|
||||
expect(onDragSelect).toHaveBeenCalledTimes(1);
|
||||
// 100 and 110 seconds converted to milliseconds
|
||||
expect(onDragSelect).toHaveBeenCalledWith(100_000, 110_000);
|
||||
});
|
||||
|
||||
it('adds and removes hooks via addHook, and exposes them through getConfig', () => {
|
||||
const builder = new UPlotConfigBuilder();
|
||||
const drawHook = jest.fn();
|
||||
|
||||
const remove = builder.addHook('draw', drawHook as uPlot.Hooks.Defs['draw']);
|
||||
|
||||
let config = builder.getConfig();
|
||||
expect(config.hooks?.draw).toContain(drawHook);
|
||||
|
||||
// Remove and ensure it no longer appears in config
|
||||
remove();
|
||||
config = builder.getConfig();
|
||||
expect(config.hooks?.draw ?? []).not.toContain(drawHook);
|
||||
});
|
||||
|
||||
it('adds axes, scales, and series and wires them into the final config', () => {
|
||||
const builder = new UPlotConfigBuilder();
|
||||
|
||||
// Add axis and scale
|
||||
builder.addAxis({ scaleKey: 'y', label: 'Requests' });
|
||||
builder.addScale({ scaleKey: 'y' });
|
||||
|
||||
// Add two series – legend indices should start from 1 (0 is the timestamp series)
|
||||
builder.addSeries(createSeriesProps({ label: 'Requests' }));
|
||||
builder.addSeries(createSeriesProps({ label: 'Errors' }));
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
// Axes
|
||||
expect(config.axes).toHaveLength(1);
|
||||
expect(config.axes?.[0].scale).toBe('y');
|
||||
|
||||
// Scales are returned as an object keyed by scaleKey
|
||||
expect(config.scales).toBeDefined();
|
||||
expect(Object.keys(config.scales ?? {})).toContain('y');
|
||||
|
||||
// Series: base timestamp + 2 data series
|
||||
expect(config.series).toHaveLength(3);
|
||||
// Base series (index 0) has a value formatter that returns empty string
|
||||
const baseSeries = config.series?.[0] as { value?: () => string };
|
||||
expect(typeof baseSeries?.value).toBe('function');
|
||||
expect(baseSeries?.value?.()).toBe('');
|
||||
|
||||
// Legend items align with series and carry label and color from series config
|
||||
const legendItems = builder.getLegendItems();
|
||||
expect(Object.keys(legendItems)).toEqual(['1', '2']);
|
||||
expect(legendItems[1].seriesIndex).toBe(1);
|
||||
expect(legendItems[1].label).toBe('Requests');
|
||||
expect(legendItems[2].label).toBe('Errors');
|
||||
});
|
||||
|
||||
it('merges axis when addAxis is called twice with same scaleKey', () => {
|
||||
const builder = new UPlotConfigBuilder();
|
||||
|
||||
builder.addAxis({ scaleKey: 'y', label: 'Requests' });
|
||||
builder.addAxis({ scaleKey: 'y', label: 'Updated Label', show: false });
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.axes).toHaveLength(1);
|
||||
expect(config.axes?.[0].label).toBe('Updated Label');
|
||||
expect(config.axes?.[0].show).toBe(false);
|
||||
});
|
||||
|
||||
it('merges scale when addScale is called twice with same scaleKey', () => {
|
||||
const builder = new UPlotConfigBuilder();
|
||||
|
||||
builder.addScale({ scaleKey: 'y', min: 0 });
|
||||
builder.addScale({ scaleKey: 'y', max: 100 });
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
// Only one scale entry for 'y' (merge path used, no duplicate added)
|
||||
expect(config.scales).toBeDefined();
|
||||
expect(Object.keys(config.scales ?? {})).toEqual(['y']);
|
||||
expect(config.scales?.y?.range).toBeDefined();
|
||||
});
|
||||
|
||||
it('restores visibility state from localStorage when selectionPreferencesSource is LOCAL_STORAGE', () => {
|
||||
const visibilityMap = new Map<string, boolean>([
|
||||
['Requests', true],
|
||||
['Errors', false],
|
||||
]);
|
||||
|
||||
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue(
|
||||
visibilityMap,
|
||||
);
|
||||
|
||||
const builder = new UPlotConfigBuilder({
|
||||
widgetId: 'widget-1',
|
||||
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
|
||||
});
|
||||
|
||||
builder.addSeries(createSeriesProps({ label: 'Requests' }));
|
||||
builder.addSeries(createSeriesProps({ label: 'Errors' }));
|
||||
|
||||
const legendItems = builder.getLegendItems();
|
||||
|
||||
// When any series is hidden, legend visibility is driven by the stored map
|
||||
expect(legendItems[1].show).toBe(true);
|
||||
expect(legendItems[2].show).toBe(false);
|
||||
|
||||
const config = builder.getConfig();
|
||||
const [, firstSeries, secondSeries] = config.series ?? [];
|
||||
|
||||
expect(firstSeries?.show).toBe(true);
|
||||
expect(secondSeries?.show).toBe(false);
|
||||
});
|
||||
|
||||
it('does not attempt to read stored visibility when using in-memory preferences', () => {
|
||||
const builder = new UPlotConfigBuilder({
|
||||
widgetId: 'widget-1',
|
||||
selectionPreferencesSource: SelectionPreferencesSource.IN_MEMORY,
|
||||
});
|
||||
|
||||
builder.addSeries(createSeriesProps({ label: 'Requests' }));
|
||||
|
||||
builder.getLegendItems();
|
||||
builder.getConfig();
|
||||
|
||||
expect(
|
||||
getStoredSeriesVisibilityMock.getStoredSeriesVisibility,
|
||||
).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('adds thresholds only once per scale key', () => {
|
||||
const builder = new UPlotConfigBuilder();
|
||||
|
||||
const thresholdsOptions = {
|
||||
scaleKey: 'y',
|
||||
thresholds: [{ thresholdValue: 100 }],
|
||||
};
|
||||
|
||||
builder.addThresholds(thresholdsOptions);
|
||||
builder.addThresholds(thresholdsOptions);
|
||||
|
||||
const config = builder.getConfig();
|
||||
const drawHooks = config.hooks?.draw ?? [];
|
||||
|
||||
// Only a single draw hook should be registered for the same scaleKey
|
||||
expect(drawHooks.length).toBe(1);
|
||||
});
|
||||
|
||||
it('merges cursor configuration with defaults instead of replacing them', () => {
|
||||
const builder = new UPlotConfigBuilder();
|
||||
|
||||
builder.setCursor({
|
||||
drag: { setScale: false },
|
||||
});
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.cursor?.drag?.setScale).toBe(false);
|
||||
// Points configuration from DEFAULT_CURSOR_CONFIG should still be present
|
||||
expect(config.cursor?.points).toBeDefined();
|
||||
});
|
||||
|
||||
it('adds plugins and includes them in config', () => {
|
||||
const builder = new UPlotConfigBuilder();
|
||||
const plugin: uPlot.Plugin = {
|
||||
opts: (): void => {},
|
||||
hooks: {},
|
||||
};
|
||||
|
||||
builder.addPlugin(plugin);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.plugins).toContain(plugin);
|
||||
});
|
||||
|
||||
it('sets bands and includes them in config', () => {
|
||||
const builder = new UPlotConfigBuilder();
|
||||
const bands: uPlot.Band[] = [{ series: [1, 2], fill: (): string => '#000' }];
|
||||
|
||||
builder.setBands(bands);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.bands).toEqual(bands);
|
||||
});
|
||||
|
||||
it('sets padding, legend, focus, select, tzDate and includes them in config', () => {
|
||||
const tzDate = (ts: number): Date => new Date(ts);
|
||||
const builder = new UPlotConfigBuilder();
|
||||
|
||||
builder.setPadding([10, 20, 30, 40]);
|
||||
builder.setLegend({ show: true, live: true });
|
||||
builder.setFocus({ alpha: 0.5 });
|
||||
builder.setSelect({ left: 0, width: 0, top: 0, height: 0 });
|
||||
builder.setTzDate(tzDate);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.padding).toEqual([10, 20, 30, 40]);
|
||||
expect(config.legend).toEqual({ show: true, live: true });
|
||||
expect(config.focus).toEqual({ alpha: 0.5 });
|
||||
expect(config.select).toEqual({ left: 0, width: 0, top: 0, height: 0 });
|
||||
expect(config.tzDate).toBe(tzDate);
|
||||
});
|
||||
|
||||
it('does not include plugins when none added', () => {
|
||||
const builder = new UPlotConfigBuilder();
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.plugins).toBeUndefined();
|
||||
});
|
||||
|
||||
it('does not include bands when empty', () => {
|
||||
const builder = new UPlotConfigBuilder();
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.bands).toBeUndefined();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,235 @@
|
||||
import type uPlot from 'uplot';
|
||||
|
||||
import * as scaleUtils from '../../utils/scale';
|
||||
import type { ScaleProps } from '../types';
|
||||
import { DistributionType } from '../types';
|
||||
import { UPlotScaleBuilder } from '../UPlotScaleBuilder';
|
||||
|
||||
const createScaleProps = (overrides: Partial<ScaleProps> = {}): ScaleProps => ({
|
||||
scaleKey: 'y',
|
||||
time: false,
|
||||
auto: undefined,
|
||||
min: undefined,
|
||||
max: undefined,
|
||||
softMin: undefined,
|
||||
softMax: undefined,
|
||||
distribution: DistributionType.Linear,
|
||||
...overrides,
|
||||
});
|
||||
|
||||
describe('UPlotScaleBuilder', () => {
|
||||
const getFallbackMinMaxSpy = jest.spyOn(
|
||||
scaleUtils,
|
||||
'getFallbackMinMaxTimeStamp',
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('initializes softMin/softMax correctly when both are 0 (treated as unset)', () => {
|
||||
const builder = new UPlotScaleBuilder(
|
||||
createScaleProps({
|
||||
softMin: 0,
|
||||
softMax: 0,
|
||||
}),
|
||||
);
|
||||
|
||||
// Non-time scale so config path uses thresholds pipeline; we just care that
|
||||
// adjustSoftLimitsWithThresholds receives null soft limits instead of 0/0.
|
||||
const adjustSpy = jest.spyOn(scaleUtils, 'adjustSoftLimitsWithThresholds');
|
||||
|
||||
builder.getConfig();
|
||||
|
||||
expect(adjustSpy).toHaveBeenCalledWith(null, null, undefined, undefined);
|
||||
});
|
||||
|
||||
it('handles time scales using explicit min/max and rounds max down to the previous minute', () => {
|
||||
const min = 1_700_000_000; // seconds
|
||||
const max = 1_700_000_600; // seconds
|
||||
|
||||
const builder = new UPlotScaleBuilder(
|
||||
createScaleProps({
|
||||
scaleKey: 'x',
|
||||
time: true,
|
||||
min,
|
||||
max,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
const xScale = config.x;
|
||||
|
||||
expect(xScale.time).toBe(true);
|
||||
expect(xScale.auto).toBe(false);
|
||||
expect(Array.isArray(xScale.range)).toBe(true);
|
||||
|
||||
const [resolvedMin, resolvedMax] = xScale.range as [number, number];
|
||||
|
||||
// min is passed through
|
||||
expect(resolvedMin).toBe(min);
|
||||
|
||||
// max is coerced to "endTime - 1 minute" and rounded down to minute precision
|
||||
const oneMinuteAgoTimestamp = (max - 60) * 1000;
|
||||
const currentDate = new Date(oneMinuteAgoTimestamp);
|
||||
currentDate.setSeconds(0);
|
||||
currentDate.setMilliseconds(0);
|
||||
const expectedMax = Math.floor(currentDate.getTime() / 1000);
|
||||
|
||||
expect(resolvedMax).toBe(expectedMax);
|
||||
});
|
||||
|
||||
it('falls back to getFallbackMinMaxTimeStamp when time scale has no min/max', () => {
|
||||
getFallbackMinMaxSpy.mockReturnValue({
|
||||
fallbackMin: 100,
|
||||
fallbackMax: 200,
|
||||
});
|
||||
|
||||
const builder = new UPlotScaleBuilder(
|
||||
createScaleProps({
|
||||
scaleKey: 'x',
|
||||
time: true,
|
||||
min: undefined,
|
||||
max: undefined,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
const [resolvedMin, resolvedMax] = config.x.range as [number, number];
|
||||
|
||||
expect(getFallbackMinMaxSpy).toHaveBeenCalled();
|
||||
expect(resolvedMin).toBe(100);
|
||||
// max is aligned to "fallbackMax - 60 seconds" minute boundary
|
||||
expect(resolvedMax).toBeLessThanOrEqual(200);
|
||||
expect(resolvedMax).toBeGreaterThan(100);
|
||||
});
|
||||
|
||||
it('pipes limits through soft-limit adjustment and log-scale normalization before range config', () => {
|
||||
const adjustSpy = jest.spyOn(scaleUtils, 'adjustSoftLimitsWithThresholds');
|
||||
const normalizeSpy = jest.spyOn(scaleUtils, 'normalizeLogScaleLimits');
|
||||
const getRangeConfigSpy = jest.spyOn(scaleUtils, 'getRangeConfig');
|
||||
|
||||
const thresholds = {
|
||||
scaleKey: 'y',
|
||||
thresholds: [{ thresholdValue: 10 }],
|
||||
yAxisUnit: 'ms',
|
||||
};
|
||||
|
||||
const builder = new UPlotScaleBuilder(
|
||||
createScaleProps({
|
||||
softMin: 1,
|
||||
softMax: 5,
|
||||
min: 0,
|
||||
max: 100,
|
||||
distribution: DistributionType.Logarithmic,
|
||||
thresholds,
|
||||
logBase: 2,
|
||||
padMinBy: 0.1,
|
||||
padMaxBy: 0.2,
|
||||
}),
|
||||
);
|
||||
|
||||
builder.getConfig();
|
||||
|
||||
expect(adjustSpy).toHaveBeenCalledWith(1, 5, thresholds.thresholds, 'ms');
|
||||
expect(normalizeSpy).toHaveBeenCalledWith({
|
||||
distr: DistributionType.Logarithmic,
|
||||
logBase: 2,
|
||||
limits: {
|
||||
min: 0,
|
||||
max: 100,
|
||||
softMin: expect.anything(),
|
||||
softMax: expect.anything(),
|
||||
},
|
||||
});
|
||||
expect(getRangeConfigSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('computes distribution config for non-time scales and wires range function when range is not provided', () => {
|
||||
const createRangeFnSpy = jest.spyOn(scaleUtils, 'createRangeFunction');
|
||||
|
||||
const builder = new UPlotScaleBuilder(
|
||||
createScaleProps({
|
||||
scaleKey: 'y',
|
||||
time: false,
|
||||
distribution: DistributionType.Linear,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
const yScale = config.y;
|
||||
|
||||
expect(createRangeFnSpy).toHaveBeenCalled();
|
||||
|
||||
// range should be a function when not provided explicitly
|
||||
expect(typeof yScale.range).toBe('function');
|
||||
|
||||
// distribution config should be applied
|
||||
expect(yScale.distr).toBeDefined();
|
||||
expect(yScale.log).toBeDefined();
|
||||
});
|
||||
|
||||
it('respects explicit range function when provided on props', () => {
|
||||
const explicitRange: uPlot.Scale.Range = jest.fn(() => [
|
||||
0,
|
||||
10,
|
||||
]) as uPlot.Scale.Range;
|
||||
|
||||
const builder = new UPlotScaleBuilder(
|
||||
createScaleProps({
|
||||
scaleKey: 'y',
|
||||
range: explicitRange,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
const yScale = config.y;
|
||||
|
||||
expect(yScale.range).toBe(explicitRange);
|
||||
});
|
||||
|
||||
it('derives auto flag when not explicitly provided, based on hasFixedRange and time', () => {
|
||||
const getRangeConfigSpy = jest.spyOn(scaleUtils, 'getRangeConfig');
|
||||
|
||||
const builder = new UPlotScaleBuilder(
|
||||
createScaleProps({
|
||||
min: 0,
|
||||
max: 100,
|
||||
time: false,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
const yScale = config.y;
|
||||
|
||||
expect(getRangeConfigSpy).toHaveBeenCalled();
|
||||
// For non-time scale with fixed min/max, hasFixedRange is true → auto should remain false
|
||||
expect(yScale.auto).toBe(false);
|
||||
});
|
||||
|
||||
it('merge updates internal min/max/soft limits while preserving other props', () => {
|
||||
const builder = new UPlotScaleBuilder(
|
||||
createScaleProps({
|
||||
scaleKey: 'y',
|
||||
min: 0,
|
||||
max: 10,
|
||||
softMin: 1,
|
||||
softMax: 9,
|
||||
time: false,
|
||||
}),
|
||||
);
|
||||
|
||||
builder.merge({
|
||||
min: 2,
|
||||
softMax: undefined,
|
||||
});
|
||||
|
||||
const config = builder.getConfig();
|
||||
const yScale = config.y;
|
||||
|
||||
// We can't read private fields directly, but we can assert that rangeConfig
|
||||
// has been recomputed using the merged values by checking that createRangeFunction
|
||||
// is still called without throwing and returns a working range function.
|
||||
expect(typeof yScale.range).toBe('function');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,309 @@
|
||||
import { themeColors } from 'constants/theme';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
import type { SeriesProps } from '../types';
|
||||
import {
|
||||
DrawStyle,
|
||||
LineInterpolation,
|
||||
LineStyle,
|
||||
VisibilityMode,
|
||||
} from '../types';
|
||||
import { UPlotSeriesBuilder } from '../UPlotSeriesBuilder';
|
||||
|
||||
const createBaseProps = (
|
||||
overrides: Partial<SeriesProps> = {},
|
||||
): SeriesProps => ({
|
||||
scaleKey: 'y',
|
||||
label: 'Requests',
|
||||
colorMapping: {},
|
||||
drawStyle: DrawStyle.Line,
|
||||
isDarkMode: false,
|
||||
...overrides,
|
||||
});
|
||||
|
||||
describe('UPlotSeriesBuilder', () => {
|
||||
it('maps basic props into uPlot series config', () => {
|
||||
const builder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
label: 'Latency',
|
||||
spanGaps: true,
|
||||
show: false,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.scale).toBe('y');
|
||||
expect(config.label).toBe('Latency');
|
||||
expect(config.spanGaps).toBe(true);
|
||||
expect(config.show).toBe(false);
|
||||
expect(config.pxAlign).toBe(true);
|
||||
expect(typeof config.value).toBe('function');
|
||||
});
|
||||
|
||||
it('uses explicit lineColor when provided, regardless of mapping', () => {
|
||||
const builder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
lineColor: '#ff00ff',
|
||||
colorMapping: { Requests: '#00ff00' },
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.stroke).toBe('#ff00ff');
|
||||
});
|
||||
|
||||
it('falls back to theme colors when no label is provided', () => {
|
||||
const darkBuilder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
label: undefined,
|
||||
isDarkMode: true,
|
||||
lineColor: undefined,
|
||||
}),
|
||||
);
|
||||
const lightBuilder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
label: undefined,
|
||||
isDarkMode: false,
|
||||
lineColor: undefined,
|
||||
}),
|
||||
);
|
||||
|
||||
const darkConfig = darkBuilder.getConfig();
|
||||
const lightConfig = lightBuilder.getConfig();
|
||||
|
||||
expect(darkConfig.stroke).toBe(themeColors.white);
|
||||
expect(lightConfig.stroke).toBe(themeColors.black);
|
||||
});
|
||||
|
||||
it('uses colorMapping when available and no explicit lineColor is provided', () => {
|
||||
const builder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
label: 'Requests',
|
||||
colorMapping: { Requests: '#123456' },
|
||||
lineColor: undefined,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.stroke).toBe('#123456');
|
||||
});
|
||||
|
||||
it('passes through a custom pathBuilder when provided', () => {
|
||||
const customPaths = (jest.fn() as unknown) as uPlot.Series.PathBuilder;
|
||||
|
||||
const builder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
pathBuilder: customPaths,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.paths).toBe(customPaths);
|
||||
});
|
||||
|
||||
it('disables line paths when drawStyle is Points, but still renders points', () => {
|
||||
const builder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
drawStyle: DrawStyle.Points,
|
||||
pointSize: 4,
|
||||
lineWidth: 2,
|
||||
lineColor: '#aa00aa',
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(typeof config.paths).toBe('function');
|
||||
expect(config.paths && config.paths({} as uPlot, 1, 0, 10)).toBeNull();
|
||||
|
||||
expect(config.points).toBeDefined();
|
||||
expect(config.points?.stroke).toBe('#aa00aa');
|
||||
expect(config.points?.fill).toBe('#aa00aa');
|
||||
expect(config.points?.show).toBe(true);
|
||||
expect(config.points?.size).toBe(4);
|
||||
});
|
||||
|
||||
it('derives point size based on lineWidth and pointSize', () => {
|
||||
const smallPointsBuilder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
lineWidth: 4,
|
||||
pointSize: 2,
|
||||
}),
|
||||
);
|
||||
const largePointsBuilder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
lineWidth: 2,
|
||||
pointSize: 4,
|
||||
}),
|
||||
);
|
||||
|
||||
const smallConfig = smallPointsBuilder.getConfig();
|
||||
const largeConfig = largePointsBuilder.getConfig();
|
||||
|
||||
expect(smallConfig.points?.size).toBeUndefined();
|
||||
expect(largeConfig.points?.size).toBe(4);
|
||||
});
|
||||
|
||||
it('uses pointsBuilder when provided instead of default visibility logic', () => {
|
||||
const pointsBuilder: uPlot.Series.Points.Show = jest.fn(
|
||||
() => true,
|
||||
) as uPlot.Series.Points.Show;
|
||||
|
||||
const builder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
pointsBuilder,
|
||||
drawStyle: DrawStyle.Line,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.points?.show).toBe(pointsBuilder);
|
||||
});
|
||||
|
||||
it('respects VisibilityMode for point visibility when no custom pointsBuilder is given', () => {
|
||||
const neverPointsBuilder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
drawStyle: DrawStyle.Line,
|
||||
showPoints: VisibilityMode.Never,
|
||||
}),
|
||||
);
|
||||
const alwaysPointsBuilder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
drawStyle: DrawStyle.Line,
|
||||
showPoints: VisibilityMode.Always,
|
||||
}),
|
||||
);
|
||||
|
||||
const neverConfig = neverPointsBuilder.getConfig();
|
||||
const alwaysConfig = alwaysPointsBuilder.getConfig();
|
||||
|
||||
expect(neverConfig.points?.show).toBe(false);
|
||||
expect(alwaysConfig.points?.show).toBe(true);
|
||||
});
|
||||
|
||||
it('applies LineStyle.Dashed and lineCap to line config', () => {
|
||||
const builder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
lineStyle: LineStyle.Dashed,
|
||||
lineCap: 'round' as CanvasLineCap,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.dash).toEqual([10, 10]);
|
||||
expect(config.cap).toBe('round');
|
||||
});
|
||||
|
||||
it('builds default paths for Line drawStyle and invokes the path builder', () => {
|
||||
const builder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
drawStyle: DrawStyle.Line,
|
||||
lineInterpolation: LineInterpolation.Linear,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(typeof config.paths).toBe('function');
|
||||
const result = config.paths?.({} as uPlot, 1, 0, 10);
|
||||
expect(result).toBeDefined();
|
||||
});
|
||||
|
||||
it('uses StepBefore and StepAfter interpolation for line paths', () => {
|
||||
const stepBeforeBuilder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
drawStyle: DrawStyle.Line,
|
||||
lineInterpolation: LineInterpolation.StepBefore,
|
||||
}),
|
||||
);
|
||||
const stepAfterBuilder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
drawStyle: DrawStyle.Line,
|
||||
lineInterpolation: LineInterpolation.StepAfter,
|
||||
}),
|
||||
);
|
||||
|
||||
const stepBeforeConfig = stepBeforeBuilder.getConfig();
|
||||
const stepAfterConfig = stepAfterBuilder.getConfig();
|
||||
|
||||
expect(typeof stepBeforeConfig.paths).toBe('function');
|
||||
expect(typeof stepAfterConfig.paths).toBe('function');
|
||||
expect(stepBeforeConfig.paths?.({} as uPlot, 1, 0, 5)).toBeDefined();
|
||||
expect(stepAfterConfig.paths?.({} as uPlot, 1, 0, 5)).toBeDefined();
|
||||
});
|
||||
|
||||
it('defaults to spline interpolation when lineInterpolation is Spline or undefined', () => {
|
||||
const splineBuilder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
drawStyle: DrawStyle.Line,
|
||||
lineInterpolation: LineInterpolation.Spline,
|
||||
}),
|
||||
);
|
||||
const defaultBuilder = new UPlotSeriesBuilder(
|
||||
createBaseProps({ drawStyle: DrawStyle.Line }),
|
||||
);
|
||||
|
||||
const splineConfig = splineBuilder.getConfig();
|
||||
const defaultConfig = defaultBuilder.getConfig();
|
||||
|
||||
expect(typeof splineConfig.paths).toBe('function');
|
||||
expect(typeof defaultConfig.paths).toBe('function');
|
||||
});
|
||||
|
||||
it('coerces non-boolean spanGaps to false', () => {
|
||||
const builder = new UPlotSeriesBuilder(
|
||||
createBaseProps({ spanGaps: undefined }),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.spanGaps).toBe(false);
|
||||
});
|
||||
|
||||
it('preserves spanGaps true when provided as boolean', () => {
|
||||
const builder = new UPlotSeriesBuilder(createBaseProps({ spanGaps: true }));
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.spanGaps).toBe(true);
|
||||
});
|
||||
|
||||
it('uses generateColor when label has no colorMapping and no lineColor', () => {
|
||||
const builder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
label: 'CustomSeries',
|
||||
colorMapping: {},
|
||||
lineColor: undefined,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.stroke).toBeDefined();
|
||||
expect(typeof config.stroke).toBe('string');
|
||||
expect((config.stroke as string).length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('passes through pointsFilter when provided', () => {
|
||||
const pointsFilter: uPlot.Series.Points.Filter = jest.fn(
|
||||
(_self, _seriesIdx, _show) => null,
|
||||
);
|
||||
|
||||
const builder = new UPlotSeriesBuilder(
|
||||
createBaseProps({
|
||||
pointsFilter,
|
||||
drawStyle: DrawStyle.Line,
|
||||
}),
|
||||
);
|
||||
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.points?.filter).toBe(pointsFilter);
|
||||
});
|
||||
});
|
||||
@@ -45,8 +45,11 @@ import APIError from 'types/api/error';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { v4 as generateUUID } from 'uuid';
|
||||
|
||||
import { useDashboardVariables } from '../../hooks/dashboard/useDashboardVariables';
|
||||
import { setDashboardVariablesStore } from './store/dashboardVariablesStore';
|
||||
import { useDashboardVariablesSelector } from '../../hooks/dashboard/useDashboardVariables';
|
||||
import {
|
||||
setDashboardVariablesStore,
|
||||
updateDashboardVariablesStore,
|
||||
} from './store/dashboardVariables/dashboardVariablesStore';
|
||||
import {
|
||||
DashboardSortOrder,
|
||||
IDashboardContext,
|
||||
@@ -198,14 +201,23 @@ export function DashboardProvider({
|
||||
: isDashboardWidgetPage?.params.dashboardId) || '';
|
||||
|
||||
const [selectedDashboard, setSelectedDashboard] = useState<Dashboard>();
|
||||
const dashboardVariables = useDashboardVariables();
|
||||
const dashboardVariables = useDashboardVariablesSelector((s) => s.variables);
|
||||
const savedDashboardId = useDashboardVariablesSelector((s) => s.dashboardId);
|
||||
|
||||
useEffect(() => {
|
||||
const existingVariables = dashboardVariables;
|
||||
const updatedVariables = selectedDashboard?.data.variables || {};
|
||||
|
||||
if (!isEqual(existingVariables, updatedVariables)) {
|
||||
setDashboardVariablesStore(updatedVariables);
|
||||
if (savedDashboardId !== dashboardId) {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId,
|
||||
variables: updatedVariables,
|
||||
});
|
||||
} else if (!isEqual(existingVariables, updatedVariables)) {
|
||||
updateDashboardVariablesStore({
|
||||
dashboardId,
|
||||
variables: updatedVariables,
|
||||
});
|
||||
}
|
||||
}, [selectedDashboard]);
|
||||
|
||||
|
||||
@@ -0,0 +1,57 @@
|
||||
import createStore from '../store';
|
||||
import { IDashboardVariablesStoreState } from './dashboardVariablesStoreTypes';
|
||||
import {
|
||||
computeDerivedValues,
|
||||
updateDerivedValues,
|
||||
} from './dashboardVariablesStoreUtils';
|
||||
|
||||
const initialState: IDashboardVariablesStoreState = {
|
||||
dashboardId: '',
|
||||
variables: {},
|
||||
sortedVariablesArray: [],
|
||||
dependencyData: null,
|
||||
};
|
||||
|
||||
export const dashboardVariablesStore = createStore<IDashboardVariablesStoreState>(
|
||||
initialState,
|
||||
);
|
||||
|
||||
/**
|
||||
* Set dashboard variables (replaces all variables)
|
||||
*/
|
||||
export function setDashboardVariablesStore({
|
||||
dashboardId,
|
||||
variables,
|
||||
}: {
|
||||
dashboardId: string;
|
||||
variables: IDashboardVariablesStoreState['variables'];
|
||||
}): void {
|
||||
dashboardVariablesStore.set(() => {
|
||||
return {
|
||||
dashboardId,
|
||||
variables,
|
||||
...computeDerivedValues(variables),
|
||||
} as IDashboardVariablesStoreState;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update specific dashboard variables (merges with existing)
|
||||
*/
|
||||
export function updateDashboardVariablesStore({
|
||||
dashboardId,
|
||||
variables,
|
||||
}: {
|
||||
dashboardId: string;
|
||||
variables: IDashboardVariablesStoreState['variables'];
|
||||
}): void {
|
||||
dashboardVariablesStore.update((draft) => {
|
||||
if (draft.dashboardId !== dashboardId) {
|
||||
// If dashboardId doesn't match, we replace the entire state
|
||||
draft.dashboardId = dashboardId;
|
||||
}
|
||||
draft.variables = variables;
|
||||
|
||||
updateDerivedValues(draft);
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
export type VariableGraph = Record<string, string[]>;
|
||||
|
||||
export interface IDependencyData {
|
||||
order: string[];
|
||||
graph: VariableGraph;
|
||||
parentDependencyGraph: VariableGraph;
|
||||
hasCycle: boolean;
|
||||
cycleNodes?: string[];
|
||||
}
|
||||
|
||||
export type IDashboardVariables = Record<string, IDashboardVariable>;
|
||||
|
||||
export interface IDashboardVariablesStoreState {
|
||||
// dashboard id
|
||||
dashboardId: string;
|
||||
|
||||
// Raw variables keyed by id/name
|
||||
variables: IDashboardVariables;
|
||||
|
||||
// Derived: sorted array of variables by order
|
||||
sortedVariablesArray: IDashboardVariable[];
|
||||
|
||||
// Derived: dependency data for QUERY variables
|
||||
dependencyData: IDependencyData | null;
|
||||
}
|
||||
|
||||
export interface IUseDashboardVariablesReturn {
|
||||
dashboardVariables: IDashboardVariablesStoreState['variables'];
|
||||
}
|
||||
@@ -0,0 +1,118 @@
|
||||
import {
|
||||
buildDependencies,
|
||||
buildDependencyGraph,
|
||||
} from 'container/DashboardContainer/DashboardVariablesSelection/util';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { initializeVariableFetchStore } from '../variableFetchStore';
|
||||
import {
|
||||
IDashboardVariables,
|
||||
IDashboardVariablesStoreState,
|
||||
IDependencyData,
|
||||
} from './dashboardVariablesStoreTypes';
|
||||
|
||||
/**
|
||||
* Build a sorted array of variables by their order property
|
||||
*/
|
||||
export function buildSortedVariablesArray(
|
||||
variables: IDashboardVariables,
|
||||
): IDashboardVariable[] {
|
||||
const sortedVariablesArray: IDashboardVariable[] = [];
|
||||
|
||||
Object.values(variables).forEach((value) => {
|
||||
sortedVariablesArray.push({ ...value });
|
||||
});
|
||||
|
||||
sortedVariablesArray.sort((a, b) => a.order - b.order);
|
||||
|
||||
return sortedVariablesArray;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build dependency data from sorted variables array
|
||||
* This includes the dependency graph, topological order, and cycle detection
|
||||
*/
|
||||
export function buildDependencyData(
|
||||
sortedVariablesArray: IDashboardVariable[],
|
||||
): IDependencyData | null {
|
||||
if (sortedVariablesArray.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const dependencies = buildDependencies(sortedVariablesArray);
|
||||
const {
|
||||
order,
|
||||
graph,
|
||||
parentDependencyGraph,
|
||||
hasCycle,
|
||||
cycleNodes,
|
||||
} = buildDependencyGraph(dependencies);
|
||||
|
||||
// Filter order to only include QUERY type variables
|
||||
const queryVariableOrder = order.filter((variable: string) => {
|
||||
const variableData = sortedVariablesArray.find((v) => v.name === variable);
|
||||
return variableData?.type === 'QUERY';
|
||||
});
|
||||
|
||||
return {
|
||||
order: queryVariableOrder,
|
||||
graph,
|
||||
parentDependencyGraph,
|
||||
hasCycle,
|
||||
cycleNodes,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the variable fetch store with the computed dependency data
|
||||
*/
|
||||
function initializeFetchStore(
|
||||
sortedVariablesArray: IDashboardVariable[],
|
||||
dependencyData: IDependencyData | null,
|
||||
): void {
|
||||
if (dependencyData) {
|
||||
const allVariableNames = sortedVariablesArray
|
||||
.map((v) => v.name)
|
||||
.filter((name): name is string => !!name);
|
||||
|
||||
initializeVariableFetchStore(
|
||||
allVariableNames,
|
||||
dependencyData.graph,
|
||||
dependencyData.parentDependencyGraph,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute derived values from variables
|
||||
* This is a composition of buildSortedVariablesArray and buildDependencyData
|
||||
* Also initializes the variable fetch store with the new dependency data
|
||||
*/
|
||||
export function computeDerivedValues(
|
||||
variables: IDashboardVariablesStoreState['variables'],
|
||||
): Pick<
|
||||
IDashboardVariablesStoreState,
|
||||
'sortedVariablesArray' | 'dependencyData'
|
||||
> {
|
||||
const sortedVariablesArray = buildSortedVariablesArray(variables);
|
||||
const dependencyData = buildDependencyData(sortedVariablesArray);
|
||||
|
||||
// Initialize the variable fetch store when dependency data is computed
|
||||
initializeFetchStore(sortedVariablesArray, dependencyData);
|
||||
|
||||
return { sortedVariablesArray, dependencyData };
|
||||
}
|
||||
|
||||
/**
|
||||
* Update derived values in the store state (for use with immer)
|
||||
* Also initializes the variable fetch store with the new dependency data
|
||||
*/
|
||||
export function updateDerivedValues(
|
||||
draft: IDashboardVariablesStoreState,
|
||||
): void {
|
||||
draft.sortedVariablesArray = buildSortedVariablesArray(draft.variables);
|
||||
draft.dependencyData = buildDependencyData(draft.sortedVariablesArray);
|
||||
|
||||
// Initialize the variable fetch store when dependency data is updated
|
||||
initializeFetchStore(draft.sortedVariablesArray, draft.dependencyData);
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import createStore from './store';
|
||||
|
||||
// export type IDashboardVariables = DashboardData['variables'];
|
||||
export type IDashboardVariables = Record<string, IDashboardVariable>;
|
||||
|
||||
export const dashboardVariablesStore = createStore<IDashboardVariables>({});
|
||||
|
||||
export function setDashboardVariablesStore(
|
||||
variables: Partial<IDashboardVariables>,
|
||||
): void {
|
||||
dashboardVariablesStore.set(() => ({ ...variables }));
|
||||
}
|
||||
57
frontend/src/providers/Dashboard/store/variableFetchStore.ts
Normal file
57
frontend/src/providers/Dashboard/store/variableFetchStore.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { VariableGraph } from 'container/DashboardContainer/DashboardVariablesSelection/util';
|
||||
|
||||
import createStore from './store';
|
||||
|
||||
// Fetch state for each variable
|
||||
export type VariableFetchState =
|
||||
| 'idle' // stable state - initial or complete
|
||||
| 'loading' // actively fetching data (first time)
|
||||
| 'revalidating' // refetching existing data
|
||||
| 'waiting' // blocked on parent dependencies
|
||||
| 'error';
|
||||
|
||||
export interface IVariableFetchStoreState {
|
||||
// Per-variable fetch state
|
||||
states: Record<string, VariableFetchState>;
|
||||
|
||||
// Dependency graphs (set once when variables change)
|
||||
dependencyGraph: VariableGraph; // variable -> children that depend on it
|
||||
parentGraph: VariableGraph; // variable -> parents it depends on
|
||||
|
||||
// Track last update timestamp per variable to trigger re-fetches
|
||||
lastUpdated: Record<string, number>;
|
||||
}
|
||||
|
||||
const initialState: IVariableFetchStoreState = {
|
||||
states: {},
|
||||
dependencyGraph: {},
|
||||
parentGraph: {},
|
||||
lastUpdated: {},
|
||||
};
|
||||
|
||||
export const variableFetchStore = createStore<IVariableFetchStoreState>(
|
||||
initialState,
|
||||
);
|
||||
|
||||
// ============== Actions ==============
|
||||
|
||||
/**
|
||||
* Initialize the store with dependency graphs and set initial states
|
||||
*/
|
||||
export function initializeVariableFetchStore(
|
||||
variableNames: string[],
|
||||
dependencyGraph: VariableGraph,
|
||||
parentGraph: VariableGraph,
|
||||
): void {
|
||||
variableFetchStore.update((draft) => {
|
||||
draft.dependencyGraph = dependencyGraph;
|
||||
draft.parentGraph = parentGraph;
|
||||
|
||||
// Initialize all variables to idle, preserving existing ready states
|
||||
variableNames.forEach((name) => {
|
||||
if (!draft.states[name]) {
|
||||
draft.states[name] = 'idle';
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -569,8 +569,8 @@ func (d *Dispatcher) getOrCreateRoute(receiver string) *dispatch.Route {
|
||||
route := &dispatch.Route{
|
||||
RouteOpts: dispatch.RouteOpts{
|
||||
Receiver: receiver,
|
||||
GroupWait: 30 * time.Second,
|
||||
GroupInterval: 5 * time.Minute,
|
||||
GroupWait: d.route.RouteOpts.GroupWait,
|
||||
GroupInterval: d.route.RouteOpts.GroupInterval,
|
||||
GroupByAll: false,
|
||||
},
|
||||
Matchers: labels.Matchers{{
|
||||
|
||||
@@ -1183,8 +1183,8 @@ func TestDispatcherRaceOnFirstAlertNotDeliveredWhenGroupWaitIsZero(t *testing.T)
|
||||
|
||||
route:
|
||||
group_by: ['alertname']
|
||||
group_wait: 1h
|
||||
group_interval: 1h
|
||||
group_wait: 30s
|
||||
group_interval: 5m
|
||||
receiver: 'slack'`
|
||||
conf, err := config.Load(confData)
|
||||
if err != nil {
|
||||
@@ -1308,3 +1308,95 @@ func TestDispatcher_DoMaintenance(t *testing.T) {
|
||||
require.False(t, isMuted)
|
||||
require.Empty(t, mutedBy)
|
||||
}
|
||||
|
||||
func TestDispatcher_GetOrCreateRoute(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
confData string
|
||||
expectedReceiver string
|
||||
expectedGroupWait time.Duration
|
||||
expectedGroupInterval time.Duration
|
||||
expectedGroupByAll bool
|
||||
expectedMatchersLen int
|
||||
expectedMatcherName string
|
||||
expectedMatcherValue string
|
||||
}{
|
||||
{
|
||||
name: "create route for slack receiver",
|
||||
confData: `receivers:
|
||||
- name: 'slack'
|
||||
- name: 'email'
|
||||
- name: 'pagerduty'
|
||||
|
||||
route:
|
||||
group_by: ['alertname']
|
||||
group_wait: 1m
|
||||
group_interval: 1m
|
||||
receiver: 'slack'`,
|
||||
expectedReceiver: "slack",
|
||||
expectedGroupWait: 1 * time.Minute,
|
||||
expectedGroupInterval: 1 * time.Minute,
|
||||
expectedGroupByAll: false,
|
||||
expectedMatchersLen: 1,
|
||||
expectedMatcherName: "__receiver__",
|
||||
expectedMatcherValue: "slack",
|
||||
},
|
||||
{
|
||||
name: "no group_wait and group_interval use default values",
|
||||
confData: `receivers:
|
||||
- name: 'slack'
|
||||
|
||||
route:
|
||||
group_by: ['alertname']
|
||||
receiver: 'slack'`,
|
||||
expectedReceiver: "slack",
|
||||
expectedGroupWait: 30 * time.Second,
|
||||
expectedGroupInterval: 5 * time.Minute,
|
||||
expectedGroupByAll: false,
|
||||
expectedMatchersLen: 1,
|
||||
expectedMatcherName: "__receiver__",
|
||||
expectedMatcherValue: "slack",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
conf, err := config.Load(tc.confData)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
providerSettings := createTestProviderSettings()
|
||||
logger := providerSettings.Logger
|
||||
route := dispatch.NewRoute(conf.Route, nil)
|
||||
marker := alertmanagertypes.NewMarker(prometheus.NewRegistry())
|
||||
alerts, err := mem.NewAlerts(context.Background(), marker, time.Hour, nil, logger, nil)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer alerts.Close()
|
||||
|
||||
timeout := func(d time.Duration) time.Duration { return time.Duration(0) }
|
||||
recorder := &recordStage{alerts: make(map[string]map[model.Fingerprint]*alertmanagertypes.Alert)}
|
||||
metrics := NewDispatcherMetrics(false, prometheus.NewRegistry())
|
||||
store := nfroutingstoretest.NewMockSQLRouteStore()
|
||||
store.MatchExpectationsInOrder(false)
|
||||
nfManager, err := rulebasednotification.New(context.Background(), providerSettings, nfmanager.Config{}, store)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
d := NewDispatcher(alerts, route, recorder, marker, timeout, nil, logger, metrics, nfManager, "test-org")
|
||||
// setup the dispatcher for tests
|
||||
d.receiverRoutes = map[string]*dispatch.Route{}
|
||||
|
||||
newRoute := d.getOrCreateRoute(tc.expectedReceiver)
|
||||
require.Equal(t, tc.expectedReceiver, newRoute.RouteOpts.Receiver)
|
||||
require.Equal(t, tc.expectedGroupWait, newRoute.RouteOpts.GroupWait)
|
||||
require.Equal(t, tc.expectedGroupInterval, newRoute.RouteOpts.GroupInterval)
|
||||
require.Equal(t, tc.expectedGroupByAll, newRoute.RouteOpts.GroupByAll)
|
||||
require.Equal(t, tc.expectedMatchersLen, len(newRoute.Matchers))
|
||||
require.Equal(t, tc.expectedMatcherName, newRoute.Matchers[0].Name)
|
||||
require.Equal(t, tc.expectedMatcherValue, newRoute.Matchers[0].Value)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,7 +44,6 @@ func NewAPI(
|
||||
telemetrylogs.LogResourceKeysTblName,
|
||||
telemetrymetadata.DBName,
|
||||
telemetrymetadata.AttributesMetadataLocalTableName,
|
||||
telemetrymetadata.ColumnEvolutionMetadataTableName,
|
||||
)
|
||||
|
||||
return &API{
|
||||
|
||||
@@ -325,7 +325,7 @@ func (provider *provider) addUserRoutes(router *mux.Router) error {
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnprocessableEntity},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{},
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
|
||||
@@ -610,18 +610,18 @@ func (m *module) buildFilterClause(ctx context.Context, filter *qbtypes.Filter,
|
||||
}
|
||||
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: m.logger,
|
||||
FieldMapper: m.fieldMapper,
|
||||
ConditionBuilder: m.condBuilder,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "labels"},
|
||||
FieldKeys: keys,
|
||||
StartNs: querybuilder.ToNanoSecs(uint64(startMillis)),
|
||||
EndNs: querybuilder.ToNanoSecs(uint64(endMillis)),
|
||||
}
|
||||
|
||||
whereClause, err := querybuilder.PrepareWhereClause(expression, opts)
|
||||
startNs := querybuilder.ToNanoSecs(uint64(startMillis))
|
||||
endNs := querybuilder.ToNanoSecs(uint64(endMillis))
|
||||
|
||||
whereClause, err := querybuilder.PrepareWhereClause(expression, opts, startNs, endNs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -369,7 +369,7 @@ func (module *Module) GetOrCreateResetPasswordToken(ctx context.Context, userID
|
||||
|
||||
func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, email valuer.Email, frontendBaseURL string) error {
|
||||
if !module.config.Password.Reset.AllowSelf {
|
||||
return errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "users are not allowed to reset their password themselves, please contact an admin to reset your password")
|
||||
return errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "Users are not allowed to reset their password themselves, please contact an admin to reset your password.")
|
||||
}
|
||||
|
||||
user, err := module.store.GetUserByEmailAndOrgID(ctx, email, orgID)
|
||||
|
||||
@@ -66,7 +66,6 @@ func newProvider(
|
||||
telemetrylogs.LogResourceKeysTblName,
|
||||
telemetrymetadata.DBName,
|
||||
telemetrymetadata.AttributesMetadataLocalTableName,
|
||||
telemetrymetadata.ColumnEvolutionMetadataTableName,
|
||||
)
|
||||
|
||||
// Create trace statement builder
|
||||
|
||||
@@ -71,7 +71,18 @@ func Parse(filters *v3.FilterSet) (string, error) {
|
||||
// accustom log filters like `body.log.message EXISTS` into EXPR language
|
||||
// where User is attempting to check for keys present in JSON log body
|
||||
if strings.HasPrefix(v.Key.Key, "body.") {
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(strings.TrimPrefix(v.Key.Key, "body.")), logOperatorsToExpr[v.Operator], "fromJSON(body)")
|
||||
// if body is a string and is a valid JSON, then check if the key exists in the JSON
|
||||
filter = fmt.Sprintf(`((type(body) == "string" && isJSON(body)) && %s %s %s)`, exprFormattedValue(strings.TrimPrefix(v.Key.Key, "body.")), logOperatorsToExpr[v.Operator], "fromJSON(body)")
|
||||
|
||||
// if body is a map, then check if the key exists in the map
|
||||
operator := v3.FilterOperatorNotEqual
|
||||
if v.Operator == v3.FilterOperatorNotExists {
|
||||
operator = v3.FilterOperatorEqual
|
||||
}
|
||||
nilCheckFilter := fmt.Sprintf("%s %s nil", v.Key.Key, logOperatorsToExpr[operator])
|
||||
|
||||
// join the two filters with OR
|
||||
filter = fmt.Sprintf(`(%s or (type(body) == "map" && (%s)))`, filter, nilCheckFilter)
|
||||
} else if typ := getTypeName(v.Key.Type); typ != "" {
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], typ)
|
||||
} else {
|
||||
|
||||
@@ -3,203 +3,538 @@ package queryBuilderToExpr
|
||||
import (
|
||||
"testing"
|
||||
|
||||
signozstanzahelper "github.com/SigNoz/signoz-otel-collector/processor/signozlogspipelineprocessor/stanza/operator/helper"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"github.com/expr-lang/expr/vm"
|
||||
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/entry"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
var testCases = []struct {
|
||||
Name string
|
||||
Query *v3.FilterSet
|
||||
Expr string
|
||||
ExpectError bool
|
||||
}{
|
||||
{
|
||||
Name: "equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "="},
|
||||
}},
|
||||
Expr: `attributes["key"] == "checkbody"`,
|
||||
},
|
||||
{
|
||||
Name: "not equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "!="},
|
||||
}},
|
||||
Expr: `attributes["key"] != "checkbody"`,
|
||||
},
|
||||
{
|
||||
Name: "less than",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] < 10`,
|
||||
},
|
||||
{
|
||||
Name: "greater than",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] > 10`,
|
||||
},
|
||||
{
|
||||
Name: "less than equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10`,
|
||||
},
|
||||
{
|
||||
Name: "greater than equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">="},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] >= 10`,
|
||||
},
|
||||
// case sensitive
|
||||
{
|
||||
Name: "body contains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) contains lower("checkbody")`,
|
||||
},
|
||||
{
|
||||
Name: "body.log.message exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `"log.message" in fromJSON(body)`,
|
||||
},
|
||||
{
|
||||
Name: "body.log.message not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `"log.message" not in fromJSON(body)`,
|
||||
},
|
||||
{
|
||||
Name: "body ncontains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) not contains lower("checkbody")`,
|
||||
},
|
||||
{
|
||||
Name: "body regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "regex"},
|
||||
}},
|
||||
Expr: `body != nil && body matches "[0-1]+regex$"`,
|
||||
},
|
||||
{
|
||||
Name: "body not regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
|
||||
}},
|
||||
Expr: `body != nil && body not matches "[0-1]+regex$"`,
|
||||
},
|
||||
{
|
||||
Name: "regex with escape characters",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: `^Executing \[\S+@\S+:[0-9]+\] \S+".*`, Operator: "regex"},
|
||||
}},
|
||||
Expr: `body != nil && body matches "^Executing \\[\\S+@\\S+:[0-9]+\\] \\S+\".*"`,
|
||||
},
|
||||
{
|
||||
Name: "invalid regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) not matches "[0-9]++"`,
|
||||
ExpectError: true,
|
||||
},
|
||||
{
|
||||
Name: "in",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{1, 2, 3, 4}, Operator: "in"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] in [1,2,3,4]`,
|
||||
},
|
||||
{
|
||||
Name: "not in",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []interface{}{"1", "2"}, Operator: "nin"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] not in ['1','2']`,
|
||||
},
|
||||
{
|
||||
Name: "exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
|
||||
}},
|
||||
Expr: `"key" in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `"key" not in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "trace_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `trace_id == nil`,
|
||||
},
|
||||
{
|
||||
Name: "trace_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `trace_id != nil`,
|
||||
},
|
||||
{
|
||||
Name: "span_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `span_id == nil`,
|
||||
},
|
||||
{
|
||||
Name: "span_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `span_id != nil`,
|
||||
},
|
||||
{
|
||||
Name: "Multi filter",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body != nil && body not matches "[0-1]+regex$" and "key" not in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "incorrect multi filter",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body not matches "[0-9]++" and "key" not in attributes`,
|
||||
ExpectError: true,
|
||||
},
|
||||
}
|
||||
func TestParseExpression(t *testing.T) {
|
||||
var testCases = []struct {
|
||||
Name string
|
||||
Query *v3.FilterSet
|
||||
Expr string
|
||||
ExpectError bool
|
||||
}{
|
||||
{
|
||||
Name: "equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "="},
|
||||
}},
|
||||
Expr: `attributes["key"] == "checkbody"`,
|
||||
},
|
||||
{
|
||||
Name: "not equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "checkbody", Operator: "!="},
|
||||
}},
|
||||
Expr: `attributes["key"] != "checkbody"`,
|
||||
},
|
||||
{
|
||||
Name: "less than",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] < 10`,
|
||||
},
|
||||
{
|
||||
Name: "greater than",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] > 10`,
|
||||
},
|
||||
{
|
||||
Name: "less than equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10`,
|
||||
},
|
||||
{
|
||||
Name: "greater than equal",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: ">="},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] >= 10`,
|
||||
},
|
||||
// case sensitive
|
||||
{
|
||||
Name: "body contains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) contains lower("checkbody")`,
|
||||
},
|
||||
{
|
||||
Name: "body.log.message exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `(((type(body) == "string" && isJSON(body)) && "log.message" in fromJSON(body)) or (type(body) == "map" && (body.log.message != nil)))`,
|
||||
},
|
||||
{
|
||||
Name: "body.log.message not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.log.message", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `(((type(body) == "string" && isJSON(body)) && "log.message" not in fromJSON(body)) or (type(body) == "map" && (body.log.message == nil)))`,
|
||||
},
|
||||
{
|
||||
Name: "body ncontains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) not contains lower("checkbody")`,
|
||||
},
|
||||
{
|
||||
Name: "body regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "regex"},
|
||||
}},
|
||||
Expr: `body != nil && body matches "[0-1]+regex$"`,
|
||||
},
|
||||
{
|
||||
Name: "body not regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
|
||||
}},
|
||||
Expr: `body != nil && body not matches "[0-1]+regex$"`,
|
||||
},
|
||||
{
|
||||
Name: "regex with escape characters",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: `^Executing \[\S+@\S+:[0-9]+\] \S+".*`, Operator: "regex"},
|
||||
}},
|
||||
Expr: `body != nil && body matches "^Executing \\[\\S+@\\S+:[0-9]+\\] \\S+\".*"`,
|
||||
},
|
||||
{
|
||||
Name: "invalid regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
|
||||
}},
|
||||
Expr: `body != nil && lower(body) not matches "[0-9]++"`,
|
||||
ExpectError: true,
|
||||
},
|
||||
{
|
||||
Name: "in",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{1, 2, 3, 4}, Operator: "in"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] in [1,2,3,4]`,
|
||||
},
|
||||
{
|
||||
Name: "not in",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"1", "2"}, Operator: "nin"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] not in ['1','2']`,
|
||||
},
|
||||
{
|
||||
Name: "exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
|
||||
}},
|
||||
Expr: `"key" in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `"key" not in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "trace_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `trace_id == nil`,
|
||||
},
|
||||
{
|
||||
Name: "trace_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `trace_id != nil`,
|
||||
},
|
||||
{
|
||||
Name: "span_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
Expr: `span_id == nil`,
|
||||
},
|
||||
{
|
||||
Name: "span_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
Expr: `span_id != nil`,
|
||||
},
|
||||
{
|
||||
Name: "Multi filter",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex$", Operator: "nregex"},
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body != nil && body not matches "[0-1]+regex$" and "key" not in attributes`,
|
||||
},
|
||||
{
|
||||
Name: "incorrect multi filter",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: 10, Operator: "<="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-9]++", Operator: "nregex"},
|
||||
{Key: v3.AttributeKey{Key: "key", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
Expr: `attributes["key"] != nil && attributes["key"] <= 10 and body not matches "[0-9]++" and "key" not in attributes`,
|
||||
ExpectError: true,
|
||||
},
|
||||
}
|
||||
|
||||
func TestParse(t *testing.T) {
|
||||
for _, tt := range testCases {
|
||||
Convey(tt.Name, t, func() {
|
||||
t.Run(tt.Name, func(t *testing.T) {
|
||||
x, err := Parse(tt.Query)
|
||||
if tt.ExpectError {
|
||||
So(err, ShouldNotBeNil)
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
So(err, ShouldBeNil)
|
||||
So(x, ShouldEqual, tt.Expr)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.Expr, x)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type EntryComposite struct {
|
||||
ID int
|
||||
*entry.Entry
|
||||
}
|
||||
|
||||
// makeEntry creates an EntryComposite for tests. Pass nil for traceID/spanID to mean "not set".
|
||||
func makeEntry(id int, body any, attributes, resource map[string]any, traceID, spanID []byte) EntryComposite {
|
||||
e := entry.New()
|
||||
e.Body = body
|
||||
if attributes != nil {
|
||||
e.Attributes = attributes
|
||||
} else {
|
||||
e.Attributes = make(map[string]any)
|
||||
}
|
||||
if resource != nil {
|
||||
e.Resource = resource
|
||||
} else {
|
||||
e.Resource = make(map[string]any)
|
||||
}
|
||||
if traceID != nil {
|
||||
e.TraceID = traceID
|
||||
}
|
||||
if spanID != nil {
|
||||
e.SpanID = spanID
|
||||
}
|
||||
return EntryComposite{ID: id, Entry: e}
|
||||
}
|
||||
|
||||
func TestExpressionVSEntry(t *testing.T) {
|
||||
// Dataset: entries with varied body (JSON and plain text), attributes, trace_id, span_id for filter testing.
|
||||
// IDs 0..12: JSON bodies (body.msg / body.log etc. work). IDs 13..17: simple text log bodies.
|
||||
dataset := []EntryComposite{
|
||||
// JSON body entries (0-12)
|
||||
makeEntry(0, `{"msg":"hello world"}`, map[string]any{"level": "info"}, map[string]any{"env": "prod", "host": "node-0"}, nil, nil),
|
||||
makeEntry(1, `{"msg":"error occurred", "missing": "value"}`, map[string]any{"level": "error"}, map[string]any{"env": "prod", "host": "node-1"}, []byte("trace1"), []byte("span1")),
|
||||
makeEntry(2, `{"msg":"checkbody substring"}`, map[string]any{"level": "info"}, map[string]any{"env": "staging", "host": "node-2"}, []byte("trace2"), nil),
|
||||
makeEntry(3, `{"msg":"no match here"}`, map[string]any{"level": "debug"}, map[string]any{"env": "staging", "host": "node-3"}, nil, []byte("span3")),
|
||||
makeEntry(4, `{"msg":"101regex suffix"}`, map[string]any{"code": "200", "count": int64(5)}, map[string]any{"env": "prod", "host": "node-4"}, nil, nil),
|
||||
makeEntry(5, `{"msg":"plain text only"}`, map[string]any{"code": "404", "count": int64(10)}, map[string]any{"env": "prod", "host": "node-5"}, []byte("trace5"), []byte("span5")),
|
||||
makeEntry(6, `{"log":{"message":"user login"}}`, map[string]any{"service": "auth"}, map[string]any{"env": "dev", "host": "node-6"}, nil, nil),
|
||||
makeEntry(7, `{"log":{"message":"user logout"}}`, map[string]any{"service": "auth", "user_id": "u1"}, map[string]any{"env": "dev", "host": "node-7"}, []byte("trace7"), nil),
|
||||
makeEntry(8, `{"event":"click"}`, map[string]any{"service": "api"}, map[string]any{"env": "dev", "host": "node-8"}, nil, nil),
|
||||
makeEntry(9, `{"msg":"checkbody"}`, map[string]any{"tag": "exact", "num": int64(9)}, map[string]any{"env": "prod", "host": "node-9"}, nil, nil),
|
||||
makeEntry(10, `{"msg":"CHECKBODY case"}`, map[string]any{"tag": "case", "num": int64(10)}, map[string]any{"env": "staging", "host": "node-10"}, nil, nil),
|
||||
makeEntry(11, `{"msg":"foo"}`, map[string]any{"status": "active", "score": int64(100)}, map[string]any{"env": "prod", "host": "node-11"}, nil, nil),
|
||||
makeEntry(12, `{"msg":"bar"}`, map[string]any{"status": "inactive", "score": int64(50)}, map[string]any{"env": "staging", "host": "node-12"}, []byte("trace12"), []byte("span12")),
|
||||
// Plain text log body entries (13-17)
|
||||
makeEntry(13, "Server started on port 8080", map[string]any{"component": "server"}, map[string]any{"env": "prod", "host": "node-13"}, nil, nil),
|
||||
makeEntry(14, "Connection refused to 10.0.0.1:5432", map[string]any{"level": "error"}, map[string]any{"env": "prod", "host": "node-14"}, nil, nil),
|
||||
makeEntry(15, "User login failed for admin", map[string]any{"service": "auth", "level": "warn"}, map[string]any{"env": "dev", "host": "node-15"}, []byte("trace15"), nil),
|
||||
makeEntry(16, "checkbody in text log", map[string]any{"level": "info"}, map[string]any{"env": "staging", "host": "node-16"}, nil, nil),
|
||||
makeEntry(17, "WARN: disk full on /var", map[string]any{"level": "warn"}, map[string]any{"env": "prod", "host": "node-17"}, nil, []byte("span17")),
|
||||
// Body as map (not string) entries (18-20)
|
||||
makeEntry(18, map[string]any{"msg": "checkbody substring", "level": "info"}, map[string]any{"source": "map"}, map[string]any{"env": "prod", "host": "node-18"}, nil, nil),
|
||||
makeEntry(19, map[string]any{"log": map[string]any{"message": "nested value in map body"}, "missing": true}, map[string]any{"source": "map"}, map[string]any{"env": "staging", "host": "node-19"}, []byte("trace19"), nil),
|
||||
makeEntry(20, map[string]any{"event": "deploy", "version": "1.2.0"}, map[string]any{"source": "map", "level": "info"}, map[string]any{"env": "dev", "host": "node-20"}, nil, []byte("span20")),
|
||||
}
|
||||
|
||||
var testCases = []struct {
|
||||
Name string
|
||||
Query *v3.FilterSet
|
||||
ExpectedMatches []int
|
||||
}{
|
||||
{
|
||||
Name: "resource equal (env)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 4, 5, 9, 11, 13, 14, 17, 18},
|
||||
},
|
||||
{
|
||||
Name: "resource not equal (env)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "!="},
|
||||
}},
|
||||
ExpectedMatches: []int{2, 3, 6, 7, 8, 10, 12, 15, 16, 19, 20},
|
||||
},
|
||||
{
|
||||
Name: "attribute less than (numeric)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "count", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 8, Operator: "<"},
|
||||
}},
|
||||
ExpectedMatches: []int{4},
|
||||
},
|
||||
{
|
||||
Name: "attribute greater than (numeric)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "count", DataType: v3.AttributeKeyDataTypeInt64, Type: v3.AttributeKeyTypeTag}, Value: 8, Operator: ">"},
|
||||
}},
|
||||
ExpectedMatches: []int{5},
|
||||
},
|
||||
{
|
||||
Name: "body contains (case insensitive)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
|
||||
}},
|
||||
ExpectedMatches: []int{2, 9, 10, 16},
|
||||
},
|
||||
{
|
||||
Name: "body ncontains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "ncontains"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 3, 4, 5, 6, 7, 8, 11, 12, 13, 14, 15, 17},
|
||||
},
|
||||
{
|
||||
Name: "body.msg (case insensitive)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.msg", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: false}, Value: "checkbody", Operator: "contains"},
|
||||
}},
|
||||
ExpectedMatches: []int{2, 9, 10, 18},
|
||||
},
|
||||
{
|
||||
Name: "body regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "regex"},
|
||||
}},
|
||||
ExpectedMatches: []int{4},
|
||||
},
|
||||
{
|
||||
Name: "body not regex",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "nregex"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17},
|
||||
},
|
||||
// body.log.message exists/nexists: expr checks "log.message" in fromJSON(body); nested key
|
||||
// semantics depend on signoz stanza helper. Omitted here to avoid coupling to env shape.
|
||||
{
|
||||
Name: "body top-level key exists (body.msg)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.msg", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 2, 3, 4, 5, 9, 10, 11, 12, 18},
|
||||
},
|
||||
{
|
||||
Name: "body top-level key not exists (body.missing)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body.missing", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 18, 20},
|
||||
},
|
||||
{
|
||||
Name: "attribute exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "exists"},
|
||||
}},
|
||||
ExpectedMatches: []int{6, 7, 8, 15},
|
||||
},
|
||||
{
|
||||
Name: "attribute not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 2, 3, 4, 5, 9, 10, 11, 12, 13, 14, 16, 17, 18, 19, 20},
|
||||
},
|
||||
{
|
||||
Name: "trace_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
ExpectedMatches: []int{1, 2, 5, 7, 12, 15, 19},
|
||||
},
|
||||
{
|
||||
Name: "trace_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 3, 4, 6, 8, 9, 10, 11, 13, 14, 16, 17, 18, 20},
|
||||
},
|
||||
{
|
||||
Name: "span_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
ExpectedMatches: []int{1, 3, 5, 12, 17, 20},
|
||||
},
|
||||
{
|
||||
Name: "span_id not exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 2, 4, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 18, 19},
|
||||
},
|
||||
{
|
||||
Name: "in (attribute in list)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"info", "error"}, Operator: "in"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 1, 2, 14, 16, 20},
|
||||
},
|
||||
{
|
||||
Name: "not in (attribute not in list)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: []any{"error", "warn"}, Operator: "nin"},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 2, 3, 16, 20},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "staging", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "info", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{2, 16},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND (two attributes)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Operator: "nexists"},
|
||||
}},
|
||||
ExpectedMatches: []int{6, 7},
|
||||
},
|
||||
// Multi-filter variations: body + attribute, three conditions, trace/span + attribute
|
||||
{
|
||||
Name: "multi filter AND body contains + attribute",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "Connection", Operator: "contains"},
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{14},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND body contains + service",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "login", Operator: "contains"},
|
||||
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{6, 15},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND env + level (prod error)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "error", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{1, 14},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND three conditions (staging + checkbody + info)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "staging", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "info", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{2, 16},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND trace_id exists + body contains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "checkbody", Operator: "contains"},
|
||||
}},
|
||||
ExpectedMatches: []int{2},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND span_id nexists + service auth",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
{Key: v3.AttributeKey{Key: "service", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "auth", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{6, 7, 15},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND body regex + attribute",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "[0-1]+regex", Operator: "regex"},
|
||||
{Key: v3.AttributeKey{Key: "code", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "200", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{4},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND no trace_id + no span_id + env prod",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
{Key: v3.AttributeKey{Key: "span_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "nexists"},
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "prod", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{0, 4, 9, 11, 13, 14, 18},
|
||||
},
|
||||
{
|
||||
Name: "multi filter AND level warn + body contains",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "level", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "warn", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "body", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "disk", Operator: "contains"},
|
||||
}},
|
||||
ExpectedMatches: []int{17},
|
||||
},
|
||||
{
|
||||
Name: "no matches (attribute value not present)",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "env", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeResource}, Value: "never", Operator: "="},
|
||||
}},
|
||||
ExpectedMatches: []int{},
|
||||
},
|
||||
{
|
||||
Name: "attribute equal and trace_id exists",
|
||||
Query: &v3.FilterSet{Operator: "AND", Items: []v3.FilterItem{
|
||||
{Key: v3.AttributeKey{Key: "code", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag}, Value: "404", Operator: "="},
|
||||
{Key: v3.AttributeKey{Key: "trace_id", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeUnspecified, IsColumn: true}, Value: "", Operator: "exists"},
|
||||
}},
|
||||
ExpectedMatches: []int{5},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range testCases {
|
||||
t.Run(tt.Name, func(t *testing.T) {
|
||||
expression, err := Parse(tt.Query)
|
||||
assert.NoError(t, err)
|
||||
|
||||
compiled, hasBodyFieldRef, err := signozstanzahelper.ExprCompileBool(expression)
|
||||
assert.NoError(t, err)
|
||||
|
||||
matchedIDs := []int{}
|
||||
for _, d := range dataset {
|
||||
env := signozstanzahelper.GetExprEnv(d.Entry, hasBodyFieldRef)
|
||||
matches, err := vm.Run(compiled, env)
|
||||
signozstanzahelper.PutExprEnv(env)
|
||||
if err != nil {
|
||||
// Eval error (e.g. fromJSON on non-JSON body) => treat as no match
|
||||
continue
|
||||
}
|
||||
if matches != nil && matches.(bool) {
|
||||
matchedIDs = append(matchedIDs, d.ID)
|
||||
}
|
||||
}
|
||||
assert.Equal(t, tt.ExpectedMatches, matchedIDs, "query %q", tt.Name)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,8 +48,6 @@ func NewAggExprRewriter(
|
||||
// and the args if the parametric aggregation function is used.
|
||||
func (r *aggExprRewriter) Rewrite(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
expr string,
|
||||
rateInterval uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
@@ -76,12 +74,7 @@ func (r *aggExprRewriter) Rewrite(
|
||||
return "", nil, errors.NewInternalf(errors.CodeInternal, "no SELECT items for %q", expr)
|
||||
}
|
||||
|
||||
visitor := newExprVisitor(
|
||||
ctx,
|
||||
startNs,
|
||||
endNs,
|
||||
r.logger,
|
||||
keys,
|
||||
visitor := newExprVisitor(r.logger, keys,
|
||||
r.fullTextColumn,
|
||||
r.fieldMapper,
|
||||
r.conditionBuilder,
|
||||
@@ -101,8 +94,6 @@ func (r *aggExprRewriter) Rewrite(
|
||||
// RewriteMulti rewrites a slice of expressions.
|
||||
func (r *aggExprRewriter) RewriteMulti(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
exprs []string,
|
||||
rateInterval uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
@@ -111,7 +102,7 @@ func (r *aggExprRewriter) RewriteMulti(
|
||||
var errs []error
|
||||
var chArgsList [][]any
|
||||
for i, e := range exprs {
|
||||
w, chArgs, err := r.Rewrite(ctx, startNs, endNs, e, rateInterval, keys)
|
||||
w, chArgs, err := r.Rewrite(ctx, e, rateInterval, keys)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
out[i] = e
|
||||
@@ -128,9 +119,6 @@ func (r *aggExprRewriter) RewriteMulti(
|
||||
|
||||
// exprVisitor walks FunctionExpr nodes and applies the mappers.
|
||||
type exprVisitor struct {
|
||||
ctx context.Context
|
||||
startNs uint64
|
||||
endNs uint64
|
||||
chparser.DefaultASTVisitor
|
||||
logger *slog.Logger
|
||||
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
|
||||
@@ -144,9 +132,6 @@ type exprVisitor struct {
|
||||
}
|
||||
|
||||
func newExprVisitor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
logger *slog.Logger,
|
||||
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
@@ -155,9 +140,6 @@ func newExprVisitor(
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *exprVisitor {
|
||||
return &exprVisitor{
|
||||
ctx: ctx,
|
||||
startNs: startNs,
|
||||
endNs: endNs,
|
||||
logger: logger,
|
||||
fieldKeys: fieldKeys,
|
||||
fullTextColumn: fullTextColumn,
|
||||
@@ -204,16 +186,13 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
whereClause, err := PrepareWhereClause(
|
||||
origPred,
|
||||
FilterExprVisitorOpts{
|
||||
Context: v.ctx,
|
||||
Logger: v.logger,
|
||||
FieldKeys: v.fieldKeys,
|
||||
FieldMapper: v.fieldMapper,
|
||||
ConditionBuilder: v.conditionBuilder,
|
||||
FullTextColumn: v.fullTextColumn,
|
||||
JsonKeyToKey: v.jsonKeyToKey,
|
||||
StartNs: v.startNs,
|
||||
EndNs: v.endNs,
|
||||
},
|
||||
}, 0, 0,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -233,7 +212,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i := 0; i < len(args)-1; i++ {
|
||||
origVal := args[i].String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(v.ctx, v.startNs, v.endNs, &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
|
||||
}
|
||||
@@ -251,7 +230,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i, arg := range args {
|
||||
orig := arg.String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(v.ctx, v.startNs, v.endNs, &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -153,7 +153,6 @@ func AdjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemet
|
||||
key.Indexes = intrinsicOrCalculatedField.Indexes
|
||||
key.Materialized = intrinsicOrCalculatedField.Materialized
|
||||
key.JSONPlan = intrinsicOrCalculatedField.JSONPlan
|
||||
key.Evolutions = intrinsicOrCalculatedField.Evolutions
|
||||
return actions
|
||||
|
||||
}
|
||||
@@ -206,8 +205,7 @@ func AdjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemet
|
||||
key.Indexes = matchingKey.Indexes
|
||||
key.Materialized = matchingKey.Materialized
|
||||
key.JSONPlan = matchingKey.JSONPlan
|
||||
key.Evolutions = matchingKey.Evolutions
|
||||
|
||||
|
||||
return actions
|
||||
} else {
|
||||
// multiple matching keys, set materialized only if all the keys are materialized
|
||||
|
||||
@@ -19,8 +19,6 @@ import (
|
||||
|
||||
func CollisionHandledFinalExpr(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
fm qbtypes.FieldMapper,
|
||||
cb qbtypes.ConditionBuilder,
|
||||
@@ -46,7 +44,7 @@ func CollisionHandledFinalExpr(
|
||||
|
||||
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
condition, err := cb.ConditionFor(ctx, startNs, endNs, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -59,7 +57,7 @@ func CollisionHandledFinalExpr(
|
||||
return nil
|
||||
}
|
||||
|
||||
colName, fieldForErr := fm.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, fieldForErr := fm.FieldFor(ctx, field)
|
||||
if errors.Is(fieldForErr, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -94,7 +92,7 @@ func CollisionHandledFinalExpr(
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
colName, _ = fm.FieldFor(ctx, startNs, endNs, key)
|
||||
colName, _ = fm.FieldFor(ctx, key)
|
||||
colName, _ = DataTypeCollisionHandledFieldName(key, dummyValue, colName, qbtypes.FilterOperatorUnknown)
|
||||
stmts = append(stmts, colName)
|
||||
}
|
||||
|
||||
@@ -44,12 +44,12 @@ func keyIndexFilter(key *telemetrytypes.TelemetryFieldKey) any {
|
||||
|
||||
func (b *defaultConditionBuilder) ConditionFor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
op qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
|
||||
if key.FieldContext != telemetrytypes.FieldContextResource {
|
||||
@@ -60,17 +60,15 @@ func (b *defaultConditionBuilder) ConditionFor(
|
||||
// as we store resource values as string
|
||||
formattedValue := querybuilder.FormatValueForContains(value)
|
||||
|
||||
columns, err := b.fm.ColumnFor(ctx, startNs, endNs, key)
|
||||
column, err := b.fm.ColumnFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
// resource evolution on main table doesn't affect this as we not changing the resource column in the resource fingerprint table.
|
||||
column := columns[0]
|
||||
|
||||
keyIdxFilter := sb.Like(column.Name, keyIndexFilter(key))
|
||||
valueForIndexFilter := valueForIndexFilter(op, key, value)
|
||||
|
||||
fieldName, err := b.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
fieldName, err := b.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -206,7 +206,7 @@ func TestConditionBuilder(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(context.Background(), 0, 0, tc.key, tc.op, tc.value, sb)
|
||||
cond, err := conditionBuilder.ConditionFor(context.Background(), tc.key, tc.op, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedErr != nil {
|
||||
|
||||
@@ -27,48 +27,44 @@ func NewFieldMapper() *defaultFieldMapper {
|
||||
|
||||
func (m *defaultFieldMapper) getColumn(
|
||||
_ context.Context,
|
||||
_, _ uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
) ([]*schema.Column, error) {
|
||||
) (*schema.Column, error) {
|
||||
if key.FieldContext == telemetrytypes.FieldContextResource {
|
||||
return []*schema.Column{resourceColumns["labels"]}, nil
|
||||
return resourceColumns["labels"], nil
|
||||
}
|
||||
if col, ok := resourceColumns[key.Name]; ok {
|
||||
return []*schema.Column{col}, nil
|
||||
return col, nil
|
||||
}
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
}
|
||||
|
||||
func (m *defaultFieldMapper) ColumnFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
) ([]*schema.Column, error) {
|
||||
return m.getColumn(ctx, tsStart, tsEnd, key)
|
||||
) (*schema.Column, error) {
|
||||
return m.getColumn(ctx, key)
|
||||
}
|
||||
|
||||
func (m *defaultFieldMapper) FieldFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
columns, err := m.getColumn(ctx, tsStart, tsEnd, key)
|
||||
column, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if key.FieldContext == telemetrytypes.FieldContextResource {
|
||||
return fmt.Sprintf("simpleJSONExtractString(%s, '%s')", columns[0].Name, key.Name), nil
|
||||
return fmt.Sprintf("simpleJSONExtractString(%s, '%s')", column.Name, key.Name), nil
|
||||
}
|
||||
return columns[0].Name, nil
|
||||
return column.Name, nil
|
||||
}
|
||||
|
||||
func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
_ map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
colName, err := m.FieldFor(ctx, tsStart, tsEnd, key)
|
||||
colName, err := m.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -148,7 +148,7 @@ func (b *resourceFilterStatementBuilder[T]) Build(
|
||||
|
||||
// addConditions adds both filter and time conditions to the query
|
||||
func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
ctx context.Context,
|
||||
_ context.Context,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
start, end uint64,
|
||||
query qbtypes.QueryBuilderQuery[T],
|
||||
@@ -160,7 +160,6 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
|
||||
// warnings would be encountered as part of the main condition already
|
||||
filterWhereClause, err := querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: b.logger,
|
||||
FieldMapper: b.fieldMapper,
|
||||
ConditionBuilder: b.conditionBuilder,
|
||||
@@ -172,9 +171,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
// there is no need for "key" not found error for resource filtering
|
||||
IgnoreNotFoundKeys: true,
|
||||
Variables: variables,
|
||||
StartNs: start,
|
||||
EndNs: end,
|
||||
})
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -23,7 +23,6 @@ const stringMatchingOperatorDocURL = "https://signoz.io/docs/userguide/operators
|
||||
// filterExpressionVisitor implements the FilterQueryVisitor interface
|
||||
// to convert the parsed filter expressions into ClickHouse WHERE clause
|
||||
type filterExpressionVisitor struct {
|
||||
context context.Context
|
||||
logger *slog.Logger
|
||||
fieldMapper qbtypes.FieldMapper
|
||||
conditionBuilder qbtypes.ConditionBuilder
|
||||
@@ -47,7 +46,6 @@ type filterExpressionVisitor struct {
|
||||
}
|
||||
|
||||
type FilterExprVisitorOpts struct {
|
||||
Context context.Context
|
||||
Logger *slog.Logger
|
||||
FieldMapper qbtypes.FieldMapper
|
||||
ConditionBuilder qbtypes.ConditionBuilder
|
||||
@@ -67,7 +65,6 @@ type FilterExprVisitorOpts struct {
|
||||
// newFilterExpressionVisitor creates a new filterExpressionVisitor
|
||||
func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVisitor {
|
||||
return &filterExpressionVisitor{
|
||||
context: opts.Context,
|
||||
logger: opts.Logger,
|
||||
fieldMapper: opts.FieldMapper,
|
||||
conditionBuilder: opts.ConditionBuilder,
|
||||
@@ -93,7 +90,7 @@ type PreparedWhereClause struct {
|
||||
}
|
||||
|
||||
// PrepareWhereClause generates a ClickHouse compatible WHERE clause from the filter query
|
||||
func PrepareWhereClause(query string, opts FilterExprVisitorOpts) (*PreparedWhereClause, error) {
|
||||
func PrepareWhereClause(query string, opts FilterExprVisitorOpts, startNs uint64, endNs uint64) (*PreparedWhereClause, error) {
|
||||
|
||||
// Setup the ANTLR parsing pipeline
|
||||
input := antlr.NewInputStream(query)
|
||||
@@ -127,6 +124,8 @@ func PrepareWhereClause(query string, opts FilterExprVisitorOpts) (*PreparedWher
|
||||
}
|
||||
tokens.Reset()
|
||||
|
||||
opts.StartNs = startNs
|
||||
opts.EndNs = endNs
|
||||
visitor := newFilterExpressionVisitor(opts)
|
||||
|
||||
// Handle syntax errors
|
||||
@@ -318,7 +317,7 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any
|
||||
// create a full text search condition on the body field
|
||||
|
||||
keyText := keyCtx.GetText()
|
||||
cond, err := v.conditionBuilder.ConditionFor(v.context, v.startNs, v.endNs, v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(keyText), v.builder)
|
||||
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(keyText), v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
|
||||
return ""
|
||||
@@ -338,7 +337,7 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any
|
||||
v.errors = append(v.errors, fmt.Sprintf("unsupported value type: %s", valCtx.GetText()))
|
||||
return ""
|
||||
}
|
||||
cond, err := v.conditionBuilder.ConditionFor(v.context, v.startNs, v.endNs, v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder)
|
||||
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
|
||||
return ""
|
||||
@@ -382,7 +381,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
}
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(v.context, v.startNs, v.endNs, key, op, nil, v.builder)
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, nil, v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
@@ -454,7 +453,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
}
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(v.context, v.startNs, v.endNs, key, op, values, v.builder)
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, values, v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
@@ -486,7 +485,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(v.context, v.startNs, v.endNs, key, op, []any{value1, value2}, v.builder)
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, []any{value1, value2}, v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
@@ -571,7 +570,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(v.context, v.startNs, v.endNs, key, op, value, v.builder)
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, value, v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build condition: %s", err.Error()))
|
||||
return ""
|
||||
@@ -650,7 +649,7 @@ func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) an
|
||||
v.errors = append(v.errors, "full text search is not supported")
|
||||
return ""
|
||||
}
|
||||
cond, err := v.conditionBuilder.ConditionFor(v.context, v.startNs, v.endNs, v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder)
|
||||
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
|
||||
return ""
|
||||
@@ -735,13 +734,13 @@ func (v *filterExpressionVisitor) VisitFunctionCall(ctx *grammar.FunctionCallCon
|
||||
if key.FieldContext == telemetrytypes.FieldContextBody {
|
||||
var err error
|
||||
if BodyJSONQueryEnabled {
|
||||
fieldName, err = v.fieldMapper.FieldFor(v.context, v.startNs, v.endNs, key)
|
||||
fieldName, err = v.fieldMapper.FieldFor(context.Background(), key)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to get field name for key %s: %s", key.Name, err.Error()))
|
||||
return ""
|
||||
}
|
||||
} else {
|
||||
fieldName, _ = v.jsonKeyToKey(v.context, key, qbtypes.FilterOperatorUnknown, value)
|
||||
fieldName, _ = v.jsonKeyToKey(context.Background(), key, qbtypes.FilterOperatorUnknown, value)
|
||||
}
|
||||
} else {
|
||||
// TODO(add docs for json body search)
|
||||
@@ -856,7 +855,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
// 1. either user meant key ( this is already handled above in fieldKeysForName )
|
||||
// 2. or user meant `attribute.key` we look up in the map for all possible field keys with name 'attribute.key'
|
||||
|
||||
// Note:
|
||||
// Note:
|
||||
// If user only wants to search `attribute.key`, then they have to use `attribute.attribute.key`
|
||||
// If user only wants to search `key`, then they have to use `key`
|
||||
// If user wants to search both, they can use `attribute.key` and we will resolve the ambiguity
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package querybuilder
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"strings"
|
||||
"testing"
|
||||
@@ -55,12 +54,11 @@ func TestPrepareWhereClause_EmptyVariableList(t *testing.T) {
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
opts := FilterExprVisitorOpts{
|
||||
Context: context.Background(),
|
||||
FieldKeys: keys,
|
||||
Variables: tt.variables,
|
||||
}
|
||||
|
||||
_, err := PrepareWhereClause(tt.expr, opts)
|
||||
_, err := PrepareWhereClause(tt.expr, opts, 0, 0)
|
||||
|
||||
if tt.expectError {
|
||||
if err == nil {
|
||||
@@ -469,7 +467,7 @@ func TestVisitKey(t *testing.T) {
|
||||
expectedWarnings: nil,
|
||||
expectedMainWrnURL: "",
|
||||
},
|
||||
{
|
||||
{
|
||||
name: "only attribute.custom_field is selected",
|
||||
keyText: "attribute.attribute.custom_field",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
|
||||
@@ -376,7 +376,6 @@ func New(
|
||||
telemetrylogs.LogResourceKeysTblName,
|
||||
telemetrymetadata.DBName,
|
||||
telemetrymetadata.AttributesMetadataLocalTableName,
|
||||
telemetrymetadata.ColumnEvolutionMetadataTableName,
|
||||
)
|
||||
|
||||
global, err := factory.NewProviderFromNamedMap(
|
||||
|
||||
@@ -25,34 +25,30 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
) (string, error) {
|
||||
columns, err := c.fm.ColumnFor(ctx, startNs, endNs, key)
|
||||
column, err := c.fm.ColumnFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// TODO(Piyush): Update this to support multiple JSON columns based on evolutions
|
||||
for _, column := range columns {
|
||||
if column.IsJSONColumn() && querybuilder.BodyJSONQueryEnabled {
|
||||
valueType, value := InferDataType(value, operator, key)
|
||||
cond, err := NewJSONConditionBuilder(key, valueType).buildJSONCondition(operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return cond, nil
|
||||
if column.IsJSONColumn() && querybuilder.BodyJSONQueryEnabled {
|
||||
valueType, value := InferDataType(value, operator, key)
|
||||
cond, err := NewJSONConditionBuilder(key, valueType).buildJSONCondition(operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return cond, nil
|
||||
}
|
||||
|
||||
if operator.IsStringSearchOperator() {
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -178,31 +174,6 @@ func (c *conditionBuilder) conditionFor(
|
||||
}
|
||||
|
||||
var value any
|
||||
column := columns[0]
|
||||
if len(key.Evolutions) > 0 {
|
||||
// we will use the corresponding column and its evolution entry for the query
|
||||
newColumns, _, err := selectEvolutionsForColumns(columns, key.Evolutions, startNs, endNs, key.Name)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if len(newColumns) == 0 {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "no valid evolution found for field %s in the given time range", key.Name)
|
||||
}
|
||||
|
||||
// This mean tblFieldName is with multiIf, we just need to do a null check.
|
||||
if len(newColumns) > 1 {
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return sb.IsNotNull(tblFieldName), nil
|
||||
} else {
|
||||
return sb.IsNull(tblFieldName), nil
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise we have to find the correct exist operator based on the column type
|
||||
column = newColumns[0]
|
||||
}
|
||||
|
||||
switch column.Type.GetType() {
|
||||
case schema.ColumnTypeEnumJSON:
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
@@ -257,7 +228,6 @@ func (c *conditionBuilder) conditionFor(
|
||||
}
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for column type %s", column.Type)
|
||||
|
||||
}
|
||||
}
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported operator: %v", operator)
|
||||
@@ -265,15 +235,14 @@ func (c *conditionBuilder) conditionFor(
|
||||
|
||||
func (c *conditionBuilder) ConditionFor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
|
||||
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -281,12 +250,12 @@ func (c *conditionBuilder) ConditionFor(
|
||||
if !(key.FieldContext == telemetrytypes.FieldContextBody && querybuilder.BodyJSONQueryEnabled) && operator.AddDefaultExistsFilter() {
|
||||
// skip adding exists filter for intrinsic fields
|
||||
// with an exception for body json search
|
||||
field, _ := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
field, _ := c.fm.FieldFor(ctx, key)
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), field) && key.FieldContext != telemetrytypes.FieldContextBody {
|
||||
return condition, nil
|
||||
}
|
||||
|
||||
existsCondition, err := c.conditionFor(ctx, startNs, endNs, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
existsCondition, err := c.conditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ package telemetrylogs
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
@@ -12,148 +11,14 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestExistsConditionForWithEvolutions(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
startTs uint64
|
||||
endTs uint64
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
operator qbtypes.FilterOperator
|
||||
value any
|
||||
expectedSQL string
|
||||
expectedArgs []any
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
name: "New column",
|
||||
startTs: uint64(time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC).UnixNano()),
|
||||
endTs: uint64(time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC).UnixNano()),
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
},
|
||||
operator: qbtypes.FilterOperatorExists,
|
||||
value: nil,
|
||||
expectedSQL: "WHERE resource.`service.name`::String IS NOT NULL",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Old column",
|
||||
startTs: uint64(time.Date(2023, 1, 15, 10, 0, 0, 0, time.UTC).UnixNano()),
|
||||
endTs: uint64(time.Date(2023, 1, 15, 10, 0, 0, 0, time.UTC).UnixNano()),
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
},
|
||||
operator: qbtypes.FilterOperatorExists,
|
||||
value: nil,
|
||||
expectedSQL: "WHERE mapContains(resources_string, 'service.name') = ?",
|
||||
expectedArgs: []any{true},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Both Old column and new - empty filter",
|
||||
startTs: uint64(time.Date(2023, 1, 15, 10, 0, 0, 0, time.UTC).UnixNano()),
|
||||
endTs: uint64(time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC).UnixNano()),
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
},
|
||||
operator: qbtypes.FilterOperatorExists,
|
||||
value: nil,
|
||||
expectedSQL: "WHERE multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL",
|
||||
expectedError: nil,
|
||||
},
|
||||
}
|
||||
fm := NewFieldMapper()
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
ctx := context.Background()
|
||||
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, tc.startTs, tc.endTs, &tc.key, tc.operator, tc.value, sb)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
sql, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
assert.Contains(t, sql, tc.expectedSQL)
|
||||
assert.Equal(t, tc.expectedArgs, args)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConditionFor(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
mockEvolution := mockEvolutionData(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC))
|
||||
testCases := []struct {
|
||||
name string
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
operator qbtypes.FilterOperator
|
||||
value any
|
||||
evolutions []*telemetrytypes.EvolutionEntry
|
||||
expectedSQL string
|
||||
expectedArgs []any
|
||||
expectedError error
|
||||
@@ -375,11 +240,9 @@ func TestConditionFor(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
evolutions: mockEvolution,
|
||||
operator: qbtypes.FilterOperatorExists,
|
||||
value: nil,
|
||||
expectedSQL: "mapContains(resources_string, 'service.name') = ?",
|
||||
expectedArgs: []any{true},
|
||||
expectedSQL: "WHERE multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -389,11 +252,9 @@ func TestConditionFor(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
evolutions: mockEvolution,
|
||||
operator: qbtypes.FilterOperatorNotExists,
|
||||
value: nil,
|
||||
expectedSQL: "mapContains(resources_string, 'service.name') <> ?",
|
||||
expectedArgs: []any{true},
|
||||
expectedSQL: "WHERE multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NULL",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -454,11 +315,10 @@ func TestConditionFor(t *testing.T) {
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Materialized: true,
|
||||
},
|
||||
evolutions: mockEvolution,
|
||||
operator: qbtypes.FilterOperatorRegexp,
|
||||
value: "frontend-.*",
|
||||
expectedSQL: "WHERE (match(`resource_string_service$$name`, ?) AND `resource_string_service$$name_exists` = ?)",
|
||||
expectedArgs: []any{"frontend-.*", true},
|
||||
expectedSQL: "(match(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, `resource_string_service$$name_exists`==true, `resource_string_service$$name`, NULL), ?) AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, `resource_string_service$$name_exists`==true, `resource_string_service$$name`, NULL) IS NOT NULL)",
|
||||
expectedArgs: []any{"frontend-.*"},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -469,10 +329,9 @@ func TestConditionFor(t *testing.T) {
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Materialized: true,
|
||||
},
|
||||
evolutions: mockEvolution,
|
||||
operator: qbtypes.FilterOperatorNotRegexp,
|
||||
value: "test-.*",
|
||||
expectedSQL: "WHERE NOT match(`resource_string_service$$name`, ?)",
|
||||
expectedSQL: "WHERE NOT match(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, `resource_string_service$$name_exists`==true, `resource_string_service$$name`, NULL), ?)",
|
||||
expectedArgs: []any{"test-.*"},
|
||||
expectedError: nil,
|
||||
},
|
||||
@@ -512,13 +371,14 @@ func TestConditionFor(t *testing.T) {
|
||||
expectedError: qbtypes.ErrColumnNotFound,
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
tc.key.Evolutions = tc.evolutions
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, 0, 0, &tc.key, tc.operator, tc.value, sb)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
@@ -573,7 +433,7 @@ func TestConditionForMultipleKeys(t *testing.T) {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
var err error
|
||||
for _, key := range tc.keys {
|
||||
cond, err := conditionBuilder.conditionFor(ctx, 0, 0, &key, tc.operator, tc.value, sb)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &key, tc.operator, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting condition for key %s: %v", key.Name, err)
|
||||
@@ -830,7 +690,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.conditionFor(ctx, 0, 0, &tc.key, tc.operator, tc.value, sb)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
|
||||
@@ -3,10 +3,7 @@ package telemetrylogs
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz-otel-collector/utils"
|
||||
@@ -71,36 +68,35 @@ func NewFieldMapper() qbtypes.FieldMapper {
|
||||
return &fieldMapper{}
|
||||
}
|
||||
|
||||
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) ([]*schema.Column, error) {
|
||||
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource:
|
||||
columns := []*schema.Column{logsV2Columns["resources_string"], logsV2Columns["resource"]}
|
||||
return columns, nil
|
||||
return logsV2Columns["resource"], nil
|
||||
case telemetrytypes.FieldContextScope:
|
||||
switch key.Name {
|
||||
case "name", "scope.name", "scope_name":
|
||||
return []*schema.Column{logsV2Columns["scope_name"]}, nil
|
||||
return logsV2Columns["scope_name"], nil
|
||||
case "version", "scope.version", "scope_version":
|
||||
return []*schema.Column{logsV2Columns["scope_version"]}, nil
|
||||
return logsV2Columns["scope_version"], nil
|
||||
}
|
||||
return []*schema.Column{logsV2Columns["scope_string"]}, nil
|
||||
return logsV2Columns["scope_string"], nil
|
||||
case telemetrytypes.FieldContextAttribute:
|
||||
switch key.FieldDataType {
|
||||
case telemetrytypes.FieldDataTypeString:
|
||||
return []*schema.Column{logsV2Columns["attributes_string"]}, nil
|
||||
return logsV2Columns["attributes_string"], nil
|
||||
case telemetrytypes.FieldDataTypeInt64, telemetrytypes.FieldDataTypeFloat64, telemetrytypes.FieldDataTypeNumber:
|
||||
return []*schema.Column{logsV2Columns["attributes_number"]}, nil
|
||||
return logsV2Columns["attributes_number"], nil
|
||||
case telemetrytypes.FieldDataTypeBool:
|
||||
return []*schema.Column{logsV2Columns["attributes_bool"]}, nil
|
||||
return logsV2Columns["attributes_bool"], nil
|
||||
}
|
||||
case telemetrytypes.FieldContextBody:
|
||||
// Body context is for JSON body fields
|
||||
// Use body_json if feature flag is enabled
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
return []*schema.Column{logsV2Columns[LogsV2BodyJSONColumn]}, nil
|
||||
return logsV2Columns[LogsV2BodyJSONColumn], nil
|
||||
}
|
||||
// Fall back to legacy body column
|
||||
return []*schema.Column{logsV2Columns["body"]}, nil
|
||||
return logsV2Columns["body"], nil
|
||||
case telemetrytypes.FieldContextLog, telemetrytypes.FieldContextUnspecified:
|
||||
col, ok := logsV2Columns[key.Name]
|
||||
if !ok {
|
||||
@@ -108,237 +104,96 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
|
||||
if strings.HasPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
// Use body_json if feature flag is enabled and we have a body condition builder
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
// TODO(Piyush): Update this to support multiple JSON columns based on evolutions
|
||||
// i.e return both the body json and body json promoted and let the evolutions decide which one to use
|
||||
// based on the query range time.
|
||||
return []*schema.Column{logsV2Columns[LogsV2BodyJSONColumn]}, nil
|
||||
return logsV2Columns[LogsV2BodyJSONColumn], nil
|
||||
}
|
||||
// Fall back to legacy body column
|
||||
return []*schema.Column{logsV2Columns["body"]}, nil
|
||||
return logsV2Columns["body"], nil
|
||||
}
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
}
|
||||
return []*schema.Column{col}, nil
|
||||
return col, nil
|
||||
}
|
||||
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
}
|
||||
|
||||
// selectEvolutionsForColumns selects the appropriate evolution entries for each column based on the time range.
|
||||
// Logic:
|
||||
// - Finds the latest base evolution (<= tsStartTime) across ALL columns
|
||||
// - Rejects all evolutions before this latest base evolution
|
||||
// - For duplicate evolutions it considers the oldest one (first in ReleaseTime)
|
||||
// - For each column, includes its evolution if it's >= latest base evolution and <= tsEndTime
|
||||
// - Results are sorted by ReleaseTime descending (newest first)
|
||||
func selectEvolutionsForColumns(columns []*schema.Column, evolutions []*telemetrytypes.EvolutionEntry, tsStart, tsEnd uint64, fieldName string) ([]*schema.Column, []*telemetrytypes.EvolutionEntry, error) {
|
||||
|
||||
sortedEvolutions := make([]*telemetrytypes.EvolutionEntry, len(evolutions))
|
||||
copy(sortedEvolutions, evolutions)
|
||||
|
||||
// sort the evolutions by ReleaseTime ascending
|
||||
sort.Slice(sortedEvolutions, func(i, j int) bool {
|
||||
return sortedEvolutions[i].ReleaseTime.Before(sortedEvolutions[j].ReleaseTime)
|
||||
})
|
||||
|
||||
tsStartTime := time.Unix(0, int64(tsStart))
|
||||
tsEndTime := time.Unix(0, int64(tsEnd))
|
||||
|
||||
// Build evolution map: column name -> evolution
|
||||
evolutionMap := make(map[string]*telemetrytypes.EvolutionEntry)
|
||||
for _, evolution := range sortedEvolutions {
|
||||
if _, exists := evolutionMap[evolution.ColumnName+":"+evolution.FieldName+":"+strconv.Itoa(int(evolution.Version))]; exists {
|
||||
// since if there is duplicate we would just use the oldest one.
|
||||
continue
|
||||
}
|
||||
evolutionMap[evolution.ColumnName+":"+evolution.FieldName+":"+strconv.Itoa(int(evolution.Version))] = evolution
|
||||
}
|
||||
|
||||
// Find the latest base evolution (<= tsStartTime) across ALL columns
|
||||
// Evolutions are sorted, so we can break early
|
||||
var latestBaseEvolutionAcrossAll *telemetrytypes.EvolutionEntry
|
||||
for _, evolution := range sortedEvolutions {
|
||||
if evolution.ReleaseTime.After(tsStartTime) {
|
||||
break
|
||||
}
|
||||
latestBaseEvolutionAcrossAll = evolution
|
||||
}
|
||||
|
||||
// We shouldn't reach this, it basically means there is something wrong with the evolutions data
|
||||
if latestBaseEvolutionAcrossAll == nil {
|
||||
return nil, nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "no base evolution found for columns %v", columns)
|
||||
}
|
||||
|
||||
columnLookUpMap := make(map[string]*schema.Column)
|
||||
for _, column := range columns {
|
||||
columnLookUpMap[column.Name] = column
|
||||
}
|
||||
|
||||
// Collect column-evolution pairs
|
||||
type colEvoPair struct {
|
||||
column *schema.Column
|
||||
evolution *telemetrytypes.EvolutionEntry
|
||||
}
|
||||
pairs := []colEvoPair{}
|
||||
|
||||
for _, evolution := range evolutionMap {
|
||||
// Reject evolutions before the latest base evolution
|
||||
if evolution.ReleaseTime.Before(latestBaseEvolutionAcrossAll.ReleaseTime) {
|
||||
continue
|
||||
}
|
||||
// skip evolutions after tsEndTime
|
||||
if evolution.ReleaseTime.After(tsEndTime) || evolution.ReleaseTime.Equal(tsEndTime) {
|
||||
continue
|
||||
}
|
||||
|
||||
if _, exists := columnLookUpMap[evolution.ColumnName]; !exists {
|
||||
return nil, nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "evolution column %s not found in columns %v", evolution.ColumnName, columns)
|
||||
}
|
||||
|
||||
pairs = append(pairs, colEvoPair{columnLookUpMap[evolution.ColumnName], evolution})
|
||||
}
|
||||
|
||||
// If no pairs found, fall back to latestBaseEvolutionAcrossAll for matching columns
|
||||
if len(pairs) == 0 {
|
||||
for _, column := range columns {
|
||||
// Use latestBaseEvolutionAcrossAll if this column name matches its column name
|
||||
if column.Name == latestBaseEvolutionAcrossAll.ColumnName {
|
||||
pairs = append(pairs, colEvoPair{column, latestBaseEvolutionAcrossAll})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by ReleaseTime descending (newest first)
|
||||
for i := 0; i < len(pairs)-1; i++ {
|
||||
for j := i + 1; j < len(pairs); j++ {
|
||||
if pairs[i].evolution.ReleaseTime.Before(pairs[j].evolution.ReleaseTime) {
|
||||
pairs[i], pairs[j] = pairs[j], pairs[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract results
|
||||
newColumns := make([]*schema.Column, len(pairs))
|
||||
evolutionsEntries := make([]*telemetrytypes.EvolutionEntry, len(pairs))
|
||||
for i, pair := range pairs {
|
||||
newColumns[i] = pair.column
|
||||
evolutionsEntries[i] = pair.evolution
|
||||
}
|
||||
|
||||
return newColumns, evolutionsEntries, nil
|
||||
}
|
||||
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, tsStart, tsEnd uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
columns, err := m.getColumn(ctx, key)
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
var newColumns []*schema.Column
|
||||
var evolutionsEntries []*telemetrytypes.EvolutionEntry
|
||||
if len(key.Evolutions) > 0 {
|
||||
// we will use the corresponding column and its evolution entry for the query
|
||||
newColumns, evolutionsEntries, err = selectEvolutionsForColumns(columns, key.Evolutions, tsStart, tsEnd, key.Name)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
} else {
|
||||
newColumns = columns
|
||||
}
|
||||
switch column.Type.GetType() {
|
||||
case schema.ColumnTypeEnumJSON:
|
||||
// json is only supported for resource context as of now
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource:
|
||||
oldColumn := logsV2Columns["resources_string"]
|
||||
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
|
||||
|
||||
exprs := []string{}
|
||||
existExpr := []string{}
|
||||
for i, column := range newColumns {
|
||||
// Use evolution column name if available, otherwise use the column name
|
||||
columnName := column.Name
|
||||
if evolutionsEntries != nil && evolutionsEntries[i] != nil {
|
||||
columnName = evolutionsEntries[i].ColumnName
|
||||
}
|
||||
|
||||
switch column.Type.GetType() {
|
||||
case schema.ColumnTypeEnumJSON:
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource:
|
||||
exprs = append(exprs, fmt.Sprintf("%s.`%s`::String", columnName, key.Name))
|
||||
existExpr = append(existExpr, fmt.Sprintf("%s.`%s` IS NOT NULL", columnName, key.Name))
|
||||
case telemetrytypes.FieldContextBody:
|
||||
if key.JSONDataType == nil {
|
||||
return "", qbtypes.ErrColumnNotFound
|
||||
}
|
||||
|
||||
if key.KeyNameContainsArray() && !key.JSONDataType.IsArray {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "FieldFor not supported for nested fields; only supported for flat paths (e.g. body.status.detail) and paths of Array type: %s(%s)", key.Name, key.FieldDataType)
|
||||
}
|
||||
expr, err := m.buildFieldForJSON(key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
exprs = append(exprs, expr)
|
||||
default:
|
||||
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource/body context fields are supported for json columns, got %s", key.FieldContext.String)
|
||||
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
|
||||
// once clickHouse dependency is updated, we need to check if we can remove it.
|
||||
if key.Materialized {
|
||||
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
|
||||
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
|
||||
}
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
|
||||
case telemetrytypes.FieldContextBody:
|
||||
if key.JSONDataType == nil {
|
||||
return "", qbtypes.ErrColumnNotFound
|
||||
}
|
||||
|
||||
case schema.ColumnTypeEnumLowCardinality:
|
||||
switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
|
||||
case schema.ColumnTypeEnumString:
|
||||
exprs = append(exprs, column.Name)
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for low cardinality column type %s", elementType)
|
||||
if key.KeyNameContainsArray() && !key.JSONDataType.IsArray {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "FieldFor not supported for nested fields; only supported for flat paths (e.g. body.status.detail) and paths of Array type: %s(%s)", key.Name, key.FieldDataType)
|
||||
}
|
||||
case schema.ColumnTypeEnumString,
|
||||
schema.ColumnTypeEnumUInt64, schema.ColumnTypeEnumUInt32, schema.ColumnTypeEnumUInt8:
|
||||
|
||||
return m.buildFieldForJSON(key)
|
||||
default:
|
||||
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource/body context fields are supported for json columns, got %s", key.FieldContext.String)
|
||||
}
|
||||
case schema.ColumnTypeEnumLowCardinality:
|
||||
switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
|
||||
case schema.ColumnTypeEnumString:
|
||||
return column.Name, nil
|
||||
case schema.ColumnTypeEnumMap:
|
||||
keyType := column.Type.(schema.MapColumnType).KeyType
|
||||
if _, ok := keyType.(schema.LowCardinalityColumnType); !ok {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "key type %s is not supported for map column type %s", keyType, column.Type)
|
||||
}
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for low cardinality column type %s", elementType)
|
||||
}
|
||||
case schema.ColumnTypeEnumString,
|
||||
schema.ColumnTypeEnumUInt64, schema.ColumnTypeEnumUInt32, schema.ColumnTypeEnumUInt8:
|
||||
return column.Name, nil
|
||||
case schema.ColumnTypeEnumMap:
|
||||
keyType := column.Type.(schema.MapColumnType).KeyType
|
||||
if _, ok := keyType.(schema.LowCardinalityColumnType); !ok {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "key type %s is not supported for map column type %s", keyType, column.Type)
|
||||
}
|
||||
|
||||
switch valueType := column.Type.(schema.MapColumnType).ValueType; valueType.GetType() {
|
||||
case schema.ColumnTypeEnumString, schema.ColumnTypeEnumBool, schema.ColumnTypeEnumFloat64:
|
||||
// a key could have been materialized, if so return the materialized column name
|
||||
if key.Materialized {
|
||||
return telemetrytypes.FieldKeyToMaterializedColumnName(key), nil
|
||||
}
|
||||
exprs = append(exprs, fmt.Sprintf("%s['%s']", columnName, key.Name))
|
||||
existExpr = append(existExpr, fmt.Sprintf("mapContains(%s, '%s')", columnName, key.Name))
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for map column type %s", valueType)
|
||||
switch valueType := column.Type.(schema.MapColumnType).ValueType; valueType.GetType() {
|
||||
case schema.ColumnTypeEnumString, schema.ColumnTypeEnumBool, schema.ColumnTypeEnumFloat64:
|
||||
// a key could have been materialized, if so return the materialized column name
|
||||
if key.Materialized {
|
||||
return telemetrytypes.FieldKeyToMaterializedColumnName(key), nil
|
||||
}
|
||||
return fmt.Sprintf("%s['%s']", column.Name, key.Name), nil
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for map column type %s", valueType)
|
||||
}
|
||||
}
|
||||
|
||||
if len(exprs) == 1 {
|
||||
return exprs[0], nil
|
||||
} else if len(exprs) > 1 {
|
||||
// Ensure existExpr has the same length as exprs
|
||||
if len(existExpr) != len(exprs) {
|
||||
return "", errors.New(errors.TypeInternal, errors.CodeInternal, "length of exist exprs doesn't match to that of exprs")
|
||||
}
|
||||
finalExprs := []string{}
|
||||
for i, expr := range exprs {
|
||||
finalExprs = append(finalExprs, fmt.Sprintf("%s, %s", existExpr[i], expr))
|
||||
}
|
||||
return "multiIf(" + strings.Join(finalExprs, ", ") + ", NULL)", nil
|
||||
}
|
||||
|
||||
// should not reach here
|
||||
return columns[0].Name, nil
|
||||
return column.Name, nil
|
||||
}
|
||||
|
||||
func (m *fieldMapper) ColumnFor(ctx context.Context, _, _ uint64, key *telemetrytypes.TelemetryFieldKey) ([]*schema.Column, error) {
|
||||
func (m *fieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||
return m.getColumn(ctx, key)
|
||||
}
|
||||
|
||||
func (m *fieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, tsStart, tsEnd, field)
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -348,7 +203,7 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
if _, ok := logsV2Columns[field.Name]; ok {
|
||||
// if it is, attach the column name directly
|
||||
field.FieldContext = telemetrytypes.FieldContextLog
|
||||
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, field)
|
||||
colName, _ = m.FieldFor(ctx, field)
|
||||
} else {
|
||||
// - the context is not provided
|
||||
// - there are not keys for the field
|
||||
@@ -366,12 +221,12 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
}
|
||||
} else if len(keysForField) == 1 {
|
||||
// we have a single key for the field, use it
|
||||
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, keysForField[0])
|
||||
colName, _ = m.FieldFor(ctx, keysForField[0])
|
||||
} else {
|
||||
// select any non-empty value from the keys
|
||||
args := []string{}
|
||||
for _, key := range keysForField {
|
||||
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, key)
|
||||
colName, _ = m.FieldFor(ctx, key)
|
||||
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
|
||||
}
|
||||
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))
|
||||
|
||||
@@ -3,7 +3,6 @@ package telemetrylogs
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
@@ -18,7 +17,7 @@ func TestGetColumn(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
expectedCol []*schema.Column
|
||||
expectedCol *schema.Column
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
@@ -27,7 +26,7 @@ func TestGetColumn(t *testing.T) {
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
},
|
||||
expectedCol: []*schema.Column{logsV2Columns["resources_string"], logsV2Columns["resource"]},
|
||||
expectedCol: logsV2Columns["resource"],
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -36,7 +35,7 @@ func TestGetColumn(t *testing.T) {
|
||||
Name: "name",
|
||||
FieldContext: telemetrytypes.FieldContextScope,
|
||||
},
|
||||
expectedCol: []*schema.Column{logsV2Columns["scope_name"]},
|
||||
expectedCol: logsV2Columns["scope_name"],
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -45,7 +44,7 @@ func TestGetColumn(t *testing.T) {
|
||||
Name: "scope.name",
|
||||
FieldContext: telemetrytypes.FieldContextScope,
|
||||
},
|
||||
expectedCol: []*schema.Column{logsV2Columns["scope_name"]},
|
||||
expectedCol: logsV2Columns["scope_name"],
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -54,7 +53,7 @@ func TestGetColumn(t *testing.T) {
|
||||
Name: "scope_name",
|
||||
FieldContext: telemetrytypes.FieldContextScope,
|
||||
},
|
||||
expectedCol: []*schema.Column{logsV2Columns["scope_name"]},
|
||||
expectedCol: logsV2Columns["scope_name"],
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -63,7 +62,7 @@ func TestGetColumn(t *testing.T) {
|
||||
Name: "version",
|
||||
FieldContext: telemetrytypes.FieldContextScope,
|
||||
},
|
||||
expectedCol: []*schema.Column{logsV2Columns["scope_version"]},
|
||||
expectedCol: logsV2Columns["scope_version"],
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -72,7 +71,7 @@ func TestGetColumn(t *testing.T) {
|
||||
Name: "custom.scope.field",
|
||||
FieldContext: telemetrytypes.FieldContextScope,
|
||||
},
|
||||
expectedCol: []*schema.Column{logsV2Columns["scope_string"]},
|
||||
expectedCol: logsV2Columns["scope_string"],
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -82,7 +81,7 @@ func TestGetColumn(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
expectedCol: []*schema.Column{logsV2Columns["attributes_string"]},
|
||||
expectedCol: logsV2Columns["attributes_string"],
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -92,7 +91,7 @@ func TestGetColumn(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
expectedCol: []*schema.Column{logsV2Columns["attributes_number"]},
|
||||
expectedCol: logsV2Columns["attributes_number"],
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -102,7 +101,7 @@ func TestGetColumn(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeInt64,
|
||||
},
|
||||
expectedCol: []*schema.Column{logsV2Columns["attributes_number"]},
|
||||
expectedCol: logsV2Columns["attributes_number"],
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -112,7 +111,7 @@ func TestGetColumn(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
|
||||
},
|
||||
expectedCol: []*schema.Column{logsV2Columns["attributes_number"]},
|
||||
expectedCol: logsV2Columns["attributes_number"],
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -122,7 +121,7 @@ func TestGetColumn(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||
},
|
||||
expectedCol: []*schema.Column{logsV2Columns["attributes_bool"]},
|
||||
expectedCol: logsV2Columns["attributes_bool"],
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -131,7 +130,7 @@ func TestGetColumn(t *testing.T) {
|
||||
Name: "timestamp",
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
},
|
||||
expectedCol: []*schema.Column{logsV2Columns["timestamp"]},
|
||||
expectedCol: logsV2Columns["timestamp"],
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -140,7 +139,7 @@ func TestGetColumn(t *testing.T) {
|
||||
Name: "body",
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
},
|
||||
expectedCol: []*schema.Column{logsV2Columns["body"]},
|
||||
expectedCol: logsV2Columns["body"],
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -160,7 +159,7 @@ func TestGetColumn(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||
},
|
||||
expectedCol: []*schema.Column{logsV2Columns["attributes_bool"]},
|
||||
expectedCol: logsV2Columns["attributes_bool"],
|
||||
expectedError: nil,
|
||||
},
|
||||
}
|
||||
@@ -169,7 +168,7 @@ func TestGetColumn(t *testing.T) {
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
col, err := fm.ColumnFor(ctx, 0, 0, &tc.key)
|
||||
col, err := fm.ColumnFor(ctx, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
@@ -184,14 +183,11 @@ func TestGetColumn(t *testing.T) {
|
||||
func TestGetFieldKeyName(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
resourceEvolution := mockEvolutionData(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC))
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
expectedResult string
|
||||
expectedError error
|
||||
addExistsFilter bool
|
||||
name string
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
expectedResult string
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
name: "Simple column type - timestamp",
|
||||
@@ -199,9 +195,8 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
Name: "timestamp",
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
},
|
||||
expectedResult: "timestamp",
|
||||
expectedError: nil,
|
||||
addExistsFilter: false,
|
||||
expectedResult: "timestamp",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Map column type - string attribute",
|
||||
@@ -210,9 +205,8 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
expectedResult: "attributes_string['user.id']",
|
||||
expectedError: nil,
|
||||
addExistsFilter: false,
|
||||
expectedResult: "attributes_string['user.id']",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Map column type - number attribute",
|
||||
@@ -221,9 +215,8 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
expectedResult: "attributes_number['request.size']",
|
||||
expectedError: nil,
|
||||
addExistsFilter: false,
|
||||
expectedResult: "attributes_number['request.size']",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Map column type - bool attribute",
|
||||
@@ -232,33 +225,28 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||
},
|
||||
expectedResult: "attributes_bool['request.success']",
|
||||
expectedError: nil,
|
||||
addExistsFilter: false,
|
||||
expectedResult: "attributes_bool['request.success']",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Map column type - resource attribute",
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
Evolutions: resourceEvolution,
|
||||
},
|
||||
expectedResult: "resources_string['service.name']",
|
||||
expectedError: nil,
|
||||
addExistsFilter: false,
|
||||
expectedResult: "multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL)",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Map column type - resource attribute - Materialized - json",
|
||||
name: "Map column type - resource attribute - Materialized",
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Materialized: true,
|
||||
Evolutions: resourceEvolution,
|
||||
},
|
||||
expectedResult: "`resource_string_service$$name`",
|
||||
expectedError: nil,
|
||||
addExistsFilter: false,
|
||||
expectedResult: "multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, `resource_string_service$$name_exists`==true, `resource_string_service$$name`, NULL)",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Non-existent column",
|
||||
@@ -274,7 +262,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
result, err := fm.FieldFor(ctx, 0, 0, &tc.key)
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
@@ -285,581 +273,3 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFieldForWithEvolutions(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
key := &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
evolutions []*telemetrytypes.EvolutionEntry
|
||||
key *telemetrytypes.TelemetryFieldKey
|
||||
tsStartTime time.Time
|
||||
tsEndTime time.Time
|
||||
expectedResult string
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
name: "Single evolution before tsStartTime",
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 1, 15, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
key: key,
|
||||
tsStartTime: time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
tsEndTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
expectedResult: "resources_string['service.name']",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Single evolution exactly at tsStartTime",
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
key: key,
|
||||
tsStartTime: time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
tsEndTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
expectedResult: "resources_string['service.name']",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Single evolution after tsStartTime",
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 2, 2, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
key: key,
|
||||
tsStartTime: time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
tsEndTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
expectedResult: "multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL)",
|
||||
expectedError: nil,
|
||||
},
|
||||
// TODO(piyush): to be added once integration with JSON is done.
|
||||
// {
|
||||
// name: "Single evolution after tsStartTime - JSON body",
|
||||
// evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
// {
|
||||
// Signal: telemetrytypes.SignalLogs,
|
||||
// ColumnName: LogsV2BodyJSONColumn,
|
||||
// ColumnType: "JSON(max_dynamic_paths=0)",
|
||||
// FieldContext: telemetrytypes.FieldContextBody,
|
||||
// FieldName: "__all__",
|
||||
// ReleaseTime: time.Unix(0, 0),
|
||||
// },
|
||||
// {
|
||||
// Signal: telemetrytypes.SignalLogs,
|
||||
// ColumnName: LogsV2BodyPromotedColumn,
|
||||
// ColumnType: "JSON()",
|
||||
// FieldContext: telemetrytypes.FieldContextBody,
|
||||
// FieldName: "user.name",
|
||||
// ReleaseTime: time.Date(2024, 2, 2, 0, 0, 0, 0, time.UTC),
|
||||
// },
|
||||
// },
|
||||
// key: &telemetrytypes.TelemetryFieldKey{
|
||||
// Name: "user.name",
|
||||
// FieldContext: telemetrytypes.FieldContextBody,
|
||||
// JSONDataType: &telemetrytypes.String,
|
||||
// Materialized: true,
|
||||
// },
|
||||
// tsStartTime: time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
// tsEndTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
// expectedResult: "coalesce(dynamicElement(body_json.`user.name`, 'String'), dynamicElement(body_promoted.`user.name`, 'String'))",
|
||||
// expectedError: nil,
|
||||
// },
|
||||
{
|
||||
name: "Multiple evolutions before tsStartTime - only latest should be included",
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 1, 2, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
key: key,
|
||||
tsStartTime: time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
tsEndTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
expectedResult: "resource.`service.name`::String",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Multiple evolutions after tsStartTime - all should be included",
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 1, 2, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
key: key,
|
||||
tsStartTime: time.Unix(0, 0),
|
||||
tsEndTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
expectedResult: "multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL)",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Duplicate evolutions after tsStartTime - all should be included",
|
||||
// Note: on production when this happens, we should go ahead and clean it up if required
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 2, 3, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
key: key,
|
||||
tsStartTime: time.Date(2024, 2, 2, 0, 0, 0, 0, time.UTC),
|
||||
tsEndTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
expectedResult: "resource.`service.name`::String",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Evolution exactly at tsEndTime - should not be included",
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
key: key,
|
||||
tsStartTime: time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
tsEndTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
expectedResult: "resources_string['service.name']",
|
||||
expectedError: nil,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
|
||||
tsStart := uint64(tc.tsStartTime.UnixNano())
|
||||
tsEnd := uint64(tc.tsEndTime.UnixNano())
|
||||
tc.key.Evolutions = tc.evolutions
|
||||
|
||||
result, err := fm.FieldFor(ctx, tsStart, tsEnd, tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tc.expectedResult, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSelectEvolutionsForColumns(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
columns []*schema.Column
|
||||
evolutions []*telemetrytypes.EvolutionEntry
|
||||
tsStart uint64
|
||||
tsEnd uint64
|
||||
fieldName string
|
||||
expectedColumns []string // column names
|
||||
expectedEvols []string // evolution column names
|
||||
expectedError bool
|
||||
errorStr string
|
||||
}{
|
||||
{
|
||||
name: "New evolutions after tsStartTime - should include all",
|
||||
columns: []*schema.Column{
|
||||
logsV2Columns["resources_string"],
|
||||
logsV2Columns["resource"],
|
||||
},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 0,
|
||||
ReleaseTime: time.Date(0, 0, 0, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 1,
|
||||
ReleaseTime: time.Date(2024, 2, 3, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
fieldName: "service.name",
|
||||
tsStart: uint64(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{"resource", "resources_string"}, // sorted by ReleaseTime desc
|
||||
expectedEvols: []string{"resource", "resources_string"},
|
||||
},
|
||||
{
|
||||
name: "Columns without matching evolutions - should exclude them",
|
||||
columns: []*schema.Column{
|
||||
logsV2Columns["resources_string"],
|
||||
logsV2Columns["resource"], // no evolution for this
|
||||
logsV2Columns["attributes_string"], // no evolution for this
|
||||
},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 0,
|
||||
ReleaseTime: time.Date(2024, 1, 15, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
fieldName: "service.name",
|
||||
tsStart: uint64(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{"resources_string"},
|
||||
expectedEvols: []string{"resources_string"},
|
||||
},
|
||||
{
|
||||
name: "New evolutions after tsEndTime - should exclude all",
|
||||
columns: []*schema.Column{
|
||||
logsV2Columns["resources_string"],
|
||||
logsV2Columns["resource"],
|
||||
},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 0,
|
||||
ReleaseTime: time.Date(0, 0, 0, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 1,
|
||||
ReleaseTime: time.Date(2024, 2, 25, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
fieldName: "service.name",
|
||||
tsStart: uint64(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{"resources_string"},
|
||||
expectedEvols: []string{"resources_string"},
|
||||
},
|
||||
{
|
||||
name: "Empty columns array",
|
||||
columns: []*schema.Column{},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 0,
|
||||
ReleaseTime: time.Date(2024, 1, 15, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
fieldName: "service.name",
|
||||
tsStart: uint64(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{},
|
||||
expectedEvols: []string{},
|
||||
expectedError: true,
|
||||
errorStr: "column resources_string not found",
|
||||
},
|
||||
{
|
||||
name: "Duplicate evolutions - should use first encountered (oldest if sorted)",
|
||||
columns: []*schema.Column{
|
||||
logsV2Columns["resource"],
|
||||
},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 0,
|
||||
ReleaseTime: time.Date(0, 0, 0, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 1,
|
||||
ReleaseTime: time.Date(2024, 1, 15, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 1,
|
||||
ReleaseTime: time.Date(2024, 1, 20, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
fieldName: "service.name",
|
||||
tsStart: uint64(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{"resource"},
|
||||
expectedEvols: []string{"resource"}, // should use first one (older)
|
||||
},
|
||||
{
|
||||
name: "Genuine Duplicate evolutions with new version- should consider both",
|
||||
columns: []*schema.Column{
|
||||
logsV2Columns["resources_string"],
|
||||
logsV2Columns["resource"],
|
||||
},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 0,
|
||||
ReleaseTime: time.Date(0, 0, 0, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 1,
|
||||
ReleaseTime: time.Date(2024, 1, 15, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 2,
|
||||
ReleaseTime: time.Date(2024, 1, 20, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
fieldName: "service.name",
|
||||
tsStart: uint64(time.Date(2024, 1, 16, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{"resources_string", "resource"},
|
||||
expectedEvols: []string{"resources_string", "resource"}, // should use first one (older)
|
||||
},
|
||||
{
|
||||
name: "Evolution exactly at tsEndTime",
|
||||
columns: []*schema.Column{
|
||||
logsV2Columns["resources_string"],
|
||||
logsV2Columns["resource"],
|
||||
},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 1, 15, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC), // exactly at tsEnd
|
||||
},
|
||||
},
|
||||
fieldName: "service.name",
|
||||
tsStart: uint64(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{"resources_string"}, // resource excluded because After(tsEnd) is true
|
||||
expectedEvols: []string{"resources_string"},
|
||||
},
|
||||
{
|
||||
name: "Single evolution after tsStartTime - JSON body",
|
||||
columns: []*schema.Column{
|
||||
logsV2Columns[LogsV2BodyJSONColumn],
|
||||
logsV2Columns[LogsV2BodyPromotedColumn],
|
||||
},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: LogsV2BodyJSONColumn,
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: LogsV2BodyPromotedColumn,
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldName: "user.name",
|
||||
ReleaseTime: time.Date(2024, 2, 2, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
fieldName: "user.name",
|
||||
tsStart: uint64(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{LogsV2BodyPromotedColumn, LogsV2BodyJSONColumn}, // sorted by ReleaseTime desc (newest first)
|
||||
expectedEvols: []string{LogsV2BodyPromotedColumn, LogsV2BodyJSONColumn},
|
||||
},
|
||||
{
|
||||
name: "No evolution after tsStartTime - JSON body",
|
||||
columns: []*schema.Column{
|
||||
logsV2Columns[LogsV2BodyJSONColumn],
|
||||
logsV2Columns[LogsV2BodyPromotedColumn],
|
||||
},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: LogsV2BodyJSONColumn,
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: LogsV2BodyPromotedColumn,
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldName: "user.name",
|
||||
ReleaseTime: time.Date(2024, 2, 2, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
fieldName: "user.name",
|
||||
tsStart: uint64(time.Date(2024, 2, 3, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{LogsV2BodyPromotedColumn},
|
||||
expectedEvols: []string{LogsV2BodyPromotedColumn},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
resultColumns, resultEvols, err := selectEvolutionsForColumns(tc.columns, tc.evolutions, tc.tsStart, tc.tsEnd, tc.fieldName)
|
||||
|
||||
if tc.expectedError {
|
||||
assert.Contains(t, err.Error(), tc.errorStr)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, len(tc.expectedColumns), len(resultColumns), "column count mismatch")
|
||||
assert.Equal(t, len(tc.expectedEvols), len(resultEvols), "evolution count mismatch")
|
||||
|
||||
resultColumnNames := make([]string, len(resultColumns))
|
||||
for i, col := range resultColumns {
|
||||
resultColumnNames[i] = col.Name
|
||||
}
|
||||
resultEvolNames := make([]string, len(resultEvols))
|
||||
for i, evol := range resultEvols {
|
||||
resultEvolNames[i] = evol.ColumnName
|
||||
}
|
||||
|
||||
for i := range tc.expectedColumns {
|
||||
assert.Equal(t, resultColumnNames[i], tc.expectedColumns[i], "expected column missing: "+tc.expectedColumns[i])
|
||||
}
|
||||
for i := range tc.expectedEvols {
|
||||
assert.Equal(t, resultEvolNames[i], tc.expectedEvols[i], "expected evolution missing: "+tc.expectedEvols[i])
|
||||
}
|
||||
// Verify sorting: should be descending by ReleaseTime
|
||||
for i := 0; i < len(resultEvols)-1; i++ {
|
||||
assert.True(t, !resultEvols[i].ReleaseTime.Before(resultEvols[i+1].ReleaseTime),
|
||||
"evolutions should be sorted descending by ReleaseTime")
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
@@ -11,14 +10,12 @@ import (
|
||||
|
||||
// TestLikeAndILikeWithoutWildcards_Warns Tests that LIKE/ILIKE without wildcards add warnings and include docs URL
|
||||
func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
@@ -36,7 +33,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
@@ -55,7 +52,6 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Context: context.Background(),
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
@@ -73,7 +69,7 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
@@ -20,7 +19,6 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Context: context.Background(),
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
@@ -163,7 +161,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts)
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
|
||||
if tc.shouldPass {
|
||||
if err != nil {
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
@@ -17,33 +15,19 @@ import (
|
||||
|
||||
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
|
||||
func TestFilterExprLogs(t *testing.T) {
|
||||
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
ctx := context.Background()
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
// for each key of resource attribute add evolution metadata
|
||||
for i, telemetryKeys := range keys {
|
||||
for j, telemetryKey := range telemetryKeys {
|
||||
if telemetryKey.FieldContext == telemetrytypes.FieldContextResource {
|
||||
keys[i][j].Evolutions = mockEvolutionData(releaseTime)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: DefaultFullTextColumn,
|
||||
JsonKeyToKey: GetBodyJSONKey,
|
||||
StartNs: uint64(releaseTime.Add(-5 * time.Minute).UnixNano()),
|
||||
EndNs: uint64(releaseTime.Add(5 * time.Minute).UnixNano()),
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
@@ -482,7 +466,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
expectedErrorContains: "",
|
||||
},
|
||||
|
||||
//fulltext with parenthesized expression
|
||||
// fulltext with parenthesized expression
|
||||
{
|
||||
category: "FREETEXT with parentheses",
|
||||
query: "error (status.code=500 OR status.code=503)",
|
||||
@@ -2402,7 +2386,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts)
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
|
||||
if tc.shouldPass {
|
||||
if err != nil {
|
||||
@@ -2458,7 +2442,6 @@ func TestFilterExprLogsConflictNegation(t *testing.T) {
|
||||
}
|
||||
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Context: context.Background(),
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
@@ -2521,7 +2504,7 @@ func TestFilterExprLogsConflictNegation(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts)
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
|
||||
if tc.shouldPass {
|
||||
if err != nil {
|
||||
|
||||
@@ -268,7 +268,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
}
|
||||
|
||||
// get column expression for the field - use array index directly to avoid pointer to loop variable
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &query.SelectFields[index], keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &query.SelectFields[index], keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -277,6 +277,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
}
|
||||
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||
|
||||
// Add filter conditions
|
||||
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
|
||||
@@ -286,8 +287,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
|
||||
// Add order by
|
||||
for _, orderBy := range query.Order {
|
||||
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -353,7 +353,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -368,7 +368,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
allAggChArgs := make([]any, 0)
|
||||
for i, agg := range query.Aggregations {
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, start, end, agg.Expression,
|
||||
ctx, agg.Expression,
|
||||
uint64(query.StepInterval.Seconds()),
|
||||
keys,
|
||||
)
|
||||
@@ -500,7 +500,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
var allGroupByArgs []any
|
||||
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -518,7 +518,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
for idx := range query.Aggregations {
|
||||
aggExpr := query.Aggregations[idx]
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, start, end, aggExpr.Expression,
|
||||
ctx, aggExpr.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
)
|
||||
@@ -590,7 +590,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
// buildFilterCondition builds SQL condition from filter expression
|
||||
func (b *logQueryStatementBuilder) addFilterCondition(
|
||||
ctx context.Context,
|
||||
_ context.Context,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
start, end uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||
@@ -604,7 +604,6 @@ func (b *logQueryStatementBuilder) addFilterCondition(
|
||||
if query.Filter != nil && query.Filter.Expression != "" {
|
||||
// add filter expression
|
||||
preparedWhereClause, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: b.logger,
|
||||
FieldMapper: b.fm,
|
||||
ConditionBuilder: b.cb,
|
||||
@@ -613,9 +612,7 @@ func (b *logQueryStatementBuilder) addFilterCondition(
|
||||
FullTextColumn: b.fullTextColumn,
|
||||
JsonKeyToKey: b.jsonKeyToKey,
|
||||
Variables: variables,
|
||||
StartNs: start,
|
||||
EndNs: end,
|
||||
})
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -37,14 +37,7 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation
|
||||
}
|
||||
|
||||
func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
|
||||
// Create a test release time
|
||||
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
releaseTimeNano := uint64(releaseTime.UnixNano())
|
||||
|
||||
cases := []struct {
|
||||
startTs uint64
|
||||
endTs uint64
|
||||
name string
|
||||
requestType qbtypes.RequestType
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]
|
||||
@@ -52,16 +45,14 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
expectedErr error
|
||||
}{
|
||||
{
|
||||
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with limit and count distinct on service.name",
|
||||
name: "Time series with limit",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count_distinct(service.name)",
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
@@ -77,22 +68,20 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, countDistinct(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, countDistinct(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
startTs: releaseTimeNano - uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with OR b/w resource attr and attribute filter and count distinct on service.name",
|
||||
name: "Time series with OR b/w resource attr and attribute filter",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count_distinct(service.name)",
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
@@ -108,14 +97,12 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE ((simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, countDistinct(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, countDistinct(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1705224600), uint64(1705485600), "redis-manual", "GET", true, "1705226400000000000", uint64(1705224600), "1705485600000000000", uint64(1705485600), 10, "redis-manual", "GET", true, "1705226400000000000", uint64(1705224600), "1705485600000000000", uint64(1705485600)},
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE ((simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "redis-manual", "GET", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "redis-manual", "GET", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with limit + custom order by",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
@@ -149,14 +136,12 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY `service.name` desc, ts desc",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY `service.name` desc, ts desc",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with group by on materialized column",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
@@ -183,12 +168,10 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(`attribute_string_materialized$$key$$name_exists` = ?, `attribute_string_materialized$$key$$name`, NULL)) AS `materialized.key.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `materialized.key.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(`attribute_string_materialized$$key$$name_exists` = ?, `attribute_string_materialized$$key$$name`, NULL)) AS `materialized.key.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`materialized.key.name`) GLOBAL IN (SELECT `materialized.key.name` FROM __limit_cte) GROUP BY ts, `materialized.key.name`",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
},
|
||||
{
|
||||
startTs: releaseTimeNano + uint64(24*time.Hour.Nanoseconds()),
|
||||
endTs: releaseTimeNano + uint64(48*time.Hour.Nanoseconds()),
|
||||
name: "Time series with materialised column using or with regex operator",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
@@ -206,29 +189,14 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (true OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((match(`attribute_string_materialized$$key$$name`, ?) AND `attribute_string_materialized$$key$$name_exists` = ?) OR (`attribute_string_materialized$$key$$name` = ? AND `attribute_string_materialized$$key$$name_exists` = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY ts",
|
||||
Args: []any{uint64(1705397400), uint64(1705485600), "redis.*", true, "memcached", true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "redis.*", true, "memcached", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
keysMap := buildCompleteFieldKeyMap()
|
||||
|
||||
// for each key of resource attribute add evolution metadata
|
||||
for i, telemetryKeys := range keysMap {
|
||||
for j, telemetryKey := range telemetryKeys {
|
||||
if telemetryKey.FieldContext == telemetrytypes.FieldContextResource {
|
||||
keysMap[i][j].Signal = telemetrytypes.SignalLogs
|
||||
keysMap[i][j].Evolutions = mockEvolutionData(releaseTime)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mockMetadataStore.KeysMap = keysMap
|
||||
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
@@ -250,7 +218,7 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
|
||||
q, err := statementBuilder.Build(ctx, c.startTs, c.endTs, c.requestType, c.query, nil)
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
|
||||
if c.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
@@ -347,10 +315,9 @@ func TestStatementBuilderListQuery(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
fm := NewFieldMapper()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
@@ -371,7 +338,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
|
||||
q, err := statementBuilder.Build(ctx, 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
|
||||
if c.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
@@ -456,10 +423,9 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
fm := NewFieldMapper()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
@@ -477,10 +443,12 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
//
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
|
||||
q, err := statementBuilder.Build(ctx, 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
|
||||
if c.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
@@ -531,10 +499,9 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
fm := NewFieldMapper()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
@@ -555,7 +522,7 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
|
||||
q, err := statementBuilder.Build(ctx, 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
|
||||
if c.expectedErrContains != "" {
|
||||
require.Error(t, err)
|
||||
@@ -627,10 +594,9 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
fm := NewFieldMapper()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMapCollision()
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
@@ -651,7 +617,7 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
|
||||
q, err := statementBuilder.Build(ctx, 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
|
||||
if c.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
|
||||
@@ -2,7 +2,6 @@ package telemetrylogs
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
@@ -1008,24 +1007,3 @@ func buildCompleteFieldKeyMapCollision() map[string][]*telemetrytypes.TelemetryF
|
||||
}
|
||||
return keysMap
|
||||
}
|
||||
|
||||
func mockEvolutionData(releaseTime time.Time) []*telemetrytypes.EvolutionEntry {
|
||||
return []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: releaseTime,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,11 +21,12 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
|
||||
func (c *conditionBuilder) ConditionFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
|
||||
switch operator {
|
||||
@@ -38,13 +39,13 @@ func (c *conditionBuilder) ConditionFor(
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
columns, err := c.fm.ColumnFor(ctx, tsStart, tsEnd, key)
|
||||
column, err := c.fm.ColumnFor(ctx, key)
|
||||
if err != nil {
|
||||
// if we don't have a column, we can't build a condition for related values
|
||||
return "", nil
|
||||
}
|
||||
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, tsStart, tsEnd, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
// if we don't have a table field name, we can't build a condition for related values
|
||||
return "", nil
|
||||
@@ -119,12 +120,12 @@ func (c *conditionBuilder) ConditionFor(
|
||||
// in the query builder, `exists` and `not exists` are used for
|
||||
// key membership checks, so depending on the column type, the condition changes
|
||||
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
|
||||
switch columns[0].Type {
|
||||
switch column.Type {
|
||||
case schema.MapColumnType{
|
||||
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
|
||||
ValueType: schema.ColumnTypeString,
|
||||
}:
|
||||
leftOperand := fmt.Sprintf("mapContains(%s, '%s')", columns[0].Name, key.Name)
|
||||
leftOperand := fmt.Sprintf("mapContains(%s, '%s')", column.Name, key.Name)
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
cond = sb.E(leftOperand, true)
|
||||
} else {
|
||||
@@ -133,5 +134,5 @@ func (c *conditionBuilder) ConditionFor(
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Sprintf(expr, columns[0].Name, sb.Var(key.Name), cond), nil
|
||||
return fmt.Sprintf(expr, column.Name, sb.Var(key.Name), cond), nil
|
||||
}
|
||||
|
||||
@@ -53,7 +53,7 @@ func TestConditionFor(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, 0, 0, &tc.key, tc.operator, tc.value, sb)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
|
||||
@@ -33,48 +33,47 @@ func NewFieldMapper() qbtypes.FieldMapper {
|
||||
return &fieldMapper{}
|
||||
}
|
||||
|
||||
func (m *fieldMapper) getColumn(_ context.Context, _, _ uint64, key *telemetrytypes.TelemetryFieldKey) ([]*schema.Column, error) {
|
||||
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource:
|
||||
return []*schema.Column{attributeMetadataColumns["resource_attributes"]}, nil
|
||||
return attributeMetadataColumns["resource_attributes"], nil
|
||||
case telemetrytypes.FieldContextAttribute:
|
||||
return []*schema.Column{attributeMetadataColumns["attributes"]}, nil
|
||||
return attributeMetadataColumns["attributes"], nil
|
||||
}
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
}
|
||||
|
||||
func (m *fieldMapper) ColumnFor(ctx context.Context, tsStart, tsEnd uint64, key *telemetrytypes.TelemetryFieldKey) ([]*schema.Column, error) {
|
||||
columns, err := m.getColumn(ctx, tsStart, tsEnd, key)
|
||||
func (m *fieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return columns, nil
|
||||
return column, nil
|
||||
}
|
||||
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
columns, err := m.getColumn(ctx, startNs, endNs, key)
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
switch columns[0].Type {
|
||||
switch column.Type {
|
||||
case schema.MapColumnType{
|
||||
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
|
||||
ValueType: schema.ColumnTypeString,
|
||||
}:
|
||||
return fmt.Sprintf("%s['%s']", columns[0].Name, key.Name), nil
|
||||
return fmt.Sprintf("%s['%s']", column.Name, key.Name), nil
|
||||
}
|
||||
return columns[0].Name, nil
|
||||
return column.Name, nil
|
||||
}
|
||||
|
||||
func (m *fieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -84,7 +83,7 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
if _, ok := attributeMetadataColumns[field.Name]; ok {
|
||||
// if it is, attach the column name directly
|
||||
field.FieldContext = telemetrytypes.FieldContextSpan
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, _ = m.FieldFor(ctx, field)
|
||||
} else {
|
||||
// - the context is not provided
|
||||
// - there are not keys for the field
|
||||
@@ -102,12 +101,12 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
}
|
||||
} else if len(keysForField) == 1 {
|
||||
// we have a single key for the field, use it
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, keysForField[0])
|
||||
colName, _ = m.FieldFor(ctx, keysForField[0])
|
||||
} else {
|
||||
// select any non-empty value from the keys
|
||||
args := []string{}
|
||||
for _, key := range keysForField {
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, key)
|
||||
colName, _ = m.FieldFor(ctx, key)
|
||||
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
|
||||
}
|
||||
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))
|
||||
|
||||
@@ -128,13 +128,13 @@ func TestGetColumn(t *testing.T) {
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
col, err := fm.ColumnFor(context.Background(), 0, 0, &tc.key)
|
||||
col, err := fm.ColumnFor(context.Background(), &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tc.expectedCol, col[0])
|
||||
assert.Equal(t, tc.expectedCol, col)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -145,8 +145,6 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
tsStart uint64
|
||||
tsEnd uint64
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
expectedResult string
|
||||
expectedError error
|
||||
@@ -205,7 +203,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
result, err := fm.FieldFor(ctx, tc.tsStart, tc.tsEnd, &tc.key)
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
|
||||
@@ -2,11 +2,9 @@ package telemetrymetadata
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
@@ -15,7 +13,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrytraces"
|
||||
"github.com/SigNoz/signoz/pkg/types/cachetypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
@@ -34,24 +31,23 @@ var (
|
||||
)
|
||||
|
||||
type telemetryMetaStore struct {
|
||||
logger *slog.Logger
|
||||
telemetrystore telemetrystore.TelemetryStore
|
||||
tracesDBName string
|
||||
tracesFieldsTblName string
|
||||
spanAttributesKeysTblName string
|
||||
indexV3TblName string
|
||||
metricsDBName string
|
||||
metricsFieldsTblName string
|
||||
meterDBName string
|
||||
meterFieldsTblName string
|
||||
logsDBName string
|
||||
logsFieldsTblName string
|
||||
logAttributeKeysTblName string
|
||||
logResourceKeysTblName string
|
||||
logsV2TblName string
|
||||
relatedMetadataDBName string
|
||||
relatedMetadataTblName string
|
||||
columnEvolutionMetadataTblName string
|
||||
logger *slog.Logger
|
||||
telemetrystore telemetrystore.TelemetryStore
|
||||
tracesDBName string
|
||||
tracesFieldsTblName string
|
||||
spanAttributesKeysTblName string
|
||||
indexV3TblName string
|
||||
metricsDBName string
|
||||
metricsFieldsTblName string
|
||||
meterDBName string
|
||||
meterFieldsTblName string
|
||||
logsDBName string
|
||||
logsFieldsTblName string
|
||||
logAttributeKeysTblName string
|
||||
logResourceKeysTblName string
|
||||
logsV2TblName string
|
||||
relatedMetadataDBName string
|
||||
relatedMetadataTblName string
|
||||
|
||||
fm qbtypes.FieldMapper
|
||||
conditionBuilder qbtypes.ConditionBuilder
|
||||
@@ -80,29 +76,27 @@ func NewTelemetryMetaStore(
|
||||
logResourceKeysTblName string,
|
||||
relatedMetadataDBName string,
|
||||
relatedMetadataTblName string,
|
||||
columnEvolutionMetadataTblName string,
|
||||
) telemetrytypes.MetadataStore {
|
||||
metadataSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrymetadata")
|
||||
|
||||
t := &telemetryMetaStore{
|
||||
logger: metadataSettings.Logger(),
|
||||
telemetrystore: telemetrystore,
|
||||
tracesDBName: tracesDBName,
|
||||
tracesFieldsTblName: tracesFieldsTblName,
|
||||
spanAttributesKeysTblName: spanAttributesKeysTblName,
|
||||
indexV3TblName: indexV3TblName,
|
||||
metricsDBName: metricsDBName,
|
||||
metricsFieldsTblName: metricsFieldsTblName,
|
||||
meterDBName: meterDBName,
|
||||
meterFieldsTblName: meterFieldsTblName,
|
||||
logsDBName: logsDBName,
|
||||
logsV2TblName: logsV2TblName,
|
||||
logsFieldsTblName: logsFieldsTblName,
|
||||
logAttributeKeysTblName: logAttributeKeysTblName,
|
||||
logResourceKeysTblName: logResourceKeysTblName,
|
||||
relatedMetadataDBName: relatedMetadataDBName,
|
||||
relatedMetadataTblName: relatedMetadataTblName,
|
||||
columnEvolutionMetadataTblName: columnEvolutionMetadataTblName,
|
||||
logger: metadataSettings.Logger(),
|
||||
telemetrystore: telemetrystore,
|
||||
tracesDBName: tracesDBName,
|
||||
tracesFieldsTblName: tracesFieldsTblName,
|
||||
spanAttributesKeysTblName: spanAttributesKeysTblName,
|
||||
indexV3TblName: indexV3TblName,
|
||||
metricsDBName: metricsDBName,
|
||||
metricsFieldsTblName: metricsFieldsTblName,
|
||||
meterDBName: meterDBName,
|
||||
meterFieldsTblName: meterFieldsTblName,
|
||||
logsDBName: logsDBName,
|
||||
logsV2TblName: logsV2TblName,
|
||||
logsFieldsTblName: logsFieldsTblName,
|
||||
logAttributeKeysTblName: logAttributeKeysTblName,
|
||||
logResourceKeysTblName: logResourceKeysTblName,
|
||||
relatedMetadataDBName: relatedMetadataDBName,
|
||||
relatedMetadataTblName: relatedMetadataTblName,
|
||||
jsonColumnMetadata: map[telemetrytypes.Signal]map[telemetrytypes.FieldContext]telemetrytypes.JSONColumnMetadata{
|
||||
telemetrytypes.SignalLogs: {
|
||||
telemetrytypes.FieldContextBody: telemetrytypes.JSONColumnMetadata{
|
||||
@@ -569,48 +563,9 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
keys = append(keys, bodyJSONPaths...)
|
||||
complete = complete && finished
|
||||
}
|
||||
|
||||
// fetch and add evolutions
|
||||
evolutionMetadataKeySelectors := getEvolutionMetadataKeySelectors(keys)
|
||||
evolutions, err := t.GetColumnEvolutionMetadataMulti(ctx, evolutionMetadataKeySelectors)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
for i, key := range keys {
|
||||
// first check if there is evolutions that with field name as __all__
|
||||
// then check for specific field name
|
||||
selector := &telemetrytypes.EvolutionSelector{
|
||||
Signal: key.Signal,
|
||||
FieldContext: key.FieldContext,
|
||||
FieldName: "__all__",
|
||||
}
|
||||
|
||||
if keyEvolutions, ok := evolutions[telemetrytypes.GetEvolutionMetadataUniqueKey(selector)]; ok {
|
||||
keys[i].Evolutions = keyEvolutions
|
||||
}
|
||||
|
||||
selector.FieldName = key.Name
|
||||
if keyEvolutions, ok := evolutions[telemetrytypes.GetEvolutionMetadataUniqueKey(selector)]; ok {
|
||||
keys[i].Evolutions = keyEvolutions
|
||||
}
|
||||
}
|
||||
|
||||
return keys, complete, nil
|
||||
}
|
||||
|
||||
func getEvolutionMetadataKeySelectors(keySelectors []*telemetrytypes.TelemetryFieldKey) []*telemetrytypes.EvolutionSelector {
|
||||
var metadataKeySelectors []*telemetrytypes.EvolutionSelector
|
||||
for _, keySelector := range keySelectors {
|
||||
selector := &telemetrytypes.EvolutionSelector{
|
||||
Signal: keySelector.Signal,
|
||||
FieldContext: keySelector.FieldContext,
|
||||
FieldName: keySelector.Name,
|
||||
}
|
||||
metadataKeySelectors = append(metadataKeySelectors, selector)
|
||||
}
|
||||
return metadataKeySelectors
|
||||
}
|
||||
|
||||
func getPriorityForContext(ctx telemetrytypes.FieldContext) int {
|
||||
switch ctx {
|
||||
case telemetrytypes.FieldContextLog:
|
||||
@@ -1031,18 +986,18 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
FieldDataType: fieldValueSelector.FieldDataType,
|
||||
}
|
||||
|
||||
selectColumn, err := t.fm.FieldFor(ctx, 0, 0, key)
|
||||
selectColumn, err := t.fm.FieldFor(ctx, key)
|
||||
|
||||
if err != nil {
|
||||
// we don't have a explicit column to select from the related metadata table
|
||||
// so we will select either from resource_attributes or attributes table
|
||||
// in that order
|
||||
resourceColumn, _ := t.fm.FieldFor(ctx, 0, 0, &telemetrytypes.TelemetryFieldKey{
|
||||
resourceColumn, _ := t.fm.FieldFor(ctx, &telemetrytypes.TelemetryFieldKey{
|
||||
Name: key.Name,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
})
|
||||
attributeColumn, _ := t.fm.FieldFor(ctx, 0, 0, &telemetrytypes.TelemetryFieldKey{
|
||||
attributeColumn, _ := t.fm.FieldFor(ctx, &telemetrytypes.TelemetryFieldKey{
|
||||
Name: key.Name,
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
@@ -1063,12 +1018,11 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
}
|
||||
|
||||
whereClause, err := querybuilder.PrepareWhereClause(fieldValueSelector.ExistingQuery, querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: t.logger,
|
||||
FieldMapper: t.fm,
|
||||
ConditionBuilder: t.conditionBuilder,
|
||||
FieldKeys: keys,
|
||||
})
|
||||
}, 0, 0)
|
||||
if err == nil {
|
||||
sb.AddWhereClause(whereClause.WhereClause)
|
||||
} else {
|
||||
@@ -1092,20 +1046,20 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
|
||||
// search on attributes
|
||||
key.FieldContext = telemetrytypes.FieldContextAttribute
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, 0, 0, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb)
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
|
||||
// search on resource
|
||||
key.FieldContext = telemetrytypes.FieldContextResource
|
||||
cond, err = t.conditionBuilder.ConditionFor(ctx, 0, 0, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb)
|
||||
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
key.FieldContext = origContext
|
||||
} else {
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, 0, 0, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb)
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
@@ -1796,103 +1750,6 @@ func (t *telemetryMetaStore) fetchMeterSourceMetricsTemporality(ctx context.Cont
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// CachedColumnEvolutionMetadata is a cacheable type for storing column evolution metadata
|
||||
type CachedEvolutionEntry struct {
|
||||
Metadata []*telemetrytypes.EvolutionEntry `json:"metadata"`
|
||||
}
|
||||
|
||||
var _ cachetypes.Cacheable = (*CachedEvolutionEntry)(nil)
|
||||
|
||||
func (c *CachedEvolutionEntry) MarshalBinary() ([]byte, error) {
|
||||
return json.Marshal(c)
|
||||
}
|
||||
|
||||
func (c *CachedEvolutionEntry) UnmarshalBinary(data []byte) error {
|
||||
return json.Unmarshal(data, c)
|
||||
}
|
||||
|
||||
func (k *telemetryMetaStore) fetchEvolutionEntryFromClickHouse(ctx context.Context, selectors []*telemetrytypes.EvolutionSelector) ([]*telemetrytypes.EvolutionEntry, error) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select("signal", "column_name", "column_type", "field_context", "field_name", "version", "release_time")
|
||||
sb.From(fmt.Sprintf("%s.%s", k.relatedMetadataDBName, k.columnEvolutionMetadataTblName))
|
||||
sb.OrderBy("release_time ASC")
|
||||
|
||||
var clauses []string
|
||||
for _, selector := range selectors {
|
||||
var clause string
|
||||
|
||||
if selector.FieldContext != telemetrytypes.FieldContextUnspecified {
|
||||
clause = sb.E("field_context", selector.FieldContext)
|
||||
}
|
||||
|
||||
if selector.FieldName != "" {
|
||||
clause = sb.And(clause,
|
||||
sb.Or(sb.E("field_name", selector.FieldName), sb.E("field_name", "__all__")),
|
||||
)
|
||||
} else {
|
||||
clause = sb.And(clause, sb.E("field_name", "__all__"))
|
||||
}
|
||||
|
||||
clauses = append(clauses, sb.And(sb.E("signal", selector.Signal), clause))
|
||||
}
|
||||
sb.Where(sb.Or(clauses...))
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
var entries []*telemetrytypes.EvolutionEntry
|
||||
rows, err := k.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
var entry telemetrytypes.EvolutionEntry
|
||||
var releaseTimeNs float64
|
||||
if err := rows.Scan(
|
||||
&entry.Signal,
|
||||
&entry.ColumnName,
|
||||
&entry.ColumnType,
|
||||
&entry.FieldContext,
|
||||
&entry.FieldName,
|
||||
&entry.Version,
|
||||
&releaseTimeNs,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Convert nanoseconds to time.Time
|
||||
releaseTime := time.Unix(0, int64(releaseTimeNs))
|
||||
entry.ReleaseTime = releaseTime
|
||||
entries = append(entries, &entry)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return entries, nil
|
||||
}
|
||||
|
||||
// Get retrieves all evolutions for the given selectors from DB.
|
||||
func (k *telemetryMetaStore) GetColumnEvolutionMetadataMulti(ctx context.Context, selectors []*telemetrytypes.EvolutionSelector) (map[string][]*telemetrytypes.EvolutionEntry, error) {
|
||||
evolutions, err := k.fetchEvolutionEntryFromClickHouse(ctx, selectors)
|
||||
if err != nil {
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to fetch evolution from clickhouse %s", err.Error())
|
||||
}
|
||||
|
||||
evolutionsByUniqueKey := make(map[string][]*telemetrytypes.EvolutionEntry)
|
||||
|
||||
for _, evolution := range evolutions {
|
||||
key := telemetrytypes.GetEvolutionMetadataUniqueKey(&telemetrytypes.EvolutionSelector{
|
||||
Signal: evolution.Signal,
|
||||
FieldContext: evolution.FieldContext,
|
||||
FieldName: evolution.FieldName,
|
||||
})
|
||||
evolutionsByUniqueKey[key] = append(evolutionsByUniqueKey[key], evolution)
|
||||
}
|
||||
return evolutionsByUniqueKey, nil
|
||||
}
|
||||
|
||||
// chunkSizeFirstSeenMetricMetadata limits the number of tuples per SQL query to avoid hitting the max_query_size limit.
|
||||
//
|
||||
// Calculation Logic:
|
||||
|
||||
@@ -39,7 +39,6 @@ func TestGetFirstSeenFromMetricMetadata(t *testing.T) {
|
||||
telemetrylogs.LogResourceKeysTblName,
|
||||
DBName,
|
||||
AttributesMetadataLocalTableName,
|
||||
ColumnEvolutionMetadataTableName,
|
||||
)
|
||||
|
||||
lookupKeys := []telemetrytypes.MetricMetadataLookupKey{
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"regexp"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
@@ -39,7 +38,6 @@ func newTestTelemetryMetaStoreTestHelper(store telemetrystore.TelemetryStore) te
|
||||
telemetrylogs.LogResourceKeysTblName,
|
||||
DBName,
|
||||
AttributesMetadataLocalTableName,
|
||||
ColumnEvolutionMetadataTableName,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -386,386 +384,3 @@ func TestGetMetricFieldValuesIntrinsicBoolReturnsEmpty(t *testing.T) {
|
||||
assert.Empty(t, values.BoolValues)
|
||||
require.NoError(t, mock.ExpectationsWereMet())
|
||||
}
|
||||
|
||||
var (
|
||||
clickHouseQueryPatternWithFieldName = "SELECT.*signal.*column_name.*column_type.*field_context.*field_name.*version.*release_time.*FROM.*distributed_column_evolution_metadata.*WHERE.*signal.*=.*field_context.*=.*field_name.*=.*field_name.*=.*"
|
||||
clickHouseQueryPatternWithoutFieldName = "SELECT.*signal.*column_name.*column_type.*field_context.*field_name.*version.*release_time.*FROM.*distributed_column_evolution_metadata.*WHERE.*signal.*=.*field_context.*=.*ORDER BY.*release_time.*ASC"
|
||||
clickHouseColumns = []cmock.ColumnType{
|
||||
{Name: "signal", Type: "String"},
|
||||
{Name: "column_name", Type: "String"},
|
||||
{Name: "column_type", Type: "String"},
|
||||
{Name: "field_context", Type: "String"},
|
||||
{Name: "field_name", Type: "String"},
|
||||
{Name: "version", Type: "UInt32"},
|
||||
{Name: "release_time", Type: "Float64"},
|
||||
}
|
||||
)
|
||||
|
||||
func createMockRows(values [][]any) *cmock.Rows {
|
||||
return cmock.NewRows(clickHouseColumns, values)
|
||||
}
|
||||
|
||||
func TestKeyEvolutionMetadata_Get_Multi_FetchFromClickHouse(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{})
|
||||
mock := telemetryStore.Mock()
|
||||
|
||||
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
values := [][]any{
|
||||
{
|
||||
"logs",
|
||||
"resources_string",
|
||||
"Map(LowCardinality(String), String)",
|
||||
"resource",
|
||||
"__all__",
|
||||
uint32(0),
|
||||
float64(releaseTime.UnixNano()),
|
||||
},
|
||||
}
|
||||
|
||||
selector := &telemetrytypes.EvolutionSelector{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}
|
||||
|
||||
rows := createMockRows(values)
|
||||
mock.ExpectQuery(clickHouseQueryPatternWithoutFieldName).WithArgs(telemetrytypes.SignalLogs, telemetrytypes.FieldContextResource, "__all__").WillReturnRows(rows)
|
||||
|
||||
metadata := newTestTelemetryMetaStoreTestHelper(telemetryStore)
|
||||
result, err := metadata.GetColumnEvolutionMetadataMulti(ctx, []*telemetrytypes.EvolutionSelector{selector})
|
||||
require.NoError(t, err)
|
||||
|
||||
expectedKey := "logs:resource:__all__"
|
||||
require.Contains(t, result, expectedKey)
|
||||
require.Len(t, result[expectedKey], 1)
|
||||
assert.Equal(t, telemetrytypes.SignalLogs, result[expectedKey][0].Signal)
|
||||
assert.Equal(t, "resources_string", result[expectedKey][0].ColumnName)
|
||||
assert.Equal(t, "Map(LowCardinality(String), String)", result[expectedKey][0].ColumnType)
|
||||
assert.Equal(t, telemetrytypes.FieldContextResource, result[expectedKey][0].FieldContext)
|
||||
assert.Equal(t, "__all__", result[expectedKey][0].FieldName)
|
||||
assert.Equal(t, releaseTime.UnixNano(), result[expectedKey][0].ReleaseTime.UnixNano())
|
||||
|
||||
require.NoError(t, mock.ExpectationsWereMet())
|
||||
}
|
||||
|
||||
func TestKeyEvolutionMetadata_Get_Multi_MultipleMetadataEntries(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{})
|
||||
mock := telemetryStore.Mock()
|
||||
|
||||
releaseTime1 := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
releaseTime2 := time.Date(2024, 2, 15, 10, 0, 0, 0, time.UTC)
|
||||
|
||||
values := [][]any{
|
||||
{
|
||||
"logs",
|
||||
"resources_string",
|
||||
"Map(LowCardinality(String), String)",
|
||||
"resource",
|
||||
"__all__",
|
||||
uint32(0),
|
||||
float64(releaseTime1.UnixNano()),
|
||||
},
|
||||
{
|
||||
"logs",
|
||||
"resource",
|
||||
"JSON()",
|
||||
"resource",
|
||||
"__all__",
|
||||
uint32(1),
|
||||
float64(releaseTime2.UnixNano()),
|
||||
},
|
||||
}
|
||||
|
||||
rows := createMockRows(values)
|
||||
mock.ExpectQuery(clickHouseQueryPatternWithoutFieldName).WithArgs(telemetrytypes.SignalLogs, telemetrytypes.FieldContextResource, "__all__").WillReturnRows(rows)
|
||||
|
||||
metadata := newTestTelemetryMetaStoreTestHelper(telemetryStore)
|
||||
selector := &telemetrytypes.EvolutionSelector{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}
|
||||
result, err := metadata.GetColumnEvolutionMetadataMulti(ctx, []*telemetrytypes.EvolutionSelector{selector})
|
||||
require.NoError(t, err)
|
||||
|
||||
expectedKey := "logs:resource:__all__"
|
||||
require.Contains(t, result, expectedKey)
|
||||
require.Len(t, result[expectedKey], 2)
|
||||
assert.Equal(t, "resources_string", result[expectedKey][0].ColumnName)
|
||||
assert.Equal(t, "Map(LowCardinality(String), String)", result[expectedKey][0].ColumnType)
|
||||
assert.Equal(t, "resource", result[expectedKey][0].FieldContext.StringValue())
|
||||
assert.Equal(t, "__all__", result[expectedKey][0].FieldName)
|
||||
assert.Equal(t, releaseTime1.UnixNano(), result[expectedKey][0].ReleaseTime.UnixNano())
|
||||
assert.Equal(t, "resource", result[expectedKey][1].ColumnName)
|
||||
assert.Equal(t, "JSON()", result[expectedKey][1].ColumnType)
|
||||
assert.Equal(t, "resource", result[expectedKey][1].FieldContext.StringValue())
|
||||
assert.Equal(t, "__all__", result[expectedKey][1].FieldName)
|
||||
assert.Equal(t, releaseTime2.UnixNano(), result[expectedKey][1].ReleaseTime.UnixNano())
|
||||
|
||||
require.NoError(t, mock.ExpectationsWereMet())
|
||||
}
|
||||
|
||||
func TestKeyEvolutionMetadata_Get_Multi_MultipleMetadataEntriesWithFieldName(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{})
|
||||
mock := telemetryStore.Mock()
|
||||
|
||||
releaseTime1 := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
releaseTime2 := time.Date(2024, 2, 15, 10, 0, 0, 0, time.UTC)
|
||||
releaseTime3 := time.Date(2024, 3, 15, 10, 0, 0, 0, time.UTC)
|
||||
|
||||
values := [][]any{
|
||||
{
|
||||
"logs",
|
||||
"body",
|
||||
"String",
|
||||
"body",
|
||||
"__all__",
|
||||
uint32(0),
|
||||
float64(releaseTime1.UnixNano()),
|
||||
},
|
||||
{
|
||||
"logs",
|
||||
"body_json",
|
||||
"JSON()",
|
||||
"body",
|
||||
"__all__",
|
||||
uint32(1),
|
||||
float64(releaseTime2.UnixNano()),
|
||||
},
|
||||
{
|
||||
"logs",
|
||||
"body_promoted",
|
||||
"JSON()",
|
||||
"body",
|
||||
"user.name",
|
||||
uint32(2),
|
||||
float64(releaseTime3.UnixNano()),
|
||||
},
|
||||
}
|
||||
|
||||
selector := &telemetrytypes.EvolutionSelector{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldName: "user.name",
|
||||
}
|
||||
|
||||
rows := createMockRows(values)
|
||||
mock.ExpectQuery(clickHouseQueryPatternWithFieldName).WithArgs(telemetrytypes.SignalLogs, telemetrytypes.FieldContextBody, selector.FieldName, "__all__").WillReturnRows(rows)
|
||||
|
||||
metadata := newTestTelemetryMetaStoreTestHelper(telemetryStore)
|
||||
result, err := metadata.GetColumnEvolutionMetadataMulti(ctx, []*telemetrytypes.EvolutionSelector{selector})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Check entries for "__all__" field name
|
||||
expectedKeyAll := "logs:body:__all__"
|
||||
require.Contains(t, result, expectedKeyAll)
|
||||
require.Len(t, result[expectedKeyAll], 2)
|
||||
assert.Equal(t, "body", result[expectedKeyAll][0].ColumnName)
|
||||
assert.Equal(t, "String", result[expectedKeyAll][0].ColumnType)
|
||||
assert.Equal(t, "body", result[expectedKeyAll][0].FieldContext.StringValue())
|
||||
assert.Equal(t, "__all__", result[expectedKeyAll][0].FieldName)
|
||||
assert.Equal(t, releaseTime1.UnixNano(), result[expectedKeyAll][0].ReleaseTime.UnixNano())
|
||||
assert.Equal(t, "body_json", result[expectedKeyAll][1].ColumnName)
|
||||
assert.Equal(t, "JSON()", result[expectedKeyAll][1].ColumnType)
|
||||
assert.Equal(t, "body", result[expectedKeyAll][1].FieldContext.StringValue())
|
||||
assert.Equal(t, "__all__", result[expectedKeyAll][1].FieldName)
|
||||
assert.Equal(t, releaseTime2.UnixNano(), result[expectedKeyAll][1].ReleaseTime.UnixNano())
|
||||
|
||||
// Check entries for "user.name" field name
|
||||
expectedKeyUser := "logs:body:user.name"
|
||||
require.Contains(t, result, expectedKeyUser)
|
||||
require.Len(t, result[expectedKeyUser], 1)
|
||||
assert.Equal(t, "body_promoted", result[expectedKeyUser][0].ColumnName)
|
||||
assert.Equal(t, "JSON()", result[expectedKeyUser][0].ColumnType)
|
||||
assert.Equal(t, "body", result[expectedKeyUser][0].FieldContext.StringValue())
|
||||
assert.Equal(t, "user.name", result[expectedKeyUser][0].FieldName)
|
||||
assert.Equal(t, releaseTime3.UnixNano(), result[expectedKeyUser][0].ReleaseTime.UnixNano())
|
||||
|
||||
require.NoError(t, mock.ExpectationsWereMet())
|
||||
}
|
||||
|
||||
func TestKeyEvolutionMetadata_Get_Multi_MultipleMetadataEntriesWithMultipleSelectors(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{})
|
||||
mock := telemetryStore.Mock()
|
||||
|
||||
// releaseTime1 := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
releaseTime2 := time.Date(2024, 2, 15, 10, 0, 0, 0, time.UTC)
|
||||
releaseTime3 := time.Date(2024, 3, 15, 10, 0, 0, 0, time.UTC)
|
||||
|
||||
values := [][]any{
|
||||
{
|
||||
"logs",
|
||||
"body_json",
|
||||
"JSON()",
|
||||
"body",
|
||||
"__all__",
|
||||
uint32(0),
|
||||
float64(releaseTime2.UnixNano()),
|
||||
},
|
||||
{
|
||||
"logs",
|
||||
"body_promoted",
|
||||
"JSON()",
|
||||
"body",
|
||||
"user.name",
|
||||
uint32(1),
|
||||
float64(releaseTime3.UnixNano()),
|
||||
},
|
||||
{
|
||||
"traces",
|
||||
"resources_string",
|
||||
"map()",
|
||||
telemetrytypes.FieldContextResource,
|
||||
"__all__",
|
||||
uint32(0),
|
||||
float64(releaseTime2.UnixNano()),
|
||||
},
|
||||
{
|
||||
telemetrytypes.SignalTraces,
|
||||
"resource",
|
||||
"JSON()",
|
||||
telemetrytypes.FieldContextResource,
|
||||
"__all__",
|
||||
uint32(1),
|
||||
float64(releaseTime3.UnixNano()),
|
||||
},
|
||||
}
|
||||
|
||||
selectors := []*telemetrytypes.EvolutionSelector{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldName: "user.name",
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "service.name",
|
||||
},
|
||||
}
|
||||
|
||||
query := `SELECT signal, column_name, column_type, field_context, field_name, version, release_time FROM signoz_metadata\.distributed_column_evolution_metadata WHERE ` +
|
||||
`\(\(signal = \? AND \(field_context = \? AND \(field_name = \? OR field_name = \?\)\)\) OR ` +
|
||||
`\(signal = \? AND \(field_context = \? AND \(field_name = \? OR field_name = \?\)\)\)\) ` +
|
||||
`ORDER BY release_time ASC`
|
||||
rows := createMockRows(values)
|
||||
mock.ExpectQuery(query).WithArgs(
|
||||
telemetrytypes.SignalLogs, telemetrytypes.FieldContextBody, selectors[0].FieldName, "__all__",
|
||||
telemetrytypes.SignalTraces, telemetrytypes.FieldContextResource, selectors[1].FieldName, "__all__",
|
||||
).WillReturnRows(rows)
|
||||
|
||||
metadata := newTestTelemetryMetaStoreTestHelper(telemetryStore)
|
||||
_, err := metadata.GetColumnEvolutionMetadataMulti(ctx, selectors)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.NoError(t, mock.ExpectationsWereMet())
|
||||
}
|
||||
|
||||
func TestKeyEvolutionMetadata_Get_Multi_EmptyResultFromClickHouse(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{})
|
||||
mock := telemetryStore.Mock()
|
||||
|
||||
rows := createMockRows([][]any{})
|
||||
mock.ExpectQuery(clickHouseQueryPatternWithoutFieldName).WithArgs(telemetrytypes.SignalLogs, telemetrytypes.FieldContextResource, "__all__").WillReturnRows(rows)
|
||||
|
||||
metadata := newTestTelemetryMetaStoreTestHelper(telemetryStore)
|
||||
selector := &telemetrytypes.EvolutionSelector{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}
|
||||
result, err := metadata.GetColumnEvolutionMetadataMulti(ctx, []*telemetrytypes.EvolutionSelector{selector})
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Empty(t, result)
|
||||
require.NoError(t, mock.ExpectationsWereMet())
|
||||
}
|
||||
|
||||
func TestKeyEvolutionMetadata_Get_Multi_ClickHouseQueryError(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{})
|
||||
mock := telemetryStore.Mock()
|
||||
|
||||
mock.ExpectQuery(clickHouseQueryPatternWithoutFieldName).WithArgs(telemetrytypes.SignalLogs, telemetrytypes.FieldContextResource, "__all__").WillReturnError(assert.AnError)
|
||||
|
||||
metadata := newTestTelemetryMetaStoreTestHelper(telemetryStore)
|
||||
selector := &telemetrytypes.EvolutionSelector{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}
|
||||
_, err := metadata.GetColumnEvolutionMetadataMulti(ctx, []*telemetrytypes.EvolutionSelector{selector})
|
||||
require.Error(t, err)
|
||||
}
|
||||
|
||||
func TestKeyEvolutionMetadata_Get_Multi_MultipleSelectors(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, ®exMatcher{})
|
||||
mock := telemetryStore.Mock()
|
||||
|
||||
releaseTime1 := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
releaseTime2 := time.Date(2024, 2, 15, 10, 0, 0, 0, time.UTC)
|
||||
|
||||
values := [][]any{
|
||||
{
|
||||
telemetrytypes.SignalLogs,
|
||||
"resources_string",
|
||||
"Map(LowCardinality(String), String)",
|
||||
telemetrytypes.FieldContextResource,
|
||||
"__all__",
|
||||
uint32(0),
|
||||
float64(releaseTime1.UnixNano()),
|
||||
},
|
||||
{
|
||||
telemetrytypes.SignalLogs,
|
||||
"body",
|
||||
"JSON()",
|
||||
telemetrytypes.FieldContextBody,
|
||||
"__all__",
|
||||
uint32(1),
|
||||
float64(releaseTime2.UnixNano()),
|
||||
},
|
||||
}
|
||||
|
||||
// When multiple selectors are provided, the query will have OR conditions
|
||||
// The pattern should match queries with multiple OR clauses
|
||||
queryPattern := "SELECT.*signal.*column_name.*column_type.*field_context.*field_name.*release_time.*FROM.*distributed_column_evolution_metadata.*WHERE.*ORDER BY.*release_time.*ASC"
|
||||
rows := createMockRows(values)
|
||||
mock.ExpectQuery(queryPattern).WillReturnRows(rows).WithArgs(telemetrytypes.SignalLogs, telemetrytypes.FieldContextResource, "__all__", "__all__", telemetrytypes.SignalLogs, telemetrytypes.FieldContextBody, "__all__", "__all__")
|
||||
|
||||
metadata := newTestTelemetryMetaStoreTestHelper(telemetryStore)
|
||||
selectors := []*telemetrytypes.EvolutionSelector{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldName: "__all__",
|
||||
},
|
||||
}
|
||||
result, err := metadata.GetColumnEvolutionMetadataMulti(ctx, selectors)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Should have entries for both selectors
|
||||
expectedKey1 := "logs:resource:__all__"
|
||||
expectedKey2 := "logs:body:__all__"
|
||||
require.Contains(t, result, expectedKey1)
|
||||
require.Contains(t, result, expectedKey2)
|
||||
require.Len(t, result[expectedKey1], 1)
|
||||
require.Len(t, result[expectedKey2], 1)
|
||||
assert.Equal(t, "resources_string", result[expectedKey1][0].ColumnName)
|
||||
assert.Equal(t, "body", result[expectedKey2][0].ColumnName)
|
||||
|
||||
require.NoError(t, mock.ExpectationsWereMet())
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ const (
|
||||
DBName = "signoz_metadata"
|
||||
AttributesMetadataTableName = "distributed_attributes_metadata"
|
||||
AttributesMetadataLocalTableName = "attributes_metadata"
|
||||
ColumnEvolutionMetadataTableName = "distributed_column_evolution_metadata"
|
||||
PathTypesTableName = otelcollectorconst.DistributedPathTypesTable
|
||||
PromotedPathsTableName = otelcollectorconst.DistributedPromotedPathsTable
|
||||
SkipIndexTableName = "system.data_skipping_indices"
|
||||
|
||||
@@ -120,7 +120,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
stepSec,
|
||||
))
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
}
|
||||
@@ -142,16 +142,13 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
)
|
||||
if query.Filter != nil && query.Filter.Expression != "" {
|
||||
filterWhere, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: b.logger,
|
||||
FieldMapper: b.fm,
|
||||
ConditionBuilder: b.cb,
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
StartNs: start,
|
||||
EndNs: end,
|
||||
})
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
}
|
||||
@@ -203,7 +200,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
|
||||
))
|
||||
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -228,16 +225,13 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
|
||||
|
||||
if query.Filter != nil && query.Filter.Expression != "" {
|
||||
filterWhere, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: b.logger,
|
||||
FieldMapper: b.fm,
|
||||
ConditionBuilder: b.cb,
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
StartNs: start,
|
||||
EndNs: end,
|
||||
})
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -276,7 +270,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
stepSec,
|
||||
))
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -295,16 +289,13 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
)
|
||||
if query.Filter != nil && query.Filter.Expression != "" {
|
||||
filterWhere, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: b.logger,
|
||||
FieldMapper: b.fm,
|
||||
ConditionBuilder: b.cb,
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
StartNs: start,
|
||||
EndNs: end,
|
||||
})
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
@@ -23,8 +23,6 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
@@ -41,7 +39,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -137,14 +135,14 @@ func (c *conditionBuilder) conditionFor(
|
||||
|
||||
func (c *conditionBuilder) ConditionFor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -234,7 +234,7 @@ func TestConditionFor(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, 0, 0, &tc.key, tc.operator, tc.value, sb)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
@@ -289,7 +289,7 @@ func TestConditionForMultipleKeys(t *testing.T) {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
var err error
|
||||
for _, key := range tc.keys {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, 0, 0, &key, tc.operator, tc.value, sb)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &key, tc.operator, tc.value, sb, 0, 0)
|
||||
sb.Where(cond)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting condition for key %s: %v", key.Name, err)
|
||||
|
||||
@@ -41,63 +41,62 @@ func NewFieldMapper() qbtypes.FieldMapper {
|
||||
return &fieldMapper{}
|
||||
}
|
||||
|
||||
func (m *fieldMapper) getColumn(_ context.Context, _, _ uint64, key *telemetrytypes.TelemetryFieldKey) ([]*schema.Column, error) {
|
||||
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource, telemetrytypes.FieldContextScope, telemetrytypes.FieldContextAttribute:
|
||||
return []*schema.Column{timeSeriesV4Columns["labels"]}, nil
|
||||
return timeSeriesV4Columns["labels"], nil
|
||||
case telemetrytypes.FieldContextMetric:
|
||||
col, ok := timeSeriesV4Columns[key.Name]
|
||||
if !ok {
|
||||
return []*schema.Column{}, qbtypes.ErrColumnNotFound
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
}
|
||||
return []*schema.Column{col}, nil
|
||||
return col, nil
|
||||
case telemetrytypes.FieldContextUnspecified:
|
||||
col, ok := timeSeriesV4Columns[key.Name]
|
||||
if !ok {
|
||||
// if nothing is found, return labels column
|
||||
// as we keep all the labels in the labels column
|
||||
return []*schema.Column{timeSeriesV4Columns["labels"]}, nil
|
||||
return timeSeriesV4Columns["labels"], nil
|
||||
}
|
||||
return []*schema.Column{col}, nil
|
||||
return col, nil
|
||||
}
|
||||
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
}
|
||||
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, startNs, endNs uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
columns, err := m.getColumn(ctx, startNs, endNs, key)
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource, telemetrytypes.FieldContextScope, telemetrytypes.FieldContextAttribute:
|
||||
return fmt.Sprintf("JSONExtractString(%s, '%s')", columns[0].Name, key.Name), nil
|
||||
return fmt.Sprintf("JSONExtractString(%s, '%s')", column.Name, key.Name), nil
|
||||
case telemetrytypes.FieldContextMetric:
|
||||
return columns[0].Name, nil
|
||||
return column.Name, nil
|
||||
case telemetrytypes.FieldContextUnspecified:
|
||||
if slices.Contains(IntrinsicFields, key.Name) {
|
||||
return columns[0].Name, nil
|
||||
return column.Name, nil
|
||||
}
|
||||
return fmt.Sprintf("JSONExtractString(%s, '%s')", columns[0].Name, key.Name), nil
|
||||
return fmt.Sprintf("JSONExtractString(%s, '%s')", column.Name, key.Name), nil
|
||||
}
|
||||
|
||||
return columns[0].Name, nil
|
||||
return column.Name, nil
|
||||
}
|
||||
|
||||
func (m *fieldMapper) ColumnFor(ctx context.Context, tsStart, tsEnd uint64, key *telemetrytypes.TelemetryFieldKey) ([]*schema.Column, error) {
|
||||
return m.getColumn(ctx, tsStart, tsEnd, key)
|
||||
func (m *fieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||
return m.getColumn(ctx, key)
|
||||
}
|
||||
|
||||
func (m *fieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -123,13 +123,13 @@ func TestGetColumn(t *testing.T) {
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
col, err := fm.ColumnFor(ctx, 0, 0, &tc.key)
|
||||
col, err := fm.ColumnFor(ctx, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tc.expectedCol, col[0])
|
||||
assert.Equal(t, tc.expectedCol, col)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -207,7 +207,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
result, err := fm.FieldFor(ctx, 0, 0, &tc.key)
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
|
||||
@@ -347,16 +347,13 @@ func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
|
||||
|
||||
if query.Filter != nil && query.Filter.Expression != "" {
|
||||
preparedWhereClause, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: b.logger,
|
||||
FieldMapper: b.fm,
|
||||
ConditionBuilder: b.cb,
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
|
||||
Variables: variables,
|
||||
StartNs: start,
|
||||
EndNs: end,
|
||||
})
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
@@ -367,7 +364,7 @@ func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
|
||||
|
||||
sb.Select("fingerprint")
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, start, end, &g.TelemetryFieldKey, keys)
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
@@ -29,8 +29,6 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
@@ -48,13 +46,13 @@ func (c *conditionBuilder) conditionFor(
|
||||
}
|
||||
|
||||
// first, locate the raw column type (so we can choose the right EXISTS logic)
|
||||
columns, err := c.fm.ColumnFor(ctx, startNs, endNs, key)
|
||||
column, err := c.fm.ColumnFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// then ask the mapper for the actual SQL reference
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -167,7 +165,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
|
||||
|
||||
var value any
|
||||
switch columns[0].Type.GetType() {
|
||||
switch column.Type.GetType() {
|
||||
case schema.ColumnTypeEnumJSON:
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return sb.IsNotNull(tblFieldName), nil
|
||||
@@ -184,7 +182,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
return sb.E(tblFieldName, value), nil
|
||||
}
|
||||
case schema.ColumnTypeEnumLowCardinality:
|
||||
switch elementType := columns[0].Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
|
||||
switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
|
||||
case schema.ColumnTypeEnumString:
|
||||
value = ""
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
@@ -208,14 +206,14 @@ func (c *conditionBuilder) conditionFor(
|
||||
return sb.E(tblFieldName, value), nil
|
||||
}
|
||||
case schema.ColumnTypeEnumMap:
|
||||
keyType := columns[0].Type.(schema.MapColumnType).KeyType
|
||||
keyType := column.Type.(schema.MapColumnType).KeyType
|
||||
if _, ok := keyType.(schema.LowCardinalityColumnType); !ok {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "key type %s is not supported for map column type %s", keyType, columns[0].Type)
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "key type %s is not supported for map column type %s", keyType, column.Type)
|
||||
}
|
||||
|
||||
switch valueType := columns[0].Type.(schema.MapColumnType).ValueType; valueType.GetType() {
|
||||
switch valueType := column.Type.(schema.MapColumnType).ValueType; valueType.GetType() {
|
||||
case schema.ColumnTypeEnumString, schema.ColumnTypeEnumBool, schema.ColumnTypeEnumFloat64:
|
||||
leftOperand := fmt.Sprintf("mapContains(%s, '%s')", columns[0].Name, key.Name)
|
||||
leftOperand := fmt.Sprintf("mapContains(%s, '%s')", column.Name, key.Name)
|
||||
if key.Materialized {
|
||||
leftOperand = telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
|
||||
}
|
||||
@@ -228,7 +226,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for map column type %s", valueType)
|
||||
}
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for column type %s", columns[0].Type)
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for column type %s", column.Type)
|
||||
}
|
||||
}
|
||||
return "", nil
|
||||
@@ -236,25 +234,25 @@ func (c *conditionBuilder) conditionFor(
|
||||
|
||||
func (c *conditionBuilder) ConditionFor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
startNs uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
if c.isSpanScopeField(key.Name) {
|
||||
return c.buildSpanScopeCondition(key, operator, value, startNs)
|
||||
}
|
||||
|
||||
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if operator.AddDefaultExistsFilter() {
|
||||
// skip adding exists filter for intrinsic fields
|
||||
field, _ := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
field, _ := c.fm.FieldFor(ctx, key)
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), field) ||
|
||||
slices.Contains(maps.Keys(IntrinsicFieldsDeprecated), field) ||
|
||||
slices.Contains(maps.Keys(CalculatedFields), field) ||
|
||||
@@ -262,7 +260,7 @@ func (c *conditionBuilder) ConditionFor(
|
||||
return condition, nil
|
||||
}
|
||||
|
||||
existsCondition, err := c.conditionFor(ctx, startNs, endNs, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
existsCondition, err := c.conditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -289,7 +289,7 @@ func TestConditionFor(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, 1761437108000000000, 1761458708000000000, &tc.key, tc.operator, tc.value, sb)
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 1761437108000000000, 1761458708000000000)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
|
||||
@@ -169,24 +169,23 @@ func NewFieldMapper() *defaultFieldMapper {
|
||||
|
||||
func (m *defaultFieldMapper) getColumn(
|
||||
_ context.Context,
|
||||
_, _ uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
) ([]*schema.Column, error) {
|
||||
) (*schema.Column, error) {
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource:
|
||||
return []*schema.Column{indexV3Columns["resource"]}, nil
|
||||
return indexV3Columns["resource"], nil
|
||||
case telemetrytypes.FieldContextScope:
|
||||
return []*schema.Column{}, qbtypes.ErrColumnNotFound
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
case telemetrytypes.FieldContextAttribute:
|
||||
switch key.FieldDataType {
|
||||
case telemetrytypes.FieldDataTypeString:
|
||||
return []*schema.Column{indexV3Columns["attributes_string"]}, nil
|
||||
return indexV3Columns["attributes_string"], nil
|
||||
case telemetrytypes.FieldDataTypeInt64,
|
||||
telemetrytypes.FieldDataTypeFloat64,
|
||||
telemetrytypes.FieldDataTypeNumber:
|
||||
return []*schema.Column{indexV3Columns["attributes_number"]}, nil
|
||||
return indexV3Columns["attributes_number"], nil
|
||||
case telemetrytypes.FieldDataTypeBool:
|
||||
return []*schema.Column{indexV3Columns["attributes_bool"]}, nil
|
||||
return indexV3Columns["attributes_bool"], nil
|
||||
}
|
||||
case telemetrytypes.FieldContextSpan, telemetrytypes.FieldContextUnspecified:
|
||||
/*
|
||||
@@ -197,7 +196,7 @@ func (m *defaultFieldMapper) getColumn(
|
||||
// Check if this is a span scope field
|
||||
if strings.ToLower(key.Name) == SpanSearchScopeRoot || strings.ToLower(key.Name) == SpanSearchScopeEntryPoint {
|
||||
// The actual SQL will be generated in the condition builder
|
||||
return []*schema.Column{{Name: key.Name, Type: schema.ColumnTypeBool}}, nil
|
||||
return &schema.Column{Name: key.Name, Type: schema.ColumnTypeBool}, nil
|
||||
}
|
||||
|
||||
// TODO(srikanthccv): remove this when it's safe to remove
|
||||
@@ -211,18 +210,18 @@ func (m *defaultFieldMapper) getColumn(
|
||||
if _, ok := CalculatedFieldsDeprecated[key.Name]; ok {
|
||||
// Check if we have a mapping for the deprecated calculated field
|
||||
if col, ok := indexV3Columns[oldToNew[key.Name]]; ok {
|
||||
return []*schema.Column{col}, nil
|
||||
return col, nil
|
||||
}
|
||||
}
|
||||
if _, ok := IntrinsicFieldsDeprecated[key.Name]; ok {
|
||||
// Check if we have a mapping for the deprecated intrinsic field
|
||||
if col, ok := indexV3Columns[oldToNew[key.Name]]; ok {
|
||||
return []*schema.Column{col}, nil
|
||||
return col, nil
|
||||
}
|
||||
}
|
||||
|
||||
if col, ok := indexV3Columns[key.Name]; ok {
|
||||
return []*schema.Column{col}, nil
|
||||
return col, nil
|
||||
}
|
||||
}
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
@@ -230,17 +229,15 @@ func (m *defaultFieldMapper) getColumn(
|
||||
|
||||
func (m *defaultFieldMapper) ColumnFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
) ([]*schema.Column, error) {
|
||||
return m.getColumn(ctx, startNs, endNs, key)
|
||||
) (*schema.Column, error) {
|
||||
return m.getColumn(ctx, key)
|
||||
}
|
||||
|
||||
// FieldFor returns the table field name for the given key if it exists
|
||||
// otherwise it returns qbtypes.ErrColumnNotFound
|
||||
func (m *defaultFieldMapper) FieldFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
// Special handling for span scope fields
|
||||
@@ -250,11 +247,10 @@ func (m *defaultFieldMapper) FieldFor(
|
||||
return key.Name, nil
|
||||
}
|
||||
|
||||
columns, err := m.getColumn(ctx, startNs, endNs, key)
|
||||
column, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
column := columns[0]
|
||||
|
||||
switch column.Type.GetType() {
|
||||
case schema.ColumnTypeEnumJSON:
|
||||
@@ -314,12 +310,11 @@ func (m *defaultFieldMapper) FieldFor(
|
||||
// if it exists otherwise it returns qbtypes.ErrColumnNotFound
|
||||
func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -329,7 +324,7 @@ func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
if _, ok := indexV3Columns[field.Name]; ok {
|
||||
// if it is, attach the column name directly
|
||||
field.FieldContext = telemetrytypes.FieldContextSpan
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, field)
|
||||
colName, _ = m.FieldFor(ctx, field)
|
||||
} else {
|
||||
// - the context is not provided
|
||||
// - there are not keys for the field
|
||||
@@ -347,12 +342,12 @@ func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
}
|
||||
} else if len(keysForField) == 1 {
|
||||
// we have a single key for the field, use it
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, keysForField[0])
|
||||
colName, _ = m.FieldFor(ctx, keysForField[0])
|
||||
} else {
|
||||
// select any non-empty value from the keys
|
||||
args := []string{}
|
||||
for _, key := range keysForField {
|
||||
colName, _ = m.FieldFor(ctx, startNs, endNs, key)
|
||||
colName, _ = m.FieldFor(ctx, key)
|
||||
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
|
||||
}
|
||||
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))
|
||||
|
||||
@@ -92,7 +92,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
result, err := fm.FieldFor(ctx, 0, 0, &tc.key)
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package telemetrytraces
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
@@ -76,16 +75,13 @@ func TestSpanScopeFilterExpression(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
}}
|
||||
|
||||
whereClause, err := querybuilder.PrepareWhereClause(tt.expression, querybuilder.FilterExprVisitorOpts{
|
||||
Context: context.Background(),
|
||||
whereClause, err := querybuilder.PrepareWhereClause(tt.expression, querybuilder.FilterExprVisitorOpts{
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
FieldKeys: fieldKeys,
|
||||
Builder: sb,
|
||||
StartNs: tt.startNs,
|
||||
EndNs: 1761458708000000000,
|
||||
})
|
||||
}, tt.startNs, 1761458708000000000)
|
||||
|
||||
if tt.expectError {
|
||||
assert.Error(t, err)
|
||||
@@ -146,16 +142,13 @@ func TestSpanScopeWithResourceFilter(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}}
|
||||
|
||||
_, err := querybuilder.PrepareWhereClause(tt.expression, querybuilder.FilterExprVisitorOpts{
|
||||
Context: context.Background(),
|
||||
_, err := querybuilder.PrepareWhereClause(tt.expression, querybuilder.FilterExprVisitorOpts{
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
FieldKeys: fieldKeys,
|
||||
SkipResourceFilter: false, // This would be set by the statement builder
|
||||
StartNs: 1761437108000000000,
|
||||
EndNs: 1761458708000000000,
|
||||
})
|
||||
}, 1761437108000000000, 1761458708000000000)
|
||||
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
|
||||
@@ -313,7 +313,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
|
||||
|
||||
// TODO: should we deprecate `SelectFields` and return everything from a span like we do for logs?
|
||||
for _, field := range query.SelectFields {
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &field, keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &field, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -331,7 +331,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
|
||||
|
||||
// Add order by
|
||||
for _, orderBy := range query.Order {
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -515,7 +515,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -529,7 +529,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
allAggChArgs := make([]any, 0)
|
||||
for i, agg := range query.Aggregations {
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, start, end, agg.Expression,
|
||||
ctx, agg.Expression,
|
||||
uint64(query.StepInterval.Seconds()),
|
||||
keys,
|
||||
)
|
||||
@@ -657,7 +657,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
var allGroupByArgs []any
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -674,7 +674,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
for idx := range query.Aggregations {
|
||||
aggExpr := query.Aggregations[idx]
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, start, end, aggExpr.Expression,
|
||||
ctx, aggExpr.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
)
|
||||
@@ -746,7 +746,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
// buildFilterCondition builds SQL condition from filter expression
|
||||
func (b *traceQueryStatementBuilder) addFilterCondition(
|
||||
ctx context.Context,
|
||||
_ context.Context,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
start, end uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
|
||||
@@ -760,16 +760,13 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
|
||||
if query.Filter != nil && query.Filter.Expression != "" {
|
||||
// add filter expression
|
||||
preparedWhereClause, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: b.logger,
|
||||
FieldMapper: b.fm,
|
||||
ConditionBuilder: b.cb,
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
Variables: variables,
|
||||
StartNs: start,
|
||||
EndNs: end,
|
||||
})
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -232,15 +232,12 @@ func (b *traceOperatorCTEBuilder) buildQueryCTE(ctx context.Context, queryName s
|
||||
filterWhereClause, err := querybuilder.PrepareWhereClause(
|
||||
query.Filter.Expression,
|
||||
querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: b.stmtBuilder.logger,
|
||||
FieldMapper: b.stmtBuilder.fm,
|
||||
ConditionBuilder: b.stmtBuilder.cb,
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
StartNs: b.start,
|
||||
EndNs: b.end,
|
||||
},
|
||||
}, b.start, b.end,
|
||||
)
|
||||
if err != nil {
|
||||
b.stmtBuilder.logger.ErrorContext(ctx, "Failed to prepare where clause", "error", err, "filter", query.Filter.Expression)
|
||||
@@ -453,7 +450,7 @@ func (b *traceOperatorCTEBuilder) buildListQuery(ctx context.Context, selectFrom
|
||||
if selectedFields[field.Name] {
|
||||
continue
|
||||
}
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, b.start, b.end, &field, keys)
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, &field, keys)
|
||||
if err != nil {
|
||||
b.stmtBuilder.logger.WarnContext(ctx, "failed to map select field",
|
||||
"field", field.Name, "error", err)
|
||||
@@ -468,7 +465,7 @@ func (b *traceOperatorCTEBuilder) buildListQuery(ctx context.Context, selectFrom
|
||||
// Add order by support using ColumnExpressionFor
|
||||
orderApplied := false
|
||||
for _, orderBy := range b.operator.Order {
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, b.start, b.end, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -550,8 +547,6 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
|
||||
for _, gb := range b.operator.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
&gb.TelemetryFieldKey,
|
||||
b.stmtBuilder.fm,
|
||||
b.stmtBuilder.cb,
|
||||
@@ -576,8 +571,6 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
agg.Expression,
|
||||
uint64(b.operator.StepInterval.Seconds()),
|
||||
keys,
|
||||
@@ -663,8 +656,6 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
|
||||
for _, gb := range b.operator.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
&gb.TelemetryFieldKey,
|
||||
b.stmtBuilder.fm,
|
||||
b.stmtBuilder.cb,
|
||||
@@ -691,8 +682,6 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
agg.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
@@ -806,8 +795,6 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
|
||||
for _, gb := range b.operator.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
&gb.TelemetryFieldKey,
|
||||
b.stmtBuilder.fm,
|
||||
b.stmtBuilder.cb,
|
||||
@@ -832,8 +819,6 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
|
||||
ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
agg.Expression,
|
||||
uint64((b.end-b.start)/querybuilder.NsToSeconds),
|
||||
keys,
|
||||
|
||||
@@ -2,13 +2,12 @@ package telemetrytraces
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
type traceOperatorStatementBuilder struct {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user