Compare commits

..

8 Commits

Author SHA1 Message Date
Piyush Singariya
541f19c34a fix: array type filtering from dynamic arrays 2026-03-30 12:59:31 +05:30
Piyush Singariya
010db03d6e fix: indexed tests passing 2026-03-30 12:24:26 +05:30
Piyush Singariya
5408acbd8c fix: primitive conditions working 2026-03-30 12:01:35 +05:30
Piyush Singariya
0de6c85f81 feat: align negative operators to include other logs 2026-03-28 10:30:11 +05:30
Piyush Singariya
69ec24fa05 test: fix unit tests 2026-03-27 15:12:49 +05:30
Piyush Singariya
539d732b65 fix: contextual path index usage 2026-03-27 14:44:51 +05:30
Piyush Singariya
843d5fb199 Merge branch 'main' into feat/json-index 2026-03-27 14:17:52 +05:30
Piyush Singariya
fabdfb8cc1 feat: enable JSON Path index 2026-03-27 14:07:37 +05:30
18 changed files with 1188 additions and 1276 deletions

View File

@@ -1,143 +0,0 @@
import { AxiosError } from 'axios';
import { ErrorV2Resp } from 'types/api';
import APIError from 'types/api/error';
import { ErrorResponseHandlerV2 } from './ErrorResponseHandlerV2';
function makeAxiosError(
overrides: Partial<AxiosError<ErrorV2Resp>>,
): AxiosError<ErrorV2Resp> {
return {
isAxiosError: true,
name: 'AxiosError',
message: 'Request failed',
config: {} as any,
toJSON: () => ({}),
...overrides,
} as AxiosError<ErrorV2Resp>;
}
describe('ErrorResponseHandlerV2', () => {
describe('when the server responds with a well-formed error body', () => {
it('throws an APIError with fields from response.data.error', () => {
const error = makeAxiosError({
response: {
status: 403,
data: {
error: {
code: 'FORBIDDEN',
message: 'only editors/admins can access this resource',
url: '/api/v1/dashboards/123',
errors: [],
},
},
headers: {} as any,
config: {} as any,
statusText: 'Forbidden',
},
});
expect(() => ErrorResponseHandlerV2(error)).toThrow(APIError);
try {
ErrorResponseHandlerV2(error);
} catch (e) {
expect(e).toBeInstanceOf(APIError);
const apiError = e as APIError;
expect(apiError.getHttpStatusCode()).toBe(403);
expect(apiError.getErrorMessage()).toBe(
'only editors/admins can access this resource',
);
expect(apiError.getErrorCode()).toBe('FORBIDDEN');
}
});
});
describe('when the server responds with a null error body', () => {
it('throws an APIError without crashing, using fallback values', () => {
const error = makeAxiosError({
name: 'AxiosError',
message: 'timeout exceeded',
response: {
status: 504,
data: (null as unknown) as ErrorV2Resp,
headers: {} as any,
config: {} as any,
statusText: 'Gateway Timeout',
},
});
expect(() => ErrorResponseHandlerV2(error)).toThrow(APIError);
try {
ErrorResponseHandlerV2(error);
} catch (e) {
expect(e).toBeInstanceOf(APIError);
const apiError = e as APIError;
expect(apiError.getHttpStatusCode()).toBe(504);
expect(apiError.getErrorMessage()).toBe('timeout exceeded');
}
});
it('throws an APIError when response.data.error is missing', () => {
const error = makeAxiosError({
name: 'AxiosError',
message: 'Bad Gateway',
response: {
status: 502,
data: {} as ErrorV2Resp,
headers: {} as any,
config: {} as any,
statusText: 'Bad Gateway',
},
});
expect(() => ErrorResponseHandlerV2(error)).toThrow(APIError);
try {
ErrorResponseHandlerV2(error);
} catch (e) {
expect(e).toBeInstanceOf(APIError);
const apiError = e as APIError;
expect(apiError.getHttpStatusCode()).toBe(502);
}
});
});
describe('when no response is received (network/timeout)', () => {
it('throws an APIError using error.message', () => {
const error = makeAxiosError({
request: {},
name: 'ECONNABORTED',
message: 'timeout exceeded',
status: undefined,
});
expect(() => ErrorResponseHandlerV2(error)).toThrow(APIError);
try {
ErrorResponseHandlerV2(error);
} catch (e) {
expect(e).toBeInstanceOf(APIError);
const apiError = e as APIError;
expect(apiError.getErrorMessage()).toBe('timeout exceeded');
}
});
});
describe('when the error is a setup error (no request or response)', () => {
it('throws an APIError using error.name and error.message', () => {
const error = makeAxiosError({
name: 'Error',
message: 'Something went wrong setting up the request',
});
expect(() => ErrorResponseHandlerV2(error)).toThrow(APIError);
try {
ErrorResponseHandlerV2(error);
} catch (e) {
expect(e).toBeInstanceOf(APIError);
const apiError = e as APIError;
expect(apiError.getErrorMessage()).toBe(
'Something went wrong setting up the request',
);
}
});
});
});

View File

@@ -8,14 +8,13 @@ export function ErrorResponseHandlerV2(error: AxiosError<ErrorV2Resp>): never {
// The request was made and the server responded with a status code
// that falls out of the range of 2xx
if (response) {
const errorData = response.data?.error;
throw new APIError({
httpStatusCode: response.status || 500,
error: {
code: errorData?.code ?? error.name,
message: errorData?.message ?? error.message,
url: errorData?.url ?? '',
errors: errorData?.errors ?? [],
code: response.data.error.code,
message: response.data.error.message,
url: response.data.error.url,
errors: response.data.error.errors,
},
});
}

View File

@@ -202,8 +202,19 @@ function InviteMembersModal({
onComplete?.();
} catch (err) {
const apiErr = err as APIError;
const errorMessage = apiErr?.getErrorMessage?.() ?? 'An error occurred';
toast.error(errorMessage, { richColors: true });
if (apiErr?.getHttpStatusCode() === 409) {
toast.error(
touchedRows.length === 1
? `${touchedRows[0].email} is already a member`
: 'Invite for one or more users already exists',
{ richColors: true },
);
} else {
const errorMessage = apiErr?.getErrorMessage?.() ?? 'An error occurred';
toast.error(`Failed to send invites: ${errorMessage}`, {
richColors: true,
});
}
} finally {
setIsSubmitting(false);
}

View File

@@ -1,18 +1,9 @@
import { toast } from '@signozhq/sonner';
import inviteUsers from 'api/v1/invite/bulk/create';
import sendInvite from 'api/v1/invite/create';
import { StatusCodes } from 'http-status-codes';
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import APIError from 'types/api/error';
import InviteMembersModal from '../InviteMembersModal';
const makeApiError = (message: string, code = StatusCodes.CONFLICT): APIError =>
new APIError({
httpStatusCode: code,
error: { code: 'already_exists', message, url: '', errors: [] },
});
jest.mock('api/v1/invite/create');
jest.mock('api/v1/invite/bulk/create');
jest.mock('@signozhq/sonner', () => ({
@@ -151,90 +142,6 @@ describe('InviteMembersModal', () => {
});
});
describe('error handling', () => {
it('shows BE message on single invite 409', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
mockSendInvite.mockRejectedValue(
makeApiError('An invite already exists for this email: single@signoz.io'),
);
render(<InviteMembersModal {...defaultProps} />);
await user.type(
screen.getAllByPlaceholderText('john@signoz.io')[0],
'single@signoz.io',
);
await user.click(screen.getAllByText('Select roles')[0]);
await user.click(await screen.findByText('Viewer'));
await user.click(
screen.getByRole('button', { name: /invite team members/i }),
);
await waitFor(() => {
expect(toast.error).toHaveBeenCalledWith(
'An invite already exists for this email: single@signoz.io',
expect.anything(),
);
});
});
it('shows BE message on bulk invite 409', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
mockInviteUsers.mockRejectedValue(
makeApiError('An invite already exists for this email: alice@signoz.io'),
);
render(<InviteMembersModal {...defaultProps} />);
const emailInputs = screen.getAllByPlaceholderText('john@signoz.io');
await user.type(emailInputs[0], 'alice@signoz.io');
await user.click(screen.getAllByText('Select roles')[0]);
await user.click(await screen.findByText('Viewer'));
await user.type(emailInputs[1], 'bob@signoz.io');
await user.click(screen.getAllByText('Select roles')[0]);
const editorOptions = await screen.findAllByText('Editor');
await user.click(editorOptions[editorOptions.length - 1]);
await user.click(
screen.getByRole('button', { name: /invite team members/i }),
);
await waitFor(() => {
expect(toast.error).toHaveBeenCalledWith(
'An invite already exists for this email: alice@signoz.io',
expect.anything(),
);
});
});
it('shows BE message on generic error', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
mockSendInvite.mockRejectedValue(
makeApiError('Internal server error', StatusCodes.INTERNAL_SERVER_ERROR),
);
render(<InviteMembersModal {...defaultProps} />);
await user.type(
screen.getAllByPlaceholderText('john@signoz.io')[0],
'single@signoz.io',
);
await user.click(screen.getAllByText('Select roles')[0]);
await user.click(await screen.findByText('Viewer'));
await user.click(
screen.getByRole('button', { name: /invite team members/i }),
);
await waitFor(() => {
expect(toast.error).toHaveBeenCalledWith(
'Internal server error',
expect.anything(),
);
});
});
});
it('uses inviteUsers (bulk) when multiple rows are filled', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
const onComplete = jest.fn();

View File

@@ -464,10 +464,14 @@ function GeneralSettings({
onModalToggleHandler(type);
};
const { isCloudUser: isCloudUserVal } = useGetTenantLicense();
const {
isCloudUser: isCloudUserVal,
isEnterpriseSelfHostedUser,
} = useGetTenantLicense();
const isAdmin = user.role === USER_ROLES.ADMIN;
const showCustomDomainSettings = isCloudUserVal && isAdmin;
const showCustomDomainSettings =
(isCloudUserVal || isEnterpriseSelfHostedUser) && isAdmin;
const renderConfig = [
{

View File

@@ -38,6 +38,7 @@ jest.mock('hooks/useComponentPermission', () => ({
jest.mock('hooks/useGetTenantLicense', () => ({
useGetTenantLicense: jest.fn(() => ({
isCloudUser: false,
isEnterpriseSelfHostedUser: false,
})),
}));
@@ -388,6 +389,7 @@ describe('GeneralSettings - S3 Logs Retention', () => {
beforeEach(() => {
(useGetTenantLicense as jest.Mock).mockReturnValue({
isCloudUser: true,
isEnterpriseSelfHostedUser: false,
});
});
@@ -409,14 +411,15 @@ describe('GeneralSettings - S3 Logs Retention', () => {
});
});
describe('Non-cloud user rendering', () => {
describe('Enterprise Self-Hosted User Rendering', () => {
beforeEach(() => {
(useGetTenantLicense as jest.Mock).mockReturnValue({
isCloudUser: false,
isEnterpriseSelfHostedUser: true,
});
});
it('should not render CustomDomainSettings or GeneralSettingsCloud', () => {
it('should render CustomDomainSettings but not GeneralSettingsCloud', () => {
render(
<GeneralSettings
metricsTtlValuesPayload={mockMetricsRetention}
@@ -429,14 +432,12 @@ describe('GeneralSettings - S3 Logs Retention', () => {
/>,
);
expect(
screen.queryByTestId('custom-domain-settings'),
).not.toBeInTheDocument();
expect(screen.getByTestId('custom-domain-settings')).toBeInTheDocument();
expect(
screen.queryByTestId('general-settings-cloud'),
).not.toBeInTheDocument();
// Save buttons should be visible for non-cloud users (these are from retentions)
// Save buttons should be visible for self-hosted
const saveButtons = screen.getAllByRole('button', { name: /save/i });
expect(saveButtons.length).toBeGreaterThan(0);
});

View File

@@ -217,7 +217,7 @@ function K8sVolumesList({
{
dataSource: currentQuery.builder.queryData[0].dataSource,
aggregateAttribute: GetK8sEntityToAggregateAttribute(
K8sCategory.VOLUMES,
K8sCategory.NODES,
dotMetricsEnabled,
),
aggregateOperator: 'noop',
@@ -228,7 +228,7 @@ function K8sVolumesList({
queryKey: [currentQuery.builder.queryData[0].dataSource, 'noop'],
},
true,
K8sCategory.VOLUMES,
K8sCategory.NODES,
);
const query = useMemo(() => {
@@ -597,7 +597,7 @@ function K8sVolumesList({
isLoadingGroupByFilters={isLoadingGroupByFilters}
handleGroupByChange={handleGroupByChange}
selectedGroupBy={groupBy}
entity={K8sCategory.VOLUMES}
entity={K8sCategory.NODES}
showAutoRefresh={!selectedVolumeData}
/>
{isError && <Typography>{data?.error || 'Something went wrong'}</Typography>}

View File

@@ -1,162 +0,0 @@
import setupCommonMocks from '../commonMocks';
setupCommonMocks();
import { QueryClient, QueryClientProvider } from 'react-query';
// eslint-disable-next-line no-restricted-imports
import { Provider } from 'react-redux';
import { MemoryRouter } from 'react-router-dom';
import { render, waitFor } from '@testing-library/react';
import { FeatureKeys } from 'constants/features';
import K8sVolumesList from 'container/InfraMonitoringK8s/Volumes/K8sVolumesList';
import { rest, server } from 'mocks-server/server';
import { IAppContext, IUser } from 'providers/App/types';
import store from 'store';
import { LicenseResModel } from 'types/api/licensesV3/getActive';
const queryClient = new QueryClient({
defaultOptions: {
queries: {
retry: false,
cacheTime: 0,
},
},
});
const SERVER_URL = 'http://localhost/api';
describe('K8sVolumesList - useGetAggregateKeys Category Regression', () => {
let requestsMade: Array<{
url: string;
params: URLSearchParams;
body?: any;
}> = [];
beforeEach(() => {
requestsMade = [];
queryClient.clear();
server.use(
rest.get(`${SERVER_URL}/v3/autocomplete/attribute_keys`, (req, res, ctx) => {
const url = req.url.toString();
const params = req.url.searchParams;
requestsMade.push({
url,
params,
});
return res(
ctx.status(200),
ctx.json({
status: 'success',
data: {
attributeKeys: [],
},
}),
);
}),
rest.post(`${SERVER_URL}/v1/pvcs/list`, (req, res, ctx) =>
res(
ctx.status(200),
ctx.json({
status: 'success',
data: {
type: 'list',
records: [],
groups: null,
total: 0,
sentAnyHostMetricsData: false,
isSendingK8SAgentMetrics: false,
},
}),
),
),
);
});
afterEach(() => {
server.resetHandlers();
});
it('should call aggregate keys API with k8s_volume_capacity', async () => {
render(
<QueryClientProvider client={queryClient}>
<Provider store={store}>
<MemoryRouter>
<K8sVolumesList
isFiltersVisible={false}
handleFilterVisibilityChange={jest.fn()}
quickFiltersLastUpdated={-1}
/>
</MemoryRouter>
</Provider>
</QueryClientProvider>,
);
await waitFor(() => {
expect(requestsMade.length).toBeGreaterThan(0);
});
// Find the attribute_keys request
const attributeKeysRequest = requestsMade.find((req) =>
req.url.includes('/autocomplete/attribute_keys'),
);
expect(attributeKeysRequest).toBeDefined();
const aggregateAttribute = attributeKeysRequest?.params.get(
'aggregateAttribute',
);
expect(aggregateAttribute).toBe('k8s_volume_capacity');
});
it('should call aggregate keys API with k8s.volume.capacity when dotMetrics enabled', async () => {
jest
.spyOn(await import('providers/App/App'), 'useAppContext')
.mockReturnValue({
featureFlags: [
{
name: FeatureKeys.DOT_METRICS_ENABLED,
active: true,
usage: 0,
usage_limit: 0,
route: '',
},
],
user: { role: 'ADMIN' } as IUser,
activeLicense: (null as unknown) as LicenseResModel,
} as IAppContext);
render(
<QueryClientProvider client={queryClient}>
<Provider store={store}>
<MemoryRouter>
<K8sVolumesList
isFiltersVisible={false}
handleFilterVisibilityChange={jest.fn()}
quickFiltersLastUpdated={-1}
/>
</MemoryRouter>
</Provider>
</QueryClientProvider>,
);
await waitFor(() => {
expect(requestsMade.length).toBeGreaterThan(0);
});
const attributeKeysRequest = requestsMade.find((req) =>
req.url.includes('/autocomplete/attribute_keys'),
);
expect(attributeKeysRequest).toBeDefined();
const aggregateAttribute = attributeKeysRequest?.params.get(
'aggregateAttribute',
);
expect(aggregateAttribute).toBe('k8s.volume.capacity');
});
});

View File

@@ -1090,21 +1090,7 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
}
processingPostCache := time.Now()
// When req.Limit is 0 (not set by the client), selectAllSpans is set to false
// preserving the old paged behaviour for backward compatibility
limit := min(req.Limit, tracedetail.MaxLimitToSelectAllSpans)
selectAllSpans := totalSpans <= uint64(limit)
var (
selectedSpans []*model.Span
uncollapsedSpans []string
rootServiceName, rootServiceEntryPoint string
)
if selectAllSpans {
selectedSpans, rootServiceName, rootServiceEntryPoint = tracedetail.GetAllSpans(traceRoots)
} else {
selectedSpans, uncollapsedSpans, rootServiceName, rootServiceEntryPoint = tracedetail.GetSelectedSpans(req.UncollapsedSpans, req.SelectedSpanID, traceRoots, spanIdToSpanNodeMap, req.IsSelectedSpanIDUnCollapsed)
}
selectedSpans, uncollapsedSpans, rootServiceName, rootServiceEntryPoint := tracedetail.GetSelectedSpans(req.UncollapsedSpans, req.SelectedSpanID, traceRoots, spanIdToSpanNodeMap, req.IsSelectedSpanIDUnCollapsed)
r.logger.Info("getWaterfallSpansForTraceWithMetadata: processing post cache", "duration", time.Since(processingPostCache), "traceID", traceID)
// convert start timestamp to millis because right now frontend is expecting it in millis
@@ -1117,7 +1103,7 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
}
response.Spans = selectedSpans
response.UncollapsedSpans = uncollapsedSpans // ignoring if all spans are returning
response.UncollapsedSpans = uncollapsedSpans
response.StartTimestampMillis = startTime / 1000000
response.EndTimestampMillis = endTime / 1000000
response.TotalSpansCount = totalSpans
@@ -1126,7 +1112,6 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
response.RootServiceEntryPoint = rootServiceEntryPoint
response.ServiceNameToTotalDurationMap = serviceNameToTotalDurationMap
response.HasMissingSpans = hasMissingSpans
response.HasMore = !selectAllSpans
return response, nil
}
@@ -3386,8 +3371,8 @@ func (r *ClickHouseReader) GetMetricAggregateAttributes(ctx context.Context, org
// Query all relevant metric names from time_series_v4, but leave metadata retrieval to cache/db
query := fmt.Sprintf(
`SELECT DISTINCT metric_name
FROM %s.%s
`SELECT DISTINCT metric_name
FROM %s.%s
WHERE metric_name ILIKE $1 AND __normalized = $2`,
signozMetricDBName, signozTSTableNameV41Day)
@@ -3464,8 +3449,8 @@ func (r *ClickHouseReader) GetMeterAggregateAttributes(ctx context.Context, orgI
var response v3.AggregateAttributeResponse
// Query all relevant metric names from time_series_v4, but leave metadata retrieval to cache/db
query := fmt.Sprintf(
`SELECT metric_name,type,temporality,is_monotonic
FROM %s.%s
`SELECT metric_name,type,temporality,is_monotonic
FROM %s.%s
WHERE metric_name ILIKE $1
GROUP BY metric_name,type,temporality,is_monotonic`,
signozMeterDBName, signozMeterSamplesName)
@@ -5161,7 +5146,7 @@ func (r *ClickHouseReader) GetOverallStateTransitions(ctx context.Context, ruleI
state,
unix_milli AS firing_time
FROM %s.%s
WHERE overall_state = '` + model.StateFiring.String() + `'
WHERE overall_state = '` + model.StateFiring.String() + `'
AND overall_state_changed = true
AND rule_id IN ('%s')
AND unix_milli >= %d AND unix_milli <= %d
@@ -5172,7 +5157,7 @@ resolution_events AS (
state,
unix_milli AS resolution_time
FROM %s.%s
WHERE overall_state = '` + model.StateInactive.String() + `'
WHERE overall_state = '` + model.StateInactive.String() + `'
AND overall_state_changed = true
AND rule_id IN ('%s')
AND unix_milli >= %d AND unix_milli <= %d
@@ -5293,7 +5278,7 @@ WITH firing_events AS (
state,
unix_milli AS firing_time
FROM %s.%s
WHERE overall_state = '` + model.StateFiring.String() + `'
WHERE overall_state = '` + model.StateFiring.String() + `'
AND overall_state_changed = true
AND rule_id IN ('%s')
AND unix_milli >= %d AND unix_milli <= %d
@@ -5304,7 +5289,7 @@ resolution_events AS (
state,
unix_milli AS resolution_time
FROM %s.%s
WHERE overall_state = '` + model.StateInactive.String() + `'
WHERE overall_state = '` + model.StateInactive.String() + `'
AND overall_state_changed = true
AND rule_id IN ('%s')
AND unix_milli >= %d AND unix_milli <= %d
@@ -5350,7 +5335,7 @@ WITH firing_events AS (
state,
unix_milli AS firing_time
FROM %s.%s
WHERE overall_state = '` + model.StateFiring.String() + `'
WHERE overall_state = '` + model.StateFiring.String() + `'
AND overall_state_changed = true
AND rule_id IN ('%s')
AND unix_milli >= %d AND unix_milli <= %d
@@ -5361,7 +5346,7 @@ resolution_events AS (
state,
unix_milli AS resolution_time
FROM %s.%s
WHERE overall_state = '` + model.StateInactive.String() + `'
WHERE overall_state = '` + model.StateInactive.String() + `'
AND overall_state_changed = true
AND rule_id IN ('%s')
AND unix_milli >= %d AND unix_milli <= %d
@@ -5623,7 +5608,7 @@ func (r *ClickHouseReader) GetMetricsDataPoints(ctx context.Context, metricName
instrumentationtypes.CodeNamespace: "clickhouse-reader",
instrumentationtypes.CodeFunctionName: "GetMetricsDataPoints",
})
query := fmt.Sprintf(`SELECT
query := fmt.Sprintf(`SELECT
sum(count) as data_points
FROM %s.%s
WHERE metric_name = ?
@@ -5643,7 +5628,7 @@ func (r *ClickHouseReader) GetMetricsLastReceived(ctx context.Context, metricNam
instrumentationtypes.CodeNamespace: "clickhouse-reader",
instrumentationtypes.CodeFunctionName: "GetMetricsLastReceived",
})
query := fmt.Sprintf(`SELECT
query := fmt.Sprintf(`SELECT
MAX(unix_milli) AS last_received_time
FROM %s.%s
WHERE metric_name = ?
@@ -5654,7 +5639,7 @@ WHERE metric_name = ?
if err != nil {
return 0, &model.ApiError{Typ: "ClickHouseError", Err: err}
}
query = fmt.Sprintf(`SELECT
query = fmt.Sprintf(`SELECT
MAX(unix_milli) AS last_received_time
FROM %s.%s
WHERE metric_name = ? and unix_milli > ?
@@ -5673,7 +5658,7 @@ func (r *ClickHouseReader) GetTotalTimeSeriesForMetricName(ctx context.Context,
instrumentationtypes.CodeNamespace: "clickhouse-reader",
instrumentationtypes.CodeFunctionName: "GetTotalTimeSeriesForMetricName",
})
query := fmt.Sprintf(`SELECT
query := fmt.Sprintf(`SELECT
uniq(fingerprint) AS timeSeriesCount
FROM %s.%s
WHERE metric_name = ?;`, signozMetricDBName, signozTSTableNameV41Week)
@@ -5705,7 +5690,7 @@ func (r *ClickHouseReader) GetAttributesForMetricName(ctx context.Context, metri
}
const baseQueryTemplate = `
SELECT
SELECT
kv.1 AS key,
arrayMap(x -> trim(BOTH '"' FROM x), groupUniqArray(1000)(kv.2)) AS values,
length(groupUniqArray(10000)(kv.2)) AS valueCount
@@ -5814,7 +5799,7 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.
sampleTable, countExp := utils.WhichSampleTableToUse(req.Start, req.End)
metricsQuery := fmt.Sprintf(
`SELECT
`SELECT
t.metric_name AS metric_name,
ANY_VALUE(t.description) AS description,
ANY_VALUE(t.type) AS metric_type,
@@ -5879,11 +5864,11 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.
if whereClause != "" {
sb.WriteString(fmt.Sprintf(
`SELECT
`SELECT
s.samples,
s.metric_name
FROM (
SELECT
SELECT
dm.metric_name,
%s AS samples
FROM %s.%s AS dm
@@ -5913,11 +5898,11 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.
} else {
// If no filters, it is a simpler query.
sb.WriteString(fmt.Sprintf(
`SELECT
`SELECT
s.samples,
s.metric_name
FROM (
SELECT
SELECT
metric_name,
%s AS samples
FROM %s.%s
@@ -6022,16 +6007,16 @@ func (r *ClickHouseReader) GetMetricsTimeSeriesPercentage(ctx context.Context, r
// Construct the query without backticks
query := fmt.Sprintf(`
SELECT
SELECT
metric_name,
total_value,
(total_value * 100.0 / total_time_series) AS percentage
FROM (
SELECT
SELECT
metric_name,
uniq(fingerprint) AS total_value,
(SELECT uniq(fingerprint)
FROM %s.%s
(SELECT uniq(fingerprint)
FROM %s.%s
WHERE unix_milli BETWEEN ? AND ? AND __normalized = ?) AS total_time_series
FROM %s.%s
WHERE unix_milli BETWEEN ? AND ? AND NOT startsWith(metric_name, 'signoz') AND __normalized = ? %s
@@ -6107,7 +6092,7 @@ func (r *ClickHouseReader) GetMetricsSamplesPercentage(ctx context.Context, req
queryLimit := 50 + req.Limit
metricsQuery := fmt.Sprintf(
`SELECT
`SELECT
ts.metric_name AS metric_name,
uniq(ts.fingerprint) AS timeSeries
FROM %s.%s AS ts
@@ -6164,13 +6149,13 @@ func (r *ClickHouseReader) GetMetricsSamplesPercentage(ctx context.Context, req
FROM %s.%s
WHERE unix_milli BETWEEN ? AND ?
)
SELECT
SELECT
s.samples,
s.metric_name,
COALESCE((s.samples * 100.0 / t.total_samples), 0) AS percentage
FROM
FROM
(
SELECT
SELECT
dm.metric_name,
%s AS samples
FROM %s.%s AS dm`,
@@ -6191,7 +6176,7 @@ func (r *ClickHouseReader) GetMetricsSamplesPercentage(ctx context.Context, req
if whereClause != "" {
sb.WriteString(fmt.Sprintf(
` AND dm.fingerprint IN (
SELECT ts.fingerprint
SELECT ts.fingerprint
FROM %s.%s AS ts
WHERE ts.metric_name IN (%s)
AND unix_milli BETWEEN ? AND ?
@@ -6262,7 +6247,7 @@ func (r *ClickHouseReader) GetNameSimilarity(ctx context.Context, req *metrics_e
}
query := fmt.Sprintf(`
SELECT
SELECT
metric_name,
any(type) as type,
any(temporality) as temporality,
@@ -6321,7 +6306,7 @@ func (r *ClickHouseReader) GetAttributeSimilarity(ctx context.Context, req *metr
// Get target labels
extractedLabelsQuery := fmt.Sprintf(`
SELECT
SELECT
kv.1 AS label_key,
topK(10)(JSONExtractString(kv.2)) AS label_values
FROM %s.%s
@@ -6365,12 +6350,12 @@ func (r *ClickHouseReader) GetAttributeSimilarity(ctx context.Context, req *metr
priorityListString := strings.Join(priorityList, ", ")
candidateLabelsQuery := fmt.Sprintf(`
WITH
arrayDistinct([%s]) AS filter_keys,
WITH
arrayDistinct([%s]) AS filter_keys,
arrayDistinct([%s]) AS filter_values,
[%s] AS priority_pairs_input,
%d AS priority_multiplier
SELECT
SELECT
metric_name,
any(type) as type,
any(temporality) as temporality,
@@ -6476,17 +6461,17 @@ func (r *ClickHouseReader) GetMetricsAllResourceAttributes(ctx context.Context,
instrumentationtypes.CodeFunctionName: "GetMetricsAllResourceAttributes",
})
start, end, attTable, _ := utils.WhichAttributesTableToUse(start, end)
query := fmt.Sprintf(`SELECT
key,
query := fmt.Sprintf(`SELECT
key,
count(distinct value) AS distinct_value_count
FROM (
SELECT key, value
FROM %s.%s
ARRAY JOIN
ARRAY JOIN
arrayConcat(mapKeys(resource_attributes)) AS key,
arrayConcat(mapValues(resource_attributes)) AS value
WHERE unix_milli between ? and ?
)
)
GROUP BY key
ORDER BY distinct_value_count DESC;`, signozMetadataDbName, attTable)
valueCtx := context.WithValue(ctx, "clickhouse_max_threads", constants.MetricsExplorerClickhouseThreads)
@@ -6633,11 +6618,11 @@ func (r *ClickHouseReader) GetInspectMetricsFingerprints(ctx context.Context, at
start, end, tsTable, _ := utils.WhichTSTableToUse(req.Start, req.End)
query := fmt.Sprintf(`
SELECT
SELECT
arrayDistinct(groupArray(toString(fingerprint))) AS fingerprints
FROM
(
SELECT
SELECT
metric_name, labels, fingerprint,
%s
FROM %s.%s
@@ -6808,14 +6793,14 @@ func (r *ClickHouseReader) GetUpdatedMetricsMetadata(ctx context.Context, orgID
var stillMissing []string
if len(missingMetrics) > 0 {
metricList := "'" + strings.Join(missingMetrics, "', '") + "'"
query := fmt.Sprintf(`SELECT
query := fmt.Sprintf(`SELECT
metric_name,
argMax(type, created_at) AS type,
argMax(description, created_at) AS description,
argMax(temporality, created_at) AS temporality,
argMax(is_monotonic, created_at) AS is_monotonic,
argMax(unit, created_at) AS unit
FROM %s.%s
FROM %s.%s
WHERE metric_name IN (%s)
GROUP BY metric_name;`,
signozMetricDBName,
@@ -6863,7 +6848,7 @@ func (r *ClickHouseReader) GetUpdatedMetricsMetadata(ctx context.Context, orgID
if len(stillMissing) > 0 {
metricList := "'" + strings.Join(stillMissing, "', '") + "'"
query := fmt.Sprintf(`SELECT DISTINCT metric_name, type, description, temporality, is_monotonic, unit
FROM %s.%s
FROM %s.%s
WHERE metric_name IN (%s)`, signozMetricDBName, signozTSTableNameV4, metricList)
valueCtx := context.WithValue(ctx, "clickhouse_max_threads", constants.MetricsExplorerClickhouseThreads)
rows, err := r.db.Query(valueCtx, query)

View File

@@ -10,9 +10,6 @@ import (
var (
SPAN_LIMIT_PER_REQUEST_FOR_WATERFALL float64 = 500
maxDepthForSelectedSpanChildren int = 5
MaxLimitToSelectAllSpans uint = 10_000
)
type Interval struct {
@@ -92,23 +89,12 @@ func getPathFromRootToSelectedSpanId(node *model.Span, selectedSpanId string) (b
// throughout the recursion. Per-call state (level, isPartOfPreOrder, etc.)
// is passed as direct arguments.
type traverseOpts struct {
uncollapsedSpans map[string]struct{}
selectedSpanID string
isSelectedSpanUncollapsed bool
selectAll bool
uncollapsedSpans map[string]struct{}
selectedSpanID string
}
func traverseTrace(
span *model.Span,
opts traverseOpts,
level uint64,
isPartOfPreOrder bool,
hasSibling bool,
autoExpandDepth int,
) ([]*model.Span, []string) {
func traverseTrace(span *model.Span, opts traverseOpts, level uint64, isPartOfPreOrder bool, hasSibling bool) []*model.Span {
preOrderTraversal := []*model.Span{}
autoExpandedSpans := []string{}
// sort the children to maintain the order across requests
sort.Slice(span.Children, func(i, j int) bool {
@@ -145,36 +131,16 @@ func traverseTrace(
preOrderTraversal = append(preOrderTraversal, &nodeWithoutChildren)
}
remainingAutoExpandDepth := 0
if span.SpanID == opts.selectedSpanID && opts.isSelectedSpanUncollapsed {
remainingAutoExpandDepth = maxDepthForSelectedSpanChildren
} else if autoExpandDepth > 0 {
remainingAutoExpandDepth = autoExpandDepth - 1
}
_, isAlreadyUncollapsed := opts.uncollapsedSpans[span.SpanID]
for index, child := range span.Children {
// A child is included in the pre-order output if its parent is uncollapsed
// OR if the child falls within MAX_DEPTH_FOR_SELECTED_SPAN_CHILDREN levels
// below the selected span.
isChildWithinMaxDepth := remainingAutoExpandDepth > 0
childIsPartOfPreOrder := opts.selectAll || (isPartOfPreOrder && (isAlreadyUncollapsed || isChildWithinMaxDepth))
if isPartOfPreOrder && isChildWithinMaxDepth && !isAlreadyUncollapsed {
if !slices.Contains(autoExpandedSpans, span.SpanID) {
autoExpandedSpans = append(autoExpandedSpans, span.SpanID)
}
}
_childTraversal, _autoExpanded := traverseTrace(child, opts, level+1, childIsPartOfPreOrder, index != (len(span.Children)-1), remainingAutoExpandDepth)
_childTraversal := traverseTrace(child, opts, level+1, isPartOfPreOrder && isAlreadyUncollapsed, index != (len(span.Children)-1))
preOrderTraversal = append(preOrderTraversal, _childTraversal...)
autoExpandedSpans = append(autoExpandedSpans, _autoExpanded...)
nodeWithoutChildren.SubTreeNodeCount += child.SubTreeNodeCount + 1
span.SubTreeNodeCount += child.SubTreeNodeCount + 1
}
nodeWithoutChildren.SubTreeNodeCount += 1
return preOrderTraversal, autoExpandedSpans
return preOrderTraversal
}
@@ -221,15 +187,10 @@ func GetSelectedSpans(uncollapsedSpans []string, selectedSpanID string, traceRoo
}
opts := traverseOpts{
uncollapsedSpans: uncollapsedSpanMap,
selectedSpanID: selectedSpanID,
isSelectedSpanUncollapsed: isSelectedSpanIDUnCollapsed,
}
_preOrderTraversal, _autoExpanded := traverseTrace(rootNode, opts, 0, true, false, 0)
// Merge auto-expanded spans into updatedUncollapsedSpans for returning in response
for _, spanID := range _autoExpanded {
uncollapsedSpanMap[spanID] = struct{}{}
uncollapsedSpans: uncollapsedSpanMap,
selectedSpanID: selectedSpanID,
}
_preOrderTraversal := traverseTrace(rootNode, opts, 0, true, false)
_selectedSpanIndex := findIndexForSelectedSpanFromPreOrder(_preOrderTraversal, selectedSpanID)
if _selectedSpanIndex != -1 {
@@ -273,15 +234,3 @@ func GetSelectedSpans(uncollapsedSpans []string, selectedSpanID string, traceRoo
return preOrderTraversal[startIndex:endIndex], slices.Collect(maps.Keys(uncollapsedSpanMap)), rootServiceName, rootServiceEntryPoint
}
func GetAllSpans(traceRoots []*model.Span) (spans []*model.Span, rootServiceName, rootEntryPoint string) {
if len(traceRoots) > 0 {
rootServiceName = traceRoots[0].ServiceName
rootEntryPoint = traceRoots[0].Name
}
for _, root := range traceRoots {
childSpans, _ := traverseTrace(root, traverseOpts{selectAll: true}, 0, true, false, 0)
spans = append(spans, childSpans...)
}
return
}

View File

@@ -81,6 +81,28 @@ func TestGetSelectedSpans_MultipleRoots(t *testing.T) {
assert.Equal(t, "root1-op", entryPoint, "metadata comes from first root")
}
// isSelectedSpanIDUnCollapsed=true opens only the selected span's direct children,
// not deeper descendants.
//
// root → selected (expanded)
// ├─ child1 ✓
// │ └─ grandchild ✗ (only one level opened)
// └─ child2 ✓
func TestGetSelectedSpans_ExpandedSelectedSpan(t *testing.T) {
root := mkSpan("root", "svc",
mkSpan("selected", "svc",
mkSpan("child1", "svc", mkSpan("grandchild", "svc")),
mkSpan("child2", "svc"),
),
)
spanMap := buildSpanMap(root)
spans, _, _, _ := GetSelectedSpans([]string{}, "selected", []*model.Span{root}, spanMap, true)
// root and selected are on the auto-uncollapsed path; child1/child2 are direct
// children of the expanded selected span; grandchild stays hidden.
assert.Equal(t, []string{"root", "selected", "child1", "child2"}, spanIDs(spans))
}
// Multiple spans uncollapsed simultaneously: children of all uncollapsed spans
// are visible at once.
//
@@ -298,119 +320,6 @@ func TestGetSelectedSpans_WindowShiftsAtStart(t *testing.T) {
assert.Equal(t, "span10", spans[10].SpanID, "selected span still in window")
}
// Auto-expanded span IDs from ALL branches are returned in
// updatedUncollapsedSpans. Only internal nodes (spans with children) are
// tracked — leaf spans are never added.
//
// root (selected)
// ├─ childA (internal ✓)
// │ └─ grandchildA (internal ✓)
// │ └─ leafA (leaf ✗)
// └─ childB (internal ✓)
// └─ grandchildB (internal ✓)
// └─ leafB (leaf ✗)
func TestGetSelectedSpans_AutoExpandedSpansReturnedInUncollapsed(t *testing.T) {
root := mkSpan("root", "svc",
mkSpan("childA", "svc",
mkSpan("grandchildA", "svc",
mkSpan("leafA", "svc"),
),
),
mkSpan("childB", "svc",
mkSpan("grandchildB", "svc",
mkSpan("leafB", "svc"),
),
),
)
spanMap := buildSpanMap(root)
_, uncollapsed, _, _ := GetSelectedSpans([]string{}, "root", []*model.Span{root}, spanMap, true)
// all internal nodes across both branches must be tracked
assert.Contains(t, uncollapsed, "root")
assert.Contains(t, uncollapsed, "childA", "internal node depth 1, branch A")
assert.Contains(t, uncollapsed, "childB", "internal node depth 1, branch B")
assert.Contains(t, uncollapsed, "grandchildA", "internal node depth 2, branch A")
assert.Contains(t, uncollapsed, "grandchildB", "internal node depth 2, branch B")
// leaves have no children to show — never added to uncollapsedSpans
assert.NotContains(t, uncollapsed, "leafA", "leaf spans are never added to uncollapsedSpans")
assert.NotContains(t, uncollapsed, "leafB", "leaf spans are never added to uncollapsedSpans")
}
// ─────────────────────────────────────────────────────────────────────────────
// maxDepthForSelectedSpanChildren boundary tests
// ─────────────────────────────────────────────────────────────────────────────
// Depth is measured from the selected span, not the trace root.
// Ancestors appear via the path-to-root logic, not the depth limit.
// Each depth level has two children to confirm the limit is enforced on all
// branches, not just the first.
//
// root
// └─ A ancestor ✓ (path-to-root)
// └─ selected
// ├─ d1a depth 1 ✓
// │ ├─ d2a depth 2 ✓
// │ │ ├─ d3a depth 3 ✓
// │ │ │ ├─ d4a depth 4 ✓
// │ │ │ │ ├─ d5a depth 5 ✓
// │ │ │ │ │ └─ d6a depth 6 ✗
// │ │ │ │ └─ d5b depth 5 ✓
// │ │ │ └─ d4b depth 4 ✓
// │ │ └─ d3b depth 3 ✓
// │ └─ d2b depth 2 ✓
// └─ d1b depth 1 ✓
func TestGetSelectedSpans_DepthCountedFromSelectedSpan(t *testing.T) {
selected := mkSpan("selected", "svc",
mkSpan("d1a", "svc",
mkSpan("d2a", "svc",
mkSpan("d3a", "svc",
mkSpan("d4a", "svc",
mkSpan("d5a", "svc",
mkSpan("d6a", "svc"), // depth 6 — excluded
),
mkSpan("d5b", "svc"), // depth 5 — included
),
mkSpan("d4b", "svc"), // depth 4 — included
),
mkSpan("d3b", "svc"), // depth 3 — included
),
mkSpan("d2b", "svc"), // depth 2 — included
),
mkSpan("d1b", "svc"), // depth 1 — included
)
root := mkSpan("root", "svc", mkSpan("A", "svc", selected))
spanMap := buildSpanMap(root)
spans, _, _, _ := GetSelectedSpans([]string{}, "selected", []*model.Span{root}, spanMap, true)
ids := spanIDs(spans)
assert.Contains(t, ids, "root", "ancestor shown via path-to-root")
assert.Contains(t, ids, "A", "ancestor shown via path-to-root")
for _, id := range []string{"d1a", "d1b", "d2a", "d2b", "d3a", "d3b", "d4a", "d4b", "d5a", "d5b"} {
assert.Contains(t, ids, id, "depth ≤ 5 — must be included")
}
assert.NotContains(t, ids, "d6a", "depth 6 > limit — excluded")
}
func TestGetAllSpans(t *testing.T) {
root := mkSpan("root", "svc",
mkSpan("childA", "svc",
mkSpan("grandchildA", "svc",
mkSpan("leafA", "svc2"),
),
),
mkSpan("childB", "svc3",
mkSpan("grandchildB", "svc",
mkSpan("leafB", "svc2"),
),
),
)
spans, rootServiceName, rootEntryPoint := GetAllSpans([]*model.Span{root})
assert.ElementsMatch(t, spanIDs(spans), []string{"root", "childA", "grandchildA", "leafA", "childB", "grandchildB", "leafB"})
assert.Equal(t, rootServiceName, "svc")
assert.Equal(t, rootEntryPoint, "root-op")
}
func mkSpan(id, service string, children ...*model.Span) *model.Span {
return &model.Span{
SpanID: id,

View File

@@ -333,7 +333,6 @@ type GetWaterfallSpansForTraceWithMetadataParams struct {
SelectedSpanID string `json:"selectedSpanId"`
IsSelectedSpanIDUnCollapsed bool `json:"isSelectedSpanIDUnCollapsed"`
UncollapsedSpans []string `json:"uncollapsedSpans"`
Limit uint `json:"limit"`
}
type GetFlamegraphSpansForTraceParams struct {

View File

@@ -329,7 +329,6 @@ type GetWaterfallSpansForTraceWithMetadataResponse struct {
HasMissingSpans bool `json:"hasMissingSpans"`
// this is needed for frontend and query service sync
UncollapsedSpans []string `json:"uncollapsedSpans"`
HasMore bool `json:"hasMore"`
}
type GetFlamegraphSpansForTraceResponse struct {

View File

@@ -3,7 +3,9 @@ package telemetrylogs
import (
"fmt"
"slices"
"strings"
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
@@ -32,9 +34,25 @@ func NewJSONConditionBuilder(key *telemetrytypes.TelemetryFieldKey, valueType te
// BuildCondition builds the full WHERE condition for body_v2 JSON paths
func (c *jsonConditionBuilder) buildJSONCondition(operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
baseCond, err := c.emitPlannedCondition(operator, value, sb)
if err != nil {
return "", err
}
// path index
if operator.AddDefaultExistsFilter() {
pathIndex := fmt.Sprintf(`has(%s, '%s')`, schemamigrator.JSONPathsIndexExpr(LogsV2BodyV2Column), c.key.ArrayParentPaths()[0])
return sb.And(baseCond, pathIndex), nil
}
return baseCond, nil
}
func (c *jsonConditionBuilder) emitPlannedCondition(operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
// Build traversal + terminal recursively per-hop
conditions := []string{}
for _, node := range c.key.JSONPlan {
condition, err := c.emitPlannedCondition(node, operator, value, sb)
condition, err := c.recurseArrayHops(node, operator, value, sb)
if err != nil {
return "", err
}
@@ -44,164 +62,7 @@ func (c *jsonConditionBuilder) buildJSONCondition(operator qbtypes.FilterOperato
return sb.Or(conditions...), nil
}
// emitPlannedCondition handles paths with array traversal
func (c *jsonConditionBuilder) emitPlannedCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
// Build traversal + terminal recursively per-hop
compiled, err := c.recurseArrayHops(node, operator, value, sb)
if err != nil {
return "", err
}
return compiled, nil
}
// buildTerminalCondition creates the innermost condition
func (c *jsonConditionBuilder) buildTerminalCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
if node.TerminalConfig.ElemType.IsArray {
conditions := []string{}
// if the value type is not an array
// TODO(piyush): Confirm the Query built for Array case and add testcases for it later
if !c.valueType.IsArray {
// if operator is a String search Operator, then we need to build one more String comparison condition along with the Strict match condition
if operator.IsStringSearchOperator() {
formattedValue := querybuilder.FormatValueForContains(value)
arrayCond, err := c.buildArrayMembershipCondition(node, operator, formattedValue, sb)
if err != nil {
return "", err
}
conditions = append(conditions, arrayCond)
}
// switch operator for array membership checks
switch operator {
case qbtypes.FilterOperatorContains:
operator = qbtypes.FilterOperatorEqual
case qbtypes.FilterOperatorNotContains:
operator = qbtypes.FilterOperatorNotEqual
}
}
arrayCond, err := c.buildArrayMembershipCondition(node, operator, value, sb)
if err != nil {
return "", err
}
conditions = append(conditions, arrayCond)
// or the conditions together
return sb.Or(conditions...), nil
}
return c.buildPrimitiveTerminalCondition(node, operator, value, sb)
}
// buildPrimitiveTerminalCondition builds the condition if the terminal node is a primitive type
// it handles the data type collisions and utilizes indexes for the condition if available
func (c *jsonConditionBuilder) buildPrimitiveTerminalCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
fieldPath := node.FieldPath()
conditions := []string{}
var formattedValue any = value
if operator.IsStringSearchOperator() {
formattedValue = querybuilder.FormatValueForContains(value)
}
elemType := node.TerminalConfig.ElemType
fieldExpr := fmt.Sprintf("dynamicElement(%s, '%s')", fieldPath, elemType.StringValue())
fieldExpr, formattedValue = querybuilder.DataTypeCollisionHandledFieldName(node.TerminalConfig.Key, formattedValue, fieldExpr, operator)
// utilize indexes for the condition if available
indexed := slices.ContainsFunc(node.TerminalConfig.Key.Indexes, func(index telemetrytypes.JSONDataTypeIndex) bool {
return index.Type == elemType && index.ColumnExpression == fieldPath
})
if elemType.IndexSupported && indexed {
indexedExpr := assumeNotNull(fieldPath, elemType)
emptyValue := func() any {
switch elemType {
case telemetrytypes.String:
return ""
case telemetrytypes.Int64, telemetrytypes.Float64, telemetrytypes.Bool:
return 0
default:
return nil
}
}()
// switch the operator and value for exists and not exists
switch operator {
case qbtypes.FilterOperatorExists:
operator = qbtypes.FilterOperatorNotEqual
value = emptyValue
case qbtypes.FilterOperatorNotExists:
operator = qbtypes.FilterOperatorEqual
value = emptyValue
default:
// do nothing
}
indexedExpr, indexedComparisonValue := querybuilder.DataTypeCollisionHandledFieldName(node.TerminalConfig.Key, formattedValue, indexedExpr, operator)
cond, err := c.applyOperator(sb, indexedExpr, operator, indexedComparisonValue)
if err != nil {
return "", err
}
// if qb has a definitive value, we can skip adding a condition to
// check the existence of the path in the json column
if value != emptyValue {
return cond, nil
}
conditions = append(conditions, cond)
// Switch operator to EXISTS since indexed paths on assumedNotNull, indexes will always have a default value
// So we flip the operator to Exists and filter the rows that actually have the value
operator = qbtypes.FilterOperatorExists
}
cond, err := c.applyOperator(sb, fieldExpr, operator, formattedValue)
if err != nil {
return "", err
}
conditions = append(conditions, cond)
if len(conditions) > 1 {
return sb.And(conditions...), nil
}
return conditions[0], nil
}
// buildArrayMembershipCondition handles array membership checks
func (c *jsonConditionBuilder) buildArrayMembershipCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
arrayPath := node.FieldPath()
localKeyCopy := *node.TerminalConfig.Key
// create typed array out of a dynamic array
filteredDynamicExpr := func() string {
// Change the field data type from []dynamic to the value type
// since we've filtered the value type out of the dynamic array, we need to change the field data corresponding to the value type
localKeyCopy.FieldDataType = telemetrytypes.MappingJSONDataTypeToFieldDataType[telemetrytypes.ScalerTypeToArrayType[c.valueType]]
baseArrayDynamicExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", arrayPath)
return fmt.Sprintf("arrayMap(x->dynamicElement(x, '%s'), arrayFilter(x->(dynamicType(x) = '%s'), %s))",
c.valueType.StringValue(),
c.valueType.StringValue(),
baseArrayDynamicExpr)
}
typedArrayExpr := func() string {
return fmt.Sprintf("dynamicElement(%s, '%s')", arrayPath, node.TerminalConfig.ElemType.StringValue())
}
var arrayExpr string
if node.TerminalConfig.ElemType == telemetrytypes.ArrayDynamic {
arrayExpr = filteredDynamicExpr()
} else {
arrayExpr = typedArrayExpr()
}
key := "x"
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(&localKeyCopy, value, key, operator)
op, err := c.applyOperator(sb, fieldExpr, operator, value)
if err != nil {
return "", err
}
return fmt.Sprintf("arrayExists(%s -> %s, %s)", key, op, arrayExpr), nil
}
// recurseArrayHops recursively builds array traversal conditions
// buildPlanCondition recursively traverses a single JSONPlan and builds condition
func (c *jsonConditionBuilder) recurseArrayHops(current *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
if current == nil {
return "", errors.NewInternalf(CodeArrayNavigationFailed, "navigation failed, current node is nil")
@@ -215,6 +76,33 @@ func (c *jsonConditionBuilder) recurseArrayHops(current *telemetrytypes.JSONAcce
return terminalCond, nil
}
// apply NOT at top level arrayExists so that any subsequent arrayExists fails we count it as true (matching log)
yes, operator := applyNotCondition(operator)
condition, err := c.buildAccessNodeBranches(current, operator, value, sb)
if err != nil {
return "", err
}
if yes {
return sb.Not(condition), nil
}
return condition, nil
}
func applyNotCondition(operator qbtypes.FilterOperator) (bool, qbtypes.FilterOperator) {
if operator.IsNegativeOperator() {
return true, operator.Inverse()
}
return false, operator
}
// buildAccessNodeBranches builds conditions for each branch of the access node
func (c *jsonConditionBuilder) buildAccessNodeBranches(current *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
if current == nil {
return "", errors.NewInternalf(CodeArrayNavigationFailed, "navigation failed, current node is nil")
}
currAlias := current.Alias()
fieldPath := current.FieldPath()
// Determine availability of Array(JSON) and Array(Dynamic) at this hop
@@ -249,6 +137,213 @@ func (c *jsonConditionBuilder) recurseArrayHops(current *telemetrytypes.JSONAcce
return sb.Or(branches...), nil
}
// buildTerminalCondition creates the innermost condition
func (c *jsonConditionBuilder) buildTerminalCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
if node.TerminalConfig.ElemType.IsArray {
// Note: here applyNotCondition will return true only if; top level path is an array; and operator is a negative operator
// Otherwise this code will be triggered by buildAccessNodeBranches; Where operator would've been already inverted if needed.
yes, operator := applyNotCondition(operator)
cond, err := c.buildTerminalArrayCondition(node, operator, value, sb)
if err != nil {
return "", err
}
if yes {
return sb.Not(cond), nil
}
return cond, nil
}
return c.buildPrimitiveTerminalCondition(node, operator, value, sb)
}
func getEmptyValue(elemType telemetrytypes.JSONDataType) any {
switch elemType {
case telemetrytypes.String:
return ""
case telemetrytypes.Int64, telemetrytypes.Float64, telemetrytypes.Bool:
return 0
default:
return nil
}
}
func (c *jsonConditionBuilder) terminalIndexedCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
fieldPath := node.FieldPath()
if strings.Contains(fieldPath, telemetrytypes.ArraySepSuffix) {
return "", errors.NewInternalf(CodeArrayNavigationFailed, "can not build index condition for array field %s", fieldPath)
}
elemType := node.TerminalConfig.ElemType
dynamicExpr := fmt.Sprintf("dynamicElement(%s, '%s')", fieldPath, elemType.StringValue())
indexedExpr := assumeNotNull(dynamicExpr)
// switch the operator and value for exists and not exists
switch operator {
case qbtypes.FilterOperatorExists:
operator = qbtypes.FilterOperatorNotEqual
value = getEmptyValue(elemType)
case qbtypes.FilterOperatorNotExists:
operator = qbtypes.FilterOperatorEqual
value = getEmptyValue(elemType)
default:
// do nothing
}
indexedExpr, formattedValue := querybuilder.DataTypeCollisionHandledFieldName(node.TerminalConfig.Key, value, indexedExpr, operator)
cond, err := c.applyOperator(sb, indexedExpr, operator, formattedValue)
if err != nil {
return "", err
}
return cond, nil
}
// buildPrimitiveTerminalCondition builds the condition if the terminal node is a primitive type
// it handles the data type collisions and utilizes indexes for the condition if available
func (c *jsonConditionBuilder) buildPrimitiveTerminalCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
fieldPath := node.FieldPath()
conditions := []string{}
// utilize indexes for the condition if available
//
// Note: Indexing code doesn't get executed for Array Nested fields because they can not be indexed
indexed := slices.ContainsFunc(node.TerminalConfig.Key.Indexes, func(index telemetrytypes.JSONDataTypeIndex) bool {
return index.Type == node.TerminalConfig.ElemType && index.ColumnExpression == fieldPath
})
if node.TerminalConfig.ElemType.IndexSupported && indexed {
indexCond, err := c.terminalIndexedCondition(node, operator, value, sb)
if err != nil {
return "", err
}
// if qb has a definitive value, we can skip adding a condition to
// check the existence of the path in the json column
if value != nil && value != getEmptyValue(node.TerminalConfig.ElemType) {
return indexCond, nil
}
conditions = append(conditions, indexCond)
// Switch operator to EXISTS except when operator is NOT EXISTS since
// indexed paths on assumedNotNull, indexes will always have a default
// value so we flip the operator to Exists and filter the rows that
// actually have the value
if operator != qbtypes.FilterOperatorNotExists {
operator = qbtypes.FilterOperatorExists
}
}
var formattedValue any = value
if operator.IsStringSearchOperator() {
formattedValue = querybuilder.FormatValueForContains(value)
}
fieldExpr := fmt.Sprintf("dynamicElement(%s, '%s')", fieldPath, node.TerminalConfig.ElemType.StringValue())
// if operator is negative and has a value comparison i.e. excluding EXISTS and NOT EXISTS, we need to assume that the field exists everywhere
//
// Note: here applyNotCondition will return true only if; top level path is being queried and operator is a negative operator
// Otherwise this code will be triggered by buildAccessNodeBranches; Where operator would've been already inverted if needed.
if node.IsNonNestedPath() {
yes, _ := applyNotCondition(operator)
if yes {
switch operator {
case qbtypes.FilterOperatorNotExists:
// skip
default:
fieldExpr = assumeNotNull(fieldExpr)
}
}
}
fieldExpr, formattedValue = querybuilder.DataTypeCollisionHandledFieldName(node.TerminalConfig.Key, formattedValue, fieldExpr, operator)
cond, err := c.applyOperator(sb, fieldExpr, operator, formattedValue)
if err != nil {
return "", err
}
conditions = append(conditions, cond)
if len(conditions) > 1 {
return sb.And(conditions...), nil
}
return conditions[0], nil
}
func (c *jsonConditionBuilder) buildTerminalArrayCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
conditions := []string{}
// if the value type is not an array
// TODO(piyush): Confirm the Query built for Array case and add testcases for it later
if !c.valueType.IsArray {
// if operator is a String search Operator, then we need to build one more String comparison condition along with the Strict match condition
if operator.IsStringSearchOperator() {
formattedValue := querybuilder.FormatValueForContains(value)
arrayCond, err := c.buildArrayMembershipCondition(node, operator, formattedValue, sb)
if err != nil {
return "", err
}
conditions = append(conditions, arrayCond)
}
// switch operator for array membership checks
switch operator {
case qbtypes.FilterOperatorContains:
operator = qbtypes.FilterOperatorEqual
case qbtypes.FilterOperatorNotContains:
operator = qbtypes.FilterOperatorNotEqual
}
}
arrayCond, err := c.buildArrayMembershipCondition(node, operator, value, sb)
if err != nil {
return "", err
}
conditions = append(conditions, arrayCond)
return sb.Or(conditions...), nil
}
// buildArrayMembershipCondition builds condition of the part where Arrays becomes primitive types
// e.g. [300, 404, 500], and value operations will work on the array elements
func (c *jsonConditionBuilder) buildArrayMembershipCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
arrayPath := node.FieldPath()
localKeyCopy := *node.TerminalConfig.Key
// create typed array out of a dynamic array
filteredDynamicExpr := func() string {
// Change the field data type from []dynamic to the value type
// since we've filtered the value type out of the dynamic array, we need to change the field data corresponding to the value type
localKeyCopy.FieldDataType = telemetrytypes.MappingJSONDataTypeToFieldDataType[telemetrytypes.ScalerTypeToArrayType[c.valueType]]
primitiveType := c.valueType.StringValue()
// check if value is an array
if c.valueType.IsArray {
primitiveType = c.valueType.ScalerType
}
baseArrayDynamicExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", arrayPath)
return fmt.Sprintf("arrayMap(x->dynamicElement(x, '%s'), arrayFilter(x->(dynamicType(x) = '%s'), %s))",
primitiveType,
primitiveType,
baseArrayDynamicExpr)
}
typedArrayExpr := func() string {
return fmt.Sprintf("dynamicElement(%s, '%s')", arrayPath, node.TerminalConfig.ElemType.StringValue())
}
var arrayExpr string
if node.TerminalConfig.ElemType == telemetrytypes.ArrayDynamic {
arrayExpr = filteredDynamicExpr()
} else {
arrayExpr = typedArrayExpr()
}
key := "x"
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(&localKeyCopy, value, key, operator)
op, err := c.applyOperator(sb, fieldExpr, operator, value)
if err != nil {
return "", err
}
return fmt.Sprintf("arrayExists(%s -> %s, %s)", key, op, arrayExpr), nil
}
func (c *jsonConditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, fieldExpr string, operator qbtypes.FilterOperator, value any) (string, error) {
switch operator {
case qbtypes.FilterOperatorEqual:
@@ -310,6 +405,6 @@ func (c *jsonConditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, field
}
}
func assumeNotNull(column string, elemType telemetrytypes.JSONDataType) string {
return fmt.Sprintf("assumeNotNull(dynamicElement(%s, '%s'))", column, elemType.StringValue())
func assumeNotNull(fieldExpr string) string {
return fmt.Sprintf("assumeNotNull(%s)", fieldExpr)
}

File diff suppressed because one or more lines are too long

View File

@@ -113,6 +113,29 @@ const (
FilterOperatorNotContains
)
var operatorInverseMapping = map[FilterOperator]FilterOperator{
FilterOperatorEqual: FilterOperatorNotEqual,
FilterOperatorNotEqual: FilterOperatorEqual,
FilterOperatorGreaterThan: FilterOperatorLessThanOrEq,
FilterOperatorGreaterThanOrEq: FilterOperatorLessThan,
FilterOperatorLessThan: FilterOperatorGreaterThanOrEq,
FilterOperatorLessThanOrEq: FilterOperatorGreaterThan,
FilterOperatorLike: FilterOperatorNotLike,
FilterOperatorNotLike: FilterOperatorLike,
FilterOperatorILike: FilterOperatorNotILike,
FilterOperatorNotILike: FilterOperatorILike,
FilterOperatorBetween: FilterOperatorNotBetween,
FilterOperatorNotBetween: FilterOperatorBetween,
FilterOperatorIn: FilterOperatorNotIn,
FilterOperatorNotIn: FilterOperatorIn,
FilterOperatorExists: FilterOperatorNotExists,
FilterOperatorNotExists: FilterOperatorExists,
FilterOperatorRegexp: FilterOperatorNotRegexp,
FilterOperatorNotRegexp: FilterOperatorRegexp,
FilterOperatorContains: FilterOperatorNotContains,
FilterOperatorNotContains: FilterOperatorContains,
}
// AddDefaultExistsFilter returns true if addl exists filter should be added to the query
// For the negative predicates, we don't want to add the exists filter. Why?
// Say for example, user adds a filter `service.name != "redis"`, we can't interpret it
@@ -162,6 +185,10 @@ func (f FilterOperator) IsNegativeOperator() bool {
return true
}
func (f FilterOperator) Inverse() FilterOperator {
return operatorInverseMapping[f]
}
func (f FilterOperator) IsComparisonOperator() bool {
switch f {
case FilterOperatorGreaterThan, FilterOperatorGreaterThanOrEq, FilterOperatorLessThan, FilterOperatorLessThanOrEq:

View File

@@ -90,6 +90,11 @@ func (n *JSONAccessNode) FieldPath() string {
return n.Parent.Alias() + "." + key
}
// Returns true if the current node is a non-nested path
func (n *JSONAccessNode) IsNonNestedPath() bool {
return !strings.Contains(n.FieldPath(), ArraySep)
}
func (n *JSONAccessNode) BranchesInOrder() []JSONAccessBranchType {
return slices.SortedFunc(maps.Keys(n.Branches), func(a, b JSONAccessBranchType) int {
return strings.Compare(b.StringValue(), a.StringValue())

View File

@@ -4,69 +4,235 @@ package telemetrytypes
// Test JSON Type Set Data Setup
// ============================================================================
// TestJSONTypeSet returns a map of path->types for testing
// This represents the type information available in the test JSON structure
// TestJSONTypeSet returns a map of path->types for testing.
// This represents the type information available in the test JSON structure.
//
// Structural patterns covered:
//
// Primitives message, count, duration, user.name, user.age, ...
// x.y service.name, response.code, user.address.city
// x[].y education[].name, http-events[].request-id
// x[].y.z education[].metadata.location, http-events[].request-info.host
// x[].y.z[] http-events[].request-info.headers (array behind non-array hop)
// x[].y.z.a[] http-events[].request-info.meta-data.entries (two non-array hops → array)
// x[].y[] education[].awards
// x[].y[].z education[].awards[].name
// x[].y[].z[] education[].awards[].participated
// x[].y[].z[].w education[].awards[].participated[].type
// x[].y[].z[].w[] education[].awards[].participated[].team
// x[].y[].z[].w[].v education[].awards[].participated[].team[].branch
func TestJSONTypeSet() (map[string][]JSONDataType, MetadataStore) {
types := map[string][]JSONDataType{
"user.name": {String},
"user.permissions": {ArrayString},
"user.age": {Int64, String},
"user.height": {Float64},
"education": {ArrayJSON},
"education[].name": {String},
"education[].type": {String, Int64},
"education[].internal_type": {String},
"education[].metadata.location": {String},
"education[].parameters": {ArrayFloat64, ArrayDynamic},
"education[].duration": {String},
"education[].mode": {String},
"education[].year": {Int64},
"education[].field": {String},
"education[].awards": {ArrayDynamic, ArrayJSON},
"education[].awards[].name": {String},
"education[].awards[].rank": {Int64},
"education[].awards[].medal": {String},
"education[].awards[].type": {String},
"education[].awards[].semester": {Int64},
"education[].awards[].participated": {ArrayDynamic, ArrayJSON},
"education[].awards[].participated[].type": {String},
"education[].awards[].participated[].field": {String},
"education[].awards[].participated[].project_type": {String},
"education[].awards[].participated[].project_name": {String},
"education[].awards[].participated[].race_type": {String},
"education[].awards[].participated[].team_based": {Bool},
"education[].awards[].participated[].team_name": {String},
"education[].awards[].participated[].team": {ArrayJSON},
"education[].awards[].participated[].members": {ArrayString},
// ── user (primitives) ─────────────────────────────────────────────
"user.name": {String},
"user.permissions": {ArrayString},
"user.age": {Int64, String}, // Int64/String ambiguity
"user.height": {Float64},
"user.active": {Bool}, // Bool — not IndexSupported
"user.score": {Float64, Int64}, // numeric Float64/Int64 ambiguity
// Deeper non-array nesting (a.b.c — no array hops)
"user.address.city": {String},
"user.address.zip": {Int64},
"user.address.country": {String, Int64}, // deep + ambiguous
// ── education[] ───────────────────────────────────────────────────
// Pattern: x[].y
"education": {ArrayJSON},
"education[].name": {String},
"education[].type": {String, Int64},
"education[].internal_type": {String},
"education[].duration": {String},
"education[].mode": {String},
"education[].year": {Int64},
"education[].field": {String},
"education[].grades": {ArrayBool}, // bool array terminal
"education[].scores": {ArrayInt64}, // int array terminal
"education[].parameters": {ArrayFloat64, ArrayDynamic},
// Pattern: x[].y.z
"education[].metadata.location": {String},
// Pattern: x[].y[]
"education[].awards": {ArrayDynamic, ArrayJSON},
// Pattern: x[].y[].z
"education[].awards[].name": {String},
"education[].awards[].rank": {Int64},
"education[].awards[].medal": {String},
"education[].awards[].type": {String},
"education[].awards[].semester": {Int64},
// Pattern: x[].y[].z[]
"education[].awards[].participated": {ArrayDynamic, ArrayJSON},
// Pattern: x[].y[].z[].w
"education[].awards[].participated[].type": {String},
"education[].awards[].participated[].field": {String},
"education[].awards[].participated[].project_type": {String},
"education[].awards[].participated[].project_name": {String},
"education[].awards[].participated[].race_type": {String},
"education[].awards[].participated[].team_based": {Bool},
"education[].awards[].participated[].team_name": {String},
"education[].awards[].participated[].members": {ArrayString},
// Pattern: x[].y[].z[].w[]
"education[].awards[].participated[].team": {ArrayJSON},
// Pattern: x[].y[].z[].w[].v
"education[].awards[].participated[].team[].name": {String},
"education[].awards[].participated[].team[].branch": {String},
"education[].awards[].participated[].team[].semester": {Int64},
"interests": {ArrayJSON},
"interests[].type": {String},
"interests[].entities": {ArrayJSON},
"interests[].entities.application_date": {String},
"interests[].entities[].reviews": {ArrayJSON},
"interests[].entities[].reviews[].given_by": {String},
"interests[].entities[].reviews[].remarks": {String},
"interests[].entities[].reviews[].weight": {Float64},
"interests[].entities[].reviews[].passed": {Bool},
"interests[].entities[].reviews[].type": {String},
"interests[].entities[].reviews[].analysis_type": {Int64},
"interests[].entities[].reviews[].entries": {ArrayJSON},
"interests[].entities[].reviews[].entries[].subject": {String},
"interests[].entities[].reviews[].entries[].status": {String},
"interests[].entities[].reviews[].entries[].metadata": {ArrayJSON},
"interests[].entities[].reviews[].entries[].metadata[].company": {String},
"interests[].entities[].reviews[].entries[].metadata[].experience": {Int64},
"interests[].entities[].reviews[].entries[].metadata[].unit": {String},
// ── interests[] ───────────────────────────────────────────────────
"interests": {ArrayJSON},
"interests[].type": {String},
"interests[].entities": {ArrayJSON},
"interests[].entities.application_date": {String},
"interests[].entities[].reviews": {ArrayJSON},
"interests[].entities[].reviews[].given_by": {String},
"interests[].entities[].reviews[].remarks": {String},
"interests[].entities[].reviews[].weight": {Float64},
"interests[].entities[].reviews[].passed": {Bool},
"interests[].entities[].reviews[].type": {String},
"interests[].entities[].reviews[].analysis_type": {Int64},
"interests[].entities[].reviews[].score": {Float64}, // additional Float64 primitive
"interests[].entities[].reviews[].flags": {ArrayBool}, // bool array in deep nesting
"interests[].entities[].reviews[].entries": {ArrayJSON},
"interests[].entities[].reviews[].entries[].subject": {String},
"interests[].entities[].reviews[].entries[].status": {String},
"interests[].entities[].reviews[].entries[].metadata": {ArrayJSON},
"interests[].entities[].reviews[].entries[].metadata[].company": {String},
"interests[].entities[].reviews[].entries[].metadata[].experience": {Int64},
"interests[].entities[].reviews[].entries[].metadata[].unit": {String},
"interests[].entities[].reviews[].entries[].metadata[].positions": {ArrayJSON},
"interests[].entities[].reviews[].entries[].metadata[].positions[].name": {String},
"interests[].entities[].reviews[].entries[].metadata[].positions[].duration": {Int64, Float64},
"interests[].entities[].reviews[].entries[].metadata[].positions[].unit": {String},
"interests[].entities[].reviews[].entries[].metadata[].positions[].ratings": {ArrayInt64, ArrayString},
"message": {String},
"tags": {ArrayString},
// ── http-events[] ─────────────────────────────────────────────────
// Segments use -, @, _ symbols. Root is a single ArrayJSON.
//
// Pattern: x[].y
"http-events": {ArrayJSON},
"http-events[].request-id": {String},
"http-events[].status-code": {Int64, String}, // ambiguous
"http-events[].@type": {String, Int64}, // @ symbol + ambiguous
// Pattern: x[].y.z (non-array intermediate, primitive terminal)
"http-events[].request-info.host": {String},
"http-events[].request-info.port": {Int64, Float64}, // numeric ambiguity
"http-events[].request-info.method": {String},
"http-events[].@context.version": {Int64}, // @ in intermediate segment
"http-events[].@context.format": {String},
// Pattern: x[].y.z[] (non-array intermediate, array terminal)
"http-events[].request-info.headers": {ArrayJSON, ArrayDynamic},
"http-events[].request-info.headers[].name": {String},
"http-events[].request-info.headers[].value": {String, Int64}, // ambiguous
"http-events[].@context.tags": {ArrayString}, // @ + simple array terminal
// Pattern: x[].y.z.a[] (two non-array hops then array)
"http-events[].request-info.meta-data.entries": {ArrayJSON, ArrayDynamic},
"http-events[].request-info.meta-data.entries[].key": {String},
"http-events[].request-info.meta-data.entries[].value": {String, Int64}, // ambiguous
"http-events[].request-info.meta-data.entries[]._score": {Float64}, // _ prefix
// ── top-level primitives ──────────────────────────────────────────
// Every IndexSupported scalar type, plus Bool and ambiguous pairs.
"message": {String},
"error": {Bool, String}, // Bool — not IndexSupported
"order-Id": {Float64, Int64}, // numeric Float64/Int64 ambiguity at root
// Special characters in root-level keys
"http-status": {Int64, String}, // hyphen in root key, ambiguous
"_internal": {String}, // underscore prefix
"@version": {String, Int64}, // @ at root level, ambiguous
// ── top-level nested objects (no array hops) ───────────────────────
"service.name": {String},
"service.version": {String, Int64}, // ambiguous
"service.weight": {Float64},
"service.active": {Bool},
"response.code": {Int64, String, ArrayString}, // ambiguous
"response.message": {String},
"response.time-taken": {Float64}, // hyphen inside nested key
// ── top-level arrays ──────────────────────────────────────────────
// One array of every type so every array terminal branch is reachable
// from a non-array root path.
"tags": {ArrayString},
}
return types, nil
}
// TestIndexedPathEntry is a path + JSON type pair representing a field
// backed by a ClickHouse skip index in the test data.
//
// Only non-array paths with IndexSupported types (String, Int64, Float64)
// are valid entries — arrays and Bool cannot carry a skip index.
//
// The ColumnExpression for each entry is computed at test-setup time from
// the access plan, since it depends on the column name (e.g. body_v2)
// which is unknown to this package.
type TestIndexedPathEntry struct {
Path string
Type JSONDataType
}
// TestIndexedPaths lists path+type pairs from TestJSONTypeSet that are
// backed by a JSON data type index. Test setup uses this to populate
// key.Indexes after calling SetJSONAccessPlan.
//
// Intentionally excluded:
// - user.active, service.active → Bool, IndexSupported=false
// - error (Bool variant) → Bool, IndexSupported=false
// - response.code (ArrayString) → array type, cannot be indexed
// - http-events[].* → inside an array hop; array-nested fields cannot be indexed
// - education[].* → same reason
// - tags → array type, cannot be indexed
var TestIndexedPaths = []TestIndexedPathEntry{
// root-level primitives
{Path: "message", Type: String},
{Path: "error", Type: String}, // String variant of the Bool/String ambiguous field
{Path: "order-Id", Type: Float64}, // hyphen in key, Float64 variant
{Path: "order-Id", Type: Int64}, // Int64 variant
// root-level with special characters
{Path: "http-status", Type: Int64},
{Path: "http-status", Type: String},
{Path: "_internal", Type: String},
{Path: "@version", Type: String},
{Path: "@version", Type: Int64},
// root-level nested objects (no array hops)
{Path: "service.name", Type: String},
{Path: "service.version", Type: String},
{Path: "service.version", Type: Int64},
{Path: "service.weight", Type: Float64},
{Path: "response.code", Type: Int64},
{Path: "response.code", Type: String},
{Path: "response.message", Type: String},
{Path: "response.time-taken", Type: Float64},
// user primitives — all IndexSupported types
{Path: "user.name", Type: String},
{Path: "user.age", Type: Int64},
{Path: "user.age", Type: String},
{Path: "user.height", Type: Float64},
{Path: "user.score", Type: Float64},
{Path: "user.score", Type: Int64},
// user.address — deeper non-array nesting
{Path: "user.address.city", Type: String},
{Path: "user.address.zip", Type: Int64},
{Path: "user.address.country", Type: String},
{Path: "user.address.country", Type: Int64},
}