Compare commits

..

9 Commits

Author SHA1 Message Date
Srikanth Chekuri
585caa84fe Merge branch 'main' into issue-10008 2026-01-23 20:11:48 +05:30
srikanthccv
814fd40a1d chore: update tests 2026-01-23 05:43:26 +05:30
srikanthccv
0978bdfa7f chore: fix warn 2026-01-23 04:33:07 +05:30
srikanthccv
4f99261743 chore: resolve conflicts 2026-01-23 04:25:39 +05:30
Srikanth Chekuri
836988273f Merge branch 'main' into issue-10008 2026-01-20 16:48:54 +05:30
srikanthccv
a622d65226 chore: lint 2026-01-20 06:30:00 +05:30
srikanthccv
2410e3d411 chore: add integration tests 2026-01-20 06:14:13 +05:30
Srikanth Chekuri
654e2e4b7e Merge branch 'main' into issue-10008 2026-01-20 05:02:15 +05:30
srikanthccv
03ad7a85fa feat(querybuilder): support using variables inside value
prior to this, variables work only as standalone RHS values (e.g. field = $var / field IN $var). this change adds support to use variables inside a value for pattern matching or substitution ex "$env-suffix" or LIKE "%$pattern-var-name%"
2026-01-19 01:41:42 +05:30
59 changed files with 2641 additions and 564 deletions

View File

@@ -12,7 +12,6 @@ linters:
- misspell
- nilnil
- sloglint
- wastedassign
- unparam
- unused
settings:

View File

@@ -38,7 +38,7 @@ module.exports = {
'import', // Import/export linting
'sonarjs', // Code quality/complexity
// TODO: Uncomment after running: yarn add -D eslint-plugin-spellcheck
// 'spellcheck', // Correct spellings
// 'spellcheck',
],
settings: {
react: {
@@ -60,18 +60,12 @@ module.exports = {
'no-debugger': 'error', // Disallows debugger statements in production code
curly: 'error', // Requires curly braces for all control statements
eqeqeq: ['error', 'always', { null: 'ignore' }], // Enforces === and !== (allows == null for null/undefined check)
'no-console': ['error', { allow: ['warn', 'error'] }], // Warns on console.log, allows console.warn/error
// TODO: Enable after fixing ~15 console.log statements
// 'no-console': ['error', { allow: ['warn', 'error'] }], // Warns on console.log, allows console.warn/error
// TypeScript rules
'@typescript-eslint/explicit-function-return-type': 'error', // Requires explicit return types on functions
'@typescript-eslint/no-unused-vars': [
// Disallows unused variables/args
'error',
{
argsIgnorePattern: '^_', // Allows unused args prefixed with _ (e.g., _unusedParam)
varsIgnorePattern: '^_', // Allows unused vars prefixed with _ (e.g., _unusedVar)
},
],
'@typescript-eslint/no-unused-vars': 'off',
'@typescript-eslint/no-explicit-any': 'warn', // Warns when using 'any' type (consider upgrading to error)
// TODO: Change to 'error' after fixing ~80 empty function placeholders in providers/contexts
'@typescript-eslint/no-empty-function': 'off', // Disallows empty function bodies

View File

@@ -41,7 +41,7 @@ export const getConsumerLagDetails = async (
> => {
const { detailType, ...restProps } = props;
const response = await axios.post(
`/messaging-queues/kafka/consumer-lag/${detailType}`,
`/messaging-queues/kafka/consumer-lag/${props.detailType}`,
{
...restProps,
},

View File

@@ -43,17 +43,16 @@ export const omitIdFromQuery = (query: Query | null): any => ({
builder: {
...query?.builder,
queryData: query?.builder.queryData.map((queryData) => {
const { id: _aggregateAttributeId, ...rest } =
queryData.aggregateAttribute || {};
const { id, ...rest } = queryData.aggregateAttribute || {};
const newAggregateAttribute = rest;
const newGroupByAttributes = queryData.groupBy.map((groupByAttribute) => {
const { id: _groupByAttributeId, ...rest } = groupByAttribute;
const { id, ...rest } = groupByAttribute;
return rest;
});
const newItems = queryData.filters?.items?.map((item) => {
const { id: _itemId, ...newItem } = item;
const { id, ...newItem } = item;
if (item.key) {
const { id: _keyId, ...rest } = item.key;
const { id, ...rest } = item.key;
return {
...newItem,
key: rest,

View File

@@ -45,6 +45,7 @@ function Pre({
}
function Code({
node,
inline,
className = 'blog-code',
children,

View File

@@ -29,6 +29,7 @@ import {
QUERY_BUILDER_OPERATORS_BY_KEY_TYPE,
queryOperatorSuggestions,
} from 'constants/antlrQueryConstants';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useIsDarkMode } from 'hooks/useDarkMode';
import useDebounce from 'hooks/useDebounce';
import { debounce, isNull } from 'lodash-es';
@@ -207,6 +208,8 @@ function QuerySearch({
const lastValueRef = useRef<string>('');
const isMountedRef = useRef<boolean>(true);
const { handleRunQuery } = useQueryBuilder();
const { selectedDashboard } = useDashboard();
const dynamicVariables = useMemo(

View File

@@ -87,7 +87,7 @@ function TraceOperatorEditor({
// Track if the query was changed externally (from props) vs internally (user input)
const [isExternalQueryChange, setIsExternalQueryChange] = useState(false);
const [lastExternalValue, setLastExternalValue] = useState<string>('');
const { currentQuery } = useQueryBuilder();
const { currentQuery, handleRunQuery } = useQueryBuilder();
const queryOptions = useMemo(
() =>

View File

@@ -5,6 +5,7 @@ import { EditorView } from '@uiw/react-codemirror';
import { getKeySuggestions } from 'api/querySuggestions/getKeySuggestions';
import { getValueSuggestions } from 'api/querySuggestions/getValueSuggestion';
import { initialQueriesMap } from 'constants/queryBuilder';
import * as UseQBModule from 'hooks/queryBuilder/useQueryBuilder';
import { fireEvent, render, userEvent, waitFor } from 'tests/test-utils';
import type { QueryKeyDataSuggestionsProps } from 'types/api/querySuggestions/types';
import { DataSource } from 'types/common/queryBuilder';
@@ -120,8 +121,13 @@ jest.mock('api/querySuggestions/getValueSuggestion', () => ({
// Note: We're NOT mocking CodeMirror here - using the real component
// This provides integration testing with the actual CodeMirror editor
const handleRunQueryMock = ((UseQBModule as unknown) as {
handleRunQuery: jest.MockedFunction<() => void>;
}).handleRunQuery;
const SAMPLE_KEY_TYPING = 'http.';
const SAMPLE_VALUE_TYPING_INCOMPLETE = "service.name = '";
const SAMPLE_VALUE_TYPING_COMPLETE = "service.name = 'frontend'";
const SAMPLE_STATUS_QUERY = "http.status_code = '200'";
describe('QuerySearch (Integration with Real CodeMirror)', () => {

View File

@@ -796,12 +796,12 @@ export const adjustQueryForV5 = (currentQuery: Query): Query => {
});
const {
aggregateAttribute: _aggregateAttribute,
aggregateOperator: _aggregateOperator,
timeAggregation: _timeAggregation,
spaceAggregation: _spaceAggregation,
reduceTo: _reduceTo,
filters: _filters,
aggregateAttribute,
aggregateOperator,
timeAggregation,
spaceAggregation,
reduceTo,
filters,
...retainedQuery
} = query;

View File

@@ -1,5 +1,14 @@
import './Slider.styles.scss';
export default function Slider(): JSX.Element {
import { IQuickFiltersConfig } from 'components/QuickFilters/types';
interface ISliderProps {
filter: IQuickFiltersConfig;
}
// not needed for now build when required
export default function Slider(props: ISliderProps): JSX.Element {
const { filter } = props;
console.log(filter);
return <div>Slider</div>;
}

View File

@@ -303,9 +303,15 @@ export default function QuickFilters(props: IQuickFiltersProps): JSX.Element {
/>
);
case FiltersType.DURATION:
return <Duration filter={filter} onFilterChange={onFilterChange} />;
return (
<Duration
filter={filter}
onFilterChange={onFilterChange}
source={source}
/>
);
case FiltersType.SLIDER:
return <Slider />;
return <Slider filter={filter} />;
// eslint-disable-next-line sonarjs/no-duplicated-branches
default:
return (

View File

@@ -1,6 +1,7 @@
/* eslint-disable react/jsx-props-no-spreading */
import { fireEvent, render, screen } from '@testing-library/react';
import { QueryParams } from 'constants/query';
import { initialQueriesMap } from 'constants/queryBuilder';
import ROUTES from 'constants/routes';
import { defaultPostableAlertRuleV2 } from 'container/CreateAlertV2/constants';
import { getCreateAlertLocalStateFromAlertDef } from 'container/CreateAlertV2/utils';

View File

@@ -73,7 +73,7 @@ export function sanitizeDashboardData(
const updatedVariables = Object.entries(selectedData.variables).reduce(
(acc, [key, value]) => {
const { selectedValue: _selectedValue, ...rest } = value;
const { selectedValue, ...rest } = value;
acc[key] = rest;
return acc;
},

View File

@@ -9,11 +9,10 @@ import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
import { ENTITY_VERSION_V4 } from 'constants/app';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { QBShortcuts } from 'constants/shortcuts/QBShortcuts';
import RunQueryBtn from 'container/QueryBuilder/components/RunQueryBtn/RunQueryBtn';
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { isEmpty } from 'lodash-es';
import { Atom, Terminal } from 'lucide-react';
import { Atom, Play, Terminal } from 'lucide-react';
import { useEffect, useMemo, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { AlertTypes } from 'types/api/alerts/alertTypes';
@@ -166,8 +165,9 @@ function QuerySection({
onChange={handleQueryCategoryChange}
tabBarExtraContent={
<span style={{ display: 'flex', gap: '1rem', alignItems: 'center' }}>
<RunQueryBtn
onStageRunQuery={(): void => {
<Button
type="primary"
onClick={(): void => {
runQuery();
logEvent('Alert: Stage and run query', {
dataSource: ALERTS_DATA_SOURCE_MAP[alertType],
@@ -176,7 +176,11 @@ function QuerySection({
queryType: queryCategory,
});
}}
/>
className="stage-run-query"
icon={<Play size={14} />}
>
Stage & Run Query
</Button>
</span>
}
items={tabs}
@@ -195,7 +199,14 @@ function QuerySection({
onChange={handleQueryCategoryChange}
tabBarExtraContent={
<span style={{ display: 'flex', gap: '1rem', alignItems: 'center' }}>
<RunQueryBtn onStageRunQuery={runQuery} />
<Button
type="primary"
onClick={runQuery}
className="stage-run-query"
icon={<Play size={14} />}
>
Stage & Run Query
</Button>
</span>
}
items={items}

View File

@@ -1,6 +1,14 @@
/* eslint-disable react/display-name */
import { PlusOutlined } from '@ant-design/icons';
import { Button, Flex, Input, Typography } from 'antd';
import {
Button,
Dropdown,
Flex,
Input,
MenuProps,
Tag,
Typography,
} from 'antd';
import type { ColumnsType } from 'antd/es/table/interface';
import saveAlertApi from 'api/alerts/save';
import logEvent from 'api/common/logEvent';

View File

@@ -91,7 +91,7 @@ function Summary(): JSX.Element {
const queryFiltersWithoutId = useMemo(() => {
const filtersWithoutId = {
...queryFilters,
items: queryFilters.items.map(({ id: _id, ...rest }) => rest),
items: queryFilters.items.map(({ id, ...rest }) => rest),
};
return JSON.stringify(filtersWithoutId);
}, [queryFilters]);

View File

@@ -12,7 +12,6 @@ import {
getDefaultWidgetData,
PANEL_TYPE_TO_QUERY_TYPES,
} from 'container/NewWidget/utils';
import RunQueryBtn from 'container/QueryBuilder/components/RunQueryBtn/RunQueryBtn';
// import { QueryBuilder } from 'container/QueryBuilder';
import { QueryBuilderProps } from 'container/QueryBuilder/QueryBuilder.interfaces';
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
@@ -21,7 +20,7 @@ import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
import { useIsDarkMode } from 'hooks/useDarkMode';
import useUrlQuery from 'hooks/useUrlQuery';
import { defaultTo, isUndefined } from 'lodash-es';
import { Atom, Terminal } from 'lucide-react';
import { Atom, Play, Terminal } from 'lucide-react';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import {
getNextWidgets,
@@ -29,14 +28,20 @@ import {
getSelectedWidgetIndex,
} from 'providers/Dashboard/util';
import { useCallback, useEffect, useMemo } from 'react';
import { UseQueryResult } from 'react-query';
import { SuccessResponse } from 'types/api';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { EQueryType } from 'types/common/dashboard';
import ClickHouseQueryContainer from './QueryBuilder/clickHouse';
import PromQLQueryContainer from './QueryBuilder/promQL';
function QuerySection({ selectedGraph }: QueryProps): JSX.Element {
function QuerySection({
selectedGraph,
queryResponse,
}: QueryProps): JSX.Element {
const {
currentQuery,
handleRunQuery: handleRunQueryFromQueryBuilder,
@@ -237,7 +242,15 @@ function QuerySection({ selectedGraph }: QueryProps): JSX.Element {
tabBarExtraContent={
<span style={{ display: 'flex', gap: '1rem', alignItems: 'center' }}>
<TextToolTip text="This will temporarily save the current query and graph state. This will persist across tab change" />
<RunQueryBtn label="Stage & Run Query" onStageRunQuery={handleRunQuery} />
<Button
loading={queryResponse.isFetching}
type="primary"
onClick={handleRunQuery}
className="stage-run-query"
icon={<Play size={14} />}
>
Stage & Run Query
</Button>
</span>
}
items={items}
@@ -248,6 +261,10 @@ function QuerySection({ selectedGraph }: QueryProps): JSX.Element {
interface QueryProps {
selectedGraph: PANEL_TYPES;
queryResponse: UseQueryResult<
SuccessResponse<MetricRangePayloadProps, unknown>,
Error
>;
}
export default QuerySection;

View File

@@ -64,7 +64,7 @@ function LeftContainer({
enableDrillDown={enableDrillDown}
/>
<QueryContainer className="query-section-left-container">
<QuerySection selectedGraph={selectedGraph} />
<QuerySection selectedGraph={selectedGraph} queryResponse={queryResponse} />
{selectedGraph === PANEL_TYPES.LIST && (
<ExplorerColumnsRenderer
selectedLogFields={selectedLogFields}

View File

@@ -166,7 +166,7 @@ function UpdateContextLinks({
onSave(newContextLink);
} catch (error) {
// Form validation failed, don't call onSave
console.error('Form validation failed:', error);
console.log('Form validation failed:', error);
}
};

View File

@@ -1,37 +0,0 @@
.run-query-btn {
display: flex;
min-width: 132px;
align-items: center;
gap: 6px;
.ant-btn-icon {
margin: 0 !important;
}
}
.cancel-query-btn {
display: flex;
min-width: 132px;
align-items: center;
gap: 6px;
}
.cmd-hint {
display: inline-flex;
align-items: center;
gap: 2px;
padding: 2px 4px;
border-radius: 4px;
//not using var here to support opacity 60%. To be handled at design system level.
background: rgba(35, 38, 46, 0.6);
line-height: 1;
font-size: 10px;
}
.lightMode {
.cmd-hint {
color: var(--bg-ink-200);
//not using var here to support opacity 60%. To be handled at design system level.
background: rgba(231, 232, 236, 0.8);
}
}

View File

@@ -1,53 +0,0 @@
import './RunQueryBtn.scss';
import { Button } from 'antd';
import {
ChevronUp,
Command,
CornerDownLeft,
Loader2,
Play,
} from 'lucide-react';
import { getUserOperatingSystem, UserOperatingSystem } from 'utils/getUserOS';
interface RunQueryBtnProps {
label?: string;
isLoadingQueries?: boolean;
handleCancelQuery?: () => void;
onStageRunQuery?: () => void;
}
function RunQueryBtn({
label,
isLoadingQueries,
handleCancelQuery,
onStageRunQuery,
}: RunQueryBtnProps): JSX.Element {
const isMac = getUserOperatingSystem() === UserOperatingSystem.MACOS;
return isLoadingQueries ? (
<Button
type="default"
icon={<Loader2 size={14} className="loading-icon animate-spin" />}
className="cancel-query-btn periscope-btn danger"
onClick={handleCancelQuery}
>
Cancel
</Button>
) : (
<Button
type="primary"
className="run-query-btn periscope-btn primary"
disabled={isLoadingQueries || !onStageRunQuery}
onClick={onStageRunQuery}
icon={<Play size={14} />}
>
{label || 'Run Query'}
<div className="cmd-hint">
{isMac ? <Command size={12} /> : <ChevronUp size={12} />}
<CornerDownLeft size={12} />
</div>
</Button>
);
}
export default RunQueryBtn;

View File

@@ -1,82 +0,0 @@
// frontend/src/container/QueryBuilder/components/RunQueryBtn/__tests__/RunQueryBtn.test.tsx
import { fireEvent, render, screen } from '@testing-library/react';
import RunQueryBtn from '../RunQueryBtn';
// Mock OS util
jest.mock('utils/getUserOS', () => ({
getUserOperatingSystem: jest.fn(),
UserOperatingSystem: { MACOS: 'mac', WINDOWS: 'win', LINUX: 'linux' },
}));
import { getUserOperatingSystem, UserOperatingSystem } from 'utils/getUserOS';
describe('RunQueryBtn', () => {
test('renders run state and triggers on click', () => {
(getUserOperatingSystem as jest.Mock).mockReturnValue(
UserOperatingSystem.MACOS,
);
const onRun = jest.fn();
render(<RunQueryBtn onStageRunQuery={onRun} />);
const btn = screen.getByRole('button', { name: /run query/i });
expect(btn).toBeEnabled();
fireEvent.click(btn);
expect(onRun).toHaveBeenCalledTimes(1);
});
test('disabled when onStageRunQuery is undefined', () => {
(getUserOperatingSystem as jest.Mock).mockReturnValue(
UserOperatingSystem.MACOS,
);
render(<RunQueryBtn />);
expect(screen.getByRole('button', { name: /run query/i })).toBeDisabled();
});
test('shows cancel state and calls handleCancelQuery', () => {
(getUserOperatingSystem as jest.Mock).mockReturnValue(
UserOperatingSystem.MACOS,
);
const onCancel = jest.fn();
render(<RunQueryBtn isLoadingQueries handleCancelQuery={onCancel} />);
const cancel = screen.getByRole('button', { name: /cancel/i });
fireEvent.click(cancel);
expect(onCancel).toHaveBeenCalledTimes(1);
});
test('shows Command + CornerDownLeft on mac', () => {
(getUserOperatingSystem as jest.Mock).mockReturnValue(
UserOperatingSystem.MACOS,
);
const { container } = render(
<RunQueryBtn onStageRunQuery={(): void => {}} />,
);
expect(container.querySelector('.lucide-command')).toBeInTheDocument();
expect(
container.querySelector('.lucide-corner-down-left'),
).toBeInTheDocument();
});
test('shows ChevronUp + CornerDownLeft on non-mac', () => {
(getUserOperatingSystem as jest.Mock).mockReturnValue(
UserOperatingSystem.WINDOWS,
);
const { container } = render(
<RunQueryBtn onStageRunQuery={(): void => {}} />,
);
expect(container.querySelector('.lucide-chevron-up')).toBeInTheDocument();
expect(container.querySelector('.lucide-command')).not.toBeInTheDocument();
expect(
container.querySelector('.lucide-corner-down-left'),
).toBeInTheDocument();
});
test('renders custom label when provided', () => {
(getUserOperatingSystem as jest.Mock).mockReturnValue(
UserOperatingSystem.MACOS,
);
const onRun = jest.fn();
render(<RunQueryBtn onStageRunQuery={onRun} label="Stage & Run Query" />);
expect(
screen.getByRole('button', { name: /stage & run query/i }),
).toBeInTheDocument();
});
});

View File

@@ -1,12 +1,12 @@
import './ToolbarActions.styles.scss';
import { Button } from 'antd';
import { LogsExplorerShortcuts } from 'constants/shortcuts/logsExplorerShortcuts';
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
import { Loader2, Play } from 'lucide-react';
import { MutableRefObject, useEffect } from 'react';
import { useQueryClient } from 'react-query';
import RunQueryBtn from '../RunQueryBtn/RunQueryBtn';
interface RightToolbarActionsProps {
onStageRunQuery: () => void;
isLoadingQueries?: boolean;
@@ -42,7 +42,14 @@ export default function RightToolbarActions({
if (showLiveLogs) {
return (
<div className="right-toolbar-actions-container">
<RunQueryBtn />
<Button
type="primary"
className="run-query-btn periscope-btn primary"
disabled
icon={<Play size={14} />}
>
Run Query
</Button>
</div>
);
}
@@ -58,11 +65,26 @@ export default function RightToolbarActions({
return (
<div className="right-toolbar-actions-container">
<RunQueryBtn
isLoadingQueries={isLoadingQueries}
handleCancelQuery={handleCancelQuery}
onStageRunQuery={onStageRunQuery}
/>
{isLoadingQueries ? (
<Button
type="default"
icon={<Loader2 size={14} className="loading-icon animate-spin" />}
className="cancel-query-btn periscope-btn danger"
onClick={handleCancelQuery}
>
Cancel
</Button>
) : (
<Button
type="primary"
className="run-query-btn periscope-btn primary"
disabled={isLoadingQueries}
onClick={onStageRunQuery}
icon={<Play size={14} />}
>
Run Query
</Button>
)}
</div>
);
}

View File

@@ -136,6 +136,7 @@ const useAggregateDrilldown = ({
query,
// panelType,
aggregateData: aggregateDataWithTimeRange,
widgetId,
onClose,
});

View File

@@ -129,6 +129,7 @@ const useBaseAggregateOptions = ({
const handleBaseDrilldown = useCallback(
(key: string): void => {
console.log('Base drilldown:', { key, aggregateData });
const route = getRoute(key);
const timeRange = aggregateData?.timeRange;
const filtersToAdd = aggregateData?.filters || [];

View File

@@ -17,6 +17,7 @@ interface UseBaseAggregateOptionsProps {
query?: Query;
// panelType?: PANEL_TYPES;
aggregateData?: AggregateData | null;
widgetId?: string;
onClose: () => void;
}
@@ -26,6 +27,7 @@ const useDashboardVarConfig = ({
query,
// panelType,
aggregateData,
widgetId,
onClose,
}: UseBaseAggregateOptionsProps): {
dashbaordVariablesConfig: {
@@ -81,6 +83,11 @@ const useDashboardVarConfig = ({
dashboardVar: [string, IDashboardVariable],
fieldValue: any,
) => {
console.log('Setting variable:', {
fieldName,
dashboardVarId: dashboardVar[0],
fieldValue,
});
onValueUpdate(fieldName, dashboardVar[1]?.id, fieldValue, false);
onClose();
},
@@ -89,6 +96,10 @@ const useDashboardVarConfig = ({
const handleUnsetVariable = useCallback(
(fieldName: string, dashboardVar: [string, IDashboardVariable]) => {
console.log('Unsetting variable:', {
fieldName,
dashboardVarId: dashboardVar[0],
});
onValueUpdate(fieldName, dashboardVar[0], null, false);
onClose();
},
@@ -98,6 +109,12 @@ const useDashboardVarConfig = ({
const handleCreateVariable = useCallback(
(fieldName: string, fieldValue: string | number | boolean) => {
const source = getSourceFromQuery();
console.log('Creating variable from drilldown:', {
fieldName,
fieldValue,
source,
widgetId,
});
createVariable(
fieldName,
fieldValue,
@@ -108,7 +125,7 @@ const useDashboardVarConfig = ({
);
onClose();
},
[createVariable, getSourceFromQuery, onClose],
[createVariable, getSourceFromQuery, widgetId, onClose],
);
const contextItems = useMemo(

View File

@@ -19,6 +19,20 @@
display: flex;
align-items: center;
gap: 8px;
.cancel-query-btn {
min-width: 96px;
display: flex;
align-items: center;
gap: 2px;
}
.run-query-btn {
min-width: 96px;
display: flex;
align-items: center;
gap: 2px;
}
}
}

View File

@@ -31,8 +31,8 @@ export const normalizeSteps = (steps: FunnelStepData[]): FunnelStepData[] => {
...step.filters,
items: step.filters.items.map((item) => {
const {
id: _unusedId,
isIndexed: _isIndexed,
id: unusedId,
isIndexed,
...keyObj
} = item.key as BaseAutocompleteData;
return {

View File

@@ -143,7 +143,7 @@ export const useValidateFunnelSteps = ({
selectedTime,
steps.map((step) => {
// eslint-disable-next-line @typescript-eslint/naming-convention
const { latency_type: _latency_type, ...rest } = step;
const { latency_type, ...rest } = step;
return rest;
}),
],

View File

@@ -55,7 +55,7 @@ const useDragColumns = <T>(storageKey: LOCALSTORAGE): UseDragColumns<T> => {
const parsedDraggedColumns = await JSON.parse(localStorageColumns);
nextDraggedColumns = parsedDraggedColumns;
} catch (e) {
console.error('error while parsing json: ', e);
console.log('error while parsing json');
} finally {
redirectWithDraggedColumns(nextDraggedColumns);
}

View File

@@ -4,6 +4,11 @@ import parser from 'lib/logql/parser';
describe('lib/logql/parser', () => {
test('parse valid queries', () => {
logqlQueries.forEach((queryObject) => {
try {
parser(queryObject.query);
} catch (e) {
console.log(e);
}
expect(parser(queryObject.query)).toEqual(queryObject.parsedQuery);
});
});

View File

@@ -4,7 +4,11 @@ import { reverseParser } from 'lib/logql/reverseParser';
describe('lib/logql/reverseParser', () => {
test('reverse parse valid queries', () => {
logqlQueries.forEach((queryObject) => {
expect(reverseParser(queryObject.parsedQuery)).toEqual(queryObject.query);
try {
expect(reverseParser(queryObject.parsedQuery)).toEqual(queryObject.query);
} catch (e) {
console.log(e);
}
});
});
});

View File

@@ -11,8 +11,8 @@ describe('getYAxisScale', () => {
keyIndex: 1,
thresholdValue: 10,
thresholdUnit: 'percentunit',
moveThreshold(): void {
// no-op
moveThreshold(dragIndex, hoverIndex): void {
console.log(dragIndex, hoverIndex);
},
selectedGraph: PANEL_TYPES.TIME_SERIES,
},
@@ -21,8 +21,8 @@ describe('getYAxisScale', () => {
keyIndex: 2,
thresholdValue: 20,
thresholdUnit: 'percentunit',
moveThreshold(): void {
// no-op
moveThreshold(dragIndex, hoverIndex): void {
console.log(dragIndex, hoverIndex);
},
selectedGraph: PANEL_TYPES.TIME_SERIES,
},

View File

@@ -9,6 +9,7 @@ import {
MessagingQueuesPayloadProps,
} from 'api/messagingQueues/getConsumerLagDetails';
import axios from 'axios';
import { isNumber } from 'chart.js/helpers';
import cx from 'classnames';
import { ColumnTypeRender } from 'components/Logs/TableView/types';
import { SOMETHING_WENT_WRONG } from 'constants/api';

View File

@@ -275,7 +275,7 @@ export function setConfigDetail(
},
): void {
// remove "key" and its value from the paramsToSet object
const { key: _key, ...restParamsToSet } = paramsToSet || {};
const { key, ...restParamsToSet } = paramsToSet || {};
if (!isEmpty(restParamsToSet)) {
const configDetail = {

View File

@@ -145,7 +145,7 @@ export const removeFilter = (
const updatedValues = prevValue.filter((item: any) => item !== value);
if (updatedValues.length === 0) {
const { [filterType]: _item, ...remainingFilters } = prevFilters;
const { [filterType]: item, ...remainingFilters } = prevFilters;
return Object.keys(remainingFilters).length > 0
? (remainingFilters as FilterType)
: undefined;
@@ -175,7 +175,7 @@ export const removeAllFilters = (
return prevFilters;
}
const { [filterType]: _item, ...remainingFilters } = prevFilters;
const { [filterType]: item, ...remainingFilters } = prevFilters;
return Object.keys(remainingFilters).length > 0
? (remainingFilters as Record<

View File

@@ -20,7 +20,8 @@ export const parseQueryIntoSpanKind = (
current = parsedValue;
}
} catch (error) {
console.error('error while parsing json: ', error);
console.log(error);
console.log('error while parsing json');
}
}

View File

@@ -19,7 +19,7 @@ export const parseQueryIntoCurrent = (
current = parseInt(parsedValue, 10);
}
} catch (error) {
console.error('error while parsing json: ', error);
console.log('error while parsing json');
}
}

View File

@@ -20,7 +20,8 @@ export const parseQueryIntoOrder = (
current = parsedValue;
}
} catch (error) {
console.error('error while parsing json: ', error);
console.log(error);
console.log('error while parsing json');
}
}

View File

@@ -20,7 +20,8 @@ export const parseAggregateOrderParams = (
current = parsedValue;
}
} catch (error) {
console.error('error while parsing json: ', error);
console.log(error);
console.log('error while parsing json');
}
}

View File

@@ -81,10 +81,8 @@ func FilterIntermediateColumns(result *qbtypes.QueryRangeResponse) *qbtypes.Quer
// Filter out columns for intermediate queries used only in formulas
filteredColumns := make([]*qbtypes.ColumnDescriptor, 0)
intermediateQueryNames := map[string]bool{
"error": true,
"total_span": true,
"endpoints_current": true,
"endpoints_legacy": true,
"error": true,
"total_span": true,
}
columnIndices := make([]int, 0)
@@ -298,15 +296,15 @@ func BuildDomainList(req *thirdpartyapitypes.ThirdPartyApiRequest) (*qbtypes.Que
return nil, err
}
queries := buildEndpointsQueries(req)
queries = append(queries,
queries := []qbtypes.QueryEnvelope{
buildEndpointsQuery(req),
buildLastSeenQuery(req),
buildRpsQuery(req),
buildErrorQuery(req),
buildTotalSpanQuery(req),
buildP99Query(req),
buildErrorRateFormula(),
)
}
return &qbtypes.QueryRangeRequest{
SchemaVersion: "v5",
@@ -348,58 +346,20 @@ func BuildDomainInfo(req *thirdpartyapitypes.ThirdPartyApiRequest) (*qbtypes.Que
}, nil
}
// buildEndpointsQueries returns queries for counting distinct URLs with semconv fallback.
// It uses two queries with mutually exclusive filters:
// - endpoints_current: count_distinct(url.full) WHERE url.full EXISTS
// - endpoints_legacy: count_distinct(http.url) WHERE url.full NOT EXISTS
// And a formula to combine them: endpoints_current + endpoints_legacy
func buildEndpointsQueries(req *thirdpartyapitypes.ThirdPartyApiRequest) []qbtypes.QueryEnvelope {
// Query for current semconv (url.full)
currentFilter := buildBaseFilter(req.Filter)
currentFilter.Expression = fmt.Sprintf("(%s) AND %s EXISTS", currentFilter.Expression, urlPathKey)
endpointsCurrent := qbtypes.QueryEnvelope{
func buildEndpointsQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEnvelope {
return qbtypes.QueryEnvelope{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Name: "endpoints_current",
Name: "endpoints",
Signal: telemetrytypes.SignalTraces,
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
Aggregations: []qbtypes.TraceAggregation{
{Expression: fmt.Sprintf("count_distinct(%s)", urlPathKey)},
{Expression: "count_distinct(http.url)"},
},
Filter: currentFilter,
Filter: buildBaseFilter(req.Filter),
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
},
}
// Query for legacy semconv (http.url) - only when url.full doesn't exist
legacyFilter := buildBaseFilter(req.Filter)
legacyFilter.Expression = fmt.Sprintf("(%s) AND %s NOT EXISTS", legacyFilter.Expression, urlPathKey)
endpointsLegacy := qbtypes.QueryEnvelope{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Name: "endpoints_legacy",
Signal: telemetrytypes.SignalTraces,
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
Aggregations: []qbtypes.TraceAggregation{
{Expression: fmt.Sprintf("count_distinct(%s)", urlPathKeyLegacy)},
},
Filter: legacyFilter,
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
},
}
// Formula to combine both counts
endpointsFormula := qbtypes.QueryEnvelope{
Type: qbtypes.QueryTypeFormula,
Spec: qbtypes.QueryBuilderFormula{
Name: "endpoints",
Expression: "endpoints_current + endpoints_legacy",
},
}
return []qbtypes.QueryEnvelope{endpointsCurrent, endpointsLegacy, endpointsFormula}
}
func buildLastSeenQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEnvelope {

View File

@@ -235,28 +235,31 @@ func (a *API) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
switch spec := item.Spec.(type) {
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
if spec.Filter != nil && spec.Filter.Expression != "" {
replaced, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
replaced, warnings, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
if err != nil {
errs = append(errs, err)
}
a.set.Logger.WarnContext(req.Context(), "variable replace warnings", "warnings", warnings)
spec.Filter.Expression = replaced
}
queryRangeRequest.CompositeQuery.Queries[idx].Spec = spec
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
if spec.Filter != nil && spec.Filter.Expression != "" {
replaced, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
replaced, warnings, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
if err != nil {
errs = append(errs, err)
}
a.set.Logger.WarnContext(req.Context(), "variable replace warnings", "warnings", warnings)
spec.Filter.Expression = replaced
}
queryRangeRequest.CompositeQuery.Queries[idx].Spec = spec
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
if spec.Filter != nil && spec.Filter.Expression != "" {
replaced, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
replaced, warnings, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
if err != nil {
errs = append(errs, err)
}
a.set.Logger.WarnContext(req.Context(), "variable replace warnings", "warnings", warnings)
spec.Filter.Expression = replaced
}
queryRangeRequest.CompositeQuery.Queries[idx].Spec = spec

View File

@@ -36,6 +36,29 @@ func NewBucketCache(settings factory.ProviderSettings, cache cache.Cache, cacheT
}
}
// cachedBucket represents a cached time bucket
type cachedBucket struct {
StartMs uint64 `json:"startMs"`
EndMs uint64 `json:"endMs"`
Type qbtypes.RequestType `json:"type"`
Value json.RawMessage `json:"value"`
Stats qbtypes.ExecStats `json:"stats"`
}
// cachedData represents the full cached data for a query
type cachedData struct {
Buckets []*cachedBucket `json:"buckets"`
Warnings []string `json:"warnings"`
}
func (c *cachedData) UnmarshalBinary(data []byte) error {
return json.Unmarshal(data, c)
}
func (c *cachedData) MarshalBinary() ([]byte, error) {
return json.Marshal(c)
}
// GetMissRanges returns cached data and missing time ranges
func (bc *bucketCache) GetMissRanges(
ctx context.Context,
@@ -55,7 +78,7 @@ func (bc *bucketCache) GetMissRanges(
bc.logger.DebugContext(ctx, "cache key", "cache_key", cacheKey)
// Try to get cached data
var data qbtypes.CachedData
var data cachedData
err := bc.cache.Get(ctx, orgID, cacheKey, &data)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
@@ -124,9 +147,9 @@ func (bc *bucketCache) Put(ctx context.Context, orgID valuer.UUID, q qbtypes.Que
cacheKey := bc.generateCacheKey(q)
// Get existing cached data
var existingData qbtypes.CachedData
var existingData cachedData
if err := bc.cache.Get(ctx, orgID, cacheKey, &existingData); err != nil {
existingData = qbtypes.CachedData{}
existingData = cachedData{}
}
// Trim the result to exclude data within flux interval
@@ -180,7 +203,7 @@ func (bc *bucketCache) Put(ctx context.Context, orgID valuer.UUID, q qbtypes.Que
uniqueWarnings := bc.deduplicateWarnings(allWarnings)
// Create updated cached data
updatedData := qbtypes.CachedData{
updatedData := cachedData{
Buckets: mergedBuckets,
Warnings: uniqueWarnings,
}
@@ -199,7 +222,7 @@ func (bc *bucketCache) generateCacheKey(q qbtypes.Query) string {
}
// findMissingRangesWithStep identifies time ranges not covered by cached buckets with step alignment
func (bc *bucketCache) findMissingRangesWithStep(buckets []*qbtypes.CachedBucket, startMs, endMs uint64, stepMs uint64) []*qbtypes.TimeRange {
func (bc *bucketCache) findMissingRangesWithStep(buckets []*cachedBucket, startMs, endMs uint64, stepMs uint64) []*qbtypes.TimeRange {
// When step is 0 or window is too small to be cached, use simple algorithm
if stepMs == 0 || (startMs+stepMs) > endMs {
return bc.findMissingRangesBasic(buckets, startMs, endMs)
@@ -242,7 +265,7 @@ func (bc *bucketCache) findMissingRangesWithStep(buckets []*qbtypes.CachedBucket
}
if needsSort {
slices.SortFunc(buckets, func(a, b *qbtypes.CachedBucket) int {
slices.SortFunc(buckets, func(a, b *cachedBucket) int {
if a.StartMs < b.StartMs {
return -1
}
@@ -316,7 +339,7 @@ func (bc *bucketCache) findMissingRangesWithStep(buckets []*qbtypes.CachedBucket
}
// findMissingRangesBasic is the simple algorithm without step alignment
func (bc *bucketCache) findMissingRangesBasic(buckets []*qbtypes.CachedBucket, startMs, endMs uint64) []*qbtypes.TimeRange {
func (bc *bucketCache) findMissingRangesBasic(buckets []*cachedBucket, startMs, endMs uint64) []*qbtypes.TimeRange {
// Check if already sorted before sorting
needsSort := false
for i := 1; i < len(buckets); i++ {
@@ -327,7 +350,7 @@ func (bc *bucketCache) findMissingRangesBasic(buckets []*qbtypes.CachedBucket, s
}
if needsSort {
slices.SortFunc(buckets, func(a, b *qbtypes.CachedBucket) int {
slices.SortFunc(buckets, func(a, b *cachedBucket) int {
if a.StartMs < b.StartMs {
return -1
}
@@ -398,9 +421,9 @@ func (bc *bucketCache) findMissingRangesBasic(buckets []*qbtypes.CachedBucket, s
}
// filterRelevantBuckets returns buckets that overlap with the requested time range
func (bc *bucketCache) filterRelevantBuckets(buckets []*qbtypes.CachedBucket, startMs, endMs uint64) []*qbtypes.CachedBucket {
func (bc *bucketCache) filterRelevantBuckets(buckets []*cachedBucket, startMs, endMs uint64) []*cachedBucket {
// Pre-allocate with estimated capacity
relevant := make([]*qbtypes.CachedBucket, 0, len(buckets))
relevant := make([]*cachedBucket, 0, len(buckets))
for _, bucket := range buckets {
// Check if bucket overlaps with requested range
@@ -410,7 +433,7 @@ func (bc *bucketCache) filterRelevantBuckets(buckets []*qbtypes.CachedBucket, st
}
// Sort by start time
slices.SortFunc(relevant, func(a, b *qbtypes.CachedBucket) int {
slices.SortFunc(relevant, func(a, b *cachedBucket) int {
if a.StartMs < b.StartMs {
return -1
}
@@ -424,7 +447,7 @@ func (bc *bucketCache) filterRelevantBuckets(buckets []*qbtypes.CachedBucket, st
}
// mergeBuckets combines multiple cached buckets into a single result
func (bc *bucketCache) mergeBuckets(ctx context.Context, buckets []*qbtypes.CachedBucket, warnings []string) *qbtypes.Result {
func (bc *bucketCache) mergeBuckets(ctx context.Context, buckets []*cachedBucket, warnings []string) *qbtypes.Result {
if len(buckets) == 0 {
return &qbtypes.Result{}
}
@@ -457,7 +480,7 @@ func (bc *bucketCache) mergeBuckets(ctx context.Context, buckets []*qbtypes.Cach
}
// mergeTimeSeriesValues merges time series data from multiple buckets
func (bc *bucketCache) mergeTimeSeriesValues(ctx context.Context, buckets []*qbtypes.CachedBucket) *qbtypes.TimeSeriesData {
func (bc *bucketCache) mergeTimeSeriesValues(ctx context.Context, buckets []*cachedBucket) *qbtypes.TimeSeriesData {
// Estimate capacity based on bucket count
estimatedSeries := len(buckets) * 10
@@ -608,7 +631,7 @@ func (bc *bucketCache) isEmptyResult(result *qbtypes.Result) (isEmpty bool, isFi
}
// resultToBuckets converts a query result into time-based buckets
func (bc *bucketCache) resultToBuckets(ctx context.Context, result *qbtypes.Result, startMs, endMs uint64) []*qbtypes.CachedBucket {
func (bc *bucketCache) resultToBuckets(ctx context.Context, result *qbtypes.Result, startMs, endMs uint64) []*cachedBucket {
// Check if result is empty
isEmpty, isFiltered := bc.isEmptyResult(result)
@@ -629,7 +652,7 @@ func (bc *bucketCache) resultToBuckets(ctx context.Context, result *qbtypes.Resu
// Always create a bucket, even for empty filtered results
// This ensures we don't re-query for data that doesn't exist
return []*qbtypes.CachedBucket{
return []*cachedBucket{
{
StartMs: startMs,
EndMs: endMs,
@@ -641,9 +664,9 @@ func (bc *bucketCache) resultToBuckets(ctx context.Context, result *qbtypes.Resu
}
// mergeAndDeduplicateBuckets combines and deduplicates bucket lists
func (bc *bucketCache) mergeAndDeduplicateBuckets(existing, fresh []*qbtypes.CachedBucket) []*qbtypes.CachedBucket {
func (bc *bucketCache) mergeAndDeduplicateBuckets(existing, fresh []*cachedBucket) []*cachedBucket {
// Create a map to deduplicate by time range
bucketMap := make(map[string]*qbtypes.CachedBucket)
bucketMap := make(map[string]*cachedBucket)
// Add existing buckets
for _, bucket := range existing {
@@ -658,13 +681,13 @@ func (bc *bucketCache) mergeAndDeduplicateBuckets(existing, fresh []*qbtypes.Cac
}
// Convert back to slice with pre-allocated capacity
result := make([]*qbtypes.CachedBucket, 0, len(bucketMap))
result := make([]*cachedBucket, 0, len(bucketMap))
for _, bucket := range bucketMap {
result = append(result, bucket)
}
// Sort by start time
slices.SortFunc(result, func(a, b *qbtypes.CachedBucket) int {
slices.SortFunc(result, func(a, b *cachedBucket) int {
if a.StartMs < b.StartMs {
return -1
}

View File

@@ -147,14 +147,14 @@ func BenchmarkBucketCache_MergeTimeSeriesValues(b *testing.B) {
for _, tc := range testCases {
b.Run(tc.name, func(b *testing.B) {
// Create test buckets
buckets := make([]*qbtypes.CachedBucket, tc.numBuckets)
buckets := make([]*cachedBucket, tc.numBuckets)
for i := 0; i < tc.numBuckets; i++ {
startMs := uint64(i * 10000)
endMs := uint64((i + 1) * 10000)
result := createBenchmarkResultWithSeries(startMs, endMs, 1000, tc.numSeries, tc.numValues)
valueBytes, _ := json.Marshal(result.Value)
buckets[i] = &qbtypes.CachedBucket{
buckets[i] = &cachedBucket{
StartMs: startMs,
EndMs: endMs,
Type: qbtypes.RequestTypeTimeSeries,
@@ -417,8 +417,8 @@ func createBenchmarkResultWithSeries(startMs, endMs uint64, _ uint64, numSeries,
}
// Helper function to create buckets with specific gap patterns
func createBucketsWithPattern(numBuckets int, pattern string) []*qbtypes.CachedBucket {
buckets := make([]*qbtypes.CachedBucket, 0, numBuckets)
func createBucketsWithPattern(numBuckets int, pattern string) []*cachedBucket {
buckets := make([]*cachedBucket, 0, numBuckets)
for i := 0; i < numBuckets; i++ {
// Skip some buckets based on pattern
@@ -432,7 +432,7 @@ func createBucketsWithPattern(numBuckets int, pattern string) []*qbtypes.CachedB
startMs := uint64(i * 10000)
endMs := uint64((i + 1) * 10000)
buckets = append(buckets, &qbtypes.CachedBucket{
buckets = append(buckets, &cachedBucket{
StartMs: startMs,
EndMs: endMs,
Type: qbtypes.RequestTypeTimeSeries,

View File

@@ -521,7 +521,7 @@ func TestBucketCache_FindMissingRanges_EdgeCases(t *testing.T) {
bc := NewBucketCache(instrumentationtest.New().ToProviderSettings(), memCache, cacheTTL, defaultFluxInterval).(*bucketCache)
// Test with buckets that have gaps and overlaps
buckets := []*qbtypes.CachedBucket{
buckets := []*cachedBucket{
{StartMs: 1000, EndMs: 2000},
{StartMs: 2500, EndMs: 3500},
{StartMs: 3000, EndMs: 4000}, // Overlaps with previous
@@ -1097,7 +1097,7 @@ func TestBucketCache_FindMissingRangesWithStep(t *testing.T) {
tests := []struct {
name string
buckets []*qbtypes.CachedBucket
buckets []*cachedBucket
startMs uint64
endMs uint64
stepMs uint64
@@ -1106,7 +1106,7 @@ func TestBucketCache_FindMissingRangesWithStep(t *testing.T) {
}{
{
name: "start_not_aligned_to_step",
buckets: []*qbtypes.CachedBucket{},
buckets: []*cachedBucket{},
startMs: 1500, // Not aligned to 1000ms step
endMs: 5000,
stepMs: 1000,
@@ -1118,7 +1118,7 @@ func TestBucketCache_FindMissingRangesWithStep(t *testing.T) {
},
{
name: "end_not_aligned_to_step",
buckets: []*qbtypes.CachedBucket{},
buckets: []*cachedBucket{},
startMs: 1000,
endMs: 4500, // Not aligned to 1000ms step
stepMs: 1000,
@@ -1129,7 +1129,7 @@ func TestBucketCache_FindMissingRangesWithStep(t *testing.T) {
},
{
name: "bucket_boundaries_not_aligned",
buckets: []*qbtypes.CachedBucket{
buckets: []*cachedBucket{
{StartMs: 1500, EndMs: 2500}, // Not aligned
},
startMs: 1000,
@@ -1143,7 +1143,7 @@ func TestBucketCache_FindMissingRangesWithStep(t *testing.T) {
},
{
name: "small_window_less_than_step",
buckets: []*qbtypes.CachedBucket{},
buckets: []*cachedBucket{},
startMs: 1000,
endMs: 1500, // Less than one step
stepMs: 1000,
@@ -1154,7 +1154,7 @@ func TestBucketCache_FindMissingRangesWithStep(t *testing.T) {
},
{
name: "zero_step_uses_basic_algorithm",
buckets: []*qbtypes.CachedBucket{},
buckets: []*cachedBucket{},
startMs: 1000,
endMs: 5000,
stepMs: 0,

View File

@@ -4,6 +4,7 @@ import (
"context"
"fmt"
"log/slog"
"sort"
"strconv"
"strings"
@@ -164,6 +165,7 @@ func PrepareWhereClause(query string, opts FilterExprVisitorOpts, startNs uint64
return nil, combinedErrors.WithAdditional(visitor.errors...).WithUrl(url)
}
// if there is nothing to filter, return true
if cond == "" {
cond = "true"
}
@@ -221,7 +223,6 @@ func (v *filterExpressionVisitor) Visit(tree antlr.ParseTree) any {
}
func (v *filterExpressionVisitor) VisitQuery(ctx *grammar.QueryContext) any {
return v.Visit(ctx.Expression())
}
@@ -503,8 +504,16 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
// Get all values for operations that need them
values := ctx.AllValue()
if len(values) > 0 {
// there should only be one value for the following operators, even if there is more than one
// we just take the first value
value := v.Visit(values[0])
// Check if the value is a skip marker (embedded variable with __all__ value)
if strVal, ok := value.(string); ok && strVal == specialSkipConditionMarker {
v.logger.Info("skipping condition due to __all__ variable", "keys", keys, "value", value) //nolint:sloglint
return ""
}
if var_, ok := value.(string); ok {
// check if this is a variables
var ok bool
@@ -516,6 +525,13 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
}
if ok {
if varItem.Type == qbtypes.DynamicVariableType {
if all_, ok := varItem.Value.(string); ok && all_ == "__all__" {
// this is likely overlooked by user, we treat it as if it was IN instead of =
v.logger.Warn("received unexpected __all__ value for single select dynamic variable", "variable", var_, "keys", keys, "value", value) //nolint:sloglint
return ""
}
}
switch varValues := varItem.Value.(type) {
case []any:
if len(varValues) == 0 {
@@ -781,7 +797,12 @@ func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any {
if ctx.QUOTED_TEXT() != nil {
txt := ctx.QUOTED_TEXT().GetText()
// trim quotes and return the value
return trimQuotes(txt)
value := trimQuotes(txt)
// Check if the string contains embedded variables
if strings.Contains(value, "$") {
return v.interpolateVariablesInString(value)
}
return value
} else if ctx.NUMBER() != nil {
number, err := strconv.ParseFloat(ctx.NUMBER().GetText(), 64)
if err != nil {
@@ -798,7 +819,12 @@ func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any {
// When the user writes an expression like `service.name=redis`
// The `redis` part is a VALUE context but parsed as a KEY token
// so we return the text as is
return ctx.KEY().GetText()
keyText := ctx.KEY().GetText()
// Check if this is a composed variable like $environment-xyz
if strings.HasPrefix(keyText, "$") {
return v.interpolateVariablesInString(keyText)
}
return keyText
}
return "" // Should not happen with valid input
@@ -830,7 +856,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
// 1. either user meant key ( this is already handled above in fieldKeysForName )
// 2. or user meant `attribute.key` we look up in the map for all possible field keys with name 'attribute.key'
// Note:
// Note:
// If user only wants to search `attribute.key`, then they have to use `attribute.attribute.key`
// If user only wants to search `key`, then they have to use `key`
// If user wants to search both, they can use `attribute.key` and we will resolve the ambiguity
@@ -928,3 +954,103 @@ func trimQuotes(txt string) string {
txt = strings.ReplaceAll(txt, `\'`, `'`)
return txt
}
// specialSkipConditionMarker is used to indicate that the entire condition should be removed
const specialSkipConditionMarker = "__signoz_skip_condition__"
// interpolateVariablesInString finds and replaces variable references within a string
// by checking against actual variable names in the variables map.
// Pure variable references (e.g., "$service") are returned as-is to let the
// existing variable handling code process them.
// Returns specialSkipConditionMarker if any variable has __all__ value.
func (v *filterExpressionVisitor) interpolateVariablesInString(s string) string {
// if this is a complete variable reference (just $varname with nothing else)
// if so, return as-is
varName := s
if strings.HasPrefix(varName, "$") {
_, exactMatch := v.variables[varName]
if !exactMatch {
_, exactMatch = v.variables[varName[1:]]
}
if exactMatch {
return s
}
}
result := s
// find and replace variables by checking each variable name in the map
// process longer variable names first to handle cases with prefix substring
varNames := make([]string, 0, len(v.variables)*2)
for name := range v.variables {
varNames = append(varNames, name)
// add with $ prefix if not already present
if !strings.HasPrefix(name, "$") {
varNames = append(varNames, "$"+name)
}
}
// sort by length (longest first) to match longer variable names before shorter ones
sort.Slice(varNames, func(i, j int) bool {
return len(varNames[i]) > len(varNames[j])
})
for _, vName := range varNames {
searchPattern := vName
if !strings.HasPrefix(searchPattern, "$") {
searchPattern = "$" + vName
}
if strings.Contains(result, searchPattern) {
// direct lookup
varItem, ok := v.variables[vName]
if !ok {
// Try without $ prefix
varItem, ok = v.variables[strings.TrimPrefix(vName, "$")]
}
if ok {
// special check for __all__ value - skip the entire condition
if varItem.Type == qbtypes.DynamicVariableType {
if allVal, ok := varItem.Value.(string); ok && allVal == "__all__" {
return specialSkipConditionMarker
}
}
replacement := v.formatVariableValueForInterpolation(varItem.Value, strings.TrimPrefix(vName, "$"))
result = strings.ReplaceAll(result, searchPattern, replacement)
}
}
}
return result
}
func (v *filterExpressionVisitor) formatVariableValueForInterpolation(value any, varName string) string {
switch val := value.(type) {
case string:
return val
case []string:
if len(val) > 1 {
v.warnings = append(v.warnings, fmt.Sprintf("variable `%s` has multiple values, using first value `%s` for string interpolation", varName, val[0]))
}
if len(val) > 0 {
return val[0]
}
return ""
case []any:
if len(val) > 1 {
v.warnings = append(v.warnings, fmt.Sprintf("variable `%s` has multiple values, using first value for string interpolation", varName))
}
if len(val) > 0 {
return v.formatVariableValueForInterpolation(val[0], varName)
}
return ""
case int, int32, int64, float32, float64:
return fmt.Sprintf("%v", val)
case bool:
return strconv.FormatBool(val)
default:
return fmt.Sprintf("%v", val)
}
}

View File

@@ -10,8 +10,113 @@ import (
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/antlr4-go/antlr/v4"
sqlbuilder "github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/assert"
)
// TestInterpolateVariablesInString tests the embedded variable interpolation feature (GitHub issue #10008)
func TestInterpolateVariablesInString(t *testing.T) {
tests := []struct {
name string
input string
variables map[string]qbtypes.VariableItem
expected string
}{
{
name: "pure variable reference - not interpolated",
input: "$service",
variables: map[string]qbtypes.VariableItem{
"service": {Value: "auth-service"},
},
expected: "$service", // Pure variables are handled by existing code
},
{
name: "variable composed with suffix",
input: "$environment-xyz",
variables: map[string]qbtypes.VariableItem{
"environment": {Value: "prod"},
},
expected: "prod-xyz",
},
{
name: "variable in quoted string with suffix",
input: "$env-cluster",
variables: map[string]qbtypes.VariableItem{
"env": {Value: "staging"},
},
expected: "staging-cluster",
},
{
name: "variable with prefix and suffix",
input: "prefix-$var-suffix",
variables: map[string]qbtypes.VariableItem{
"var": {Value: "middle"},
},
expected: "prefix-middle-suffix",
},
{
name: "multiple variables in one string",
input: "$region-$env-cluster",
variables: map[string]qbtypes.VariableItem{
"region": {Value: "us-west"},
"env": {Value: "prod"},
},
expected: "us-west-prod-cluster",
},
{
name: "similar variable names - longer matches first",
input: "$env-$environment",
variables: map[string]qbtypes.VariableItem{
"env": {Value: "dev"},
"environment": {Value: "production"},
},
expected: "dev-production",
},
{
name: "unknown variable - preserved as-is",
input: "$unknown-suffix",
variables: map[string]qbtypes.VariableItem{},
expected: "$unknown-suffix",
},
{
name: "variable with underscore",
input: "$my_var-test",
variables: map[string]qbtypes.VariableItem{
"my_var": {Value: "hello"},
},
expected: "hello-test",
},
{
name: "__all__ value returns skip marker",
input: "$env-suffix",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "__all__",
},
},
expected: specialSkipConditionMarker,
},
{
name: "multi-select takes first value",
input: "$env-suffix",
variables: map[string]qbtypes.VariableItem{
"env": {Value: []any{"prod", "staging", "dev"}},
},
expected: "prod-suffix",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
visitor := &filterExpressionVisitor{
variables: tt.variables,
}
result := visitor.interpolateVariablesInString(tt.input)
assert.Equal(t, tt.expected, result)
})
}
}
// TestPrepareWhereClause_EmptyVariableList ensures PrepareWhereClause errors when a variable has an empty list value
func TestPrepareWhereClause_EmptyVariableList(t *testing.T) {
tests := []struct {
@@ -42,7 +147,7 @@ func TestPrepareWhereClause_EmptyVariableList(t *testing.T) {
}
keys := map[string][]*telemetrytypes.TelemetryFieldKey{
"service": []*telemetrytypes.TelemetryFieldKey{
"service": {
{
Name: "service",
Signal: telemetrytypes.SignalLogs,
@@ -154,7 +259,7 @@ func TestVisitKey(t *testing.T) {
name: "Key not found",
keyText: "unknown_key",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"service": []*telemetrytypes.TelemetryFieldKey{
"service": {
{
Name: "service",
Signal: telemetrytypes.SignalLogs,
@@ -335,7 +440,7 @@ func TestVisitKey(t *testing.T) {
name: "Unknown key with ignoreNotFoundKeys=false",
keyText: "unknown_key",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"service": []*telemetrytypes.TelemetryFieldKey{
"service": {
{
Name: "service",
Signal: telemetrytypes.SignalLogs,
@@ -355,7 +460,7 @@ func TestVisitKey(t *testing.T) {
name: "Unknown key with ignoreNotFoundKeys=true",
keyText: "unknown_key",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"service": []*telemetrytypes.TelemetryFieldKey{
"service": {
{
Name: "service",
Signal: telemetrytypes.SignalLogs,
@@ -467,7 +572,7 @@ func TestVisitKey(t *testing.T) {
expectedWarnings: nil,
expectedMainWrnURL: "",
},
{
{
name: "only attribute.custom_field is selected",
keyText: "attribute.attribute.custom_field",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{

View File

@@ -0,0 +1,191 @@
package telemetrylogs
import (
"testing"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/require"
)
func TestFilterExprEmbeddedVariables(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm, nil)
keys := buildCompleteFieldKeyMap()
testCases := []struct {
name string
query string
variables map[string]qbtypes.VariableItem
shouldPass bool
expectedQuery string
expectedArgs []any
}{
{
name: "variable composed with suffix in quoted string",
query: "version = '$env-xyz'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['version'] = ? AND mapContains(attributes_string, 'version') = ?)",
expectedArgs: []any{"prod-xyz", true},
},
{
name: "variable in LIKE pattern with suffix",
query: "service.name LIKE '$env%'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
shouldPass: true,
expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) LIKE ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)",
expectedArgs: []any{"prod%"},
},
{
name: "variable with prefix and suffix",
query: "path = 'prefix-$var-suffix'",
variables: map[string]qbtypes.VariableItem{
"var": {
Type: qbtypes.DynamicVariableType,
Value: "middle",
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['path'] = ? AND mapContains(attributes_string, 'path') = ?)",
expectedArgs: []any{"prefix-middle-suffix", true},
},
{
name: "multiple variables in one string",
query: "path = '$region-$env-cluster'",
variables: map[string]qbtypes.VariableItem{
"region": {
Type: qbtypes.DynamicVariableType,
Value: "us-west",
},
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['path'] = ? AND mapContains(attributes_string, 'path') = ?)",
expectedArgs: []any{"us-west-prod-cluster", true},
},
{
name: "similar variable names - longer matches first",
query: "path = '$env-$environment'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "dev",
},
"environment": {
Type: qbtypes.DynamicVariableType,
Value: "production",
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['path'] = ? AND mapContains(attributes_string, 'path') = ?)",
expectedArgs: []any{"dev-production", true},
},
{
name: "pure variable reference - still works",
query: "service.name = $service",
variables: map[string]qbtypes.VariableItem{
"service": {
Type: qbtypes.DynamicVariableType,
Value: "auth-service",
},
},
shouldPass: true,
expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)",
expectedArgs: []any{"auth-service"},
},
{
name: "variable with underscore composed with suffix",
query: "version = '$my_var-test'",
variables: map[string]qbtypes.VariableItem{
"my_var": {
Type: qbtypes.DynamicVariableType,
Value: "hello",
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['version'] = ? AND mapContains(attributes_string, 'version') = ?)",
expectedArgs: []any{"hello-test", true},
},
{
name: "variable in ILIKE pattern",
query: "message ILIKE '%$pattern%'",
variables: map[string]qbtypes.VariableItem{
"pattern": {
Type: qbtypes.DynamicVariableType,
Value: "error",
},
},
shouldPass: true,
expectedQuery: "WHERE (LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?)",
expectedArgs: []any{"%error%", true},
},
{
name: "__all__ value skips condition",
query: "version = '$env-xyz'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "__all__",
},
},
shouldPass: true,
expectedQuery: "WHERE true",
expectedArgs: nil,
},
{
name: "multi-select takes first value",
query: "version = '$env-xyz'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: []any{"prod", "staging", "dev"},
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['version'] = ? AND mapContains(attributes_string, 'version') = ?)",
expectedArgs: []any{"prod-xyz", true},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
opts := querybuilder.FilterExprVisitorOpts{
Logger: instrumentationtest.New().Logger(),
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: keys,
FullTextColumn: DefaultFullTextColumn,
JsonKeyToKey: GetBodyJSONKey,
Variables: tc.variables,
}
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
if tc.shouldPass {
require.NoError(t, err)
require.NotNil(t, clause)
sql, args := clause.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
require.Equal(t, tc.expectedQuery, sql)
require.Equal(t, tc.expectedArgs, args)
} else {
require.Error(t, err)
}
})
}
}

View File

@@ -0,0 +1,235 @@
package telemetrymetrics
import (
"testing"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/require"
)
func buildMetricsFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
return map[string][]*telemetrytypes.TelemetryFieldKey{
"service.name": {
{
Name: "service.name",
Signal: telemetrytypes.SignalMetrics,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"host.name": {
{
Name: "host.name",
Signal: telemetrytypes.SignalMetrics,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"environment": {
{
Name: "environment",
Signal: telemetrytypes.SignalMetrics,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"region": {
{
Name: "region",
Signal: telemetrytypes.SignalMetrics,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"cluster": {
{
Name: "cluster",
Signal: telemetrytypes.SignalMetrics,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
}
}
func TestFilterExprEmbeddedVariables(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
keys := buildMetricsFieldKeyMap()
testCases := []struct {
name string
query string
variables map[string]qbtypes.VariableItem
shouldPass bool
expectedQuery string
expectedArgs []any
}{
{
name: "variable composed with suffix in quoted string",
query: "host.name = '$env-server'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
shouldPass: true,
expectedQuery: "WHERE JSONExtractString(labels, 'host.name') = ?",
expectedArgs: []any{"prod-server"},
},
{
name: "variable in LIKE pattern with suffix",
query: "service.name LIKE '$env%'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
shouldPass: true,
expectedQuery: "WHERE JSONExtractString(labels, 'service.name') LIKE ?",
expectedArgs: []any{"prod%"},
},
{
name: "variable with prefix and suffix",
query: "cluster = 'prefix-$var-suffix'",
variables: map[string]qbtypes.VariableItem{
"var": {
Type: qbtypes.DynamicVariableType,
Value: "middle",
},
},
shouldPass: true,
expectedQuery: "WHERE JSONExtractString(labels, 'cluster') = ?",
expectedArgs: []any{"prefix-middle-suffix"},
},
{
name: "multiple variables in one string",
query: "cluster = '$region-$env-cluster'",
variables: map[string]qbtypes.VariableItem{
"region": {
Type: qbtypes.DynamicVariableType,
Value: "us-west",
},
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
shouldPass: true,
expectedQuery: "WHERE JSONExtractString(labels, 'cluster') = ?",
expectedArgs: []any{"us-west-prod-cluster"},
},
{
name: "similar variable names - longer matches first",
query: "cluster = '$env-$environment'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "dev",
},
"environment": {
Type: qbtypes.DynamicVariableType,
Value: "production",
},
},
shouldPass: true,
expectedQuery: "WHERE JSONExtractString(labels, 'cluster') = ?",
expectedArgs: []any{"dev-production"},
},
{
name: "pure variable reference - still works",
query: "service.name = $service",
variables: map[string]qbtypes.VariableItem{
"service": {
Type: qbtypes.DynamicVariableType,
Value: "auth-service",
},
},
shouldPass: true,
expectedQuery: "WHERE JSONExtractString(labels, 'service.name') = ?",
expectedArgs: []any{"auth-service"},
},
{
name: "variable with underscore composed with suffix",
query: "host.name = '$my_var-test'",
variables: map[string]qbtypes.VariableItem{
"my_var": {
Type: qbtypes.DynamicVariableType,
Value: "hello",
},
},
shouldPass: true,
expectedQuery: "WHERE JSONExtractString(labels, 'host.name') = ?",
expectedArgs: []any{"hello-test"},
},
{
name: "variable in ILIKE pattern",
query: "environment ILIKE '%$pattern%'",
variables: map[string]qbtypes.VariableItem{
"pattern": {
Type: qbtypes.DynamicVariableType,
Value: "staging",
},
},
shouldPass: true,
expectedQuery: "WHERE LOWER(JSONExtractString(labels, 'environment')) LIKE LOWER(?)",
expectedArgs: []any{"%staging%"},
},
{
name: "__all__ value skips condition",
query: "host.name = '$env-server'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "__all__",
},
},
shouldPass: true,
expectedQuery: "WHERE true",
expectedArgs: nil,
},
{
name: "multi-select takes first value",
query: "host.name = '$env-server'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: []any{"prod", "staging", "dev"},
},
},
shouldPass: true,
expectedQuery: "WHERE JSONExtractString(labels, 'host.name') = ?",
expectedArgs: []any{"prod-server"},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
opts := querybuilder.FilterExprVisitorOpts{
Logger: instrumentationtest.New().Logger(),
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: keys,
Variables: tc.variables,
}
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
if tc.shouldPass {
require.NoError(t, err)
require.NotNil(t, clause)
sql, args := clause.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
require.Equal(t, tc.expectedQuery, sql)
require.Equal(t, tc.expectedArgs, args)
} else {
require.Error(t, err)
}
})
}
}

View File

@@ -279,7 +279,7 @@ func (c *conditionBuilder) buildSpanScopeCondition(key *telemetrytypes.Telemetry
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "span scope field %s only supports '=' operator", key.Name)
}
var isTrue bool
isTrue := false
switch v := value.(type) {
case bool:
isTrue = v

View File

@@ -0,0 +1,146 @@
package telemetrytraces
import (
"testing"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/require"
)
func TestFilterExprEmbeddedVariables(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
keys := buildCompleteFieldKeyMap()
testCases := []struct {
name string
query string
variables map[string]qbtypes.VariableItem
shouldPass bool
expectedQuery string
expectedArgs []any
}{
{
name: "variable composed with suffix in quoted string",
query: "http.method = '$method-request'",
variables: map[string]qbtypes.VariableItem{
"method": {
Type: qbtypes.DynamicVariableType,
Value: "GET",
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)",
expectedArgs: []any{"GET-request", true},
},
{
name: "variable in LIKE pattern with suffix",
query: "service.name LIKE '$env%'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
shouldPass: true,
expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) LIKE ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)",
expectedArgs: []any{"prod%"},
},
{
name: "variable with prefix and suffix",
query: "user.id = 'user-$var-id'",
variables: map[string]qbtypes.VariableItem{
"var": {
Type: qbtypes.DynamicVariableType,
Value: "123",
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['user.id'] = ? AND mapContains(attributes_string, 'user.id') = ?)",
expectedArgs: []any{"user-123-id", true},
},
{
name: "multiple variables in one string",
query: "user.id = '$region-$env-user'",
variables: map[string]qbtypes.VariableItem{
"region": {
Type: qbtypes.DynamicVariableType,
Value: "us-west",
},
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['user.id'] = ? AND mapContains(attributes_string, 'user.id') = ?)",
expectedArgs: []any{"us-west-prod-user", true},
},
{
name: "pure variable reference - still works",
query: "service.name = $service",
variables: map[string]qbtypes.VariableItem{
"service": {
Type: qbtypes.DynamicVariableType,
Value: "auth-service",
},
},
shouldPass: true,
expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)",
expectedArgs: []any{"auth-service"},
},
{
name: "__all__ value skips condition",
query: "http.method = '$method-request'",
variables: map[string]qbtypes.VariableItem{
"method": {
Type: qbtypes.DynamicVariableType,
Value: "__all__",
},
},
shouldPass: true,
expectedQuery: "WHERE true",
expectedArgs: nil,
},
{
name: "multi-select takes first value",
query: "http.method = '$method-request'",
variables: map[string]qbtypes.VariableItem{
"method": {
Type: qbtypes.DynamicVariableType,
Value: []any{"GET", "POST", "PUT"},
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)",
expectedArgs: []any{"GET-request", true},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
opts := querybuilder.FilterExprVisitorOpts{
Logger: instrumentationtest.New().Logger(),
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: keys,
Variables: tc.variables,
}
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
if tc.shouldPass {
require.NoError(t, err)
require.NotNil(t, clause)
sql, args := clause.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
require.Equal(t, tc.expectedQuery, sql)
require.Equal(t, tc.expectedArgs, args)
} else {
require.Error(t, err)
}
})
}
}

View File

@@ -1,61 +0,0 @@
package querybuildertypesv5
import (
"bytes"
"encoding/json"
"maps"
"github.com/SigNoz/signoz/pkg/types/cachetypes"
)
var _ cachetypes.Cacheable = (*CachedData)(nil)
type CachedBucket struct {
StartMs uint64 `json:"startMs"`
EndMs uint64 `json:"endMs"`
Type RequestType `json:"type"`
Value json.RawMessage `json:"value"`
Stats ExecStats `json:"stats"`
}
func (c *CachedBucket) Clone() *CachedBucket {
return &CachedBucket{
StartMs: c.StartMs,
EndMs: c.EndMs,
Type: c.Type,
Value: bytes.Clone(c.Value),
Stats: ExecStats{
RowsScanned: c.Stats.RowsScanned,
BytesScanned: c.Stats.BytesScanned,
DurationMS: c.Stats.DurationMS,
StepIntervals: maps.Clone(c.Stats.StepIntervals),
},
}
}
// CachedData represents the full cached data for a query
type CachedData struct {
Buckets []*CachedBucket `json:"buckets"`
Warnings []string `json:"warnings"`
}
func (c *CachedData) UnmarshalBinary(data []byte) error {
return json.Unmarshal(data, c)
}
func (c *CachedData) MarshalBinary() ([]byte, error) {
return json.Marshal(c)
}
func (c *CachedData) Clone() cachetypes.Cacheable {
clonedCachedData := new(CachedData)
clonedCachedData.Buckets = make([]*CachedBucket, len(c.Buckets))
for i := range c.Buckets {
clonedCachedData.Buckets[i] = c.Buckets[i].Clone()
}
clonedCachedData.Warnings = make([]string, len(c.Warnings))
copy(clonedCachedData.Warnings, c.Warnings)
return clonedCachedData
}

View File

@@ -1,87 +0,0 @@
package querybuildertypesv5
import (
"encoding/json"
"testing"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/assert"
)
func createBuckets_TimeSeries(numBuckets int) []*CachedBucket {
buckets := make([]*CachedBucket, numBuckets)
for i := 0; i < numBuckets; i++ {
startMs := uint64(i * 10000)
endMs := uint64((i + 1) * 10000)
timeSeriesData := &TimeSeriesData{
QueryName: "A",
Aggregations: []*AggregationBucket{
{
Index: 0,
Series: []*TimeSeries{
{
Labels: []*Label{
{Key: telemetrytypes.TelemetryFieldKey{Name: "service"}, Value: "test"},
},
Values: []*TimeSeriesValue{
{Timestamp: 1672563720000, Value: 1, Partial: true}, // 12:02
{Timestamp: 1672563900000, Value: 2}, // 12:05
{Timestamp: 1672564200000, Value: 2.5}, // 12:10
{Timestamp: 1672564500000, Value: 2.6}, // 12:15
{Timestamp: 1672566600000, Value: 2.9}, // 12:50
{Timestamp: 1672566900000, Value: 3}, // 12:55
{Timestamp: 1672567080000, Value: 4, Partial: true}, // 12:58
},
},
},
},
},
}
value, err := json.Marshal(timeSeriesData)
if err != nil {
panic(err)
}
buckets[i] = &CachedBucket{
StartMs: startMs,
EndMs: endMs,
Type: RequestTypeTimeSeries,
Value: json.RawMessage(value),
Stats: ExecStats{
RowsScanned: uint64(i * 500),
BytesScanned: uint64(i * 10000),
DurationMS: uint64(i * 1000),
},
}
}
return buckets
}
func BenchmarkCachedData_JSONMarshal_10kbuckets(b *testing.B) {
buckets := createBuckets_TimeSeries(10000)
data := &CachedData{Buckets: buckets}
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
_, err := json.Marshal(data)
assert.NoError(b, err)
}
}
func BenchmarkCachedData_Clone_10kbuckets(b *testing.B) {
buckets := createBuckets_TimeSeries(10000)
data := &CachedData{Buckets: buckets}
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
_ = data.Clone()
}
}

View File

@@ -2,6 +2,7 @@ package variables
import (
"fmt"
"sort"
"strconv"
"strings"
@@ -35,19 +36,22 @@ func (e *ErrorListener) SyntaxError(recognizer antlr.Recognizer, offendingSymbol
type variableReplacementVisitor struct {
variables map[string]qbtypes.VariableItem
errors []string
warnings []string
}
// specialSkipMarker is used to indicate that a condition should be removed
const specialSkipMarker = "__SKIP_CONDITION__"
// ReplaceVariablesInExpression takes a filter expression and returns it with variables replaced
func ReplaceVariablesInExpression(expression string, variables map[string]qbtypes.VariableItem) (string, error) {
// Also returns any warnings generated during the replacement process.
func ReplaceVariablesInExpression(expression string, variables map[string]qbtypes.VariableItem) (string, []string, error) {
input := antlr.NewInputStream(expression)
lexer := grammar.NewFilterQueryLexer(input)
visitor := &variableReplacementVisitor{
variables: variables,
errors: []string{},
warnings: []string{},
}
lexerErrorListener := NewErrorListener()
@@ -63,21 +67,21 @@ func ReplaceVariablesInExpression(expression string, variables map[string]qbtype
tree := parser.Query()
if len(parserErrorListener.SyntaxErrors) > 0 {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "syntax errors in expression: %v", parserErrorListener.SyntaxErrors)
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "syntax errors in expression: %v", parserErrorListener.SyntaxErrors)
}
result := visitor.Visit(tree).(string)
if len(visitor.errors) > 0 {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "errors processing expression: %v", visitor.errors)
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "errors processing expression: %v", visitor.errors)
}
// If the entire expression should be skipped, return empty string
if result == specialSkipMarker {
return "", nil
return "", visitor.warnings, nil
}
return result, nil
return result, visitor.warnings, nil
}
// Visit dispatches to the specific visit method based on node type
@@ -478,11 +482,32 @@ func (v *variableReplacementVisitor) VisitValue(ctx *grammar.ValueContext) any {
// Replace variable with its value
return v.formatVariableValue(varItem.Value)
}
// did not find as exact match; check if it's a composed string like "$env-xyz"
interpolated, hasAllValue := v.interpolateVariablesInString(originalValue)
if hasAllValue {
return specialSkipMarker
}
if interpolated != originalValue {
return v.formatVariableValue(interpolated)
}
// No variables found, return original
return originalValue
}
// Check if the quoted text contains embedded variables (not starting with $)
if ctx.QUOTED_TEXT() != nil && strings.Contains(originalValue, "$") {
interpolated, hasAllValue := v.interpolateVariablesInString(originalValue)
if hasAllValue {
return specialSkipMarker
}
return v.formatVariableValue(interpolated)
}
// Return original value if not a variable or variable not found
// If it was quoted text and not a variable, return with quotes
if ctx.QUOTED_TEXT() != nil && !strings.HasPrefix(originalValue, "$") {
if ctx.QUOTED_TEXT() != nil {
return ctx.QUOTED_TEXT().GetText()
}
return originalValue
@@ -517,6 +542,82 @@ func (v *variableReplacementVisitor) VisitKey(ctx *grammar.KeyContext) any {
return keyText
}
// interpolateVariablesInString finds and replaces variable references within a string
// by checking against actual variable names in the variables map.
// Returns the interpolated string and a boolean indicating if any variable had __all__ value.
func (v *variableReplacementVisitor) interpolateVariablesInString(s string) (string, bool) {
result := s
varNames := make([]string, 0, len(v.variables)*2)
for name := range v.variables {
varNames = append(varNames, name)
if !strings.HasPrefix(name, "$") {
varNames = append(varNames, "$"+name)
}
}
sort.Slice(varNames, func(i, j int) bool {
return len(varNames[i]) > len(varNames[j])
})
for _, varName := range varNames {
// ensure we're looking for $varname pattern
searchPattern := varName
if !strings.HasPrefix(searchPattern, "$") {
searchPattern = "$" + varName
}
if strings.Contains(result, searchPattern) {
// direct lookup
varItem, ok := v.variables[varName]
if !ok {
// Try without $ prefix
varItem, ok = v.variables[strings.TrimPrefix(varName, "$")]
}
if ok {
// special check for __all__ value
if varItem.Type == qbtypes.DynamicVariableType {
if allVal, ok := varItem.Value.(string); ok && allVal == "__all__" {
return "", true
}
}
// format the replacement value (unquoted for string interpolation)
replacement := v.formatVariableValueUnquoted(varItem.Value, strings.TrimPrefix(varName, "$"))
result = strings.ReplaceAll(result, searchPattern, replacement)
}
}
}
return result, false
}
// formatVariableValueUnquoted returns a string representation of the value for string interpolation.
// For multi-select (array) values, it takes the first value and adds a warning.
func (v *variableReplacementVisitor) formatVariableValueUnquoted(value any, varName string) string {
switch val := value.(type) {
case []string:
if len(val) > 1 {
v.warnings = append(v.warnings, fmt.Sprintf("variable `%s` has multiple values, using first value `%s` for string interpolation", varName, val[0]))
}
if len(val) > 0 {
return val[0]
}
return ""
case []any:
if len(val) > 1 {
v.warnings = append(v.warnings, fmt.Sprintf("variable `%s` has multiple values, using first value for string interpolation", varName))
}
if len(val) > 0 {
return fmt.Sprintf("%v", val[0])
}
return ""
default:
return fmt.Sprintf("%v", val)
}
}
// formatVariableValue formats a variable value for inclusion in the expression
func (v *variableReplacementVisitor) formatVariableValue(value any) string {
switch val := value.(type) {

View File

@@ -421,11 +421,107 @@ func TestReplaceVariablesInExpression(t *testing.T) {
},
expected: "message NOT CONTAINS 'debug'",
},
{
name: "variable composed with suffix in value",
expression: "cluster_name = '$environment-xyz'",
variables: map[string]qbtypes.VariableItem{
"environment": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
expected: "cluster_name = 'prod-xyz'",
},
{
name: "variable composed with suffix without quotes",
expression: "cluster_name = $environment-xyz",
variables: map[string]qbtypes.VariableItem{
"environment": {
Type: qbtypes.DynamicVariableType,
Value: "staging",
},
},
expected: "cluster_name = 'staging-xyz'",
},
{
name: "variable in LIKE pattern with suffix",
expression: "service.name LIKE '$env%'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
expected: "service.name LIKE 'prod%'",
},
{
name: "variable composed with prefix and suffix",
expression: "label = 'prefix-$var-suffix'",
variables: map[string]qbtypes.VariableItem{
"var": {
Type: qbtypes.DynamicVariableType,
Value: "middle",
},
},
expected: "label = 'prefix-middle-suffix'",
},
{
name: "multiple variables in one string",
expression: "path = '$region-$env-cluster'",
variables: map[string]qbtypes.VariableItem{
"region": {
Type: qbtypes.DynamicVariableType,
Value: "us-west",
},
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
expected: "path = 'us-west-prod-cluster'",
},
{
name: "embedded variable with __all__ value skips condition",
expression: "cluster_name = '$environment-xyz'",
variables: map[string]qbtypes.VariableItem{
"environment": {
Type: qbtypes.DynamicVariableType,
Value: "__all__",
},
},
expected: "",
},
{
name: "variable with underscore composed with suffix",
expression: "name = '$my_var-test'",
variables: map[string]qbtypes.VariableItem{
"my_var": {
Type: qbtypes.DynamicVariableType,
Value: "hello",
},
},
expected: "name = 'hello-test'",
},
{
name: "similar variable names - longer matches first",
expression: "name = '$env-$environment'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "dev",
},
"environment": {
Type: qbtypes.DynamicVariableType,
Value: "production",
},
},
expected: "name = 'dev-production'",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := ReplaceVariablesInExpression(tt.expression, tt.variables)
result, _, err := ReplaceVariablesInExpression(tt.expression, tt.variables)
if tt.wantErr {
assert.Error(t, err)

View File

@@ -456,3 +456,32 @@ def assert_scalar_column_order(
f"{context}: Column {column_index} order mismatch. "
f"Expected {expected_values}, got {actual_values}"
)
def make_substitute_vars_request(
signoz: "types.SigNoz",
token: str,
start_ms: int,
end_ms: int,
queries: List[Dict],
variables: Dict[str, Any],
timeout: int = QUERY_TIMEOUT,
) -> requests.Response:
return requests.post(
signoz.self.host_configs["8080"].get("/api/v5/substitute_vars"),
timeout=timeout,
headers={"authorization": f"Bearer {token}"},
json={
"schemaVersion": "v1",
"start": start_ms,
"end": end_ms,
"requestType": "time_series",
"compositeQuery": {"queries": queries},
"variables": variables,
"formatOptions": {"formatTableResultForUI": False, "fillGaps": False},
},
)
def sum_series_values(series_values: List[Dict]) -> float:
return sum(point["value"] for point in series_values)

View File

@@ -2,8 +2,6 @@ from datetime import datetime, timedelta, timezone
from http import HTTPStatus
from typing import Callable, Dict, List, Optional, Tuple
import requests
from fixtures import querier, types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.logs import Logs
@@ -24,6 +22,8 @@ metric_values_for_test = {
"service-d": 10.0,
}
SCALAR_FORMAT_OPTIONS = {"formatTableResultForUI": True, "fillGaps": False}
def generate_logs_with_counts(
now: datetime,
@@ -85,30 +85,6 @@ def generate_metrics_with_values(
return metrics
def make_scalar_query_request(
signoz: types.SigNoz,
token: str,
now: datetime,
queries: List[Dict],
lookback_minutes: int = 5,
) -> requests.Response:
return requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=5,
headers={"authorization": f"Bearer {token}"},
json={
"schemaVersion": "v1",
"start": int(
(now - timedelta(minutes=lookback_minutes)).timestamp() * 1000
),
"end": int(now.timestamp() * 1000),
"requestType": "scalar",
"compositeQuery": {"queries": queries},
"formatOptions": {"formatTableResultForUI": True, "fillGaps": False},
},
)
def build_logs_query(
name: str = "A",
aggregations: Optional[List[str]] = None,
@@ -218,11 +194,16 @@ def test_logs_scalar_group_by_single_agg_no_order(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[build_logs_query(group_by=["service.name"])],
request_type="scalar",
format_options={"formatTableResultForUI": True, "fillGaps": False},
)
assert response.status_code == HTTPStatus.OK
@@ -246,11 +227,16 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_asc(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[build_logs_query(group_by=["service.name"], order_by=[("count()", "asc")])],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -274,11 +260,16 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_desc(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[build_logs_query(group_by=["service.name"], order_by=[("count()", "desc")])],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -302,15 +293,20 @@ def test_logs_scalar_group_by_single_agg_order_by_grouping_key_asc(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_logs_query(
group_by=["service.name"], order_by=[("service.name", "asc")]
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -334,15 +330,20 @@ def test_logs_scalar_group_by_single_agg_order_by_grouping_key_desc(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_logs_query(
group_by=["service.name"], order_by=[("service.name", "desc")]
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -366,10 +367,13 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_logs_query(
group_by=["service.name"],
@@ -377,6 +381,8 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
order_by=[("count()", "asc")],
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -398,10 +404,13 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_second_agg_desc(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_logs_query(
group_by=["service.name"],
@@ -409,6 +418,8 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_second_agg_desc(
order_by=[("count_distinct(body)", "desc")],
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -431,15 +442,20 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_logs_query(
group_by=["service.name"], order_by=[("count()", "asc")], limit=2
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -463,15 +479,20 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_logs_query(
group_by=["service.name"], order_by=[("count()", "desc")], limit=3
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -495,15 +516,20 @@ def test_logs_scalar_group_by_order_by_grouping_key_asc_limit_2(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_logs_query(
group_by=["service.name"], order_by=[("service.name", "asc")], limit=2
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -527,11 +553,16 @@ def test_traces_scalar_group_by_single_agg_no_order(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[build_traces_query(group_by=["service.name"])],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -555,11 +586,16 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_asc(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[build_traces_query(group_by=["service.name"], order_by=[("count()", "asc")])],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -583,11 +619,16 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_desc(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[build_traces_query(group_by=["service.name"], order_by=[("count()", "desc")])],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -611,15 +652,20 @@ def test_traces_scalar_group_by_single_agg_order_by_grouping_key_asc(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_traces_query(
group_by=["service.name"], order_by=[("service.name", "asc")]
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -643,15 +689,20 @@ def test_traces_scalar_group_by_single_agg_order_by_grouping_key_desc(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_traces_query(
group_by=["service.name"], order_by=[("service.name", "desc")]
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -675,10 +726,13 @@ def test_traces_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_traces_query(
group_by=["service.name"],
@@ -686,6 +740,8 @@ def test_traces_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
order_by=[("count()", "asc")],
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -707,15 +763,20 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_traces_query(
group_by=["service.name"], order_by=[("count()", "asc")], limit=2
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -739,15 +800,20 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_traces_query(
group_by=["service.name"], order_by=[("count()", "desc")], limit=3
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -771,15 +837,20 @@ def test_traces_scalar_group_by_order_by_grouping_key_asc_limit_2(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_traces_query(
group_by=["service.name"], order_by=[("service.name", "asc")], limit=2
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -803,11 +874,16 @@ def test_metrics_scalar_group_by_single_agg_no_order(
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[build_metrics_query(group_by=["service.name"])],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -836,16 +912,21 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_asc(
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_metrics_query(
group_by=["service.name"],
order_by=[("sum(test.metric)", "asc")],
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -874,16 +955,21 @@ def test_metrics_scalar_group_by_single_agg_order_by_grouping_key_asc(
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_metrics_query(
group_by=["service.name"],
order_by=[("service.name", "asc")],
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -912,10 +998,13 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_metrics_query(
group_by=["service.name"],
@@ -923,6 +1012,8 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
limit=2,
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -946,10 +1037,13 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_metrics_query(
group_by=["service.name"],
@@ -957,6 +1051,8 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
limit=3,
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -980,10 +1076,13 @@ def test_metrics_scalar_group_by_order_by_grouping_key_asc_limit_2(
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_metrics_query(
group_by=["service.name"],
@@ -991,6 +1090,8 @@ def test_metrics_scalar_group_by_order_by_grouping_key_asc_limit_2(
limit=2,
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK

View File

@@ -0,0 +1,356 @@
from datetime import datetime, timedelta, timezone
from http import HTTPStatus
from typing import Callable
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.querier import build_scalar_query, make_substitute_vars_request
LOGS_COUNT_AGG = [{"expression": "count()"}]
def test_substitute_vars_standalone_variable(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A", "logs", LOGS_COUNT_AGG, filter_expression="service.name = $service"
)
],
variables={"service": {"type": "query", "value": "auth-service"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == "service.name = 'auth-service'"
def test_substitute_vars_variable_in_string(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="cluster_name = '$environment-xyz'",
)
],
variables={"environment": {"type": "custom", "value": "prod"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == "cluster_name = 'prod-xyz'"
def test_substitute_vars_multiple_variables(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name = $service AND env = $environment",
)
],
variables={
"service": {"type": "text", "value": "auth-service"},
"environment": {"type": "query", "value": "production"},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == "service.name = 'auth-service' AND env = 'production'"
def test_substitute_vars_array_variable(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name IN $services",
)
],
variables={
"services": {"type": "query", "value": ["auth-service", "api-service"]}
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == "service.name IN ['auth-service', 'api-service']"
def test_substitute_vars_like_pattern(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name LIKE '$env%'",
)
],
variables={"env": {"type": "text", "value": "prod"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == "service.name LIKE 'prod%'"
def test_substitute_vars_variable_without_quotes(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="cluster_name = $environment-xyz",
)
],
variables={"environment": {"type": "dynamic", "value": "staging"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == "cluster_name = 'staging-xyz'"
def test_substitute_vars_all_value_standalone(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A", "logs", LOGS_COUNT_AGG, filter_expression="service.name = $service"
)
],
variables={"service": {"type": "dynamic", "value": "__all__"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
# expression should be empty when __all__ is used
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == ""
def test_substitute_vars_all_value_in_composed_string(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="cluster_name = '$environment-xyz'",
)
],
variables={"environment": {"type": "dynamic", "value": "__all__"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
# expression should be empty when __all__ is used
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == ""
def test_substitute_vars_all_value_partial(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name = $service AND env = $env",
)
],
variables={
"service": {"type": "dynamic", "value": "__all__"},
"env": {"type": "dynamic", "value": "production"},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
# only env condition should remain
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == "env = 'production'"

View File

@@ -0,0 +1,803 @@
from datetime import datetime, timedelta, timezone
from http import HTTPStatus
from typing import Callable, List
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.logs import Logs
from fixtures.querier import (
build_scalar_query,
get_all_series,
get_series_values,
make_query_request,
sum_series_values,
)
LOGS_COUNT_AGG = [{"expression": "count()"}]
def test_query_range_with_standalone_variable(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "auth-service"},
attributes={"env": "production"},
body="Auth service log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "api-service"},
attributes={"env": "production"},
body="API service log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "web-service"},
attributes={"env": "staging"},
body="Web service log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A", "logs", LOGS_COUNT_AGG, filter_expression="service.name = $service"
)
],
variables={"service": {"type": "query", "value": "auth-service"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 1 # auth-service log
def test_query_range_with_variable_in_array(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "auth-service"},
attributes={"env": "production"},
body="Auth service log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "api-service"},
attributes={"env": "production"},
body="API service log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "web-service"},
attributes={"env": "staging"},
body="Web service log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name IN $services",
)
],
variables={
"services": {"type": "custom", "value": ["auth-service", "api-service"]}
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 2 # auth-service and api-service logs
def test_query_range_with_variable_composed_in_string(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service1"},
attributes={"cluster_name": "prod-xyz"},
body="Prod cluster log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service2"},
attributes={"cluster_name": "staging-xyz"},
body="Staging cluster log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service3"},
attributes={"cluster_name": "dev-xyz"},
body="Dev cluster log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="cluster_name = '$environment-xyz'",
)
],
variables={"environment": {"type": "dynamic", "value": "prod"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 1 # prod-xyz log
def test_query_range_with_variable_in_like_pattern(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "prod-auth"},
attributes={"env": "production"},
body="Prod auth log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "prod-api"},
attributes={"env": "production"},
body="Prod API log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "staging-api"},
attributes={"env": "staging"},
body="Staging API log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name LIKE '$env%'",
)
],
variables={"env": {"type": "text", "value": "prod"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 2 # prod-auth and prod-api logs
def test_query_range_with_multiple_variables_in_string(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service1"},
attributes={"path": "us-west-prod-cluster"},
body="US West Prod log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service2"},
attributes={"path": "us-east-prod-cluster"},
body="US East Prod log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service3"},
attributes={"path": "eu-west-staging-cluster"},
body="EU West Staging log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="path = '$region-$env-cluster'",
)
],
variables={
"region": {"type": "query", "value": "us-west"},
"env": {"type": "custom", "value": "prod"},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 1 # us-west-prod-cluster log
def test_query_range_with_variable_prefix_and_suffix(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service1"},
attributes={"label": "prefix-middle-suffix"},
body="Middle label log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service2"},
attributes={"label": "prefix-other-suffix"},
body="Other label log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="label = 'prefix-$var-suffix'",
)
],
variables={"var": {"type": "text", "value": "middle"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 1 # prefix-middle-suffix log
def test_query_range_with_variable_without_quotes(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service1"},
attributes={"cluster_name": "staging-xyz"},
body="Staging cluster log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service2"},
attributes={"cluster_name": "prod-xyz"},
body="Prod cluster log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="cluster_name = $environment-xyz",
)
],
variables={"environment": {"type": "custom", "value": "staging"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 1 # staging-xyz log
def test_query_range_time_series_with_group_by(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = []
for i in range(3):
logs.append(
Logs(
timestamp=now - timedelta(minutes=i + 1),
resources={"service.name": "auth-service"},
attributes={"cluster_name": "prod-xyz"},
body=f"Auth service log {i}",
severity_text="INFO",
)
)
logs.append(
Logs(
timestamp=now - timedelta(minutes=i + 1),
resources={"service.name": "api-service"},
attributes={"cluster_name": "prod-xyz"},
body=f"API service log {i}",
severity_text="INFO",
)
)
logs.append(
Logs(
timestamp=now - timedelta(minutes=i + 1),
resources={"service.name": "web-service"},
attributes={"cluster_name": "staging-xyz"},
body=f"Web service log {i}",
severity_text="INFO",
)
)
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=10)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="cluster_name = '$env-xyz'",
group_by=[
{
"name": "service.name",
"fieldDataType": "string",
"fieldContext": "resource",
}
],
)
],
variables={"env": {"type": "query", "value": "prod"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
all_series = get_all_series(response.json(), "A")
assert len(all_series) == 2 # auth-service and api-service
# 6 (3 auth-service + 3 api-service)
total_count = sum(sum_series_values(s["values"]) for s in all_series)
assert total_count == 6
def test_query_range_with_different_variable_types(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "auth-service"},
attributes={"env": "production"},
body="Auth service log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
# all different variable types
for var_type in ["query", "custom", "text", "dynamic"]:
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name = $service",
)
],
variables={"service": {"type": var_type, "value": "auth-service"}},
)
assert (
response.status_code == HTTPStatus.OK
), f"Failed for variable type: {var_type}"
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert (
total_count == 1
), f"Expected 1 log for variable type {var_type}, got {total_count}"
def test_query_range_with_all_value_standalone(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "auth-service"},
attributes={"env": "production"},
body="Auth service log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "api-service"},
attributes={"env": "production"},
body="API service log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "web-service"},
attributes={"env": "staging"},
body="Web service log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
# `__all__`, the filter condition should be removed
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A", "logs", LOGS_COUNT_AGG, filter_expression="service.name = $service"
)
],
variables={"service": {"type": "dynamic", "value": "__all__"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 3
def test_query_range_with_all_value_in_composed_string(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service1"},
attributes={"cluster_name": "prod-xyz"},
body="Prod cluster log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service2"},
attributes={"cluster_name": "staging-xyz"},
body="Staging cluster log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service3"},
attributes={"cluster_name": "dev-xyz"},
body="Dev cluster log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
# `__all__` in composed string, the filter should be removed
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="cluster_name = '$environment-xyz'",
)
],
variables={"environment": {"type": "dynamic", "value": "__all__"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 3 # all logs should be returned
def test_query_range_with_all_value_partial_filter(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "auth-service"},
attributes={"env": "production"},
body="Auth prod log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "api-service"},
attributes={"env": "production"},
body="API prod log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "web-service"},
attributes={"env": "staging"},
body="Web staging log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
# `__all__` for service, only env filter should apply
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name = $service AND env = $env",
)
],
variables={
"service": {"type": "dynamic", "value": "__all__"},
"env": {"type": "dynamic", "value": "production"},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 2 # prod logs (auth + api)
def test_query_range_with_all_value_in_array(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "auth-service"},
attributes={"env": "production"},
body="Auth service log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "api-service"},
attributes={"env": "production"},
body="API service log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name IN $services",
)
],
variables={"services": {"type": "dynamic", "value": "__all__"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 2 # all logs