mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-11 03:54:28 +00:00
Compare commits
6 Commits
issue-1000
...
ns/ext-api
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cd00d71478 | ||
|
|
15161c09e8 | ||
|
|
ee5fbe41eb | ||
|
|
f2f3a7b24a | ||
|
|
dd0738ac70 | ||
|
|
1c4dfc931f |
@@ -12,6 +12,7 @@ linters:
|
||||
- misspell
|
||||
- nilnil
|
||||
- sloglint
|
||||
- wastedassign
|
||||
- unparam
|
||||
- unused
|
||||
settings:
|
||||
|
||||
@@ -38,7 +38,7 @@ module.exports = {
|
||||
'import', // Import/export linting
|
||||
'sonarjs', // Code quality/complexity
|
||||
// TODO: Uncomment after running: yarn add -D eslint-plugin-spellcheck
|
||||
// 'spellcheck',
|
||||
// 'spellcheck', // Correct spellings
|
||||
],
|
||||
settings: {
|
||||
react: {
|
||||
@@ -60,12 +60,18 @@ module.exports = {
|
||||
'no-debugger': 'error', // Disallows debugger statements in production code
|
||||
curly: 'error', // Requires curly braces for all control statements
|
||||
eqeqeq: ['error', 'always', { null: 'ignore' }], // Enforces === and !== (allows == null for null/undefined check)
|
||||
// TODO: Enable after fixing ~15 console.log statements
|
||||
// 'no-console': ['error', { allow: ['warn', 'error'] }], // Warns on console.log, allows console.warn/error
|
||||
'no-console': ['error', { allow: ['warn', 'error'] }], // Warns on console.log, allows console.warn/error
|
||||
|
||||
// TypeScript rules
|
||||
'@typescript-eslint/explicit-function-return-type': 'error', // Requires explicit return types on functions
|
||||
'@typescript-eslint/no-unused-vars': 'off',
|
||||
'@typescript-eslint/no-unused-vars': [
|
||||
// Disallows unused variables/args
|
||||
'error',
|
||||
{
|
||||
argsIgnorePattern: '^_', // Allows unused args prefixed with _ (e.g., _unusedParam)
|
||||
varsIgnorePattern: '^_', // Allows unused vars prefixed with _ (e.g., _unusedVar)
|
||||
},
|
||||
],
|
||||
'@typescript-eslint/no-explicit-any': 'warn', // Warns when using 'any' type (consider upgrading to error)
|
||||
// TODO: Change to 'error' after fixing ~80 empty function placeholders in providers/contexts
|
||||
'@typescript-eslint/no-empty-function': 'off', // Disallows empty function bodies
|
||||
|
||||
@@ -41,7 +41,7 @@ export const getConsumerLagDetails = async (
|
||||
> => {
|
||||
const { detailType, ...restProps } = props;
|
||||
const response = await axios.post(
|
||||
`/messaging-queues/kafka/consumer-lag/${props.detailType}`,
|
||||
`/messaging-queues/kafka/consumer-lag/${detailType}`,
|
||||
{
|
||||
...restProps,
|
||||
},
|
||||
|
||||
@@ -43,16 +43,17 @@ export const omitIdFromQuery = (query: Query | null): any => ({
|
||||
builder: {
|
||||
...query?.builder,
|
||||
queryData: query?.builder.queryData.map((queryData) => {
|
||||
const { id, ...rest } = queryData.aggregateAttribute || {};
|
||||
const { id: _aggregateAttributeId, ...rest } =
|
||||
queryData.aggregateAttribute || {};
|
||||
const newAggregateAttribute = rest;
|
||||
const newGroupByAttributes = queryData.groupBy.map((groupByAttribute) => {
|
||||
const { id, ...rest } = groupByAttribute;
|
||||
const { id: _groupByAttributeId, ...rest } = groupByAttribute;
|
||||
return rest;
|
||||
});
|
||||
const newItems = queryData.filters?.items?.map((item) => {
|
||||
const { id, ...newItem } = item;
|
||||
const { id: _itemId, ...newItem } = item;
|
||||
if (item.key) {
|
||||
const { id, ...rest } = item.key;
|
||||
const { id: _keyId, ...rest } = item.key;
|
||||
return {
|
||||
...newItem,
|
||||
key: rest,
|
||||
|
||||
@@ -45,7 +45,6 @@ function Pre({
|
||||
}
|
||||
|
||||
function Code({
|
||||
node,
|
||||
inline,
|
||||
className = 'blog-code',
|
||||
children,
|
||||
|
||||
@@ -29,7 +29,6 @@ import {
|
||||
QUERY_BUILDER_OPERATORS_BY_KEY_TYPE,
|
||||
queryOperatorSuggestions,
|
||||
} from 'constants/antlrQueryConstants';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import useDebounce from 'hooks/useDebounce';
|
||||
import { debounce, isNull } from 'lodash-es';
|
||||
@@ -208,8 +207,6 @@ function QuerySearch({
|
||||
const lastValueRef = useRef<string>('');
|
||||
const isMountedRef = useRef<boolean>(true);
|
||||
|
||||
const { handleRunQuery } = useQueryBuilder();
|
||||
|
||||
const { selectedDashboard } = useDashboard();
|
||||
|
||||
const dynamicVariables = useMemo(
|
||||
|
||||
@@ -87,7 +87,7 @@ function TraceOperatorEditor({
|
||||
// Track if the query was changed externally (from props) vs internally (user input)
|
||||
const [isExternalQueryChange, setIsExternalQueryChange] = useState(false);
|
||||
const [lastExternalValue, setLastExternalValue] = useState<string>('');
|
||||
const { currentQuery, handleRunQuery } = useQueryBuilder();
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
const queryOptions = useMemo(
|
||||
() =>
|
||||
|
||||
@@ -5,7 +5,6 @@ import { EditorView } from '@uiw/react-codemirror';
|
||||
import { getKeySuggestions } from 'api/querySuggestions/getKeySuggestions';
|
||||
import { getValueSuggestions } from 'api/querySuggestions/getValueSuggestion';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import * as UseQBModule from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { fireEvent, render, userEvent, waitFor } from 'tests/test-utils';
|
||||
import type { QueryKeyDataSuggestionsProps } from 'types/api/querySuggestions/types';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
@@ -121,13 +120,8 @@ jest.mock('api/querySuggestions/getValueSuggestion', () => ({
|
||||
// Note: We're NOT mocking CodeMirror here - using the real component
|
||||
// This provides integration testing with the actual CodeMirror editor
|
||||
|
||||
const handleRunQueryMock = ((UseQBModule as unknown) as {
|
||||
handleRunQuery: jest.MockedFunction<() => void>;
|
||||
}).handleRunQuery;
|
||||
|
||||
const SAMPLE_KEY_TYPING = 'http.';
|
||||
const SAMPLE_VALUE_TYPING_INCOMPLETE = "service.name = '";
|
||||
const SAMPLE_VALUE_TYPING_COMPLETE = "service.name = 'frontend'";
|
||||
const SAMPLE_STATUS_QUERY = "http.status_code = '200'";
|
||||
|
||||
describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
|
||||
@@ -796,12 +796,12 @@ export const adjustQueryForV5 = (currentQuery: Query): Query => {
|
||||
});
|
||||
|
||||
const {
|
||||
aggregateAttribute,
|
||||
aggregateOperator,
|
||||
timeAggregation,
|
||||
spaceAggregation,
|
||||
reduceTo,
|
||||
filters,
|
||||
aggregateAttribute: _aggregateAttribute,
|
||||
aggregateOperator: _aggregateOperator,
|
||||
timeAggregation: _timeAggregation,
|
||||
spaceAggregation: _spaceAggregation,
|
||||
reduceTo: _reduceTo,
|
||||
filters: _filters,
|
||||
...retainedQuery
|
||||
} = query;
|
||||
|
||||
|
||||
@@ -1,14 +1,5 @@
|
||||
import './Slider.styles.scss';
|
||||
|
||||
import { IQuickFiltersConfig } from 'components/QuickFilters/types';
|
||||
|
||||
interface ISliderProps {
|
||||
filter: IQuickFiltersConfig;
|
||||
}
|
||||
|
||||
// not needed for now build when required
|
||||
export default function Slider(props: ISliderProps): JSX.Element {
|
||||
const { filter } = props;
|
||||
console.log(filter);
|
||||
export default function Slider(): JSX.Element {
|
||||
return <div>Slider</div>;
|
||||
}
|
||||
|
||||
@@ -303,15 +303,9 @@ export default function QuickFilters(props: IQuickFiltersProps): JSX.Element {
|
||||
/>
|
||||
);
|
||||
case FiltersType.DURATION:
|
||||
return (
|
||||
<Duration
|
||||
filter={filter}
|
||||
onFilterChange={onFilterChange}
|
||||
source={source}
|
||||
/>
|
||||
);
|
||||
return <Duration filter={filter} onFilterChange={onFilterChange} />;
|
||||
case FiltersType.SLIDER:
|
||||
return <Slider filter={filter} />;
|
||||
return <Slider />;
|
||||
// eslint-disable-next-line sonarjs/no-duplicated-branches
|
||||
default:
|
||||
return (
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { defaultPostableAlertRuleV2 } from 'container/CreateAlertV2/constants';
|
||||
import { getCreateAlertLocalStateFromAlertDef } from 'container/CreateAlertV2/utils';
|
||||
|
||||
@@ -73,7 +73,7 @@ export function sanitizeDashboardData(
|
||||
|
||||
const updatedVariables = Object.entries(selectedData.variables).reduce(
|
||||
(acc, [key, value]) => {
|
||||
const { selectedValue, ...rest } = value;
|
||||
const { selectedValue: _selectedValue, ...rest } = value;
|
||||
acc[key] = rest;
|
||||
return acc;
|
||||
},
|
||||
|
||||
@@ -9,10 +9,11 @@ import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { QBShortcuts } from 'constants/shortcuts/QBShortcuts';
|
||||
import RunQueryBtn from 'container/QueryBuilder/components/RunQueryBtn/RunQueryBtn';
|
||||
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { Atom, Play, Terminal } from 'lucide-react';
|
||||
import { Atom, Terminal } from 'lucide-react';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { AlertTypes } from 'types/api/alerts/alertTypes';
|
||||
@@ -165,9 +166,8 @@ function QuerySection({
|
||||
onChange={handleQueryCategoryChange}
|
||||
tabBarExtraContent={
|
||||
<span style={{ display: 'flex', gap: '1rem', alignItems: 'center' }}>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={(): void => {
|
||||
<RunQueryBtn
|
||||
onStageRunQuery={(): void => {
|
||||
runQuery();
|
||||
logEvent('Alert: Stage and run query', {
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertType],
|
||||
@@ -176,11 +176,7 @@ function QuerySection({
|
||||
queryType: queryCategory,
|
||||
});
|
||||
}}
|
||||
className="stage-run-query"
|
||||
icon={<Play size={14} />}
|
||||
>
|
||||
Stage & Run Query
|
||||
</Button>
|
||||
/>
|
||||
</span>
|
||||
}
|
||||
items={tabs}
|
||||
@@ -199,14 +195,7 @@ function QuerySection({
|
||||
onChange={handleQueryCategoryChange}
|
||||
tabBarExtraContent={
|
||||
<span style={{ display: 'flex', gap: '1rem', alignItems: 'center' }}>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={runQuery}
|
||||
className="stage-run-query"
|
||||
icon={<Play size={14} />}
|
||||
>
|
||||
Stage & Run Query
|
||||
</Button>
|
||||
<RunQueryBtn onStageRunQuery={runQuery} />
|
||||
</span>
|
||||
}
|
||||
items={items}
|
||||
|
||||
@@ -1,14 +1,6 @@
|
||||
/* eslint-disable react/display-name */
|
||||
import { PlusOutlined } from '@ant-design/icons';
|
||||
import {
|
||||
Button,
|
||||
Dropdown,
|
||||
Flex,
|
||||
Input,
|
||||
MenuProps,
|
||||
Tag,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { Button, Flex, Input, Typography } from 'antd';
|
||||
import type { ColumnsType } from 'antd/es/table/interface';
|
||||
import saveAlertApi from 'api/alerts/save';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
|
||||
@@ -91,7 +91,7 @@ function Summary(): JSX.Element {
|
||||
const queryFiltersWithoutId = useMemo(() => {
|
||||
const filtersWithoutId = {
|
||||
...queryFilters,
|
||||
items: queryFilters.items.map(({ id, ...rest }) => rest),
|
||||
items: queryFilters.items.map(({ id: _id, ...rest }) => rest),
|
||||
};
|
||||
return JSON.stringify(filtersWithoutId);
|
||||
}, [queryFilters]);
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
getDefaultWidgetData,
|
||||
PANEL_TYPE_TO_QUERY_TYPES,
|
||||
} from 'container/NewWidget/utils';
|
||||
import RunQueryBtn from 'container/QueryBuilder/components/RunQueryBtn/RunQueryBtn';
|
||||
// import { QueryBuilder } from 'container/QueryBuilder';
|
||||
import { QueryBuilderProps } from 'container/QueryBuilder/QueryBuilder.interfaces';
|
||||
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
@@ -20,7 +21,7 @@ import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { defaultTo, isUndefined } from 'lodash-es';
|
||||
import { Atom, Play, Terminal } from 'lucide-react';
|
||||
import { Atom, Terminal } from 'lucide-react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import {
|
||||
getNextWidgets,
|
||||
@@ -28,20 +29,14 @@ import {
|
||||
getSelectedWidgetIndex,
|
||||
} from 'providers/Dashboard/util';
|
||||
import { useCallback, useEffect, useMemo } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
|
||||
import ClickHouseQueryContainer from './QueryBuilder/clickHouse';
|
||||
import PromQLQueryContainer from './QueryBuilder/promQL';
|
||||
|
||||
function QuerySection({
|
||||
selectedGraph,
|
||||
queryResponse,
|
||||
}: QueryProps): JSX.Element {
|
||||
function QuerySection({ selectedGraph }: QueryProps): JSX.Element {
|
||||
const {
|
||||
currentQuery,
|
||||
handleRunQuery: handleRunQueryFromQueryBuilder,
|
||||
@@ -242,15 +237,7 @@ function QuerySection({
|
||||
tabBarExtraContent={
|
||||
<span style={{ display: 'flex', gap: '1rem', alignItems: 'center' }}>
|
||||
<TextToolTip text="This will temporarily save the current query and graph state. This will persist across tab change" />
|
||||
<Button
|
||||
loading={queryResponse.isFetching}
|
||||
type="primary"
|
||||
onClick={handleRunQuery}
|
||||
className="stage-run-query"
|
||||
icon={<Play size={14} />}
|
||||
>
|
||||
Stage & Run Query
|
||||
</Button>
|
||||
<RunQueryBtn label="Stage & Run Query" onStageRunQuery={handleRunQuery} />
|
||||
</span>
|
||||
}
|
||||
items={items}
|
||||
@@ -261,10 +248,6 @@ function QuerySection({
|
||||
|
||||
interface QueryProps {
|
||||
selectedGraph: PANEL_TYPES;
|
||||
queryResponse: UseQueryResult<
|
||||
SuccessResponse<MetricRangePayloadProps, unknown>,
|
||||
Error
|
||||
>;
|
||||
}
|
||||
|
||||
export default QuerySection;
|
||||
|
||||
@@ -64,7 +64,7 @@ function LeftContainer({
|
||||
enableDrillDown={enableDrillDown}
|
||||
/>
|
||||
<QueryContainer className="query-section-left-container">
|
||||
<QuerySection selectedGraph={selectedGraph} queryResponse={queryResponse} />
|
||||
<QuerySection selectedGraph={selectedGraph} />
|
||||
{selectedGraph === PANEL_TYPES.LIST && (
|
||||
<ExplorerColumnsRenderer
|
||||
selectedLogFields={selectedLogFields}
|
||||
|
||||
@@ -166,7 +166,7 @@ function UpdateContextLinks({
|
||||
onSave(newContextLink);
|
||||
} catch (error) {
|
||||
// Form validation failed, don't call onSave
|
||||
console.log('Form validation failed:', error);
|
||||
console.error('Form validation failed:', error);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
.run-query-btn {
|
||||
display: flex;
|
||||
min-width: 132px;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
|
||||
.ant-btn-icon {
|
||||
margin: 0 !important;
|
||||
}
|
||||
}
|
||||
|
||||
.cancel-query-btn {
|
||||
display: flex;
|
||||
min-width: 132px;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
}
|
||||
|
||||
.cmd-hint {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 2px;
|
||||
padding: 2px 4px;
|
||||
border-radius: 4px;
|
||||
//not using var here to support opacity 60%. To be handled at design system level.
|
||||
background: rgba(35, 38, 46, 0.6);
|
||||
line-height: 1;
|
||||
font-size: 10px;
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.cmd-hint {
|
||||
color: var(--bg-ink-200);
|
||||
//not using var here to support opacity 60%. To be handled at design system level.
|
||||
background: rgba(231, 232, 236, 0.8);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,53 @@
|
||||
import './RunQueryBtn.scss';
|
||||
|
||||
import { Button } from 'antd';
|
||||
import {
|
||||
ChevronUp,
|
||||
Command,
|
||||
CornerDownLeft,
|
||||
Loader2,
|
||||
Play,
|
||||
} from 'lucide-react';
|
||||
import { getUserOperatingSystem, UserOperatingSystem } from 'utils/getUserOS';
|
||||
|
||||
interface RunQueryBtnProps {
|
||||
label?: string;
|
||||
isLoadingQueries?: boolean;
|
||||
handleCancelQuery?: () => void;
|
||||
onStageRunQuery?: () => void;
|
||||
}
|
||||
|
||||
function RunQueryBtn({
|
||||
label,
|
||||
isLoadingQueries,
|
||||
handleCancelQuery,
|
||||
onStageRunQuery,
|
||||
}: RunQueryBtnProps): JSX.Element {
|
||||
const isMac = getUserOperatingSystem() === UserOperatingSystem.MACOS;
|
||||
return isLoadingQueries ? (
|
||||
<Button
|
||||
type="default"
|
||||
icon={<Loader2 size={14} className="loading-icon animate-spin" />}
|
||||
className="cancel-query-btn periscope-btn danger"
|
||||
onClick={handleCancelQuery}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
type="primary"
|
||||
className="run-query-btn periscope-btn primary"
|
||||
disabled={isLoadingQueries || !onStageRunQuery}
|
||||
onClick={onStageRunQuery}
|
||||
icon={<Play size={14} />}
|
||||
>
|
||||
{label || 'Run Query'}
|
||||
<div className="cmd-hint">
|
||||
{isMac ? <Command size={12} /> : <ChevronUp size={12} />}
|
||||
<CornerDownLeft size={12} />
|
||||
</div>
|
||||
</Button>
|
||||
);
|
||||
}
|
||||
|
||||
export default RunQueryBtn;
|
||||
@@ -0,0 +1,82 @@
|
||||
// frontend/src/container/QueryBuilder/components/RunQueryBtn/__tests__/RunQueryBtn.test.tsx
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
|
||||
import RunQueryBtn from '../RunQueryBtn';
|
||||
|
||||
// Mock OS util
|
||||
jest.mock('utils/getUserOS', () => ({
|
||||
getUserOperatingSystem: jest.fn(),
|
||||
UserOperatingSystem: { MACOS: 'mac', WINDOWS: 'win', LINUX: 'linux' },
|
||||
}));
|
||||
import { getUserOperatingSystem, UserOperatingSystem } from 'utils/getUserOS';
|
||||
|
||||
describe('RunQueryBtn', () => {
|
||||
test('renders run state and triggers on click', () => {
|
||||
(getUserOperatingSystem as jest.Mock).mockReturnValue(
|
||||
UserOperatingSystem.MACOS,
|
||||
);
|
||||
const onRun = jest.fn();
|
||||
render(<RunQueryBtn onStageRunQuery={onRun} />);
|
||||
const btn = screen.getByRole('button', { name: /run query/i });
|
||||
expect(btn).toBeEnabled();
|
||||
fireEvent.click(btn);
|
||||
expect(onRun).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('disabled when onStageRunQuery is undefined', () => {
|
||||
(getUserOperatingSystem as jest.Mock).mockReturnValue(
|
||||
UserOperatingSystem.MACOS,
|
||||
);
|
||||
render(<RunQueryBtn />);
|
||||
expect(screen.getByRole('button', { name: /run query/i })).toBeDisabled();
|
||||
});
|
||||
|
||||
test('shows cancel state and calls handleCancelQuery', () => {
|
||||
(getUserOperatingSystem as jest.Mock).mockReturnValue(
|
||||
UserOperatingSystem.MACOS,
|
||||
);
|
||||
const onCancel = jest.fn();
|
||||
render(<RunQueryBtn isLoadingQueries handleCancelQuery={onCancel} />);
|
||||
const cancel = screen.getByRole('button', { name: /cancel/i });
|
||||
fireEvent.click(cancel);
|
||||
expect(onCancel).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('shows Command + CornerDownLeft on mac', () => {
|
||||
(getUserOperatingSystem as jest.Mock).mockReturnValue(
|
||||
UserOperatingSystem.MACOS,
|
||||
);
|
||||
const { container } = render(
|
||||
<RunQueryBtn onStageRunQuery={(): void => {}} />,
|
||||
);
|
||||
expect(container.querySelector('.lucide-command')).toBeInTheDocument();
|
||||
expect(
|
||||
container.querySelector('.lucide-corner-down-left'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('shows ChevronUp + CornerDownLeft on non-mac', () => {
|
||||
(getUserOperatingSystem as jest.Mock).mockReturnValue(
|
||||
UserOperatingSystem.WINDOWS,
|
||||
);
|
||||
const { container } = render(
|
||||
<RunQueryBtn onStageRunQuery={(): void => {}} />,
|
||||
);
|
||||
expect(container.querySelector('.lucide-chevron-up')).toBeInTheDocument();
|
||||
expect(container.querySelector('.lucide-command')).not.toBeInTheDocument();
|
||||
expect(
|
||||
container.querySelector('.lucide-corner-down-left'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('renders custom label when provided', () => {
|
||||
(getUserOperatingSystem as jest.Mock).mockReturnValue(
|
||||
UserOperatingSystem.MACOS,
|
||||
);
|
||||
const onRun = jest.fn();
|
||||
render(<RunQueryBtn onStageRunQuery={onRun} label="Stage & Run Query" />);
|
||||
expect(
|
||||
screen.getByRole('button', { name: /stage & run query/i }),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,12 +1,12 @@
|
||||
import './ToolbarActions.styles.scss';
|
||||
|
||||
import { Button } from 'antd';
|
||||
import { LogsExplorerShortcuts } from 'constants/shortcuts/logsExplorerShortcuts';
|
||||
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { Loader2, Play } from 'lucide-react';
|
||||
import { MutableRefObject, useEffect } from 'react';
|
||||
import { useQueryClient } from 'react-query';
|
||||
|
||||
import RunQueryBtn from '../RunQueryBtn/RunQueryBtn';
|
||||
|
||||
interface RightToolbarActionsProps {
|
||||
onStageRunQuery: () => void;
|
||||
isLoadingQueries?: boolean;
|
||||
@@ -42,14 +42,7 @@ export default function RightToolbarActions({
|
||||
if (showLiveLogs) {
|
||||
return (
|
||||
<div className="right-toolbar-actions-container">
|
||||
<Button
|
||||
type="primary"
|
||||
className="run-query-btn periscope-btn primary"
|
||||
disabled
|
||||
icon={<Play size={14} />}
|
||||
>
|
||||
Run Query
|
||||
</Button>
|
||||
<RunQueryBtn />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -65,26 +58,11 @@ export default function RightToolbarActions({
|
||||
|
||||
return (
|
||||
<div className="right-toolbar-actions-container">
|
||||
{isLoadingQueries ? (
|
||||
<Button
|
||||
type="default"
|
||||
icon={<Loader2 size={14} className="loading-icon animate-spin" />}
|
||||
className="cancel-query-btn periscope-btn danger"
|
||||
onClick={handleCancelQuery}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
type="primary"
|
||||
className="run-query-btn periscope-btn primary"
|
||||
disabled={isLoadingQueries}
|
||||
onClick={onStageRunQuery}
|
||||
icon={<Play size={14} />}
|
||||
>
|
||||
Run Query
|
||||
</Button>
|
||||
)}
|
||||
<RunQueryBtn
|
||||
isLoadingQueries={isLoadingQueries}
|
||||
handleCancelQuery={handleCancelQuery}
|
||||
onStageRunQuery={onStageRunQuery}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -136,7 +136,6 @@ const useAggregateDrilldown = ({
|
||||
query,
|
||||
// panelType,
|
||||
aggregateData: aggregateDataWithTimeRange,
|
||||
widgetId,
|
||||
onClose,
|
||||
});
|
||||
|
||||
|
||||
@@ -129,7 +129,6 @@ const useBaseAggregateOptions = ({
|
||||
|
||||
const handleBaseDrilldown = useCallback(
|
||||
(key: string): void => {
|
||||
console.log('Base drilldown:', { key, aggregateData });
|
||||
const route = getRoute(key);
|
||||
const timeRange = aggregateData?.timeRange;
|
||||
const filtersToAdd = aggregateData?.filters || [];
|
||||
|
||||
@@ -17,7 +17,6 @@ interface UseBaseAggregateOptionsProps {
|
||||
query?: Query;
|
||||
// panelType?: PANEL_TYPES;
|
||||
aggregateData?: AggregateData | null;
|
||||
widgetId?: string;
|
||||
onClose: () => void;
|
||||
}
|
||||
|
||||
@@ -27,7 +26,6 @@ const useDashboardVarConfig = ({
|
||||
query,
|
||||
// panelType,
|
||||
aggregateData,
|
||||
widgetId,
|
||||
onClose,
|
||||
}: UseBaseAggregateOptionsProps): {
|
||||
dashbaordVariablesConfig: {
|
||||
@@ -83,11 +81,6 @@ const useDashboardVarConfig = ({
|
||||
dashboardVar: [string, IDashboardVariable],
|
||||
fieldValue: any,
|
||||
) => {
|
||||
console.log('Setting variable:', {
|
||||
fieldName,
|
||||
dashboardVarId: dashboardVar[0],
|
||||
fieldValue,
|
||||
});
|
||||
onValueUpdate(fieldName, dashboardVar[1]?.id, fieldValue, false);
|
||||
onClose();
|
||||
},
|
||||
@@ -96,10 +89,6 @@ const useDashboardVarConfig = ({
|
||||
|
||||
const handleUnsetVariable = useCallback(
|
||||
(fieldName: string, dashboardVar: [string, IDashboardVariable]) => {
|
||||
console.log('Unsetting variable:', {
|
||||
fieldName,
|
||||
dashboardVarId: dashboardVar[0],
|
||||
});
|
||||
onValueUpdate(fieldName, dashboardVar[0], null, false);
|
||||
onClose();
|
||||
},
|
||||
@@ -109,12 +98,6 @@ const useDashboardVarConfig = ({
|
||||
const handleCreateVariable = useCallback(
|
||||
(fieldName: string, fieldValue: string | number | boolean) => {
|
||||
const source = getSourceFromQuery();
|
||||
console.log('Creating variable from drilldown:', {
|
||||
fieldName,
|
||||
fieldValue,
|
||||
source,
|
||||
widgetId,
|
||||
});
|
||||
createVariable(
|
||||
fieldName,
|
||||
fieldValue,
|
||||
@@ -125,7 +108,7 @@ const useDashboardVarConfig = ({
|
||||
);
|
||||
onClose();
|
||||
},
|
||||
[createVariable, getSourceFromQuery, widgetId, onClose],
|
||||
[createVariable, getSourceFromQuery, onClose],
|
||||
);
|
||||
|
||||
const contextItems = useMemo(
|
||||
|
||||
@@ -19,20 +19,6 @@
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
|
||||
.cancel-query-btn {
|
||||
min-width: 96px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 2px;
|
||||
}
|
||||
|
||||
.run-query-btn {
|
||||
min-width: 96px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 2px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -31,8 +31,8 @@ export const normalizeSteps = (steps: FunnelStepData[]): FunnelStepData[] => {
|
||||
...step.filters,
|
||||
items: step.filters.items.map((item) => {
|
||||
const {
|
||||
id: unusedId,
|
||||
isIndexed,
|
||||
id: _unusedId,
|
||||
isIndexed: _isIndexed,
|
||||
...keyObj
|
||||
} = item.key as BaseAutocompleteData;
|
||||
return {
|
||||
|
||||
@@ -143,7 +143,7 @@ export const useValidateFunnelSteps = ({
|
||||
selectedTime,
|
||||
steps.map((step) => {
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
const { latency_type, ...rest } = step;
|
||||
const { latency_type: _latency_type, ...rest } = step;
|
||||
return rest;
|
||||
}),
|
||||
],
|
||||
|
||||
@@ -55,7 +55,7 @@ const useDragColumns = <T>(storageKey: LOCALSTORAGE): UseDragColumns<T> => {
|
||||
const parsedDraggedColumns = await JSON.parse(localStorageColumns);
|
||||
nextDraggedColumns = parsedDraggedColumns;
|
||||
} catch (e) {
|
||||
console.log('error while parsing json');
|
||||
console.error('error while parsing json: ', e);
|
||||
} finally {
|
||||
redirectWithDraggedColumns(nextDraggedColumns);
|
||||
}
|
||||
|
||||
@@ -4,11 +4,6 @@ import parser from 'lib/logql/parser';
|
||||
describe('lib/logql/parser', () => {
|
||||
test('parse valid queries', () => {
|
||||
logqlQueries.forEach((queryObject) => {
|
||||
try {
|
||||
parser(queryObject.query);
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
expect(parser(queryObject.query)).toEqual(queryObject.parsedQuery);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,11 +4,7 @@ import { reverseParser } from 'lib/logql/reverseParser';
|
||||
describe('lib/logql/reverseParser', () => {
|
||||
test('reverse parse valid queries', () => {
|
||||
logqlQueries.forEach((queryObject) => {
|
||||
try {
|
||||
expect(reverseParser(queryObject.parsedQuery)).toEqual(queryObject.query);
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
expect(reverseParser(queryObject.parsedQuery)).toEqual(queryObject.query);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -11,8 +11,8 @@ describe('getYAxisScale', () => {
|
||||
keyIndex: 1,
|
||||
thresholdValue: 10,
|
||||
thresholdUnit: 'percentunit',
|
||||
moveThreshold(dragIndex, hoverIndex): void {
|
||||
console.log(dragIndex, hoverIndex);
|
||||
moveThreshold(): void {
|
||||
// no-op
|
||||
},
|
||||
selectedGraph: PANEL_TYPES.TIME_SERIES,
|
||||
},
|
||||
@@ -21,8 +21,8 @@ describe('getYAxisScale', () => {
|
||||
keyIndex: 2,
|
||||
thresholdValue: 20,
|
||||
thresholdUnit: 'percentunit',
|
||||
moveThreshold(dragIndex, hoverIndex): void {
|
||||
console.log(dragIndex, hoverIndex);
|
||||
moveThreshold(): void {
|
||||
// no-op
|
||||
},
|
||||
selectedGraph: PANEL_TYPES.TIME_SERIES,
|
||||
},
|
||||
|
||||
@@ -9,7 +9,6 @@ import {
|
||||
MessagingQueuesPayloadProps,
|
||||
} from 'api/messagingQueues/getConsumerLagDetails';
|
||||
import axios from 'axios';
|
||||
import { isNumber } from 'chart.js/helpers';
|
||||
import cx from 'classnames';
|
||||
import { ColumnTypeRender } from 'components/Logs/TableView/types';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
|
||||
@@ -275,7 +275,7 @@ export function setConfigDetail(
|
||||
},
|
||||
): void {
|
||||
// remove "key" and its value from the paramsToSet object
|
||||
const { key, ...restParamsToSet } = paramsToSet || {};
|
||||
const { key: _key, ...restParamsToSet } = paramsToSet || {};
|
||||
|
||||
if (!isEmpty(restParamsToSet)) {
|
||||
const configDetail = {
|
||||
|
||||
@@ -145,7 +145,7 @@ export const removeFilter = (
|
||||
const updatedValues = prevValue.filter((item: any) => item !== value);
|
||||
|
||||
if (updatedValues.length === 0) {
|
||||
const { [filterType]: item, ...remainingFilters } = prevFilters;
|
||||
const { [filterType]: _item, ...remainingFilters } = prevFilters;
|
||||
return Object.keys(remainingFilters).length > 0
|
||||
? (remainingFilters as FilterType)
|
||||
: undefined;
|
||||
@@ -175,7 +175,7 @@ export const removeAllFilters = (
|
||||
return prevFilters;
|
||||
}
|
||||
|
||||
const { [filterType]: item, ...remainingFilters } = prevFilters;
|
||||
const { [filterType]: _item, ...remainingFilters } = prevFilters;
|
||||
|
||||
return Object.keys(remainingFilters).length > 0
|
||||
? (remainingFilters as Record<
|
||||
|
||||
@@ -20,8 +20,7 @@ export const parseQueryIntoSpanKind = (
|
||||
current = parsedValue;
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
console.log('error while parsing json');
|
||||
console.error('error while parsing json: ', error);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ export const parseQueryIntoCurrent = (
|
||||
current = parseInt(parsedValue, 10);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('error while parsing json');
|
||||
console.error('error while parsing json: ', error);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -20,8 +20,7 @@ export const parseQueryIntoOrder = (
|
||||
current = parsedValue;
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
console.log('error while parsing json');
|
||||
console.error('error while parsing json: ', error);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -20,8 +20,7 @@ export const parseAggregateOrderParams = (
|
||||
current = parsedValue;
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
console.log('error while parsing json');
|
||||
console.error('error while parsing json: ', error);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -81,8 +81,10 @@ func FilterIntermediateColumns(result *qbtypes.QueryRangeResponse) *qbtypes.Quer
|
||||
// Filter out columns for intermediate queries used only in formulas
|
||||
filteredColumns := make([]*qbtypes.ColumnDescriptor, 0)
|
||||
intermediateQueryNames := map[string]bool{
|
||||
"error": true,
|
||||
"total_span": true,
|
||||
"error": true,
|
||||
"total_span": true,
|
||||
"endpoints_current": true,
|
||||
"endpoints_legacy": true,
|
||||
}
|
||||
|
||||
columnIndices := make([]int, 0)
|
||||
@@ -296,15 +298,15 @@ func BuildDomainList(req *thirdpartyapitypes.ThirdPartyApiRequest) (*qbtypes.Que
|
||||
return nil, err
|
||||
}
|
||||
|
||||
queries := []qbtypes.QueryEnvelope{
|
||||
buildEndpointsQuery(req),
|
||||
queries := buildEndpointsQueries(req)
|
||||
queries = append(queries,
|
||||
buildLastSeenQuery(req),
|
||||
buildRpsQuery(req),
|
||||
buildErrorQuery(req),
|
||||
buildTotalSpanQuery(req),
|
||||
buildP99Query(req),
|
||||
buildErrorRateFormula(),
|
||||
}
|
||||
)
|
||||
|
||||
return &qbtypes.QueryRangeRequest{
|
||||
SchemaVersion: "v5",
|
||||
@@ -346,20 +348,58 @@ func BuildDomainInfo(req *thirdpartyapitypes.ThirdPartyApiRequest) (*qbtypes.Que
|
||||
}, nil
|
||||
}
|
||||
|
||||
func buildEndpointsQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
// buildEndpointsQueries returns queries for counting distinct URLs with semconv fallback.
|
||||
// It uses two queries with mutually exclusive filters:
|
||||
// - endpoints_current: count_distinct(url.full) WHERE url.full EXISTS
|
||||
// - endpoints_legacy: count_distinct(http.url) WHERE url.full NOT EXISTS
|
||||
// And a formula to combine them: endpoints_current + endpoints_legacy
|
||||
func buildEndpointsQueries(req *thirdpartyapitypes.ThirdPartyApiRequest) []qbtypes.QueryEnvelope {
|
||||
// Query for current semconv (url.full)
|
||||
currentFilter := buildBaseFilter(req.Filter)
|
||||
currentFilter.Expression = fmt.Sprintf("(%s) AND %s EXISTS", currentFilter.Expression, urlPathKey)
|
||||
|
||||
endpointsCurrent := qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "endpoints",
|
||||
Name: "endpoints_current",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count_distinct(http.url)"},
|
||||
{Expression: fmt.Sprintf("count_distinct(%s)", urlPathKey)},
|
||||
},
|
||||
Filter: buildBaseFilter(req.Filter),
|
||||
Filter: currentFilter,
|
||||
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
|
||||
},
|
||||
}
|
||||
|
||||
// Query for legacy semconv (http.url) - only when url.full doesn't exist
|
||||
legacyFilter := buildBaseFilter(req.Filter)
|
||||
legacyFilter.Expression = fmt.Sprintf("(%s) AND %s NOT EXISTS", legacyFilter.Expression, urlPathKey)
|
||||
|
||||
endpointsLegacy := qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "endpoints_legacy",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: fmt.Sprintf("count_distinct(%s)", urlPathKeyLegacy)},
|
||||
},
|
||||
Filter: legacyFilter,
|
||||
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
|
||||
},
|
||||
}
|
||||
|
||||
// Formula to combine both counts
|
||||
endpointsFormula := qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeFormula,
|
||||
Spec: qbtypes.QueryBuilderFormula{
|
||||
Name: "endpoints",
|
||||
Expression: "endpoints_current + endpoints_legacy",
|
||||
},
|
||||
}
|
||||
|
||||
return []qbtypes.QueryEnvelope{endpointsCurrent, endpointsLegacy, endpointsFormula}
|
||||
}
|
||||
|
||||
func buildLastSeenQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEnvelope {
|
||||
|
||||
@@ -235,31 +235,28 @@ func (a *API) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
switch spec := item.Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
if spec.Filter != nil && spec.Filter.Expression != "" {
|
||||
replaced, warnings, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
|
||||
replaced, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
a.set.Logger.WarnContext(req.Context(), "variable replace warnings", "warnings", warnings)
|
||||
spec.Filter.Expression = replaced
|
||||
}
|
||||
queryRangeRequest.CompositeQuery.Queries[idx].Spec = spec
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
if spec.Filter != nil && spec.Filter.Expression != "" {
|
||||
replaced, warnings, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
|
||||
replaced, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
a.set.Logger.WarnContext(req.Context(), "variable replace warnings", "warnings", warnings)
|
||||
spec.Filter.Expression = replaced
|
||||
}
|
||||
queryRangeRequest.CompositeQuery.Queries[idx].Spec = spec
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
if spec.Filter != nil && spec.Filter.Expression != "" {
|
||||
replaced, warnings, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
|
||||
replaced, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
a.set.Logger.WarnContext(req.Context(), "variable replace warnings", "warnings", warnings)
|
||||
spec.Filter.Expression = replaced
|
||||
}
|
||||
queryRangeRequest.CompositeQuery.Queries[idx].Spec = spec
|
||||
|
||||
@@ -36,29 +36,6 @@ func NewBucketCache(settings factory.ProviderSettings, cache cache.Cache, cacheT
|
||||
}
|
||||
}
|
||||
|
||||
// cachedBucket represents a cached time bucket
|
||||
type cachedBucket struct {
|
||||
StartMs uint64 `json:"startMs"`
|
||||
EndMs uint64 `json:"endMs"`
|
||||
Type qbtypes.RequestType `json:"type"`
|
||||
Value json.RawMessage `json:"value"`
|
||||
Stats qbtypes.ExecStats `json:"stats"`
|
||||
}
|
||||
|
||||
// cachedData represents the full cached data for a query
|
||||
type cachedData struct {
|
||||
Buckets []*cachedBucket `json:"buckets"`
|
||||
Warnings []string `json:"warnings"`
|
||||
}
|
||||
|
||||
func (c *cachedData) UnmarshalBinary(data []byte) error {
|
||||
return json.Unmarshal(data, c)
|
||||
}
|
||||
|
||||
func (c *cachedData) MarshalBinary() ([]byte, error) {
|
||||
return json.Marshal(c)
|
||||
}
|
||||
|
||||
// GetMissRanges returns cached data and missing time ranges
|
||||
func (bc *bucketCache) GetMissRanges(
|
||||
ctx context.Context,
|
||||
@@ -78,7 +55,7 @@ func (bc *bucketCache) GetMissRanges(
|
||||
bc.logger.DebugContext(ctx, "cache key", "cache_key", cacheKey)
|
||||
|
||||
// Try to get cached data
|
||||
var data cachedData
|
||||
var data qbtypes.CachedData
|
||||
err := bc.cache.Get(ctx, orgID, cacheKey, &data)
|
||||
if err != nil {
|
||||
if !errors.Ast(err, errors.TypeNotFound) {
|
||||
@@ -147,9 +124,9 @@ func (bc *bucketCache) Put(ctx context.Context, orgID valuer.UUID, q qbtypes.Que
|
||||
cacheKey := bc.generateCacheKey(q)
|
||||
|
||||
// Get existing cached data
|
||||
var existingData cachedData
|
||||
var existingData qbtypes.CachedData
|
||||
if err := bc.cache.Get(ctx, orgID, cacheKey, &existingData); err != nil {
|
||||
existingData = cachedData{}
|
||||
existingData = qbtypes.CachedData{}
|
||||
}
|
||||
|
||||
// Trim the result to exclude data within flux interval
|
||||
@@ -203,7 +180,7 @@ func (bc *bucketCache) Put(ctx context.Context, orgID valuer.UUID, q qbtypes.Que
|
||||
uniqueWarnings := bc.deduplicateWarnings(allWarnings)
|
||||
|
||||
// Create updated cached data
|
||||
updatedData := cachedData{
|
||||
updatedData := qbtypes.CachedData{
|
||||
Buckets: mergedBuckets,
|
||||
Warnings: uniqueWarnings,
|
||||
}
|
||||
@@ -222,7 +199,7 @@ func (bc *bucketCache) generateCacheKey(q qbtypes.Query) string {
|
||||
}
|
||||
|
||||
// findMissingRangesWithStep identifies time ranges not covered by cached buckets with step alignment
|
||||
func (bc *bucketCache) findMissingRangesWithStep(buckets []*cachedBucket, startMs, endMs uint64, stepMs uint64) []*qbtypes.TimeRange {
|
||||
func (bc *bucketCache) findMissingRangesWithStep(buckets []*qbtypes.CachedBucket, startMs, endMs uint64, stepMs uint64) []*qbtypes.TimeRange {
|
||||
// When step is 0 or window is too small to be cached, use simple algorithm
|
||||
if stepMs == 0 || (startMs+stepMs) > endMs {
|
||||
return bc.findMissingRangesBasic(buckets, startMs, endMs)
|
||||
@@ -265,7 +242,7 @@ func (bc *bucketCache) findMissingRangesWithStep(buckets []*cachedBucket, startM
|
||||
}
|
||||
|
||||
if needsSort {
|
||||
slices.SortFunc(buckets, func(a, b *cachedBucket) int {
|
||||
slices.SortFunc(buckets, func(a, b *qbtypes.CachedBucket) int {
|
||||
if a.StartMs < b.StartMs {
|
||||
return -1
|
||||
}
|
||||
@@ -339,7 +316,7 @@ func (bc *bucketCache) findMissingRangesWithStep(buckets []*cachedBucket, startM
|
||||
}
|
||||
|
||||
// findMissingRangesBasic is the simple algorithm without step alignment
|
||||
func (bc *bucketCache) findMissingRangesBasic(buckets []*cachedBucket, startMs, endMs uint64) []*qbtypes.TimeRange {
|
||||
func (bc *bucketCache) findMissingRangesBasic(buckets []*qbtypes.CachedBucket, startMs, endMs uint64) []*qbtypes.TimeRange {
|
||||
// Check if already sorted before sorting
|
||||
needsSort := false
|
||||
for i := 1; i < len(buckets); i++ {
|
||||
@@ -350,7 +327,7 @@ func (bc *bucketCache) findMissingRangesBasic(buckets []*cachedBucket, startMs,
|
||||
}
|
||||
|
||||
if needsSort {
|
||||
slices.SortFunc(buckets, func(a, b *cachedBucket) int {
|
||||
slices.SortFunc(buckets, func(a, b *qbtypes.CachedBucket) int {
|
||||
if a.StartMs < b.StartMs {
|
||||
return -1
|
||||
}
|
||||
@@ -421,9 +398,9 @@ func (bc *bucketCache) findMissingRangesBasic(buckets []*cachedBucket, startMs,
|
||||
}
|
||||
|
||||
// filterRelevantBuckets returns buckets that overlap with the requested time range
|
||||
func (bc *bucketCache) filterRelevantBuckets(buckets []*cachedBucket, startMs, endMs uint64) []*cachedBucket {
|
||||
func (bc *bucketCache) filterRelevantBuckets(buckets []*qbtypes.CachedBucket, startMs, endMs uint64) []*qbtypes.CachedBucket {
|
||||
// Pre-allocate with estimated capacity
|
||||
relevant := make([]*cachedBucket, 0, len(buckets))
|
||||
relevant := make([]*qbtypes.CachedBucket, 0, len(buckets))
|
||||
|
||||
for _, bucket := range buckets {
|
||||
// Check if bucket overlaps with requested range
|
||||
@@ -433,7 +410,7 @@ func (bc *bucketCache) filterRelevantBuckets(buckets []*cachedBucket, startMs, e
|
||||
}
|
||||
|
||||
// Sort by start time
|
||||
slices.SortFunc(relevant, func(a, b *cachedBucket) int {
|
||||
slices.SortFunc(relevant, func(a, b *qbtypes.CachedBucket) int {
|
||||
if a.StartMs < b.StartMs {
|
||||
return -1
|
||||
}
|
||||
@@ -447,7 +424,7 @@ func (bc *bucketCache) filterRelevantBuckets(buckets []*cachedBucket, startMs, e
|
||||
}
|
||||
|
||||
// mergeBuckets combines multiple cached buckets into a single result
|
||||
func (bc *bucketCache) mergeBuckets(ctx context.Context, buckets []*cachedBucket, warnings []string) *qbtypes.Result {
|
||||
func (bc *bucketCache) mergeBuckets(ctx context.Context, buckets []*qbtypes.CachedBucket, warnings []string) *qbtypes.Result {
|
||||
if len(buckets) == 0 {
|
||||
return &qbtypes.Result{}
|
||||
}
|
||||
@@ -480,7 +457,7 @@ func (bc *bucketCache) mergeBuckets(ctx context.Context, buckets []*cachedBucket
|
||||
}
|
||||
|
||||
// mergeTimeSeriesValues merges time series data from multiple buckets
|
||||
func (bc *bucketCache) mergeTimeSeriesValues(ctx context.Context, buckets []*cachedBucket) *qbtypes.TimeSeriesData {
|
||||
func (bc *bucketCache) mergeTimeSeriesValues(ctx context.Context, buckets []*qbtypes.CachedBucket) *qbtypes.TimeSeriesData {
|
||||
// Estimate capacity based on bucket count
|
||||
estimatedSeries := len(buckets) * 10
|
||||
|
||||
@@ -631,7 +608,7 @@ func (bc *bucketCache) isEmptyResult(result *qbtypes.Result) (isEmpty bool, isFi
|
||||
}
|
||||
|
||||
// resultToBuckets converts a query result into time-based buckets
|
||||
func (bc *bucketCache) resultToBuckets(ctx context.Context, result *qbtypes.Result, startMs, endMs uint64) []*cachedBucket {
|
||||
func (bc *bucketCache) resultToBuckets(ctx context.Context, result *qbtypes.Result, startMs, endMs uint64) []*qbtypes.CachedBucket {
|
||||
// Check if result is empty
|
||||
isEmpty, isFiltered := bc.isEmptyResult(result)
|
||||
|
||||
@@ -652,7 +629,7 @@ func (bc *bucketCache) resultToBuckets(ctx context.Context, result *qbtypes.Resu
|
||||
|
||||
// Always create a bucket, even for empty filtered results
|
||||
// This ensures we don't re-query for data that doesn't exist
|
||||
return []*cachedBucket{
|
||||
return []*qbtypes.CachedBucket{
|
||||
{
|
||||
StartMs: startMs,
|
||||
EndMs: endMs,
|
||||
@@ -664,9 +641,9 @@ func (bc *bucketCache) resultToBuckets(ctx context.Context, result *qbtypes.Resu
|
||||
}
|
||||
|
||||
// mergeAndDeduplicateBuckets combines and deduplicates bucket lists
|
||||
func (bc *bucketCache) mergeAndDeduplicateBuckets(existing, fresh []*cachedBucket) []*cachedBucket {
|
||||
func (bc *bucketCache) mergeAndDeduplicateBuckets(existing, fresh []*qbtypes.CachedBucket) []*qbtypes.CachedBucket {
|
||||
// Create a map to deduplicate by time range
|
||||
bucketMap := make(map[string]*cachedBucket)
|
||||
bucketMap := make(map[string]*qbtypes.CachedBucket)
|
||||
|
||||
// Add existing buckets
|
||||
for _, bucket := range existing {
|
||||
@@ -681,13 +658,13 @@ func (bc *bucketCache) mergeAndDeduplicateBuckets(existing, fresh []*cachedBucke
|
||||
}
|
||||
|
||||
// Convert back to slice with pre-allocated capacity
|
||||
result := make([]*cachedBucket, 0, len(bucketMap))
|
||||
result := make([]*qbtypes.CachedBucket, 0, len(bucketMap))
|
||||
for _, bucket := range bucketMap {
|
||||
result = append(result, bucket)
|
||||
}
|
||||
|
||||
// Sort by start time
|
||||
slices.SortFunc(result, func(a, b *cachedBucket) int {
|
||||
slices.SortFunc(result, func(a, b *qbtypes.CachedBucket) int {
|
||||
if a.StartMs < b.StartMs {
|
||||
return -1
|
||||
}
|
||||
|
||||
@@ -147,14 +147,14 @@ func BenchmarkBucketCache_MergeTimeSeriesValues(b *testing.B) {
|
||||
for _, tc := range testCases {
|
||||
b.Run(tc.name, func(b *testing.B) {
|
||||
// Create test buckets
|
||||
buckets := make([]*cachedBucket, tc.numBuckets)
|
||||
buckets := make([]*qbtypes.CachedBucket, tc.numBuckets)
|
||||
for i := 0; i < tc.numBuckets; i++ {
|
||||
startMs := uint64(i * 10000)
|
||||
endMs := uint64((i + 1) * 10000)
|
||||
result := createBenchmarkResultWithSeries(startMs, endMs, 1000, tc.numSeries, tc.numValues)
|
||||
|
||||
valueBytes, _ := json.Marshal(result.Value)
|
||||
buckets[i] = &cachedBucket{
|
||||
buckets[i] = &qbtypes.CachedBucket{
|
||||
StartMs: startMs,
|
||||
EndMs: endMs,
|
||||
Type: qbtypes.RequestTypeTimeSeries,
|
||||
@@ -417,8 +417,8 @@ func createBenchmarkResultWithSeries(startMs, endMs uint64, _ uint64, numSeries,
|
||||
}
|
||||
|
||||
// Helper function to create buckets with specific gap patterns
|
||||
func createBucketsWithPattern(numBuckets int, pattern string) []*cachedBucket {
|
||||
buckets := make([]*cachedBucket, 0, numBuckets)
|
||||
func createBucketsWithPattern(numBuckets int, pattern string) []*qbtypes.CachedBucket {
|
||||
buckets := make([]*qbtypes.CachedBucket, 0, numBuckets)
|
||||
|
||||
for i := 0; i < numBuckets; i++ {
|
||||
// Skip some buckets based on pattern
|
||||
@@ -432,7 +432,7 @@ func createBucketsWithPattern(numBuckets int, pattern string) []*cachedBucket {
|
||||
startMs := uint64(i * 10000)
|
||||
endMs := uint64((i + 1) * 10000)
|
||||
|
||||
buckets = append(buckets, &cachedBucket{
|
||||
buckets = append(buckets, &qbtypes.CachedBucket{
|
||||
StartMs: startMs,
|
||||
EndMs: endMs,
|
||||
Type: qbtypes.RequestTypeTimeSeries,
|
||||
|
||||
@@ -521,7 +521,7 @@ func TestBucketCache_FindMissingRanges_EdgeCases(t *testing.T) {
|
||||
bc := NewBucketCache(instrumentationtest.New().ToProviderSettings(), memCache, cacheTTL, defaultFluxInterval).(*bucketCache)
|
||||
|
||||
// Test with buckets that have gaps and overlaps
|
||||
buckets := []*cachedBucket{
|
||||
buckets := []*qbtypes.CachedBucket{
|
||||
{StartMs: 1000, EndMs: 2000},
|
||||
{StartMs: 2500, EndMs: 3500},
|
||||
{StartMs: 3000, EndMs: 4000}, // Overlaps with previous
|
||||
@@ -1097,7 +1097,7 @@ func TestBucketCache_FindMissingRangesWithStep(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
buckets []*cachedBucket
|
||||
buckets []*qbtypes.CachedBucket
|
||||
startMs uint64
|
||||
endMs uint64
|
||||
stepMs uint64
|
||||
@@ -1106,7 +1106,7 @@ func TestBucketCache_FindMissingRangesWithStep(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "start_not_aligned_to_step",
|
||||
buckets: []*cachedBucket{},
|
||||
buckets: []*qbtypes.CachedBucket{},
|
||||
startMs: 1500, // Not aligned to 1000ms step
|
||||
endMs: 5000,
|
||||
stepMs: 1000,
|
||||
@@ -1118,7 +1118,7 @@ func TestBucketCache_FindMissingRangesWithStep(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "end_not_aligned_to_step",
|
||||
buckets: []*cachedBucket{},
|
||||
buckets: []*qbtypes.CachedBucket{},
|
||||
startMs: 1000,
|
||||
endMs: 4500, // Not aligned to 1000ms step
|
||||
stepMs: 1000,
|
||||
@@ -1129,7 +1129,7 @@ func TestBucketCache_FindMissingRangesWithStep(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "bucket_boundaries_not_aligned",
|
||||
buckets: []*cachedBucket{
|
||||
buckets: []*qbtypes.CachedBucket{
|
||||
{StartMs: 1500, EndMs: 2500}, // Not aligned
|
||||
},
|
||||
startMs: 1000,
|
||||
@@ -1143,7 +1143,7 @@ func TestBucketCache_FindMissingRangesWithStep(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "small_window_less_than_step",
|
||||
buckets: []*cachedBucket{},
|
||||
buckets: []*qbtypes.CachedBucket{},
|
||||
startMs: 1000,
|
||||
endMs: 1500, // Less than one step
|
||||
stepMs: 1000,
|
||||
@@ -1154,7 +1154,7 @@ func TestBucketCache_FindMissingRangesWithStep(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "zero_step_uses_basic_algorithm",
|
||||
buckets: []*cachedBucket{},
|
||||
buckets: []*qbtypes.CachedBucket{},
|
||||
startMs: 1000,
|
||||
endMs: 5000,
|
||||
stepMs: 0,
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
@@ -165,7 +164,6 @@ func PrepareWhereClause(query string, opts FilterExprVisitorOpts, startNs uint64
|
||||
return nil, combinedErrors.WithAdditional(visitor.errors...).WithUrl(url)
|
||||
}
|
||||
|
||||
// if there is nothing to filter, return true
|
||||
if cond == "" {
|
||||
cond = "true"
|
||||
}
|
||||
@@ -223,6 +221,7 @@ func (v *filterExpressionVisitor) Visit(tree antlr.ParseTree) any {
|
||||
}
|
||||
|
||||
func (v *filterExpressionVisitor) VisitQuery(ctx *grammar.QueryContext) any {
|
||||
|
||||
return v.Visit(ctx.Expression())
|
||||
}
|
||||
|
||||
@@ -504,16 +503,8 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
// Get all values for operations that need them
|
||||
values := ctx.AllValue()
|
||||
if len(values) > 0 {
|
||||
// there should only be one value for the following operators, even if there is more than one
|
||||
// we just take the first value
|
||||
value := v.Visit(values[0])
|
||||
|
||||
// Check if the value is a skip marker (embedded variable with __all__ value)
|
||||
if strVal, ok := value.(string); ok && strVal == specialSkipConditionMarker {
|
||||
v.logger.Info("skipping condition due to __all__ variable", "keys", keys, "value", value) //nolint:sloglint
|
||||
return ""
|
||||
}
|
||||
|
||||
if var_, ok := value.(string); ok {
|
||||
// check if this is a variables
|
||||
var ok bool
|
||||
@@ -525,13 +516,6 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
}
|
||||
|
||||
if ok {
|
||||
if varItem.Type == qbtypes.DynamicVariableType {
|
||||
if all_, ok := varItem.Value.(string); ok && all_ == "__all__" {
|
||||
// this is likely overlooked by user, we treat it as if it was IN instead of =
|
||||
v.logger.Warn("received unexpected __all__ value for single select dynamic variable", "variable", var_, "keys", keys, "value", value) //nolint:sloglint
|
||||
return ""
|
||||
}
|
||||
}
|
||||
switch varValues := varItem.Value.(type) {
|
||||
case []any:
|
||||
if len(varValues) == 0 {
|
||||
@@ -797,12 +781,7 @@ func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any {
|
||||
if ctx.QUOTED_TEXT() != nil {
|
||||
txt := ctx.QUOTED_TEXT().GetText()
|
||||
// trim quotes and return the value
|
||||
value := trimQuotes(txt)
|
||||
// Check if the string contains embedded variables
|
||||
if strings.Contains(value, "$") {
|
||||
return v.interpolateVariablesInString(value)
|
||||
}
|
||||
return value
|
||||
return trimQuotes(txt)
|
||||
} else if ctx.NUMBER() != nil {
|
||||
number, err := strconv.ParseFloat(ctx.NUMBER().GetText(), 64)
|
||||
if err != nil {
|
||||
@@ -819,12 +798,7 @@ func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any {
|
||||
// When the user writes an expression like `service.name=redis`
|
||||
// The `redis` part is a VALUE context but parsed as a KEY token
|
||||
// so we return the text as is
|
||||
keyText := ctx.KEY().GetText()
|
||||
// Check if this is a composed variable like $environment-xyz
|
||||
if strings.HasPrefix(keyText, "$") {
|
||||
return v.interpolateVariablesInString(keyText)
|
||||
}
|
||||
return keyText
|
||||
return ctx.KEY().GetText()
|
||||
}
|
||||
|
||||
return "" // Should not happen with valid input
|
||||
@@ -856,7 +830,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
// 1. either user meant key ( this is already handled above in fieldKeysForName )
|
||||
// 2. or user meant `attribute.key` we look up in the map for all possible field keys with name 'attribute.key'
|
||||
|
||||
// Note:
|
||||
// Note:
|
||||
// If user only wants to search `attribute.key`, then they have to use `attribute.attribute.key`
|
||||
// If user only wants to search `key`, then they have to use `key`
|
||||
// If user wants to search both, they can use `attribute.key` and we will resolve the ambiguity
|
||||
@@ -954,103 +928,3 @@ func trimQuotes(txt string) string {
|
||||
txt = strings.ReplaceAll(txt, `\'`, `'`)
|
||||
return txt
|
||||
}
|
||||
|
||||
// specialSkipConditionMarker is used to indicate that the entire condition should be removed
|
||||
const specialSkipConditionMarker = "__signoz_skip_condition__"
|
||||
|
||||
// interpolateVariablesInString finds and replaces variable references within a string
|
||||
// by checking against actual variable names in the variables map.
|
||||
// Pure variable references (e.g., "$service") are returned as-is to let the
|
||||
// existing variable handling code process them.
|
||||
// Returns specialSkipConditionMarker if any variable has __all__ value.
|
||||
func (v *filterExpressionVisitor) interpolateVariablesInString(s string) string {
|
||||
// if this is a complete variable reference (just $varname with nothing else)
|
||||
// if so, return as-is
|
||||
varName := s
|
||||
if strings.HasPrefix(varName, "$") {
|
||||
_, exactMatch := v.variables[varName]
|
||||
if !exactMatch {
|
||||
_, exactMatch = v.variables[varName[1:]]
|
||||
}
|
||||
if exactMatch {
|
||||
return s
|
||||
}
|
||||
}
|
||||
|
||||
result := s
|
||||
|
||||
// find and replace variables by checking each variable name in the map
|
||||
// process longer variable names first to handle cases with prefix substring
|
||||
varNames := make([]string, 0, len(v.variables)*2)
|
||||
for name := range v.variables {
|
||||
varNames = append(varNames, name)
|
||||
// add with $ prefix if not already present
|
||||
if !strings.HasPrefix(name, "$") {
|
||||
varNames = append(varNames, "$"+name)
|
||||
}
|
||||
}
|
||||
|
||||
// sort by length (longest first) to match longer variable names before shorter ones
|
||||
sort.Slice(varNames, func(i, j int) bool {
|
||||
return len(varNames[i]) > len(varNames[j])
|
||||
})
|
||||
|
||||
for _, vName := range varNames {
|
||||
searchPattern := vName
|
||||
if !strings.HasPrefix(searchPattern, "$") {
|
||||
searchPattern = "$" + vName
|
||||
}
|
||||
|
||||
if strings.Contains(result, searchPattern) {
|
||||
// direct lookup
|
||||
varItem, ok := v.variables[vName]
|
||||
if !ok {
|
||||
// Try without $ prefix
|
||||
varItem, ok = v.variables[strings.TrimPrefix(vName, "$")]
|
||||
}
|
||||
|
||||
if ok {
|
||||
// special check for __all__ value - skip the entire condition
|
||||
if varItem.Type == qbtypes.DynamicVariableType {
|
||||
if allVal, ok := varItem.Value.(string); ok && allVal == "__all__" {
|
||||
return specialSkipConditionMarker
|
||||
}
|
||||
}
|
||||
|
||||
replacement := v.formatVariableValueForInterpolation(varItem.Value, strings.TrimPrefix(vName, "$"))
|
||||
result = strings.ReplaceAll(result, searchPattern, replacement)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (v *filterExpressionVisitor) formatVariableValueForInterpolation(value any, varName string) string {
|
||||
switch val := value.(type) {
|
||||
case string:
|
||||
return val
|
||||
case []string:
|
||||
if len(val) > 1 {
|
||||
v.warnings = append(v.warnings, fmt.Sprintf("variable `%s` has multiple values, using first value `%s` for string interpolation", varName, val[0]))
|
||||
}
|
||||
if len(val) > 0 {
|
||||
return val[0]
|
||||
}
|
||||
return ""
|
||||
case []any:
|
||||
if len(val) > 1 {
|
||||
v.warnings = append(v.warnings, fmt.Sprintf("variable `%s` has multiple values, using first value for string interpolation", varName))
|
||||
}
|
||||
if len(val) > 0 {
|
||||
return v.formatVariableValueForInterpolation(val[0], varName)
|
||||
}
|
||||
return ""
|
||||
case int, int32, int64, float32, float64:
|
||||
return fmt.Sprintf("%v", val)
|
||||
case bool:
|
||||
return strconv.FormatBool(val)
|
||||
default:
|
||||
return fmt.Sprintf("%v", val)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,113 +10,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/antlr4-go/antlr/v4"
|
||||
sqlbuilder "github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
// TestInterpolateVariablesInString tests the embedded variable interpolation feature (GitHub issue #10008)
|
||||
func TestInterpolateVariablesInString(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
variables map[string]qbtypes.VariableItem
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "pure variable reference - not interpolated",
|
||||
input: "$service",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"service": {Value: "auth-service"},
|
||||
},
|
||||
expected: "$service", // Pure variables are handled by existing code
|
||||
},
|
||||
{
|
||||
name: "variable composed with suffix",
|
||||
input: "$environment-xyz",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"environment": {Value: "prod"},
|
||||
},
|
||||
expected: "prod-xyz",
|
||||
},
|
||||
{
|
||||
name: "variable in quoted string with suffix",
|
||||
input: "$env-cluster",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {Value: "staging"},
|
||||
},
|
||||
expected: "staging-cluster",
|
||||
},
|
||||
{
|
||||
name: "variable with prefix and suffix",
|
||||
input: "prefix-$var-suffix",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"var": {Value: "middle"},
|
||||
},
|
||||
expected: "prefix-middle-suffix",
|
||||
},
|
||||
{
|
||||
name: "multiple variables in one string",
|
||||
input: "$region-$env-cluster",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"region": {Value: "us-west"},
|
||||
"env": {Value: "prod"},
|
||||
},
|
||||
expected: "us-west-prod-cluster",
|
||||
},
|
||||
{
|
||||
name: "similar variable names - longer matches first",
|
||||
input: "$env-$environment",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {Value: "dev"},
|
||||
"environment": {Value: "production"},
|
||||
},
|
||||
expected: "dev-production",
|
||||
},
|
||||
{
|
||||
name: "unknown variable - preserved as-is",
|
||||
input: "$unknown-suffix",
|
||||
variables: map[string]qbtypes.VariableItem{},
|
||||
expected: "$unknown-suffix",
|
||||
},
|
||||
{
|
||||
name: "variable with underscore",
|
||||
input: "$my_var-test",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"my_var": {Value: "hello"},
|
||||
},
|
||||
expected: "hello-test",
|
||||
},
|
||||
{
|
||||
name: "__all__ value returns skip marker",
|
||||
input: "$env-suffix",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "__all__",
|
||||
},
|
||||
},
|
||||
expected: specialSkipConditionMarker,
|
||||
},
|
||||
{
|
||||
name: "multi-select takes first value",
|
||||
input: "$env-suffix",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {Value: []any{"prod", "staging", "dev"}},
|
||||
},
|
||||
expected: "prod-suffix",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
visitor := &filterExpressionVisitor{
|
||||
variables: tt.variables,
|
||||
}
|
||||
result := visitor.interpolateVariablesInString(tt.input)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TestPrepareWhereClause_EmptyVariableList ensures PrepareWhereClause errors when a variable has an empty list value
|
||||
func TestPrepareWhereClause_EmptyVariableList(t *testing.T) {
|
||||
tests := []struct {
|
||||
@@ -147,7 +42,7 @@ func TestPrepareWhereClause_EmptyVariableList(t *testing.T) {
|
||||
}
|
||||
|
||||
keys := map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"service": {
|
||||
"service": []*telemetrytypes.TelemetryFieldKey{
|
||||
{
|
||||
Name: "service",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
@@ -259,7 +154,7 @@ func TestVisitKey(t *testing.T) {
|
||||
name: "Key not found",
|
||||
keyText: "unknown_key",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"service": {
|
||||
"service": []*telemetrytypes.TelemetryFieldKey{
|
||||
{
|
||||
Name: "service",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
@@ -440,7 +335,7 @@ func TestVisitKey(t *testing.T) {
|
||||
name: "Unknown key with ignoreNotFoundKeys=false",
|
||||
keyText: "unknown_key",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"service": {
|
||||
"service": []*telemetrytypes.TelemetryFieldKey{
|
||||
{
|
||||
Name: "service",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
@@ -460,7 +355,7 @@ func TestVisitKey(t *testing.T) {
|
||||
name: "Unknown key with ignoreNotFoundKeys=true",
|
||||
keyText: "unknown_key",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"service": {
|
||||
"service": []*telemetrytypes.TelemetryFieldKey{
|
||||
{
|
||||
Name: "service",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
@@ -572,7 +467,7 @@ func TestVisitKey(t *testing.T) {
|
||||
expectedWarnings: nil,
|
||||
expectedMainWrnURL: "",
|
||||
},
|
||||
{
|
||||
{
|
||||
name: "only attribute.custom_field is selected",
|
||||
keyText: "attribute.attribute.custom_field",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
|
||||
@@ -1,191 +0,0 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestFilterExprEmbeddedVariables(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm, nil)
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
query string
|
||||
variables map[string]qbtypes.VariableItem
|
||||
shouldPass bool
|
||||
expectedQuery string
|
||||
expectedArgs []any
|
||||
}{
|
||||
{
|
||||
name: "variable composed with suffix in quoted string",
|
||||
query: "version = '$env-xyz'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['version'] = ? AND mapContains(attributes_string, 'version') = ?)",
|
||||
expectedArgs: []any{"prod-xyz", true},
|
||||
},
|
||||
{
|
||||
name: "variable in LIKE pattern with suffix",
|
||||
query: "service.name LIKE '$env%'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) LIKE ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)",
|
||||
expectedArgs: []any{"prod%"},
|
||||
},
|
||||
{
|
||||
name: "variable with prefix and suffix",
|
||||
query: "path = 'prefix-$var-suffix'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"var": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "middle",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['path'] = ? AND mapContains(attributes_string, 'path') = ?)",
|
||||
expectedArgs: []any{"prefix-middle-suffix", true},
|
||||
},
|
||||
{
|
||||
name: "multiple variables in one string",
|
||||
query: "path = '$region-$env-cluster'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"region": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "us-west",
|
||||
},
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['path'] = ? AND mapContains(attributes_string, 'path') = ?)",
|
||||
expectedArgs: []any{"us-west-prod-cluster", true},
|
||||
},
|
||||
{
|
||||
name: "similar variable names - longer matches first",
|
||||
query: "path = '$env-$environment'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "dev",
|
||||
},
|
||||
"environment": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "production",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['path'] = ? AND mapContains(attributes_string, 'path') = ?)",
|
||||
expectedArgs: []any{"dev-production", true},
|
||||
},
|
||||
{
|
||||
name: "pure variable reference - still works",
|
||||
query: "service.name = $service",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"service": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "auth-service",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)",
|
||||
expectedArgs: []any{"auth-service"},
|
||||
},
|
||||
{
|
||||
name: "variable with underscore composed with suffix",
|
||||
query: "version = '$my_var-test'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"my_var": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "hello",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['version'] = ? AND mapContains(attributes_string, 'version') = ?)",
|
||||
expectedArgs: []any{"hello-test", true},
|
||||
},
|
||||
{
|
||||
name: "variable in ILIKE pattern",
|
||||
query: "message ILIKE '%$pattern%'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"pattern": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "error",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?)",
|
||||
expectedArgs: []any{"%error%", true},
|
||||
},
|
||||
{
|
||||
name: "__all__ value skips condition",
|
||||
query: "version = '$env-xyz'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "__all__",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE true",
|
||||
expectedArgs: nil,
|
||||
},
|
||||
{
|
||||
name: "multi-select takes first value",
|
||||
query: "version = '$env-xyz'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: []any{"prod", "staging", "dev"},
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['version'] = ? AND mapContains(attributes_string, 'version') = ?)",
|
||||
expectedArgs: []any{"prod-xyz", true},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: DefaultFullTextColumn,
|
||||
JsonKeyToKey: GetBodyJSONKey,
|
||||
Variables: tc.variables,
|
||||
}
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
|
||||
if tc.shouldPass {
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
sql, args := clause.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
require.Equal(t, tc.expectedQuery, sql)
|
||||
require.Equal(t, tc.expectedArgs, args)
|
||||
} else {
|
||||
require.Error(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,235 +0,0 @@
|
||||
package telemetrymetrics
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func buildMetricsFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
||||
return map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"service.name": {
|
||||
{
|
||||
Name: "service.name",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"host.name": {
|
||||
{
|
||||
Name: "host.name",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"environment": {
|
||||
{
|
||||
Name: "environment",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"region": {
|
||||
{
|
||||
Name: "region",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"cluster": {
|
||||
{
|
||||
Name: "cluster",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func TestFilterExprEmbeddedVariables(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
keys := buildMetricsFieldKeyMap()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
query string
|
||||
variables map[string]qbtypes.VariableItem
|
||||
shouldPass bool
|
||||
expectedQuery string
|
||||
expectedArgs []any
|
||||
}{
|
||||
{
|
||||
name: "variable composed with suffix in quoted string",
|
||||
query: "host.name = '$env-server'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE JSONExtractString(labels, 'host.name') = ?",
|
||||
expectedArgs: []any{"prod-server"},
|
||||
},
|
||||
{
|
||||
name: "variable in LIKE pattern with suffix",
|
||||
query: "service.name LIKE '$env%'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE JSONExtractString(labels, 'service.name') LIKE ?",
|
||||
expectedArgs: []any{"prod%"},
|
||||
},
|
||||
{
|
||||
name: "variable with prefix and suffix",
|
||||
query: "cluster = 'prefix-$var-suffix'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"var": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "middle",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE JSONExtractString(labels, 'cluster') = ?",
|
||||
expectedArgs: []any{"prefix-middle-suffix"},
|
||||
},
|
||||
{
|
||||
name: "multiple variables in one string",
|
||||
query: "cluster = '$region-$env-cluster'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"region": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "us-west",
|
||||
},
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE JSONExtractString(labels, 'cluster') = ?",
|
||||
expectedArgs: []any{"us-west-prod-cluster"},
|
||||
},
|
||||
{
|
||||
name: "similar variable names - longer matches first",
|
||||
query: "cluster = '$env-$environment'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "dev",
|
||||
},
|
||||
"environment": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "production",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE JSONExtractString(labels, 'cluster') = ?",
|
||||
expectedArgs: []any{"dev-production"},
|
||||
},
|
||||
{
|
||||
name: "pure variable reference - still works",
|
||||
query: "service.name = $service",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"service": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "auth-service",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE JSONExtractString(labels, 'service.name') = ?",
|
||||
expectedArgs: []any{"auth-service"},
|
||||
},
|
||||
{
|
||||
name: "variable with underscore composed with suffix",
|
||||
query: "host.name = '$my_var-test'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"my_var": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "hello",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE JSONExtractString(labels, 'host.name') = ?",
|
||||
expectedArgs: []any{"hello-test"},
|
||||
},
|
||||
{
|
||||
name: "variable in ILIKE pattern",
|
||||
query: "environment ILIKE '%$pattern%'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"pattern": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "staging",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE LOWER(JSONExtractString(labels, 'environment')) LIKE LOWER(?)",
|
||||
expectedArgs: []any{"%staging%"},
|
||||
},
|
||||
{
|
||||
name: "__all__ value skips condition",
|
||||
query: "host.name = '$env-server'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "__all__",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE true",
|
||||
expectedArgs: nil,
|
||||
},
|
||||
{
|
||||
name: "multi-select takes first value",
|
||||
query: "host.name = '$env-server'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: []any{"prod", "staging", "dev"},
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE JSONExtractString(labels, 'host.name') = ?",
|
||||
expectedArgs: []any{"prod-server"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
FieldKeys: keys,
|
||||
Variables: tc.variables,
|
||||
}
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
|
||||
if tc.shouldPass {
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
sql, args := clause.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
require.Equal(t, tc.expectedQuery, sql)
|
||||
require.Equal(t, tc.expectedArgs, args)
|
||||
} else {
|
||||
require.Error(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -279,7 +279,7 @@ func (c *conditionBuilder) buildSpanScopeCondition(key *telemetrytypes.Telemetry
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "span scope field %s only supports '=' operator", key.Name)
|
||||
}
|
||||
|
||||
isTrue := false
|
||||
var isTrue bool
|
||||
switch v := value.(type) {
|
||||
case bool:
|
||||
isTrue = v
|
||||
|
||||
@@ -1,146 +0,0 @@
|
||||
package telemetrytraces
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestFilterExprEmbeddedVariables(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
query string
|
||||
variables map[string]qbtypes.VariableItem
|
||||
shouldPass bool
|
||||
expectedQuery string
|
||||
expectedArgs []any
|
||||
}{
|
||||
{
|
||||
name: "variable composed with suffix in quoted string",
|
||||
query: "http.method = '$method-request'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"method": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "GET",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)",
|
||||
expectedArgs: []any{"GET-request", true},
|
||||
},
|
||||
{
|
||||
name: "variable in LIKE pattern with suffix",
|
||||
query: "service.name LIKE '$env%'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) LIKE ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)",
|
||||
expectedArgs: []any{"prod%"},
|
||||
},
|
||||
{
|
||||
name: "variable with prefix and suffix",
|
||||
query: "user.id = 'user-$var-id'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"var": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "123",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['user.id'] = ? AND mapContains(attributes_string, 'user.id') = ?)",
|
||||
expectedArgs: []any{"user-123-id", true},
|
||||
},
|
||||
{
|
||||
name: "multiple variables in one string",
|
||||
query: "user.id = '$region-$env-user'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"region": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "us-west",
|
||||
},
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['user.id'] = ? AND mapContains(attributes_string, 'user.id') = ?)",
|
||||
expectedArgs: []any{"us-west-prod-user", true},
|
||||
},
|
||||
{
|
||||
name: "pure variable reference - still works",
|
||||
query: "service.name = $service",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"service": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "auth-service",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)",
|
||||
expectedArgs: []any{"auth-service"},
|
||||
},
|
||||
{
|
||||
name: "__all__ value skips condition",
|
||||
query: "http.method = '$method-request'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"method": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "__all__",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE true",
|
||||
expectedArgs: nil,
|
||||
},
|
||||
{
|
||||
name: "multi-select takes first value",
|
||||
query: "http.method = '$method-request'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"method": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: []any{"GET", "POST", "PUT"},
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)",
|
||||
expectedArgs: []any{"GET-request", true},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
FieldKeys: keys,
|
||||
Variables: tc.variables,
|
||||
}
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
|
||||
if tc.shouldPass {
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
sql, args := clause.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
require.Equal(t, tc.expectedQuery, sql)
|
||||
require.Equal(t, tc.expectedArgs, args)
|
||||
} else {
|
||||
require.Error(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
61
pkg/types/querybuildertypes/querybuildertypesv5/cached.go
Normal file
61
pkg/types/querybuildertypes/querybuildertypesv5/cached.go
Normal file
@@ -0,0 +1,61 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"maps"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/cachetypes"
|
||||
)
|
||||
|
||||
var _ cachetypes.Cacheable = (*CachedData)(nil)
|
||||
|
||||
type CachedBucket struct {
|
||||
StartMs uint64 `json:"startMs"`
|
||||
EndMs uint64 `json:"endMs"`
|
||||
Type RequestType `json:"type"`
|
||||
Value json.RawMessage `json:"value"`
|
||||
Stats ExecStats `json:"stats"`
|
||||
}
|
||||
|
||||
func (c *CachedBucket) Clone() *CachedBucket {
|
||||
return &CachedBucket{
|
||||
StartMs: c.StartMs,
|
||||
EndMs: c.EndMs,
|
||||
Type: c.Type,
|
||||
Value: bytes.Clone(c.Value),
|
||||
Stats: ExecStats{
|
||||
RowsScanned: c.Stats.RowsScanned,
|
||||
BytesScanned: c.Stats.BytesScanned,
|
||||
DurationMS: c.Stats.DurationMS,
|
||||
StepIntervals: maps.Clone(c.Stats.StepIntervals),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// CachedData represents the full cached data for a query
|
||||
type CachedData struct {
|
||||
Buckets []*CachedBucket `json:"buckets"`
|
||||
Warnings []string `json:"warnings"`
|
||||
}
|
||||
|
||||
func (c *CachedData) UnmarshalBinary(data []byte) error {
|
||||
return json.Unmarshal(data, c)
|
||||
}
|
||||
|
||||
func (c *CachedData) MarshalBinary() ([]byte, error) {
|
||||
return json.Marshal(c)
|
||||
}
|
||||
|
||||
func (c *CachedData) Clone() cachetypes.Cacheable {
|
||||
clonedCachedData := new(CachedData)
|
||||
clonedCachedData.Buckets = make([]*CachedBucket, len(c.Buckets))
|
||||
for i := range c.Buckets {
|
||||
clonedCachedData.Buckets[i] = c.Buckets[i].Clone()
|
||||
}
|
||||
|
||||
clonedCachedData.Warnings = make([]string, len(c.Warnings))
|
||||
copy(clonedCachedData.Warnings, c.Warnings)
|
||||
|
||||
return clonedCachedData
|
||||
}
|
||||
@@ -0,0 +1,87 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func createBuckets_TimeSeries(numBuckets int) []*CachedBucket {
|
||||
buckets := make([]*CachedBucket, numBuckets)
|
||||
|
||||
for i := 0; i < numBuckets; i++ {
|
||||
startMs := uint64(i * 10000)
|
||||
endMs := uint64((i + 1) * 10000)
|
||||
|
||||
timeSeriesData := &TimeSeriesData{
|
||||
QueryName: "A",
|
||||
Aggregations: []*AggregationBucket{
|
||||
{
|
||||
Index: 0,
|
||||
Series: []*TimeSeries{
|
||||
{
|
||||
Labels: []*Label{
|
||||
{Key: telemetrytypes.TelemetryFieldKey{Name: "service"}, Value: "test"},
|
||||
},
|
||||
Values: []*TimeSeriesValue{
|
||||
{Timestamp: 1672563720000, Value: 1, Partial: true}, // 12:02
|
||||
{Timestamp: 1672563900000, Value: 2}, // 12:05
|
||||
{Timestamp: 1672564200000, Value: 2.5}, // 12:10
|
||||
{Timestamp: 1672564500000, Value: 2.6}, // 12:15
|
||||
{Timestamp: 1672566600000, Value: 2.9}, // 12:50
|
||||
{Timestamp: 1672566900000, Value: 3}, // 12:55
|
||||
{Timestamp: 1672567080000, Value: 4, Partial: true}, // 12:58
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
value, err := json.Marshal(timeSeriesData)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
buckets[i] = &CachedBucket{
|
||||
StartMs: startMs,
|
||||
EndMs: endMs,
|
||||
Type: RequestTypeTimeSeries,
|
||||
Value: json.RawMessage(value),
|
||||
Stats: ExecStats{
|
||||
RowsScanned: uint64(i * 500),
|
||||
BytesScanned: uint64(i * 10000),
|
||||
DurationMS: uint64(i * 1000),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return buckets
|
||||
}
|
||||
|
||||
func BenchmarkCachedData_JSONMarshal_10kbuckets(b *testing.B) {
|
||||
buckets := createBuckets_TimeSeries(10000)
|
||||
data := &CachedData{Buckets: buckets}
|
||||
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, err := json.Marshal(data)
|
||||
assert.NoError(b, err)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCachedData_Clone_10kbuckets(b *testing.B) {
|
||||
buckets := createBuckets_TimeSeries(10000)
|
||||
data := &CachedData{Buckets: buckets}
|
||||
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = data.Clone()
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,6 @@ package variables
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
@@ -36,22 +35,19 @@ func (e *ErrorListener) SyntaxError(recognizer antlr.Recognizer, offendingSymbol
|
||||
type variableReplacementVisitor struct {
|
||||
variables map[string]qbtypes.VariableItem
|
||||
errors []string
|
||||
warnings []string
|
||||
}
|
||||
|
||||
// specialSkipMarker is used to indicate that a condition should be removed
|
||||
const specialSkipMarker = "__SKIP_CONDITION__"
|
||||
|
||||
// ReplaceVariablesInExpression takes a filter expression and returns it with variables replaced
|
||||
// Also returns any warnings generated during the replacement process.
|
||||
func ReplaceVariablesInExpression(expression string, variables map[string]qbtypes.VariableItem) (string, []string, error) {
|
||||
func ReplaceVariablesInExpression(expression string, variables map[string]qbtypes.VariableItem) (string, error) {
|
||||
input := antlr.NewInputStream(expression)
|
||||
lexer := grammar.NewFilterQueryLexer(input)
|
||||
|
||||
visitor := &variableReplacementVisitor{
|
||||
variables: variables,
|
||||
errors: []string{},
|
||||
warnings: []string{},
|
||||
}
|
||||
|
||||
lexerErrorListener := NewErrorListener()
|
||||
@@ -67,21 +63,21 @@ func ReplaceVariablesInExpression(expression string, variables map[string]qbtype
|
||||
tree := parser.Query()
|
||||
|
||||
if len(parserErrorListener.SyntaxErrors) > 0 {
|
||||
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "syntax errors in expression: %v", parserErrorListener.SyntaxErrors)
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "syntax errors in expression: %v", parserErrorListener.SyntaxErrors)
|
||||
}
|
||||
|
||||
result := visitor.Visit(tree).(string)
|
||||
|
||||
if len(visitor.errors) > 0 {
|
||||
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "errors processing expression: %v", visitor.errors)
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "errors processing expression: %v", visitor.errors)
|
||||
}
|
||||
|
||||
// If the entire expression should be skipped, return empty string
|
||||
if result == specialSkipMarker {
|
||||
return "", visitor.warnings, nil
|
||||
return "", nil
|
||||
}
|
||||
|
||||
return result, visitor.warnings, nil
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// Visit dispatches to the specific visit method based on node type
|
||||
@@ -482,32 +478,11 @@ func (v *variableReplacementVisitor) VisitValue(ctx *grammar.ValueContext) any {
|
||||
// Replace variable with its value
|
||||
return v.formatVariableValue(varItem.Value)
|
||||
}
|
||||
|
||||
// did not find as exact match; check if it's a composed string like "$env-xyz"
|
||||
interpolated, hasAllValue := v.interpolateVariablesInString(originalValue)
|
||||
if hasAllValue {
|
||||
return specialSkipMarker
|
||||
}
|
||||
if interpolated != originalValue {
|
||||
return v.formatVariableValue(interpolated)
|
||||
}
|
||||
|
||||
// No variables found, return original
|
||||
return originalValue
|
||||
}
|
||||
|
||||
// Check if the quoted text contains embedded variables (not starting with $)
|
||||
if ctx.QUOTED_TEXT() != nil && strings.Contains(originalValue, "$") {
|
||||
interpolated, hasAllValue := v.interpolateVariablesInString(originalValue)
|
||||
if hasAllValue {
|
||||
return specialSkipMarker
|
||||
}
|
||||
return v.formatVariableValue(interpolated)
|
||||
}
|
||||
|
||||
// Return original value if not a variable or variable not found
|
||||
// If it was quoted text and not a variable, return with quotes
|
||||
if ctx.QUOTED_TEXT() != nil {
|
||||
if ctx.QUOTED_TEXT() != nil && !strings.HasPrefix(originalValue, "$") {
|
||||
return ctx.QUOTED_TEXT().GetText()
|
||||
}
|
||||
return originalValue
|
||||
@@ -542,82 +517,6 @@ func (v *variableReplacementVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
return keyText
|
||||
}
|
||||
|
||||
// interpolateVariablesInString finds and replaces variable references within a string
|
||||
// by checking against actual variable names in the variables map.
|
||||
// Returns the interpolated string and a boolean indicating if any variable had __all__ value.
|
||||
func (v *variableReplacementVisitor) interpolateVariablesInString(s string) (string, bool) {
|
||||
result := s
|
||||
|
||||
varNames := make([]string, 0, len(v.variables)*2)
|
||||
for name := range v.variables {
|
||||
varNames = append(varNames, name)
|
||||
if !strings.HasPrefix(name, "$") {
|
||||
varNames = append(varNames, "$"+name)
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(varNames, func(i, j int) bool {
|
||||
return len(varNames[i]) > len(varNames[j])
|
||||
})
|
||||
|
||||
for _, varName := range varNames {
|
||||
// ensure we're looking for $varname pattern
|
||||
searchPattern := varName
|
||||
if !strings.HasPrefix(searchPattern, "$") {
|
||||
searchPattern = "$" + varName
|
||||
}
|
||||
|
||||
if strings.Contains(result, searchPattern) {
|
||||
// direct lookup
|
||||
varItem, ok := v.variables[varName]
|
||||
if !ok {
|
||||
// Try without $ prefix
|
||||
varItem, ok = v.variables[strings.TrimPrefix(varName, "$")]
|
||||
}
|
||||
|
||||
if ok {
|
||||
// special check for __all__ value
|
||||
if varItem.Type == qbtypes.DynamicVariableType {
|
||||
if allVal, ok := varItem.Value.(string); ok && allVal == "__all__" {
|
||||
return "", true
|
||||
}
|
||||
}
|
||||
|
||||
// format the replacement value (unquoted for string interpolation)
|
||||
replacement := v.formatVariableValueUnquoted(varItem.Value, strings.TrimPrefix(varName, "$"))
|
||||
result = strings.ReplaceAll(result, searchPattern, replacement)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result, false
|
||||
}
|
||||
|
||||
// formatVariableValueUnquoted returns a string representation of the value for string interpolation.
|
||||
// For multi-select (array) values, it takes the first value and adds a warning.
|
||||
func (v *variableReplacementVisitor) formatVariableValueUnquoted(value any, varName string) string {
|
||||
switch val := value.(type) {
|
||||
case []string:
|
||||
if len(val) > 1 {
|
||||
v.warnings = append(v.warnings, fmt.Sprintf("variable `%s` has multiple values, using first value `%s` for string interpolation", varName, val[0]))
|
||||
}
|
||||
if len(val) > 0 {
|
||||
return val[0]
|
||||
}
|
||||
return ""
|
||||
case []any:
|
||||
if len(val) > 1 {
|
||||
v.warnings = append(v.warnings, fmt.Sprintf("variable `%s` has multiple values, using first value for string interpolation", varName))
|
||||
}
|
||||
if len(val) > 0 {
|
||||
return fmt.Sprintf("%v", val[0])
|
||||
}
|
||||
return ""
|
||||
default:
|
||||
return fmt.Sprintf("%v", val)
|
||||
}
|
||||
}
|
||||
|
||||
// formatVariableValue formats a variable value for inclusion in the expression
|
||||
func (v *variableReplacementVisitor) formatVariableValue(value any) string {
|
||||
switch val := value.(type) {
|
||||
|
||||
@@ -421,107 +421,11 @@ func TestReplaceVariablesInExpression(t *testing.T) {
|
||||
},
|
||||
expected: "message NOT CONTAINS 'debug'",
|
||||
},
|
||||
{
|
||||
name: "variable composed with suffix in value",
|
||||
expression: "cluster_name = '$environment-xyz'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"environment": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
expected: "cluster_name = 'prod-xyz'",
|
||||
},
|
||||
{
|
||||
name: "variable composed with suffix without quotes",
|
||||
expression: "cluster_name = $environment-xyz",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"environment": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "staging",
|
||||
},
|
||||
},
|
||||
expected: "cluster_name = 'staging-xyz'",
|
||||
},
|
||||
{
|
||||
name: "variable in LIKE pattern with suffix",
|
||||
expression: "service.name LIKE '$env%'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
expected: "service.name LIKE 'prod%'",
|
||||
},
|
||||
{
|
||||
name: "variable composed with prefix and suffix",
|
||||
expression: "label = 'prefix-$var-suffix'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"var": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "middle",
|
||||
},
|
||||
},
|
||||
expected: "label = 'prefix-middle-suffix'",
|
||||
},
|
||||
{
|
||||
name: "multiple variables in one string",
|
||||
expression: "path = '$region-$env-cluster'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"region": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "us-west",
|
||||
},
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
expected: "path = 'us-west-prod-cluster'",
|
||||
},
|
||||
{
|
||||
name: "embedded variable with __all__ value skips condition",
|
||||
expression: "cluster_name = '$environment-xyz'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"environment": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "__all__",
|
||||
},
|
||||
},
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "variable with underscore composed with suffix",
|
||||
expression: "name = '$my_var-test'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"my_var": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "hello",
|
||||
},
|
||||
},
|
||||
expected: "name = 'hello-test'",
|
||||
},
|
||||
{
|
||||
name: "similar variable names - longer matches first",
|
||||
expression: "name = '$env-$environment'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "dev",
|
||||
},
|
||||
"environment": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "production",
|
||||
},
|
||||
},
|
||||
expected: "name = 'dev-production'",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, _, err := ReplaceVariablesInExpression(tt.expression, tt.variables)
|
||||
result, err := ReplaceVariablesInExpression(tt.expression, tt.variables)
|
||||
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
|
||||
@@ -456,32 +456,3 @@ def assert_scalar_column_order(
|
||||
f"{context}: Column {column_index} order mismatch. "
|
||||
f"Expected {expected_values}, got {actual_values}"
|
||||
)
|
||||
|
||||
|
||||
def make_substitute_vars_request(
|
||||
signoz: "types.SigNoz",
|
||||
token: str,
|
||||
start_ms: int,
|
||||
end_ms: int,
|
||||
queries: List[Dict],
|
||||
variables: Dict[str, Any],
|
||||
timeout: int = QUERY_TIMEOUT,
|
||||
) -> requests.Response:
|
||||
return requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v5/substitute_vars"),
|
||||
timeout=timeout,
|
||||
headers={"authorization": f"Bearer {token}"},
|
||||
json={
|
||||
"schemaVersion": "v1",
|
||||
"start": start_ms,
|
||||
"end": end_ms,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": {"queries": queries},
|
||||
"variables": variables,
|
||||
"formatOptions": {"formatTableResultForUI": False, "fillGaps": False},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def sum_series_values(series_values: List[Dict]) -> float:
|
||||
return sum(point["value"] for point in series_values)
|
||||
|
||||
@@ -2,6 +2,8 @@ from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, Dict, List, Optional, Tuple
|
||||
|
||||
import requests
|
||||
|
||||
from fixtures import querier, types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logs import Logs
|
||||
@@ -22,8 +24,6 @@ metric_values_for_test = {
|
||||
"service-d": 10.0,
|
||||
}
|
||||
|
||||
SCALAR_FORMAT_OPTIONS = {"formatTableResultForUI": True, "fillGaps": False}
|
||||
|
||||
|
||||
def generate_logs_with_counts(
|
||||
now: datetime,
|
||||
@@ -85,6 +85,30 @@ def generate_metrics_with_values(
|
||||
return metrics
|
||||
|
||||
|
||||
def make_scalar_query_request(
|
||||
signoz: types.SigNoz,
|
||||
token: str,
|
||||
now: datetime,
|
||||
queries: List[Dict],
|
||||
lookback_minutes: int = 5,
|
||||
) -> requests.Response:
|
||||
return requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
|
||||
timeout=5,
|
||||
headers={"authorization": f"Bearer {token}"},
|
||||
json={
|
||||
"schemaVersion": "v1",
|
||||
"start": int(
|
||||
(now - timedelta(minutes=lookback_minutes)).timestamp() * 1000
|
||||
),
|
||||
"end": int(now.timestamp() * 1000),
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": {"queries": queries},
|
||||
"formatOptions": {"formatTableResultForUI": True, "fillGaps": False},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def build_logs_query(
|
||||
name: str = "A",
|
||||
aggregations: Optional[List[str]] = None,
|
||||
@@ -194,16 +218,11 @@ def test_logs_scalar_group_by_single_agg_no_order(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[build_logs_query(group_by=["service.name"])],
|
||||
request_type="scalar",
|
||||
format_options={"formatTableResultForUI": True, "fillGaps": False},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -227,16 +246,11 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_asc(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[build_logs_query(group_by=["service.name"], order_by=[("count()", "asc")])],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -260,16 +274,11 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_desc(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[build_logs_query(group_by=["service.name"], order_by=[("count()", "desc")])],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -293,20 +302,15 @@ def test_logs_scalar_group_by_single_agg_order_by_grouping_key_asc(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_logs_query(
|
||||
group_by=["service.name"], order_by=[("service.name", "asc")]
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -330,20 +334,15 @@ def test_logs_scalar_group_by_single_agg_order_by_grouping_key_desc(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_logs_query(
|
||||
group_by=["service.name"], order_by=[("service.name", "desc")]
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -367,13 +366,10 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_logs_query(
|
||||
group_by=["service.name"],
|
||||
@@ -381,8 +377,6 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
|
||||
order_by=[("count()", "asc")],
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -404,13 +398,10 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_second_agg_desc(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_logs_query(
|
||||
group_by=["service.name"],
|
||||
@@ -418,8 +409,6 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_second_agg_desc(
|
||||
order_by=[("count_distinct(body)", "desc")],
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -442,20 +431,15 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_logs_query(
|
||||
group_by=["service.name"], order_by=[("count()", "asc")], limit=2
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -479,20 +463,15 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_logs_query(
|
||||
group_by=["service.name"], order_by=[("count()", "desc")], limit=3
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -516,20 +495,15 @@ def test_logs_scalar_group_by_order_by_grouping_key_asc_limit_2(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_logs_query(
|
||||
group_by=["service.name"], order_by=[("service.name", "asc")], limit=2
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -553,16 +527,11 @@ def test_traces_scalar_group_by_single_agg_no_order(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[build_traces_query(group_by=["service.name"])],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -586,16 +555,11 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_asc(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[build_traces_query(group_by=["service.name"], order_by=[("count()", "asc")])],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -619,16 +583,11 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_desc(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[build_traces_query(group_by=["service.name"], order_by=[("count()", "desc")])],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -652,20 +611,15 @@ def test_traces_scalar_group_by_single_agg_order_by_grouping_key_asc(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_traces_query(
|
||||
group_by=["service.name"], order_by=[("service.name", "asc")]
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -689,20 +643,15 @@ def test_traces_scalar_group_by_single_agg_order_by_grouping_key_desc(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_traces_query(
|
||||
group_by=["service.name"], order_by=[("service.name", "desc")]
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -726,13 +675,10 @@ def test_traces_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_traces_query(
|
||||
group_by=["service.name"],
|
||||
@@ -740,8 +686,6 @@ def test_traces_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
|
||||
order_by=[("count()", "asc")],
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -763,20 +707,15 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_traces_query(
|
||||
group_by=["service.name"], order_by=[("count()", "asc")], limit=2
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -800,20 +739,15 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_traces_query(
|
||||
group_by=["service.name"], order_by=[("count()", "desc")], limit=3
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -837,20 +771,15 @@ def test_traces_scalar_group_by_order_by_grouping_key_asc_limit_2(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_traces_query(
|
||||
group_by=["service.name"], order_by=[("service.name", "asc")], limit=2
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -874,16 +803,11 @@ def test_metrics_scalar_group_by_single_agg_no_order(
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[build_metrics_query(group_by=["service.name"])],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -912,21 +836,16 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_asc(
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_metrics_query(
|
||||
group_by=["service.name"],
|
||||
order_by=[("sum(test.metric)", "asc")],
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -955,21 +874,16 @@ def test_metrics_scalar_group_by_single_agg_order_by_grouping_key_asc(
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_metrics_query(
|
||||
group_by=["service.name"],
|
||||
order_by=[("service.name", "asc")],
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -998,13 +912,10 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_metrics_query(
|
||||
group_by=["service.name"],
|
||||
@@ -1012,8 +923,6 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
|
||||
limit=2,
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -1037,13 +946,10 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_metrics_query(
|
||||
group_by=["service.name"],
|
||||
@@ -1051,8 +957,6 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
|
||||
limit=3,
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -1076,13 +980,10 @@ def test_metrics_scalar_group_by_order_by_grouping_key_asc_limit_2(
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_metrics_query(
|
||||
group_by=["service.name"],
|
||||
@@ -1090,8 +991,6 @@ def test_metrics_scalar_group_by_order_by_grouping_key_asc_limit_2(
|
||||
limit=2,
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
@@ -1,356 +0,0 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.querier import build_scalar_query, make_substitute_vars_request
|
||||
|
||||
LOGS_COUNT_AGG = [{"expression": "count()"}]
|
||||
|
||||
|
||||
def test_substitute_vars_standalone_variable(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A", "logs", LOGS_COUNT_AGG, filter_expression="service.name = $service"
|
||||
)
|
||||
],
|
||||
variables={"service": {"type": "query", "value": "auth-service"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == "service.name = 'auth-service'"
|
||||
|
||||
|
||||
def test_substitute_vars_variable_in_string(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="cluster_name = '$environment-xyz'",
|
||||
)
|
||||
],
|
||||
variables={"environment": {"type": "custom", "value": "prod"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == "cluster_name = 'prod-xyz'"
|
||||
|
||||
|
||||
def test_substitute_vars_multiple_variables(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name = $service AND env = $environment",
|
||||
)
|
||||
],
|
||||
variables={
|
||||
"service": {"type": "text", "value": "auth-service"},
|
||||
"environment": {"type": "query", "value": "production"},
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == "service.name = 'auth-service' AND env = 'production'"
|
||||
|
||||
|
||||
def test_substitute_vars_array_variable(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name IN $services",
|
||||
)
|
||||
],
|
||||
variables={
|
||||
"services": {"type": "query", "value": ["auth-service", "api-service"]}
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == "service.name IN ['auth-service', 'api-service']"
|
||||
|
||||
|
||||
def test_substitute_vars_like_pattern(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name LIKE '$env%'",
|
||||
)
|
||||
],
|
||||
variables={"env": {"type": "text", "value": "prod"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == "service.name LIKE 'prod%'"
|
||||
|
||||
|
||||
def test_substitute_vars_variable_without_quotes(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="cluster_name = $environment-xyz",
|
||||
)
|
||||
],
|
||||
variables={"environment": {"type": "dynamic", "value": "staging"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == "cluster_name = 'staging-xyz'"
|
||||
|
||||
|
||||
def test_substitute_vars_all_value_standalone(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A", "logs", LOGS_COUNT_AGG, filter_expression="service.name = $service"
|
||||
)
|
||||
],
|
||||
variables={"service": {"type": "dynamic", "value": "__all__"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
# expression should be empty when __all__ is used
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == ""
|
||||
|
||||
|
||||
def test_substitute_vars_all_value_in_composed_string(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="cluster_name = '$environment-xyz'",
|
||||
)
|
||||
],
|
||||
variables={"environment": {"type": "dynamic", "value": "__all__"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
# expression should be empty when __all__ is used
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == ""
|
||||
|
||||
|
||||
def test_substitute_vars_all_value_partial(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name = $service AND env = $env",
|
||||
)
|
||||
],
|
||||
variables={
|
||||
"service": {"type": "dynamic", "value": "__all__"},
|
||||
"env": {"type": "dynamic", "value": "production"},
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
# only env condition should remain
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == "env = 'production'"
|
||||
@@ -1,803 +0,0 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, List
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logs import Logs
|
||||
from fixtures.querier import (
|
||||
build_scalar_query,
|
||||
get_all_series,
|
||||
get_series_values,
|
||||
make_query_request,
|
||||
sum_series_values,
|
||||
)
|
||||
|
||||
LOGS_COUNT_AGG = [{"expression": "count()"}]
|
||||
|
||||
|
||||
def test_query_range_with_standalone_variable(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "auth-service"},
|
||||
attributes={"env": "production"},
|
||||
body="Auth service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "api-service"},
|
||||
attributes={"env": "production"},
|
||||
body="API service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "web-service"},
|
||||
attributes={"env": "staging"},
|
||||
body="Web service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A", "logs", LOGS_COUNT_AGG, filter_expression="service.name = $service"
|
||||
)
|
||||
],
|
||||
variables={"service": {"type": "query", "value": "auth-service"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 1 # auth-service log
|
||||
|
||||
|
||||
def test_query_range_with_variable_in_array(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "auth-service"},
|
||||
attributes={"env": "production"},
|
||||
body="Auth service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "api-service"},
|
||||
attributes={"env": "production"},
|
||||
body="API service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "web-service"},
|
||||
attributes={"env": "staging"},
|
||||
body="Web service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name IN $services",
|
||||
)
|
||||
],
|
||||
variables={
|
||||
"services": {"type": "custom", "value": ["auth-service", "api-service"]}
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 2 # auth-service and api-service logs
|
||||
|
||||
|
||||
def test_query_range_with_variable_composed_in_string(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service1"},
|
||||
attributes={"cluster_name": "prod-xyz"},
|
||||
body="Prod cluster log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service2"},
|
||||
attributes={"cluster_name": "staging-xyz"},
|
||||
body="Staging cluster log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service3"},
|
||||
attributes={"cluster_name": "dev-xyz"},
|
||||
body="Dev cluster log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="cluster_name = '$environment-xyz'",
|
||||
)
|
||||
],
|
||||
variables={"environment": {"type": "dynamic", "value": "prod"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 1 # prod-xyz log
|
||||
|
||||
|
||||
def test_query_range_with_variable_in_like_pattern(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "prod-auth"},
|
||||
attributes={"env": "production"},
|
||||
body="Prod auth log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "prod-api"},
|
||||
attributes={"env": "production"},
|
||||
body="Prod API log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "staging-api"},
|
||||
attributes={"env": "staging"},
|
||||
body="Staging API log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name LIKE '$env%'",
|
||||
)
|
||||
],
|
||||
variables={"env": {"type": "text", "value": "prod"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 2 # prod-auth and prod-api logs
|
||||
|
||||
|
||||
def test_query_range_with_multiple_variables_in_string(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service1"},
|
||||
attributes={"path": "us-west-prod-cluster"},
|
||||
body="US West Prod log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service2"},
|
||||
attributes={"path": "us-east-prod-cluster"},
|
||||
body="US East Prod log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service3"},
|
||||
attributes={"path": "eu-west-staging-cluster"},
|
||||
body="EU West Staging log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="path = '$region-$env-cluster'",
|
||||
)
|
||||
],
|
||||
variables={
|
||||
"region": {"type": "query", "value": "us-west"},
|
||||
"env": {"type": "custom", "value": "prod"},
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 1 # us-west-prod-cluster log
|
||||
|
||||
|
||||
def test_query_range_with_variable_prefix_and_suffix(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service1"},
|
||||
attributes={"label": "prefix-middle-suffix"},
|
||||
body="Middle label log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service2"},
|
||||
attributes={"label": "prefix-other-suffix"},
|
||||
body="Other label log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="label = 'prefix-$var-suffix'",
|
||||
)
|
||||
],
|
||||
variables={"var": {"type": "text", "value": "middle"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 1 # prefix-middle-suffix log
|
||||
|
||||
|
||||
def test_query_range_with_variable_without_quotes(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service1"},
|
||||
attributes={"cluster_name": "staging-xyz"},
|
||||
body="Staging cluster log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service2"},
|
||||
attributes={"cluster_name": "prod-xyz"},
|
||||
body="Prod cluster log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="cluster_name = $environment-xyz",
|
||||
)
|
||||
],
|
||||
variables={"environment": {"type": "custom", "value": "staging"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 1 # staging-xyz log
|
||||
|
||||
|
||||
def test_query_range_time_series_with_group_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = []
|
||||
|
||||
for i in range(3):
|
||||
logs.append(
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=i + 1),
|
||||
resources={"service.name": "auth-service"},
|
||||
attributes={"cluster_name": "prod-xyz"},
|
||||
body=f"Auth service log {i}",
|
||||
severity_text="INFO",
|
||||
)
|
||||
)
|
||||
logs.append(
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=i + 1),
|
||||
resources={"service.name": "api-service"},
|
||||
attributes={"cluster_name": "prod-xyz"},
|
||||
body=f"API service log {i}",
|
||||
severity_text="INFO",
|
||||
)
|
||||
)
|
||||
logs.append(
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=i + 1),
|
||||
resources={"service.name": "web-service"},
|
||||
attributes={"cluster_name": "staging-xyz"},
|
||||
body=f"Web service log {i}",
|
||||
severity_text="INFO",
|
||||
)
|
||||
)
|
||||
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=10)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="cluster_name = '$env-xyz'",
|
||||
group_by=[
|
||||
{
|
||||
"name": "service.name",
|
||||
"fieldDataType": "string",
|
||||
"fieldContext": "resource",
|
||||
}
|
||||
],
|
||||
)
|
||||
],
|
||||
variables={"env": {"type": "query", "value": "prod"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
all_series = get_all_series(response.json(), "A")
|
||||
assert len(all_series) == 2 # auth-service and api-service
|
||||
|
||||
# 6 (3 auth-service + 3 api-service)
|
||||
total_count = sum(sum_series_values(s["values"]) for s in all_series)
|
||||
assert total_count == 6
|
||||
|
||||
|
||||
def test_query_range_with_different_variable_types(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "auth-service"},
|
||||
attributes={"env": "production"},
|
||||
body="Auth service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
# all different variable types
|
||||
for var_type in ["query", "custom", "text", "dynamic"]:
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name = $service",
|
||||
)
|
||||
],
|
||||
variables={"service": {"type": var_type, "value": "auth-service"}},
|
||||
)
|
||||
|
||||
assert (
|
||||
response.status_code == HTTPStatus.OK
|
||||
), f"Failed for variable type: {var_type}"
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert (
|
||||
total_count == 1
|
||||
), f"Expected 1 log for variable type {var_type}, got {total_count}"
|
||||
|
||||
|
||||
def test_query_range_with_all_value_standalone(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "auth-service"},
|
||||
attributes={"env": "production"},
|
||||
body="Auth service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "api-service"},
|
||||
attributes={"env": "production"},
|
||||
body="API service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "web-service"},
|
||||
attributes={"env": "staging"},
|
||||
body="Web service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
# `__all__`, the filter condition should be removed
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A", "logs", LOGS_COUNT_AGG, filter_expression="service.name = $service"
|
||||
)
|
||||
],
|
||||
variables={"service": {"type": "dynamic", "value": "__all__"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 3
|
||||
|
||||
|
||||
def test_query_range_with_all_value_in_composed_string(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service1"},
|
||||
attributes={"cluster_name": "prod-xyz"},
|
||||
body="Prod cluster log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service2"},
|
||||
attributes={"cluster_name": "staging-xyz"},
|
||||
body="Staging cluster log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service3"},
|
||||
attributes={"cluster_name": "dev-xyz"},
|
||||
body="Dev cluster log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
# `__all__` in composed string, the filter should be removed
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="cluster_name = '$environment-xyz'",
|
||||
)
|
||||
],
|
||||
variables={"environment": {"type": "dynamic", "value": "__all__"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 3 # all logs should be returned
|
||||
|
||||
|
||||
def test_query_range_with_all_value_partial_filter(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "auth-service"},
|
||||
attributes={"env": "production"},
|
||||
body="Auth prod log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "api-service"},
|
||||
attributes={"env": "production"},
|
||||
body="API prod log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "web-service"},
|
||||
attributes={"env": "staging"},
|
||||
body="Web staging log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
# `__all__` for service, only env filter should apply
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name = $service AND env = $env",
|
||||
)
|
||||
],
|
||||
variables={
|
||||
"service": {"type": "dynamic", "value": "__all__"},
|
||||
"env": {"type": "dynamic", "value": "production"},
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 2 # prod logs (auth + api)
|
||||
|
||||
|
||||
def test_query_range_with_all_value_in_array(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "auth-service"},
|
||||
attributes={"env": "production"},
|
||||
body="Auth service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "api-service"},
|
||||
attributes={"env": "production"},
|
||||
body="API service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name IN $services",
|
||||
)
|
||||
],
|
||||
variables={"services": {"type": "dynamic", "value": "__all__"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 2 # all logs
|
||||
Reference in New Issue
Block a user