mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-13 12:52:55 +00:00
Compare commits
25 Commits
gh-4291
...
unit-testi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ee4086dce5 | ||
|
|
d688399b91 | ||
|
|
cfc239e3c9 | ||
|
|
3572baa5eb | ||
|
|
ff26c5f69c | ||
|
|
9230f2442f | ||
|
|
7fed80b145 | ||
|
|
a268bb910c | ||
|
|
fbbe0bef86 | ||
|
|
bcd6ac47f7 | ||
|
|
ec27916fa5 | ||
|
|
263ac9fa5a | ||
|
|
7d73b144c9 | ||
|
|
5fd919a369 | ||
|
|
3159121929 | ||
|
|
d5c3760dc9 | ||
|
|
950584af36 | ||
|
|
fb5d4475e2 | ||
|
|
6771857941 | ||
|
|
1e16df65ac | ||
|
|
933b70134c | ||
|
|
581e80cd37 | ||
|
|
50836593a4 | ||
|
|
f647e828dd | ||
|
|
56fb58abed |
@@ -7,7 +7,6 @@ import {
|
||||
} from '@ant-design/icons';
|
||||
import Convert from 'ansi-to-html';
|
||||
import { Button, Divider, Row, Typography } from 'antd';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import LogsExplorerContext from 'container/LogsExplorerContext';
|
||||
import dayjs from 'dayjs';
|
||||
import dompurify from 'dompurify';
|
||||
@@ -95,11 +94,15 @@ function LogSelectedField({
|
||||
type ListLogViewProps = {
|
||||
logData: ILog;
|
||||
selectedFields: IField[];
|
||||
onSetActiveLog: (log: ILog) => void;
|
||||
onAddToQuery: AddToQueryHOCProps['onAddToQuery'];
|
||||
};
|
||||
|
||||
function ListLogView({
|
||||
logData,
|
||||
selectedFields,
|
||||
onSetActiveLog,
|
||||
onAddToQuery,
|
||||
}: ListLogViewProps): JSX.Element {
|
||||
const flattenLogData = useMemo(() => FlatLogData(logData), [logData]);
|
||||
|
||||
@@ -113,12 +116,6 @@ function ListLogView({
|
||||
onSetActiveLog: handleSetActiveContextLog,
|
||||
onClearActiveLog: handleClearActiveContextLog,
|
||||
} = useActiveLog();
|
||||
const {
|
||||
activeLog,
|
||||
onSetActiveLog,
|
||||
onClearActiveLog,
|
||||
onAddToQuery,
|
||||
} = useActiveLog();
|
||||
|
||||
const handleDetailedView = useCallback(() => {
|
||||
onSetActiveLog(logData);
|
||||
@@ -223,12 +220,6 @@ function ListLogView({
|
||||
onClose={handleClearActiveContextLog}
|
||||
/>
|
||||
)}
|
||||
<LogDetail
|
||||
log={activeLog}
|
||||
onClose={onClearActiveLog}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
/>
|
||||
</Row>
|
||||
</Container>
|
||||
);
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import '../GridCardLayout.styles.scss';
|
||||
|
||||
import { Skeleton, Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { ToggleGraphProps } from 'components/Graph/types';
|
||||
|
||||
106
frontend/src/container/ListAlertRules/ListAlertRules.test.tsx
Normal file
106
frontend/src/container/ListAlertRules/ListAlertRules.test.tsx
Normal file
@@ -0,0 +1,106 @@
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { fireEvent, render, screen, within } from 'tests/test-utils';
|
||||
|
||||
import ListAlertRules from '.';
|
||||
|
||||
describe('ListAlertRules', () => {
|
||||
test('Should render the table', async () => {
|
||||
act(() => {
|
||||
render(<ListAlertRules />);
|
||||
});
|
||||
|
||||
const newAlert = await screen.findByRole('button', {
|
||||
name: /plus new alert/i,
|
||||
});
|
||||
|
||||
expect(newAlert).toBeInTheDocument();
|
||||
|
||||
const status = await screen.findByText(/status/i);
|
||||
expect(status).toBeInTheDocument();
|
||||
|
||||
const alertName = await screen.findByText(/alert name/i);
|
||||
expect(alertName).toBeInTheDocument();
|
||||
|
||||
const severity = await screen.findByText(/severity/i);
|
||||
expect(severity).toBeInTheDocument();
|
||||
|
||||
const label = await screen.findByText(/label/i);
|
||||
expect(label).toBeInTheDocument();
|
||||
|
||||
const action = await screen.findByText(/action/i);
|
||||
expect(action).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('Should render the table data', async () => {
|
||||
act(() => {
|
||||
render(<ListAlertRules />);
|
||||
});
|
||||
|
||||
const status = await screen.findByText(/status/i);
|
||||
expect(status).toBeInTheDocument();
|
||||
|
||||
const disabledRow = await screen.findByRole('row', {
|
||||
name: /disabled Test Rule 1 warning details: https:\/\/stagi\.\.\. hello: world region: us \+1 ellipsis/i,
|
||||
});
|
||||
expect(disabledRow).toBeInTheDocument();
|
||||
|
||||
const actionButton = within(disabledRow).getByRole('button', {
|
||||
name: /ellipsis/i,
|
||||
});
|
||||
expect(actionButton).toBeInTheDocument();
|
||||
|
||||
fireEvent.mouseOver(actionButton);
|
||||
|
||||
const enabled = await screen.findByRole('menuitem', {
|
||||
name: /enable/i,
|
||||
});
|
||||
expect(enabled).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('Should render enabled for disabled alert in menu', async () => {
|
||||
act(() => {
|
||||
render(<ListAlertRules />);
|
||||
});
|
||||
|
||||
const disabledRow = await screen.findByRole('row', {
|
||||
name: /disabled Test Rule 1 warning details: https:\/\/stagi\.\.\. hello: world region: us \+1 ellipsis/i,
|
||||
});
|
||||
expect(disabledRow).toBeInTheDocument();
|
||||
|
||||
const actionButton = within(disabledRow).getByRole('button', {
|
||||
name: /ellipsis/i,
|
||||
});
|
||||
expect(actionButton).toBeInTheDocument();
|
||||
|
||||
fireEvent.mouseOver(actionButton);
|
||||
|
||||
const enabled = await screen.findByRole('menuitem', {
|
||||
name: /enable/i,
|
||||
});
|
||||
expect(enabled).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('Should render disabled for Ok alert in menu', async () => {
|
||||
act(() => {
|
||||
render(<ListAlertRules />);
|
||||
});
|
||||
|
||||
const enabledRow = await screen.findByRole('row', {
|
||||
name: /ok test rule 2 warning - ellipsis/i,
|
||||
});
|
||||
|
||||
expect(enabledRow).toBeInTheDocument();
|
||||
|
||||
const actionButton = within(enabledRow).getByRole('button', {
|
||||
name: /ellipsis/i,
|
||||
});
|
||||
expect(actionButton).toBeInTheDocument();
|
||||
|
||||
fireEvent.mouseOver(actionButton);
|
||||
|
||||
const disabled = await screen.findByRole('menuitem', {
|
||||
name: /disable/i,
|
||||
});
|
||||
expect(disabled).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Card, Typography } from 'antd';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import ListLogView from 'components/Logs/ListLogView';
|
||||
import RawLogView from 'components/Logs/RawLogView';
|
||||
import Spinner from 'components/Spinner';
|
||||
@@ -10,6 +11,7 @@ import { InfinityWrapperStyled } from 'container/LogsExplorerList/styles';
|
||||
import { convertKeysToColumnFields } from 'container/LogsExplorerList/utils';
|
||||
import { Heading } from 'container/LogsTable/styles';
|
||||
import { useOptionsMenu } from 'container/OptionsMenu';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||
import useFontFaceObserver from 'hooks/useFontObserver';
|
||||
import { useEventSource } from 'providers/EventSource';
|
||||
@@ -31,6 +33,13 @@ function LiveLogsList({ logs }: LiveLogsListProps): JSX.Element {
|
||||
|
||||
const { activeLogId } = useCopyLogLink();
|
||||
|
||||
const {
|
||||
activeLog,
|
||||
onClearActiveLog,
|
||||
onAddToQuery,
|
||||
onSetActiveLog,
|
||||
} = useActiveLog();
|
||||
|
||||
const { options } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
|
||||
dataSource: DataSource.LOGS,
|
||||
@@ -66,10 +75,22 @@ function LiveLogsList({ logs }: LiveLogsListProps): JSX.Element {
|
||||
}
|
||||
|
||||
return (
|
||||
<ListLogView key={log.id} logData={log} selectedFields={selectedFields} />
|
||||
<ListLogView
|
||||
key={log.id}
|
||||
logData={log}
|
||||
selectedFields={selectedFields}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onSetActiveLog={onSetActiveLog}
|
||||
/>
|
||||
);
|
||||
},
|
||||
[options.format, options.maxLines, selectedFields],
|
||||
[
|
||||
onAddToQuery,
|
||||
onSetActiveLog,
|
||||
options.format,
|
||||
options.maxLines,
|
||||
selectedFields,
|
||||
],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -123,6 +144,12 @@ function LiveLogsList({ logs }: LiveLogsListProps): JSX.Element {
|
||||
)}
|
||||
</InfinityWrapperStyled>
|
||||
)}
|
||||
<LogDetail
|
||||
log={activeLog}
|
||||
onClose={onClearActiveLog}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Card, Typography } from 'antd';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
// components
|
||||
import ListLogView from 'components/Logs/ListLogView';
|
||||
import RawLogView from 'components/Logs/RawLogView';
|
||||
@@ -8,6 +9,7 @@ import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ExplorerControlPanel from 'container/ExplorerControlPanel';
|
||||
import { Heading } from 'container/LogsTable/styles';
|
||||
import { useOptionsMenu } from 'container/OptionsMenu';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import useFontFaceObserver from 'hooks/useFontObserver';
|
||||
@@ -37,6 +39,13 @@ function LogsExplorerList({
|
||||
|
||||
const { activeLogId } = useCopyLogLink();
|
||||
|
||||
const {
|
||||
activeLog,
|
||||
onClearActiveLog,
|
||||
onAddToQuery,
|
||||
onSetActiveLog,
|
||||
} = useActiveLog();
|
||||
|
||||
const { options, config } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
|
||||
dataSource: initialDataSource || DataSource.METRICS,
|
||||
@@ -76,10 +85,22 @@ function LogsExplorerList({
|
||||
}
|
||||
|
||||
return (
|
||||
<ListLogView key={log.id} logData={log} selectedFields={selectedFields} />
|
||||
<ListLogView
|
||||
key={log.id}
|
||||
logData={log}
|
||||
selectedFields={selectedFields}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onSetActiveLog={onSetActiveLog}
|
||||
/>
|
||||
);
|
||||
},
|
||||
[options.format, options.maxLines, selectedFields],
|
||||
[
|
||||
onAddToQuery,
|
||||
onSetActiveLog,
|
||||
options.format,
|
||||
options.maxLines,
|
||||
selectedFields,
|
||||
],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -149,6 +170,13 @@ function LogsExplorerList({
|
||||
)}
|
||||
|
||||
<InfinityWrapperStyled>{renderContent}</InfinityWrapperStyled>
|
||||
|
||||
<LogDetail
|
||||
log={activeLog}
|
||||
onClose={onClearActiveLog}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import './logsTable.styles.scss';
|
||||
|
||||
import { Card, Typography } from 'antd';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
// components
|
||||
import ListLogView from 'components/Logs/ListLogView';
|
||||
import RawLogView from 'components/Logs/RawLogView';
|
||||
@@ -29,7 +30,12 @@ type LogsTableProps = {
|
||||
function LogsTable(props: LogsTableProps): JSX.Element {
|
||||
const { viewMode, linesPerRow } = props;
|
||||
|
||||
const { onSetActiveLog } = useActiveLog();
|
||||
const {
|
||||
activeLog,
|
||||
onClearActiveLog,
|
||||
onAddToQuery,
|
||||
onSetActiveLog,
|
||||
} = useActiveLog();
|
||||
|
||||
useFontFaceObserver(
|
||||
[
|
||||
@@ -69,9 +75,17 @@ function LogsTable(props: LogsTableProps): JSX.Element {
|
||||
return <RawLogView key={log.id} data={log} linesPerRow={linesPerRow} />;
|
||||
}
|
||||
|
||||
return <ListLogView key={log.id} logData={log} selectedFields={selected} />;
|
||||
return (
|
||||
<ListLogView
|
||||
key={log.id}
|
||||
logData={log}
|
||||
selectedFields={selected}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onSetActiveLog={onSetActiveLog}
|
||||
/>
|
||||
);
|
||||
},
|
||||
[logs, viewMode, selected, linesPerRow],
|
||||
[logs, viewMode, selected, onAddToQuery, onSetActiveLog, linesPerRow],
|
||||
);
|
||||
|
||||
const renderContent = useMemo(() => {
|
||||
@@ -110,6 +124,12 @@ function LogsTable(props: LogsTableProps): JSX.Element {
|
||||
{isNoLogs && <Typography>No logs lines found</Typography>}
|
||||
|
||||
{renderContent}
|
||||
<LogDetail
|
||||
log={activeLog}
|
||||
onClose={onClearActiveLog}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
/>
|
||||
</Container>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -32,19 +32,13 @@ import {
|
||||
errorPercentage,
|
||||
operationPerSec,
|
||||
} from '../MetricsPageQueries/OverviewQueries';
|
||||
import {
|
||||
Card,
|
||||
Col,
|
||||
ColApDexContainer,
|
||||
ColErrorContainer,
|
||||
Row,
|
||||
} from '../styles';
|
||||
import { Col, ColApDexContainer, ColErrorContainer, Row } from '../styles';
|
||||
import ApDex from './Overview/ApDex';
|
||||
import ServiceOverview from './Overview/ServiceOverview';
|
||||
import TopLevelOperation from './Overview/TopLevelOperations';
|
||||
import TopOperation from './Overview/TopOperation';
|
||||
import TopOperationMetrics from './Overview/TopOperationMetrics';
|
||||
import { Button } from './styles';
|
||||
import { Button, Card } from './styles';
|
||||
import { IServiceName } from './types';
|
||||
import {
|
||||
handleNonInQueryRange,
|
||||
@@ -276,7 +270,7 @@ function Application(): JSX.Element {
|
||||
|
||||
<Col span={12}>
|
||||
<Card>
|
||||
{isSpanMetricEnabled ? <TopOperationMetrics /> : <TopOperation />}
|
||||
{isSpanMetricEnabled ? <TopOperationMetrics /> : <TopOperation />}{' '}
|
||||
</Card>
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Button as ButtonComponent } from 'antd';
|
||||
import { Button as ButtonComponent, Card as CardComponent } from 'antd';
|
||||
import styled from 'styled-components';
|
||||
|
||||
export const Button = styled(ButtonComponent)`
|
||||
@@ -8,3 +8,9 @@ export const Button = styled(ButtonComponent)`
|
||||
display: none;
|
||||
}
|
||||
`;
|
||||
|
||||
export const Card = styled(CardComponent)`
|
||||
.ant-card-body {
|
||||
padding: 10px;
|
||||
}
|
||||
`;
|
||||
|
||||
@@ -8,12 +8,13 @@ import styled from 'styled-components';
|
||||
|
||||
export const Card = styled(CardComponent)`
|
||||
&&& {
|
||||
padding: 10px;
|
||||
height: 40vh;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.ant-card-body {
|
||||
height: calc(100% - 40px);
|
||||
padding: 0;
|
||||
min-height: 40vh;
|
||||
}
|
||||
`;
|
||||
|
||||
@@ -38,7 +39,8 @@ export const ColErrorContainer = styled(ColComponent)`
|
||||
`;
|
||||
|
||||
export const GraphContainer = styled.div`
|
||||
height: 40vh;
|
||||
min-height: calc(40vh - 40px);
|
||||
height: calc(100% - 40px);
|
||||
`;
|
||||
|
||||
export const GraphTitle = styled(Typography)`
|
||||
|
||||
@@ -28,6 +28,10 @@ export const timeItems: timePreferance[] = [
|
||||
name: 'Last 1 day',
|
||||
enum: 'LAST_1_DAY',
|
||||
},
|
||||
{
|
||||
name: 'Last 3 days',
|
||||
enum: 'LAST_3_DAYS',
|
||||
},
|
||||
{
|
||||
name: 'Last 1 week',
|
||||
enum: 'LAST_1_WEEK',
|
||||
@@ -47,6 +51,7 @@ export type timePreferenceType =
|
||||
| LAST_1_HR
|
||||
| LAST_6_HR
|
||||
| LAST_1_DAY
|
||||
| LAST_3_DAYS
|
||||
| LAST_1_WEEK;
|
||||
|
||||
type GLOBAL_TIME = 'GLOBAL_TIME';
|
||||
@@ -56,6 +61,7 @@ type LAST_30_MIN = 'LAST_30_MIN';
|
||||
type LAST_1_HR = 'LAST_1_HR';
|
||||
type LAST_6_HR = 'LAST_6_HR';
|
||||
type LAST_1_DAY = 'LAST_1_DAY';
|
||||
type LAST_3_DAYS = 'LAST_3_DAYS';
|
||||
type LAST_1_WEEK = 'LAST_1_WEEK';
|
||||
|
||||
export default timeItems;
|
||||
|
||||
@@ -178,6 +178,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
||||
yAxisUnit,
|
||||
panelTypes: graphType,
|
||||
thresholds,
|
||||
fillSpans: isFillSpans,
|
||||
},
|
||||
...afterWidgets,
|
||||
],
|
||||
@@ -212,6 +213,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
|
||||
yAxisUnit,
|
||||
graphType,
|
||||
thresholds,
|
||||
isFillSpans,
|
||||
afterWidgets,
|
||||
updateDashboardMutation,
|
||||
setSelectedDashboard,
|
||||
|
||||
@@ -35,9 +35,7 @@ export default function DataSource(): JSX.Element {
|
||||
selectedFramework,
|
||||
updateSelectedDataSource,
|
||||
updateServiceName,
|
||||
updateSelectedEnvironment,
|
||||
updateSelectedFramework,
|
||||
updateErrorDetails,
|
||||
} = useOnboardingContext();
|
||||
|
||||
const [supportedDataSources, setSupportedDataSources] = useState<
|
||||
@@ -55,11 +53,6 @@ export default function DataSource(): JSX.Element {
|
||||
|
||||
setSupportedDataSources(dataSource);
|
||||
}
|
||||
|
||||
updateSelectedEnvironment('');
|
||||
updateErrorDetails('');
|
||||
updateServiceName('');
|
||||
updateSelectedFramework('');
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
|
||||
@@ -3,7 +3,10 @@ import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
|
||||
import { ApmDocFilePaths } from 'container/OnboardingContainer/constants/apmDocFilePaths';
|
||||
import { InfraMonitoringDocFilePaths } from 'container/OnboardingContainer/constants/infraMonitoringDocFilePaths';
|
||||
import { LogsManagementDocFilePaths } from 'container/OnboardingContainer/constants/logsManagementDocFilePaths';
|
||||
import { useOnboardingContext } from 'container/OnboardingContainer/context/OnboardingContext';
|
||||
import {
|
||||
OnboardingMethods,
|
||||
useOnboardingContext,
|
||||
} from 'container/OnboardingContainer/context/OnboardingContext';
|
||||
import { ModulesMap } from 'container/OnboardingContainer/OnboardingContainer';
|
||||
import useAnalytics from 'hooks/analytics/useAnalytics';
|
||||
import { useEffect, useState } from 'react';
|
||||
@@ -42,12 +45,12 @@ export default function MarkdownStep(): JSX.Element {
|
||||
path += `_${selectedEnvironment}`;
|
||||
}
|
||||
|
||||
if (
|
||||
selectedModule?.id === ModulesMap.APM &&
|
||||
selectedDataSource?.id !== 'kubernetes' &&
|
||||
selectedMethod
|
||||
) {
|
||||
path += `_${selectedMethod}`;
|
||||
if (selectedModule?.id === ModulesMap.APM) {
|
||||
if (selectedEnvironment === 'kubernetes') {
|
||||
path += `_${OnboardingMethods.RECOMMENDED_STEPS}`;
|
||||
} else if (selectedEnvironment !== 'kubernetes' && selectedMethod) {
|
||||
path += `_${selectedMethod}`;
|
||||
}
|
||||
}
|
||||
|
||||
path += `_${step?.id}`;
|
||||
|
||||
@@ -104,7 +104,7 @@ function OnboardingContextProvider({
|
||||
setSelectedDataSource(defaultApplicationDataSource);
|
||||
setSelectedEnvironment('');
|
||||
setSelectedFramework('');
|
||||
setSelectedMethod(OnboardingMethods.RECOMMENDED_STEPS);
|
||||
setSelectedMethod(OnboardingMethods.QUICK_START);
|
||||
updateActiveStep(null);
|
||||
};
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ type SixHour = '6hr';
|
||||
type OneHour = '1hr';
|
||||
type FourHour = '4hr';
|
||||
type OneDay = '1day';
|
||||
type ThreeDay = '3days';
|
||||
type OneWeek = '1week';
|
||||
type Custom = 'custom';
|
||||
|
||||
@@ -23,7 +24,8 @@ export type Time =
|
||||
| OneHour
|
||||
| Custom
|
||||
| OneWeek
|
||||
| OneDay;
|
||||
| OneDay
|
||||
| ThreeDay;
|
||||
|
||||
export const Options: Option[] = [
|
||||
{ value: '5min', label: 'Last 5 min' },
|
||||
@@ -32,6 +34,7 @@ export const Options: Option[] = [
|
||||
{ value: '1hr', label: 'Last 1 hour' },
|
||||
{ value: '6hr', label: 'Last 6 hour' },
|
||||
{ value: '1day', label: 'Last 1 day' },
|
||||
{ value: '3days', label: 'Last 3 days' },
|
||||
{ value: '1week', label: 'Last 1 week' },
|
||||
{ value: 'custom', label: 'Custom' },
|
||||
];
|
||||
@@ -48,6 +51,7 @@ export const RelativeDurationOptions: Option[] = [
|
||||
{ value: '1hr', label: 'Last 1 hour' },
|
||||
{ value: '6hr', label: 'Last 6 hour' },
|
||||
{ value: '1day', label: 'Last 1 day' },
|
||||
{ value: '3days', label: 'Last 3 days' },
|
||||
{ value: '1week', label: 'Last 1 week' },
|
||||
];
|
||||
|
||||
|
||||
@@ -0,0 +1,68 @@
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
import { Alerts } from 'types/api/alerts/getTriggered';
|
||||
|
||||
import ExapandableRow from './ExapandableRow';
|
||||
|
||||
jest.mock('lib/convertDateToAmAndPm', () => jest.fn(() => '12:00 PM'));
|
||||
jest.mock('lib/getFormatedDate', () => jest.fn(() => '2023-12-05'));
|
||||
|
||||
describe('ExapandableRow component', () => {
|
||||
const allAlerts: Alerts[] = [
|
||||
{
|
||||
id: 1,
|
||||
annotations: { description: 'Description 1', summary: 'Summary 1' },
|
||||
state: 'active',
|
||||
name: 'Alert 1',
|
||||
labels: {
|
||||
alertname: 'Critical Alert',
|
||||
severity: 'critical',
|
||||
tag1: 'value1',
|
||||
tag2: 'value2',
|
||||
},
|
||||
status: { inhibitedBy: [], silencedBy: [], state: 'active' },
|
||||
startsAt: '2023-12-05T11:00:00Z',
|
||||
fingerprint: 'fingerprint1',
|
||||
endsAt: '2023-12-05T12:00:00Z',
|
||||
generatorURL: 'generatorURL1',
|
||||
receivers: [],
|
||||
updatedAt: '2023-12-05T11:30:00Z',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
annotations: { description: 'Description 2', summary: 'Summary 2' },
|
||||
state: 'inactive',
|
||||
name: 'Alert 2',
|
||||
labels: {
|
||||
alertname: 'Warning Alert',
|
||||
severity: 'warning',
|
||||
tag1: 'value3',
|
||||
tag2: 'value4',
|
||||
tag3: 'value5',
|
||||
},
|
||||
status: { inhibitedBy: [], silencedBy: [], state: 'inactive' },
|
||||
startsAt: '2023-12-05T13:00:00Z',
|
||||
fingerprint: 'fingerprint2',
|
||||
endsAt: '2023-12-05T14:00:00Z',
|
||||
generatorURL: 'generatorURL2',
|
||||
receivers: [],
|
||||
updatedAt: '2023-12-05T13:30:00Z',
|
||||
},
|
||||
];
|
||||
|
||||
test('should render correct content for each alert', () => {
|
||||
render(<ExapandableRow allAlerts={allAlerts} />);
|
||||
|
||||
expect(screen.getByText('Critical Alert')).toBeInTheDocument();
|
||||
expect(screen.getByText('critical')).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText('Warning Alert')).toBeInTheDocument();
|
||||
expect(screen.getByText('warning')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('Should render the unknown status if tag is not corrently mentioned', () => {
|
||||
render(<ExapandableRow allAlerts={allAlerts} />);
|
||||
const unknowStatus = screen.getByText('Unknown Status');
|
||||
expect(unknowStatus).toBeInTheDocument();
|
||||
screen.debug();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,44 @@
|
||||
// FilteredTable.test.tsx
|
||||
|
||||
import { render } from 'tests/test-utils';
|
||||
import { Alerts } from 'types/api/alerts/getTriggered';
|
||||
|
||||
import FilteredTable from '.';
|
||||
|
||||
describe('FilteredTable component', () => {
|
||||
const selectedGroup = [{ value: 'group1' }, { value: 'group2' }];
|
||||
const allAlerts: Alerts[] = [
|
||||
{
|
||||
labels: { group1: 'value1', group2: 'value2' },
|
||||
annotations: { description: 'Description 1', summary: 'Summary 1' },
|
||||
state: 'active',
|
||||
name: 'Alert 1',
|
||||
id: 1,
|
||||
endsAt: '2023-12-05T12:00:00Z',
|
||||
fingerprint: 'fingerprint1',
|
||||
generatorURL: 'generatorURL1',
|
||||
receivers: [],
|
||||
startsAt: '2023-12-05T11:00:00Z',
|
||||
status: { inhibitedBy: [], silencedBy: [], state: 'active' },
|
||||
updatedAt: '2023-12-05T11:30:00Z',
|
||||
},
|
||||
];
|
||||
const selectedFilter = [{ value: 'severity:critical' }];
|
||||
|
||||
it('should render table headers', () => {
|
||||
const { getByText } = render(
|
||||
<FilteredTable
|
||||
selectedGroup={selectedGroup}
|
||||
allAlerts={allAlerts}
|
||||
selectedFilter={selectedFilter}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Assert that each header is present
|
||||
expect(getByText('Status')).toBeInTheDocument();
|
||||
expect(getByText('Alert Name')).toBeInTheDocument();
|
||||
expect(getByText('Severity')).toBeInTheDocument();
|
||||
expect(getByText('Firing Since')).toBeInTheDocument();
|
||||
expect(getByText('Tags')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,68 @@
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
import { Alerts } from 'types/api/alerts/getTriggered';
|
||||
|
||||
import TableRowComponent from './TableRow';
|
||||
|
||||
jest.mock('types/api/alerts/getTriggered', () => ({}));
|
||||
|
||||
describe('TableRowComponent component', () => {
|
||||
const tags = ['tag1', 'tag2'];
|
||||
const tagsAlerts: Alerts[] = [
|
||||
{
|
||||
labels: {
|
||||
alertname: 'Critical Alert',
|
||||
severity: 'critical',
|
||||
tag1: 'value1',
|
||||
tag2: 'value2',
|
||||
},
|
||||
annotations: {
|
||||
description: 'Description 1',
|
||||
summary: 'Summary 1',
|
||||
customProperty: 'Custom Value 1',
|
||||
},
|
||||
state: 'active',
|
||||
name: 'Alert 1',
|
||||
id: 1,
|
||||
endsAt: '2023-12-05T12:00:00Z',
|
||||
fingerprint: 'fingerprint1',
|
||||
generatorURL: 'generatorURL1',
|
||||
receivers: [],
|
||||
startsAt: '2023-12-05T11:00:00Z',
|
||||
status: { inhibitedBy: [], silencedBy: [], state: 'active' },
|
||||
updatedAt: '2023-12-05T11:30:00Z',
|
||||
},
|
||||
{
|
||||
labels: {
|
||||
alertname: 'Warning Alert',
|
||||
severity: 'warning',
|
||||
tag1: 'value3',
|
||||
tag2: 'value4',
|
||||
tag3: 'value5',
|
||||
},
|
||||
annotations: {
|
||||
description: 'Description 2',
|
||||
summary: 'Summary 2',
|
||||
customProperty: 'Custom Value 2',
|
||||
},
|
||||
state: 'inactive',
|
||||
name: 'Alert 2',
|
||||
id: 2,
|
||||
endsAt: '2023-12-05T13:00:00Z',
|
||||
fingerprint: 'fingerprint2',
|
||||
generatorURL: 'generatorURL2',
|
||||
receivers: [],
|
||||
startsAt: '2023-12-05T12:30:00Z',
|
||||
status: { inhibitedBy: [], silencedBy: [], state: 'inactive' },
|
||||
updatedAt: '2023-12-05T12:45:00Z',
|
||||
},
|
||||
// Add more test alerts as needed
|
||||
];
|
||||
|
||||
test('should render tags and expandable row when clicked', () => {
|
||||
render(<TableRowComponent tags={tags} tagsAlert={tagsAlerts} />);
|
||||
expect(screen.getByText('tag1')).toBeInTheDocument();
|
||||
expect(screen.getByText('tag2')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Add more test cases as needed
|
||||
});
|
||||
@@ -0,0 +1,39 @@
|
||||
import { Alerts } from 'types/api/alerts/getTriggered';
|
||||
|
||||
import { Value } from './Filter';
|
||||
import { FilterAlerts } from './utils';
|
||||
|
||||
describe('FilterAlerts function', () => {
|
||||
const alerts: Alerts[] = [
|
||||
{
|
||||
labels: { severity: 'critical', app: 'myApp' },
|
||||
annotations: { description: 'Alert description', summary: 'Alert summary' },
|
||||
state: 'active',
|
||||
name: 'Alert 1',
|
||||
id: 1,
|
||||
endsAt: '2023-12-05T12:00:00Z',
|
||||
fingerprint: 'fingerprint1',
|
||||
generatorURL: 'generatorURL1',
|
||||
receivers: [],
|
||||
startsAt: '2023-12-05T11:00:00Z',
|
||||
status: { inhibitedBy: [], silencedBy: [], state: 'active' },
|
||||
updatedAt: '2023-12-05T11:30:00Z',
|
||||
},
|
||||
];
|
||||
|
||||
const selectedFilter: Value[] = [
|
||||
{ value: 'severity:critical' },
|
||||
{ value: 'app:myApp' },
|
||||
];
|
||||
|
||||
it('should filter alerts based on the selected filter', () => {
|
||||
const filteredAlerts = FilterAlerts(alerts, selectedFilter);
|
||||
expect(filteredAlerts).toHaveLength(1);
|
||||
expect(filteredAlerts[0].fingerprint).toEqual('fingerprint1');
|
||||
});
|
||||
|
||||
it('should return all alerts when no filter is selected', () => {
|
||||
const allAlerts = FilterAlerts(alerts, []);
|
||||
expect(allAlerts).toHaveLength(alerts.length);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,37 @@
|
||||
import { render } from 'tests/test-utils';
|
||||
|
||||
import Severity from './AlertStatus';
|
||||
|
||||
describe('Severity component', () => {
|
||||
it('should render UnProcessed tag for severity "unprocessed"', () => {
|
||||
const { getByText } = render(<Severity severity="unprocessed" />);
|
||||
const tagElement = getByText('UnProcessed');
|
||||
|
||||
expect(tagElement).toBeInTheDocument();
|
||||
expect(tagElement).toHaveClass('ant-tag-green');
|
||||
});
|
||||
|
||||
it('should render Firing tag for severity "active"', () => {
|
||||
const { getByText } = render(<Severity severity="active" />);
|
||||
const tagElement = getByText('Firing');
|
||||
|
||||
expect(tagElement).toBeInTheDocument();
|
||||
expect(tagElement).toHaveClass('ant-tag-red');
|
||||
});
|
||||
|
||||
it('should render Suppressed tag for severity "suppressed"', () => {
|
||||
const { getByText } = render(<Severity severity="suppressed" />);
|
||||
const tagElement = getByText('Suppressed');
|
||||
|
||||
expect(tagElement).toBeInTheDocument();
|
||||
expect(tagElement).toHaveClass('ant-tag-red');
|
||||
});
|
||||
|
||||
it('should render Unknown Status tag for unknown severity', () => {
|
||||
const { getByText } = render(<Severity severity="unknown" />);
|
||||
const tagElement = getByText('Unknown Status');
|
||||
|
||||
expect(tagElement).toBeInTheDocument();
|
||||
expect(tagElement).toHaveClass('ant-tag-default');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,38 @@
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
|
||||
import TriggeredAlerts from '.';
|
||||
|
||||
describe('TriggeredAlerts', () => {
|
||||
test('Should render the table', async () => {
|
||||
act(() => {
|
||||
render(<TriggeredAlerts />);
|
||||
});
|
||||
|
||||
const status = await screen.findByText('Status');
|
||||
expect(status).toBeInTheDocument();
|
||||
|
||||
const alertName = await screen.findByText('Alert Name');
|
||||
expect(alertName).toBeInTheDocument();
|
||||
|
||||
const severity = await screen.findByText('Severity');
|
||||
expect(severity).toBeInTheDocument();
|
||||
|
||||
const tags = await screen.findByText('Tags');
|
||||
expect(tags).toBeInTheDocument();
|
||||
|
||||
const firedSince = await screen.findByText('Firing Since');
|
||||
expect(firedSince).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// test('Should render the table data in triggeredAlert', async () => {
|
||||
// act(() => {
|
||||
// render(<TriggeredAlerts />);
|
||||
// });
|
||||
|
||||
// const row = await screen.findByRole('row', {
|
||||
// name: /firing above 400ms alertname: above 400ms component: net\/http details: https:\/\/demo\.\.\.\. \+2 warning 11\/30\/2023 10:04:19 am/i,
|
||||
// });
|
||||
// expect(row).toBeInTheDocument();
|
||||
// });
|
||||
});
|
||||
@@ -6,6 +6,7 @@ import getMinAgo from './getStartAndEndTime/getMinAgo';
|
||||
const GetMinMax = (
|
||||
interval: Time,
|
||||
dateTimeRange?: [number, number],
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
): GetMinMaxPayload => {
|
||||
let maxTime = new Date().getTime();
|
||||
let minTime = 0;
|
||||
@@ -32,6 +33,10 @@ const GetMinMax = (
|
||||
// one day = 24*60(min)
|
||||
const minTimeAgo = getMinAgo({ minutes: 24 * 60 }).getTime();
|
||||
minTime = minTimeAgo;
|
||||
} else if (interval === '3days') {
|
||||
// three day = one day * 3
|
||||
const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 3 }).getTime();
|
||||
minTime = minTimeAgo;
|
||||
} else if (interval === '1week') {
|
||||
// one week = one day * 7
|
||||
const minTimeAgo = getMinAgo({ minutes: 24 * 60 * 7 }).getTime();
|
||||
|
||||
@@ -3,6 +3,19 @@ import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems
|
||||
import getMicroSeconds from './getMicroSeconds';
|
||||
import getMinAgo from './getMinAgo';
|
||||
|
||||
const calculateStartAndEndTime = (
|
||||
minutes: number,
|
||||
endString: string,
|
||||
): Payload => {
|
||||
const agodate = getMinAgo({ minutes }).getTime();
|
||||
const agoString = getMicroSeconds({ time: agodate });
|
||||
|
||||
return {
|
||||
start: agoString,
|
||||
end: endString,
|
||||
};
|
||||
};
|
||||
|
||||
const GetStartAndEndTime = ({
|
||||
type,
|
||||
minTime,
|
||||
@@ -12,73 +25,35 @@ const GetStartAndEndTime = ({
|
||||
const endString = getMicroSeconds({ time: end });
|
||||
|
||||
if (type === 'LAST_5_MIN') {
|
||||
const agodate = getMinAgo({ minutes: 5 }).getTime();
|
||||
const agoString = getMicroSeconds({ time: agodate });
|
||||
|
||||
return {
|
||||
start: agoString,
|
||||
end: endString,
|
||||
};
|
||||
return calculateStartAndEndTime(5, endString);
|
||||
}
|
||||
|
||||
if (type === 'LAST_30_MIN') {
|
||||
const agodate = getMinAgo({ minutes: 30 }).getTime();
|
||||
const agoString = getMicroSeconds({ time: agodate });
|
||||
|
||||
return {
|
||||
start: agoString,
|
||||
end: endString,
|
||||
};
|
||||
return calculateStartAndEndTime(30, endString);
|
||||
}
|
||||
|
||||
if (type === 'LAST_1_HR') {
|
||||
const agodate = getMinAgo({ minutes: 60 }).getTime();
|
||||
const agoString = getMicroSeconds({ time: agodate });
|
||||
|
||||
return {
|
||||
start: agoString,
|
||||
end: endString,
|
||||
};
|
||||
return calculateStartAndEndTime(60, endString);
|
||||
}
|
||||
|
||||
if (type === 'LAST_15_MIN') {
|
||||
const agodate = getMinAgo({ minutes: 15 }).getTime();
|
||||
const agoString = getMicroSeconds({ time: agodate });
|
||||
|
||||
return {
|
||||
start: agoString,
|
||||
end: endString,
|
||||
};
|
||||
return calculateStartAndEndTime(15, endString);
|
||||
}
|
||||
|
||||
if (type === 'LAST_6_HR') {
|
||||
const agoDate = getMinAgo({ minutes: 6 * 60 }).getTime();
|
||||
const agoString = getMicroSeconds({ time: agoDate });
|
||||
|
||||
return {
|
||||
start: agoString,
|
||||
end: endString,
|
||||
};
|
||||
return calculateStartAndEndTime(6 * 60, endString);
|
||||
}
|
||||
|
||||
if (type === 'LAST_1_DAY') {
|
||||
const agoDate = getMinAgo({ minutes: 24 * 60 }).getTime();
|
||||
const agoString = getMicroSeconds({ time: agoDate });
|
||||
return calculateStartAndEndTime(24 * 60, endString);
|
||||
}
|
||||
|
||||
return {
|
||||
start: agoString,
|
||||
end: endString,
|
||||
};
|
||||
if (type === 'LAST_3_DAYS') {
|
||||
return calculateStartAndEndTime(24 * 60 * 3, endString);
|
||||
}
|
||||
|
||||
if (type === 'LAST_1_WEEK') {
|
||||
const agoDate = getMinAgo({ minutes: 24 * 60 * 7 }).getTime();
|
||||
const agoString = getMicroSeconds({ time: agoDate });
|
||||
|
||||
return {
|
||||
start: agoString,
|
||||
end: endString,
|
||||
};
|
||||
return calculateStartAndEndTime(24 * 60 * 7, endString);
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
116
frontend/src/lib/logql/tokens.test.ts
Normal file
116
frontend/src/lib/logql/tokens.test.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import {
|
||||
NumTypeQueryOperators,
|
||||
QueryOperatorsMultiVal,
|
||||
QueryTypes,
|
||||
StringTypeQueryOperators,
|
||||
ValidTypeSequence,
|
||||
ValidTypeValue,
|
||||
} from './tokens';
|
||||
|
||||
describe('ValidTypeValue', () => {
|
||||
test('should return true for valid numeric values with number operators', () => {
|
||||
expect(ValidTypeValue(NumTypeQueryOperators.GTE, '42')).toBe(true);
|
||||
expect(ValidTypeValue(NumTypeQueryOperators.LT, '3.14')).toBe(true);
|
||||
});
|
||||
|
||||
test('should return false for invalid numeric values with number operators', () => {
|
||||
expect(ValidTypeValue(NumTypeQueryOperators.GTE, 'abc')).toBe(false);
|
||||
expect(ValidTypeValue(NumTypeQueryOperators.LT, '12xyz')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return true for string values with string operators', () => {
|
||||
expect(ValidTypeValue(StringTypeQueryOperators.CONTAINS, 'example')).toBe(
|
||||
true,
|
||||
);
|
||||
expect(ValidTypeValue(StringTypeQueryOperators.NCONTAINS, 'test')).toBe(true);
|
||||
});
|
||||
|
||||
test('should return true for any value with other operators', () => {
|
||||
expect(ValidTypeValue('anything', 'whatever')).toBe(true);
|
||||
expect(ValidTypeValue(QueryOperatorsMultiVal.IN, ['1', '2', '3'])).toBe(true);
|
||||
});
|
||||
|
||||
test('should return false if value is array', () => {
|
||||
expect(ValidTypeValue(NumTypeQueryOperators.GTE, ['1', '2', '3'])).toBe(
|
||||
false,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('ValidTypeSequence', () => {
|
||||
test('should return true for valid type sequences', () => {
|
||||
expect(
|
||||
ValidTypeSequence(
|
||||
undefined,
|
||||
QueryTypes.QUERY_KEY,
|
||||
QueryTypes.CONDITIONAL_OPERATOR,
|
||||
),
|
||||
).toBe(true);
|
||||
expect(
|
||||
ValidTypeSequence(
|
||||
QueryTypes.QUERY_KEY,
|
||||
QueryTypes.QUERY_OPERATOR,
|
||||
QueryTypes.QUERY_VALUE,
|
||||
),
|
||||
).toBe(true);
|
||||
expect(
|
||||
ValidTypeSequence(
|
||||
QueryTypes.QUERY_OPERATOR,
|
||||
QueryTypes.QUERY_VALUE,
|
||||
undefined,
|
||||
),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
test('should return false for invalid type sequences', () => {
|
||||
expect(
|
||||
ValidTypeSequence(
|
||||
undefined,
|
||||
QueryTypes.QUERY_OPERATOR,
|
||||
QueryTypes.QUERY_VALUE,
|
||||
),
|
||||
).toBe(false);
|
||||
expect(
|
||||
ValidTypeSequence(
|
||||
QueryTypes.QUERY_KEY,
|
||||
QueryTypes.QUERY_VALUE,
|
||||
QueryTypes.QUERY_OPERATOR,
|
||||
),
|
||||
).toBe(false);
|
||||
expect(
|
||||
ValidTypeSequence(
|
||||
QueryTypes.QUERY_OPERATOR,
|
||||
QueryTypes.QUERY_KEY,
|
||||
QueryTypes.QUERY_VALUE,
|
||||
),
|
||||
).toBe(false);
|
||||
expect(
|
||||
ValidTypeSequence(
|
||||
QueryTypes.QUERY_VALUE,
|
||||
QueryTypes.QUERY_OPERATOR,
|
||||
undefined,
|
||||
),
|
||||
).toBe(false);
|
||||
expect(
|
||||
ValidTypeSequence(
|
||||
QueryTypes.CONDITIONAL_OPERATOR,
|
||||
QueryTypes.QUERY_OPERATOR,
|
||||
QueryTypes.QUERY_KEY,
|
||||
),
|
||||
).toBe(false);
|
||||
expect(
|
||||
ValidTypeSequence(
|
||||
QueryTypes.CONDITIONAL_OPERATOR,
|
||||
undefined,
|
||||
QueryTypes.QUERY_KEY,
|
||||
),
|
||||
).toBe(false);
|
||||
expect(
|
||||
ValidTypeSequence(
|
||||
QueryTypes.QUERY_KEY,
|
||||
QueryTypes.CONDITIONAL_OPERATOR,
|
||||
undefined,
|
||||
),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
5
frontend/src/mocks-server/__mockdata__/alerts.ts
Normal file
5
frontend/src/mocks-server/__mockdata__/alerts.ts
Normal file
File diff suppressed because one or more lines are too long
163
frontend/src/mocks-server/__mockdata__/rules.ts
Normal file
163
frontend/src/mocks-server/__mockdata__/rules.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
export const rulesSuccessResponse = {
|
||||
status: 'success',
|
||||
data: {
|
||||
rules: [
|
||||
{
|
||||
id: '5',
|
||||
state: 'disabled',
|
||||
alert: 'Test Rule 1',
|
||||
alertType: 'LOGS_BASED_ALERT',
|
||||
ruleType: 'threshold_rule',
|
||||
evalWindow: '1h0m0s',
|
||||
frequency: '1m0s',
|
||||
condition: {
|
||||
compositeQuery: {
|
||||
builderQueries: {
|
||||
A: {
|
||||
queryName: 'A',
|
||||
stepInterval: 60,
|
||||
dataSource: 'metrics',
|
||||
aggregateOperator: 'noop',
|
||||
aggregateAttribute: {
|
||||
key: '',
|
||||
dataType: 'float64',
|
||||
type: '',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
},
|
||||
filters: {
|
||||
op: 'AND',
|
||||
items: null,
|
||||
},
|
||||
expression: 'A',
|
||||
disabled: false,
|
||||
limit: 0,
|
||||
offset: 0,
|
||||
pageSize: 0,
|
||||
reduceTo: 'last',
|
||||
},
|
||||
},
|
||||
chQueries: {
|
||||
A: {
|
||||
query:
|
||||
'select \ntoStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 1 MINUTE) AS interval, \ntoFloat64(count()) as value \nFROM signoz_logs.distributed_logs \nWHERE timestamp BETWEEN {{.start_timestamp_nano}} AND {{.end_timestamp_nano}}\n\nGROUP BY interval;\n\n-- available variables:\n-- \t{{.start_timestamp_nano}}\n-- \t{{.end_timestamp_nano}}\n\n-- required columns (or alias):\n-- \tvalue\n-- \tinterval',
|
||||
disabled: false,
|
||||
},
|
||||
},
|
||||
promQueries: {
|
||||
A: {
|
||||
query: '',
|
||||
disabled: false,
|
||||
},
|
||||
},
|
||||
panelType: 'graph',
|
||||
queryType: 'clickhouse_sql',
|
||||
},
|
||||
op: '1',
|
||||
target: 2000,
|
||||
matchType: '1',
|
||||
},
|
||||
labels: {
|
||||
details: 'https://stagingapp.signoz.io/logs',
|
||||
hello: 'world',
|
||||
region: 'us',
|
||||
severity: 'warning',
|
||||
type: 'test',
|
||||
},
|
||||
annotations: {
|
||||
description: 'description',
|
||||
summary: 'summary',
|
||||
},
|
||||
disabled: true,
|
||||
source:
|
||||
'https://stagingapp.signoz.io/alerts/edit?ruleId=5\u0026compositeQuery=%7B%22builder%22%3A%7B%22queryData%22%3A%5B%7B%22dataSource%22%3A%22metrics%22%2C%22queryName%22%3A%22A%22%2C%22aggregateOperator%22%3A%22noop%22%2C%22aggregateAttribute%22%3A%7B%22key%22%3A%22%22%2C%22dataType%22%3A%22float64%22%2C%22type%22%3A%22%22%2C%22isColumn%22%3Atrue%2C%22isJSON%22%3Afalse%7D%2C%22filters%22%3A%7B%22op%22%3A%22AND%22%2C%22items%22%3Anull%7D%2C%22expression%22%3A%22A%22%2C%22disabled%22%3Afalse%2C%22having%22%3A%5B%5D%2C%22stepInterval%22%3A60%2C%22limit%22%3A0%2C%22orderBy%22%3A%5B%5D%2C%22groupBy%22%3A%5B%5D%2C%22legend%22%3A%22%22%2C%22reduceTo%22%3A%22last%22%2C%22offset%22%3A0%2C%22pageSize%22%3A0%7D%5D%2C%22queryFormulas%22%3A%5B%5D%7D%2C%22promql%22%3A%5B%7B%22query%22%3A%22%22%2C%22disabled%22%3Afalse%2C%22name%22%3A%22A%22%7D%5D%2C%22clickhouse_sql%22%3A%5B%7B%22query%22%3A%22select%20%5CntoStartOfInterval(fromUnixTimestamp64Nano(timestamp)%2C%20INTERVAL%201%20MINUTE)%20AS%20interval%2C%20%5CntoFloat64(count())%20as%20value%20%5CnFROM%20signoz_logs.distributed_logs%20%20%5CnWHERE%20timestamp%20BETWEEN%20%7B%7B.start_timestamp_nano%7D%7D%20AND%20%7B%7B.end_timestamp_nano%7D%7D%5Cn%5CnGROUP%20BY%20interval%3B%5Cn%5Cn--%20available%20variables%3A%5Cn--%20%5Ct%7B%7B.start_timestamp_nano%7D%7D%5Cn--%20%5Ct%7B%7B.end_timestamp_nano%7D%7D%5Cn%5Cn--%20required%20columns%20(or%20alias)%3A%5Cn--%20%5Ctvalue%5Cn--%20%5Ctinterval%22%2C%22disabled%22%3Afalse%2C%22name%22%3A%22A%22%7D%5D%2C%22queryType%22%3A%22clickhouse_sql%22%2C%22id%22%3A%22f17cf0cd-f479-4452-aded-e426aeda45ff%22%7D',
|
||||
preferredChannels: ['webhook-site'],
|
||||
createAt: null,
|
||||
createBy: null,
|
||||
updateAt: '2023-10-27T14:03:49.79371099Z',
|
||||
updateBy: 'ankit@signoz.io',
|
||||
},
|
||||
{
|
||||
id: '6',
|
||||
state: 'inactive',
|
||||
alert: 'Test Rule 2',
|
||||
alertType: 'METRIC_BASED_ALERT',
|
||||
ruleType: 'threshold_rule',
|
||||
evalWindow: '5m0s',
|
||||
frequency: '1m0s',
|
||||
condition: {
|
||||
compositeQuery: {
|
||||
builderQueries: {
|
||||
A: {
|
||||
queryName: 'A',
|
||||
stepInterval: 60,
|
||||
dataSource: 'metrics',
|
||||
aggregateOperator: 'sum_rate',
|
||||
aggregateAttribute: {
|
||||
key: 'signoz_calls_total',
|
||||
dataType: 'float64',
|
||||
type: '',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
},
|
||||
filters: {
|
||||
op: 'AND',
|
||||
items: [],
|
||||
},
|
||||
groupBy: [
|
||||
{
|
||||
key: 'service_name',
|
||||
dataType: 'string',
|
||||
type: 'tag',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
},
|
||||
],
|
||||
expression: 'A',
|
||||
disabled: false,
|
||||
limit: 0,
|
||||
offset: 0,
|
||||
pageSize: 0,
|
||||
reduceTo: 'sum',
|
||||
},
|
||||
},
|
||||
chQueries: {
|
||||
A: {
|
||||
query: '',
|
||||
disabled: false,
|
||||
},
|
||||
},
|
||||
promQueries: {
|
||||
A: {
|
||||
query: '',
|
||||
disabled: false,
|
||||
},
|
||||
},
|
||||
panelType: 'graph',
|
||||
queryType: 'builder',
|
||||
},
|
||||
op: '1',
|
||||
target: 20,
|
||||
matchType: '1',
|
||||
},
|
||||
labels: {
|
||||
severity: 'warning',
|
||||
},
|
||||
annotations: {
|
||||
description:
|
||||
'This alert is fired when the defined metric (current value: {{$value}}) crosses the threshold ({{$threshold}})',
|
||||
summary:
|
||||
'The rule threshold is set to {{$threshold}}, and the observed metric value is {{$value}}',
|
||||
},
|
||||
disabled: false,
|
||||
source:
|
||||
'http://localhost:3301/alerts/edit?ruleId=6\u0026compositeQuery=%7B%22builder%22%3A%7B%22queryData%22%3A%5B%7B%22dataSource%22%3A%22metrics%22%2C%22queryName%22%3A%22A%22%2C%22aggregateOperator%22%3A%22sum_rate%22%2C%22aggregateAttribute%22%3A%7B%22key%22%3A%22signoz_calls_total%22%2C%22dataType%22%3A%22float64%22%2C%22type%22%3A%22%22%2C%22isColumn%22%3Atrue%7D%2C%22filters%22%3A%7B%22op%22%3A%22AND%22%2C%22items%22%3A%5B%5D%7D%2C%22expression%22%3A%22A%22%2C%22disabled%22%3Afalse%2C%22having%22%3A%5B%5D%2C%22stepInterval%22%3A60%2C%22limit%22%3A0%2C%22orderBy%22%3A%5B%5D%2C%22groupBy%22%3A%5B%7B%22key%22%3A%22service_name%22%2C%22dataType%22%3A%22string%22%2C%22type%22%3A%22tag%22%2C%22isColumn%22%3Afalse%7D%5D%2C%22legend%22%3A%22%22%2C%22reduceTo%22%3A%22sum%22%2C%22offset%22%3A0%2C%22pageSize%22%3A0%7D%5D%2C%22queryFormulas%22%3A%5B%5D%7D%2C%22promql%22%3A%5B%7B%22query%22%3A%22%22%2C%22disabled%22%3Afalse%2C%22name%22%3A%22A%22%7D%5D%2C%22clickhouse_sql%22%3A%5B%7B%22query%22%3A%22%22%2C%22disabled%22%3Afalse%2C%22name%22%3A%22A%22%7D%5D%2C%22queryType%22%3A%22builder%22%2C%22id%22%3A%22c6486149-69b9-4e75-92ab-dde3282e558f%22%7D',
|
||||
preferredChannels: ['Slack-Discord-Compatible', 'Discord-webhook'],
|
||||
createAt: null,
|
||||
createBy: null,
|
||||
updateAt: '2023-10-06T09:48:07.047188664Z',
|
||||
updateBy: null,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
@@ -1,8 +1,10 @@
|
||||
import { rest } from 'msw';
|
||||
|
||||
import { alertsSuccessResponse } from './__mockdata__/alerts';
|
||||
import { billingSuccessResponse } from './__mockdata__/billing';
|
||||
import { licensesSuccessResponse } from './__mockdata__/licenses';
|
||||
import { queryRangeSuccessResponse } from './__mockdata__/query_range';
|
||||
import { rulesSuccessResponse } from './__mockdata__/rules';
|
||||
import { serviceSuccessResponse } from './__mockdata__/services';
|
||||
import { topLevelOperationSuccessResponse } from './__mockdata__/top_level_operations';
|
||||
|
||||
@@ -81,4 +83,12 @@ export const handlers = [
|
||||
rest.get('http://localhost/api/v1/billing', (req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(billingSuccessResponse)),
|
||||
),
|
||||
|
||||
rest.get('http://localhost/api/v1/rules', (req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(rulesSuccessResponse)),
|
||||
),
|
||||
|
||||
rest.get('http://localhost/api/v1/alerts', (req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(alertsSuccessResponse)),
|
||||
),
|
||||
];
|
||||
|
||||
@@ -2,8 +2,12 @@ package logparsingpipeline
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/antonmedv/expr"
|
||||
"github.com/antonmedv/expr/ast"
|
||||
"github.com/antonmedv/expr/parser"
|
||||
"github.com/pkg/errors"
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
"go.signoz.io/signoz/pkg/query-service/queryBuilderToExpr"
|
||||
@@ -81,50 +85,97 @@ func getOperators(ops []PipelineOperator) ([]PipelineOperator, error) {
|
||||
}
|
||||
|
||||
if operator.Type == "regex_parser" {
|
||||
parseFromParts := strings.Split(operator.ParseFrom, ".")
|
||||
parseFromPath := strings.Join(parseFromParts, "?.")
|
||||
parseFromNotNilCheck, err := fieldNotNilCheck(operator.ParseFrom)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't generate nil check for parseFrom of regex op %s: %w", operator.Name, err,
|
||||
)
|
||||
}
|
||||
operator.If = fmt.Sprintf(
|
||||
`%s != nil && %s matches "%s"`,
|
||||
parseFromPath,
|
||||
parseFromPath,
|
||||
`%s && %s matches "%s"`,
|
||||
parseFromNotNilCheck,
|
||||
operator.ParseFrom,
|
||||
strings.ReplaceAll(
|
||||
strings.ReplaceAll(operator.Regex, `\`, `\\`),
|
||||
`"`, `\"`,
|
||||
),
|
||||
)
|
||||
|
||||
} else if operator.Type == "grok_parser" {
|
||||
parseFromNotNilCheck, err := fieldNotNilCheck(operator.ParseFrom)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't generate nil check for parseFrom of grok op %s: %w", operator.Name, err,
|
||||
)
|
||||
}
|
||||
operator.If = parseFromNotNilCheck
|
||||
|
||||
} else if operator.Type == "json_parser" {
|
||||
parseFromParts := strings.Split(operator.ParseFrom, ".")
|
||||
parseFromPath := strings.Join(parseFromParts, "?.")
|
||||
operator.If = fmt.Sprintf(`%s != nil && %s matches "^\\s*{.*}\\s*$"`, parseFromPath, parseFromPath)
|
||||
parseFromNotNilCheck, err := fieldNotNilCheck(operator.ParseFrom)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't generate nil check for parseFrom of json parser op %s: %w", operator.Name, err,
|
||||
)
|
||||
}
|
||||
operator.If = fmt.Sprintf(
|
||||
`%s && %s matches "^\\s*{.*}\\s*$"`, parseFromNotNilCheck, operator.ParseFrom,
|
||||
)
|
||||
|
||||
} else if operator.Type == "add" {
|
||||
if strings.HasPrefix(operator.Value, "EXPR(") && strings.HasSuffix(operator.Value, ")") {
|
||||
expression := strings.TrimSuffix(strings.TrimPrefix(operator.Value, "EXPR("), ")")
|
||||
fieldsNotNilCheck, err := fieldsReferencedInExprNotNilCheck(expression)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"could'nt generate nil check for fields referenced in value expr of add operator %s: %w",
|
||||
operator.Name, err,
|
||||
)
|
||||
}
|
||||
if fieldsNotNilCheck != "" {
|
||||
operator.If = fieldsNotNilCheck
|
||||
}
|
||||
}
|
||||
|
||||
} else if operator.Type == "move" || operator.Type == "copy" {
|
||||
fromParts := strings.Split(operator.From, ".")
|
||||
fromPath := strings.Join(fromParts, "?.")
|
||||
operator.If = fmt.Sprintf(`%s != nil`, fromPath)
|
||||
fromNotNilCheck, err := fieldNotNilCheck(operator.From)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't generate nil check for From field of %s op %s: %w", operator.Type, operator.Name, err,
|
||||
)
|
||||
}
|
||||
operator.If = fromNotNilCheck
|
||||
|
||||
} else if operator.Type == "remove" {
|
||||
fieldParts := strings.Split(operator.Field, ".")
|
||||
fieldPath := strings.Join(fieldParts, "?.")
|
||||
operator.If = fmt.Sprintf(`%s != nil`, fieldPath)
|
||||
fieldNotNilCheck, err := fieldNotNilCheck(operator.Field)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't generate nil check for field to be removed by op %s: %w", operator.Name, err,
|
||||
)
|
||||
}
|
||||
operator.If = fieldNotNilCheck
|
||||
|
||||
} else if operator.Type == "trace_parser" {
|
||||
cleanTraceParser(&operator)
|
||||
|
||||
} else if operator.Type == "time_parser" {
|
||||
parseFromParts := strings.Split(operator.ParseFrom, ".")
|
||||
parseFromPath := strings.Join(parseFromParts, "?.")
|
||||
|
||||
operator.If = fmt.Sprintf(`%s != nil`, parseFromPath)
|
||||
parseFromNotNilCheck, err := fieldNotNilCheck(operator.ParseFrom)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't generate nil check for parseFrom of time parser op %s: %w", operator.Name, err,
|
||||
)
|
||||
}
|
||||
operator.If = parseFromNotNilCheck
|
||||
|
||||
if operator.LayoutType == "strptime" {
|
||||
regex, err := RegexForStrptimeLayout(operator.Layout)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not generate time_parser processor: %w", err)
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't generate layout regex for time_parser %s: %w", operator.Name, err,
|
||||
)
|
||||
}
|
||||
|
||||
operator.If = fmt.Sprintf(
|
||||
`%s && %s matches "%s"`, operator.If, parseFromPath, regex,
|
||||
`%s && %s matches "%s"`, operator.If, operator.ParseFrom, regex,
|
||||
)
|
||||
} else if operator.LayoutType == "epoch" {
|
||||
valueRegex := `^\\s*[0-9]+\\s*$`
|
||||
@@ -133,19 +184,22 @@ func getOperators(ops []PipelineOperator) ([]PipelineOperator, error) {
|
||||
}
|
||||
|
||||
operator.If = fmt.Sprintf(
|
||||
`%s && string(%s) matches "%s"`, operator.If, parseFromPath, valueRegex,
|
||||
`%s && string(%s) matches "%s"`, operator.If, operator.ParseFrom, valueRegex,
|
||||
)
|
||||
|
||||
}
|
||||
// TODO(Raj): Maybe add support for gotime too eventually
|
||||
|
||||
} else if operator.Type == "severity_parser" {
|
||||
parseFromParts := strings.Split(operator.ParseFrom, ".")
|
||||
parseFromPath := strings.Join(parseFromParts, "?.")
|
||||
|
||||
parseFromNotNilCheck, err := fieldNotNilCheck(operator.ParseFrom)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"couldn't generate nil check for parseFrom of severity parser %s: %w", operator.Name, err,
|
||||
)
|
||||
}
|
||||
operator.If = fmt.Sprintf(
|
||||
`%s != nil && ( type(%s) == "string" || ( type(%s) in ["int", "float"] && %s == float(int(%s)) ) )`,
|
||||
parseFromPath, parseFromPath, parseFromPath, parseFromPath, parseFromPath,
|
||||
`%s && ( type(%s) == "string" || ( type(%s) in ["int", "float"] && %s == float(int(%s)) ) )`,
|
||||
parseFromNotNilCheck, operator.ParseFrom, operator.ParseFrom, operator.ParseFrom, operator.ParseFrom,
|
||||
)
|
||||
|
||||
}
|
||||
@@ -169,3 +223,151 @@ func cleanTraceParser(operator *PipelineOperator) {
|
||||
operator.TraceFlags = nil
|
||||
}
|
||||
}
|
||||
|
||||
// Generates an expression checking that `fieldPath` has a non-nil value in a log record.
|
||||
func fieldNotNilCheck(fieldPath string) (string, error) {
|
||||
_, err := expr.Compile(fieldPath)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("invalid fieldPath %s: %w", fieldPath, err)
|
||||
}
|
||||
|
||||
// helper for turning `.` into `?.` in field paths.
|
||||
// Eg: a.b?.c.d -> a?.b?.c?.d
|
||||
optionalChainedPath := func(path string) string {
|
||||
return strings.ReplaceAll(
|
||||
strings.ReplaceAll(path, "?.", "."), ".", "?.",
|
||||
)
|
||||
}
|
||||
|
||||
// Optional chaining before membership ops is not supported by expr.
|
||||
// Eg: The field `attributes.test["a.b"].value["c.d"].e` can't be checked using
|
||||
// the nil check `attributes.test?.["a.b"]?.value?.["c.d"]?.e != nil`
|
||||
// This needs to be worked around by checking that the target of membership op is not nil first.
|
||||
// Eg: attributes.test != nil && attributes.test["a.b"]?.value != nil && attributes.test["a.b"].value["c.d"]?.e != nil
|
||||
|
||||
// Split once from the right to include the rightmost membership op and everything after it.
|
||||
// Eg: `attributes.test["a.b"].value["c.d"].e` would result in `attributes.test["a.b"].value` and `["c.d"].e`
|
||||
parts := rSplitAfterN(fieldPath, "[", 2)
|
||||
if len(parts) < 2 {
|
||||
// there is no [] access in fieldPath
|
||||
return fmt.Sprintf("%s != nil", optionalChainedPath(fieldPath)), nil
|
||||
}
|
||||
|
||||
// recursively generate nil check for target of the rightmost membership op (attributes.test["a.b"].value)
|
||||
// should come out to be (attributes.test != nil && attributes.test["a.b"]?.value != nil)
|
||||
collectionNotNilCheck, err := fieldNotNilCheck(parts[0])
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("couldn't generate nil check for %s: %w", parts[0], err)
|
||||
}
|
||||
|
||||
// generate nil check for entire path.
|
||||
suffixParts := strings.SplitAfter(parts[1], "]") // ["c.d"], ".e"
|
||||
fullPath := parts[0] + suffixParts[0]
|
||||
if len(suffixParts) > 1 {
|
||||
// attributes.test["a.b"].value["c.d"]?.e
|
||||
fullPath += optionalChainedPath(suffixParts[1])
|
||||
}
|
||||
fullPathCheck := fmt.Sprintf("%s != nil", fullPath)
|
||||
|
||||
// If the membership op is for array/slice indexing, add check ensuring array is long enough
|
||||
// attributes.test[3] -> len(attributes.test) > 3 && attributes.test[3] != nil
|
||||
if !(strings.Contains(suffixParts[0], "'") || strings.Contains(suffixParts[0], `"`)) {
|
||||
fullPathCheck = fmt.Sprintf(
|
||||
"len(%s) > %s && %s",
|
||||
parts[0], suffixParts[0][1:len(suffixParts[0])-1], fullPathCheck,
|
||||
)
|
||||
}
|
||||
|
||||
// If prefix is `attributes` or `resource` there is no need to add a nil check for
|
||||
// the prefix since all log records have non nil `attributes` and `resource` fields.
|
||||
if slices.Contains([]string{"attributes", "resource"}, parts[0]) {
|
||||
return fullPathCheck, nil
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s && %s", collectionNotNilCheck, fullPathCheck), nil
|
||||
}
|
||||
|
||||
// Split `str` after `sep` from the right to create up to `n` parts.
|
||||
// rSplitAfterN("a.b.c.d", ".", 3) -> ["a.b", ".c", ".d"]
|
||||
func rSplitAfterN(str string, sep string, n int) []string {
|
||||
reversedStr := reverseString(str)
|
||||
parts := strings.SplitAfterN(reversedStr, sep, n)
|
||||
slices.Reverse(parts)
|
||||
result := []string{}
|
||||
for _, p := range parts {
|
||||
result = append(result, reverseString(p))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func reverseString(s string) string {
|
||||
r := []rune(s)
|
||||
for i := 0; i < len(r)/2; i++ {
|
||||
j := len(s) - 1 - i
|
||||
r[i], r[j] = r[j], r[i]
|
||||
}
|
||||
return string(r)
|
||||
}
|
||||
|
||||
// Generate expression for checking that all fields referenced in `expr` have a non nil value in log record.
|
||||
// Eg: `attributes.x + len(resource.y)` will return the expression `attributes.x != nil && resource.y != nil`
|
||||
func fieldsReferencedInExprNotNilCheck(expr string) (string, error) {
|
||||
referencedFields, err := logFieldsReferencedInExpr(expr)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("couldn't extract log fields referenced in expr %s: %w", expr, err)
|
||||
}
|
||||
|
||||
// Generating nil check for deepest fields takes care of their prefixes too.
|
||||
// Eg: `attributes.test.value + len(attributes.test)` needs a nil check only for `attributes.test.value`
|
||||
deepestFieldRefs := []string{}
|
||||
for _, field := range referencedFields {
|
||||
isPrefixOfAnotherReferencedField := slices.ContainsFunc(
|
||||
referencedFields, func(e string) bool {
|
||||
return len(e) > len(field) && strings.HasPrefix(e, field)
|
||||
},
|
||||
)
|
||||
if !isPrefixOfAnotherReferencedField {
|
||||
deepestFieldRefs = append(deepestFieldRefs, field)
|
||||
}
|
||||
}
|
||||
|
||||
fieldExprChecks := []string{}
|
||||
for _, field := range deepestFieldRefs {
|
||||
checkExpr, err := fieldNotNilCheck(field)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("could not create nil check for %s: %w", field, err)
|
||||
}
|
||||
fieldExprChecks = append(fieldExprChecks, fmt.Sprintf("(%s)", checkExpr))
|
||||
}
|
||||
|
||||
return strings.Join(fieldExprChecks, " && "), nil
|
||||
}
|
||||
|
||||
// Expr AST visitor for extracting referenced log fields
|
||||
// See more at https://github.com/expr-lang/expr/blob/master/ast/visitor.go
|
||||
type logFieldsInExprExtractor struct {
|
||||
referencedFields []string
|
||||
}
|
||||
|
||||
func (v *logFieldsInExprExtractor) Visit(node *ast.Node) {
|
||||
if n, ok := (*node).(*ast.MemberNode); ok {
|
||||
memberRef := n.String()
|
||||
if strings.HasPrefix(memberRef, "attributes") || strings.HasPrefix(memberRef, "resource") {
|
||||
v.referencedFields = append(v.referencedFields, memberRef)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func logFieldsReferencedInExpr(expr string) ([]string, error) {
|
||||
// parse abstract syntax tree for expr
|
||||
exprAst, err := parser.Parse(expr)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not parse expr: %w", err)
|
||||
}
|
||||
|
||||
// walk ast for expr to collect all member references.
|
||||
v := &logFieldsInExprExtractor{}
|
||||
ast.Walk(&exprAst.Node, v)
|
||||
|
||||
return v.referencedFields, nil
|
||||
}
|
||||
|
||||
@@ -386,8 +386,19 @@ func TestNoCollectorErrorsFromProcessorsForMismatchedLogs(t *testing.T) {
|
||||
makeTestLog("mismatching log", map[string]string{
|
||||
"test_timestamp": "not-an-epoch",
|
||||
}),
|
||||
}, {
|
||||
"grok parser should ignore logs with missing parse from field",
|
||||
PipelineOperator{
|
||||
ID: "grok",
|
||||
Type: "grok_parser",
|
||||
Enabled: true,
|
||||
Name: "grok parser",
|
||||
ParseFrom: "attributes.test",
|
||||
Pattern: "%{GREEDYDATA}",
|
||||
ParseTo: "attributes.test_parsed",
|
||||
},
|
||||
makeTestLog("test log with missing parse from field", map[string]string{}),
|
||||
},
|
||||
// TODO(Raj): see if there is an error scenario for grok parser.
|
||||
// TODO(Raj): see if there is an error scenario for trace parser.
|
||||
// TODO(Raj): see if there is an error scenario for Add operator.
|
||||
}
|
||||
@@ -608,6 +619,184 @@ func TestAttributePathsContainingDollarDoNotBreakCollector(t *testing.T) {
|
||||
require.Equal("test", result[0].Attributes_string["$test1"])
|
||||
}
|
||||
|
||||
func TestMembershipOpInProcessorFieldExpressions(t *testing.T) {
|
||||
require := require.New(t)
|
||||
|
||||
testLogs := []model.SignozLog{
|
||||
makeTestSignozLog("test log", map[string]interface{}{
|
||||
"http.method": "GET",
|
||||
"order.products": `{"ids": ["pid0", "pid1"]}`,
|
||||
}),
|
||||
}
|
||||
|
||||
testPipeline := Pipeline{
|
||||
OrderId: 1,
|
||||
Name: "pipeline1",
|
||||
Alias: "pipeline1",
|
||||
Enabled: true,
|
||||
Filter: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "http.method",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "GET",
|
||||
},
|
||||
},
|
||||
},
|
||||
Config: []PipelineOperator{
|
||||
{
|
||||
ID: "move",
|
||||
Type: "move",
|
||||
Enabled: true,
|
||||
Name: "move",
|
||||
From: `attributes["http.method"]`,
|
||||
To: `attributes["test.http.method"]`,
|
||||
}, {
|
||||
ID: "json",
|
||||
Type: "json_parser",
|
||||
Enabled: true,
|
||||
Name: "json",
|
||||
ParseFrom: `attributes["order.products"]`,
|
||||
ParseTo: `attributes["order.products"]`,
|
||||
}, {
|
||||
ID: "move1",
|
||||
Type: "move",
|
||||
Enabled: true,
|
||||
Name: "move1",
|
||||
From: `attributes["order.products"].ids`,
|
||||
To: `attributes["order.product_ids"]`,
|
||||
}, {
|
||||
ID: "move2",
|
||||
Type: "move",
|
||||
Enabled: true,
|
||||
Name: "move2",
|
||||
From: `attributes.test?.doesnt_exist`,
|
||||
To: `attributes["test.doesnt_exist"]`,
|
||||
}, {
|
||||
ID: "add",
|
||||
Type: "add",
|
||||
Enabled: true,
|
||||
Name: "add",
|
||||
Field: `attributes["order.pids"].missing_field`,
|
||||
Value: `EXPR(attributes.a["b.c"].d[4].e + resource.f)`,
|
||||
}, {
|
||||
ID: "add2",
|
||||
Type: "add",
|
||||
Enabled: true,
|
||||
Name: "add2",
|
||||
Field: `attributes["order.pids.pid0"]`,
|
||||
Value: `EXPR(attributes["order.product_ids"][0])`,
|
||||
}, {
|
||||
ID: "add3",
|
||||
Type: "add",
|
||||
Enabled: true,
|
||||
Name: "add3",
|
||||
Field: `attributes["attrs.test.value"]`,
|
||||
Value: `EXPR(attributes.test?.value)`,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing(
|
||||
context.Background(),
|
||||
[]Pipeline{testPipeline},
|
||||
testLogs,
|
||||
)
|
||||
require.Nil(err)
|
||||
require.Equal(0, len(collectorWarnAndErrorLogs), strings.Join(collectorWarnAndErrorLogs, "\n"))
|
||||
require.Equal(1, len(result))
|
||||
|
||||
_, methodAttrExists := result[0].Attributes_string["http.method"]
|
||||
require.False(methodAttrExists)
|
||||
require.Equal("GET", result[0].Attributes_string["test.http.method"])
|
||||
require.Equal("pid0", result[0].Attributes_string["order.pids.pid0"])
|
||||
}
|
||||
|
||||
func TestContainsFilterIsCaseInsensitive(t *testing.T) {
|
||||
// The contains and ncontains query builder filters are case insensitive when querying logs.
|
||||
// Pipeline filter should also behave in the same way.
|
||||
require := require.New(t)
|
||||
|
||||
testLogs := []model.SignozLog{
|
||||
makeTestSignozLog("test Ecom Log", map[string]interface{}{}),
|
||||
}
|
||||
|
||||
testPipelines := []Pipeline{{
|
||||
OrderId: 1,
|
||||
Name: "pipeline1",
|
||||
Alias: "pipeline1",
|
||||
Enabled: true,
|
||||
Filter: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: []v3.FilterItem{{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "body",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeUnspecified,
|
||||
IsColumn: true,
|
||||
},
|
||||
Operator: "contains",
|
||||
Value: "log",
|
||||
}},
|
||||
},
|
||||
Config: []PipelineOperator{
|
||||
{
|
||||
ID: "add",
|
||||
Type: "add",
|
||||
Enabled: true,
|
||||
Name: "add",
|
||||
Field: "attributes.test1",
|
||||
Value: "value1",
|
||||
},
|
||||
},
|
||||
}, {
|
||||
OrderId: 2,
|
||||
Name: "pipeline2",
|
||||
Alias: "pipeline2",
|
||||
Enabled: true,
|
||||
Filter: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: []v3.FilterItem{{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "body",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeUnspecified,
|
||||
IsColumn: true,
|
||||
},
|
||||
Operator: "ncontains",
|
||||
Value: "ecom",
|
||||
}},
|
||||
},
|
||||
Config: []PipelineOperator{
|
||||
{
|
||||
ID: "add",
|
||||
Type: "add",
|
||||
Enabled: true,
|
||||
Name: "add",
|
||||
Field: "attributes.test2",
|
||||
Value: "value2",
|
||||
},
|
||||
},
|
||||
}}
|
||||
|
||||
result, collectorWarnAndErrorLogs, err := SimulatePipelinesProcessing(
|
||||
context.Background(), testPipelines, testLogs,
|
||||
)
|
||||
require.Nil(err)
|
||||
require.Equal(0, len(collectorWarnAndErrorLogs), strings.Join(collectorWarnAndErrorLogs, "\n"))
|
||||
require.Equal(1, len(result))
|
||||
|
||||
require.Equal(result[0].Attributes_string["test1"], "value1")
|
||||
|
||||
_, test2Exists := result[0].Attributes_string["test2"]
|
||||
require.False(test2Exists)
|
||||
}
|
||||
|
||||
func TestTemporaryWorkaroundForSupportingAttribsContainingDots(t *testing.T) {
|
||||
// TODO(Raj): Remove this after dots are supported
|
||||
|
||||
|
||||
@@ -438,15 +438,15 @@ func reduceQuery(query string, reduceTo v3.ReduceToOperator, aggregateOperator v
|
||||
// chart with just the query value.
|
||||
switch reduceTo {
|
||||
case v3.ReduceToOperatorLast:
|
||||
query = fmt.Sprintf("SELECT anyLast(value) as value, any(ts) as ts FROM (%s)", query)
|
||||
query = fmt.Sprintf("SELECT anyLast(value) as value, now() as ts FROM (%s)", query)
|
||||
case v3.ReduceToOperatorSum:
|
||||
query = fmt.Sprintf("SELECT sum(value) as value, any(ts) as ts FROM (%s)", query)
|
||||
query = fmt.Sprintf("SELECT sum(value) as value, now() as ts FROM (%s)", query)
|
||||
case v3.ReduceToOperatorAvg:
|
||||
query = fmt.Sprintf("SELECT avg(value) as value, any(ts) as ts FROM (%s)", query)
|
||||
query = fmt.Sprintf("SELECT avg(value) as value, now() as ts FROM (%s)", query)
|
||||
case v3.ReduceToOperatorMax:
|
||||
query = fmt.Sprintf("SELECT max(value) as value, any(ts) as ts FROM (%s)", query)
|
||||
query = fmt.Sprintf("SELECT max(value) as value, now() as ts FROM (%s)", query)
|
||||
case v3.ReduceToOperatorMin:
|
||||
query = fmt.Sprintf("SELECT min(value) as value, any(ts) as ts FROM (%s)", query)
|
||||
query = fmt.Sprintf("SELECT min(value) as value, now() as ts FROM (%s)", query)
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported reduce operator")
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package v3
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -172,7 +173,7 @@ func buildMetricQuery(start, end, step int64, mq *v3.BuilderQuery, tableName str
|
||||
return "", err
|
||||
}
|
||||
|
||||
samplesTableTimeFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms <= %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end)
|
||||
samplesTableTimeFilter := fmt.Sprintf("metric_name = %s AND timestamp_ms >= %d AND timestamp_ms < %d", utils.ClickHouseFormattedValue(mq.AggregateAttribute.Key), start, end)
|
||||
|
||||
// Select the aggregate value for interval
|
||||
queryTmpl :=
|
||||
@@ -427,15 +428,15 @@ func reduceQuery(query string, reduceTo v3.ReduceToOperator, aggregateOperator v
|
||||
// chart with just the query value. For the quer
|
||||
switch reduceTo {
|
||||
case v3.ReduceToOperatorLast:
|
||||
query = fmt.Sprintf("SELECT *, timestamp AS ts FROM (SELECT anyLastIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy)
|
||||
query = fmt.Sprintf("SELECT *, now() AS ts FROM (SELECT anyLastIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy)
|
||||
case v3.ReduceToOperatorSum:
|
||||
query = fmt.Sprintf("SELECT *, timestamp AS ts FROM (SELECT sumIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy)
|
||||
query = fmt.Sprintf("SELECT *, now() AS ts FROM (SELECT sumIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy)
|
||||
case v3.ReduceToOperatorAvg:
|
||||
query = fmt.Sprintf("SELECT *, timestamp AS ts FROM (SELECT avgIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy)
|
||||
query = fmt.Sprintf("SELECT *, now() AS ts FROM (SELECT avgIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy)
|
||||
case v3.ReduceToOperatorMax:
|
||||
query = fmt.Sprintf("SELECT *, timestamp AS ts FROM (SELECT maxIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy)
|
||||
query = fmt.Sprintf("SELECT *, now() AS ts FROM (SELECT maxIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy)
|
||||
case v3.ReduceToOperatorMin:
|
||||
query = fmt.Sprintf("SELECT *, timestamp AS ts FROM (SELECT minIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy)
|
||||
query = fmt.Sprintf("SELECT *, now() AS ts FROM (SELECT minIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp %s FROM (%s) %s)", selectLabels, query, groupBy)
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported reduce operator")
|
||||
}
|
||||
@@ -447,10 +448,14 @@ func reduceQuery(query string, reduceTo v3.ReduceToOperator, aggregateOperator v
|
||||
// start and end are in milliseconds
|
||||
// step is in seconds
|
||||
func PrepareMetricQuery(start, end int64, queryType v3.QueryType, panelType v3.PanelType, mq *v3.BuilderQuery, options Options) (string, error) {
|
||||
|
||||
// adjust the start and end time to be aligned with the step interval
|
||||
start = start - (start % (mq.StepInterval * 1000))
|
||||
end = end - (end % (mq.StepInterval * 1000))
|
||||
// if the query is a rate query, we adjust the start time by one more step
|
||||
// so that we can calculate the rate for the first data point
|
||||
if mq.AggregateOperator.IsRateOperator() && mq.Temporality != v3.Delta {
|
||||
start -= mq.StepInterval * 1000
|
||||
}
|
||||
adjustStep := int64(math.Min(float64(mq.StepInterval), 60))
|
||||
end = end - (end % (adjustStep * 1000))
|
||||
|
||||
var query string
|
||||
var err error
|
||||
|
||||
@@ -245,7 +245,7 @@ func TestBuildQueryOperators(t *testing.T) {
|
||||
func TestBuildQueryXRate(t *testing.T) {
|
||||
t.Run("TestBuildQueryXRate", func(t *testing.T) {
|
||||
|
||||
tmpl := `SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts`
|
||||
tmpl := `SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND timestamp_ms >= 1650991920000 AND timestamp_ms < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts`
|
||||
|
||||
cases := []struct {
|
||||
aggregateOperator v3.AggregateOperator
|
||||
@@ -298,7 +298,7 @@ func TestBuildQueryXRate(t *testing.T) {
|
||||
func TestBuildQueryRPM(t *testing.T) {
|
||||
t.Run("TestBuildQueryXRate", func(t *testing.T) {
|
||||
|
||||
tmpl := `SELECT ts, ceil(value * 60) as value FROM (SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND timestamp_ms >= 1650991980000 AND timestamp_ms <= 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts)`
|
||||
tmpl := `SELECT ts, ceil(value * 60) as value FROM (SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(timestamp_ms, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v2 INNER JOIN (SELECT fingerprint FROM signoz_metrics.time_series_v2 WHERE metric_name = 'name' AND temporality IN ['Cumulative', 'Unspecified']) as filtered_time_series USING fingerprint WHERE metric_name = 'name' AND timestamp_ms >= 1650991920000 AND timestamp_ms < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts)`
|
||||
|
||||
cases := []struct {
|
||||
aggregateOperator v3.AggregateOperator
|
||||
@@ -376,8 +376,8 @@ func TestBuildQueryAdjustedTimes(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
// 20:11:00 - 20:41:00
|
||||
expected: "timestamp_ms >= 1686082260000 AND timestamp_ms <= 1686084060000",
|
||||
// 20:10:00 - 20:41:00
|
||||
expected: "timestamp_ms >= 1686082200000 AND timestamp_ms < 1686084060000",
|
||||
},
|
||||
{
|
||||
name: "TestBuildQueryAdjustedTimes start close to 50 seconds",
|
||||
@@ -401,8 +401,8 @@ func TestBuildQueryAdjustedTimes(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
// 20:11:00 - 20:41:00
|
||||
expected: "timestamp_ms >= 1686082260000 AND timestamp_ms <= 1686084060000",
|
||||
// 20:10:00 - 20:41:00
|
||||
expected: "timestamp_ms >= 1686082200000 AND timestamp_ms < 1686084060000",
|
||||
},
|
||||
{
|
||||
name: "TestBuildQueryAdjustedTimes start close to 42 seconds with step 30 seconds",
|
||||
@@ -426,8 +426,8 @@ func TestBuildQueryAdjustedTimes(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
// 20:11:30 - 20:41:00
|
||||
expected: "timestamp_ms >= 1686082290000 AND timestamp_ms <= 1686084060000",
|
||||
// 20:11:00 - 20:41:00
|
||||
expected: "timestamp_ms >= 1686082260000 AND timestamp_ms < 1686084060000",
|
||||
},
|
||||
{
|
||||
name: "TestBuildQueryAdjustedTimes start close to 42 seconds with step 30 seconds and end close to 30 seconds",
|
||||
@@ -451,8 +451,8 @@ func TestBuildQueryAdjustedTimes(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
// 20:11:30 - 20:41:00
|
||||
expected: "timestamp_ms >= 1686082290000 AND timestamp_ms <= 1686084060000",
|
||||
// 20:11:00 - 20:41:00
|
||||
expected: "timestamp_ms >= 1686082260000 AND timestamp_ms < 1686084060000",
|
||||
},
|
||||
{
|
||||
name: "TestBuildQueryAdjustedTimes start close to 42 seconds with step 300 seconds and end close to 30 seconds",
|
||||
@@ -476,8 +476,10 @@ func TestBuildQueryAdjustedTimes(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
// 20:10:00 - 20:40:00
|
||||
expected: "timestamp_ms >= 1686082200000 AND timestamp_ms <= 1686084000000",
|
||||
// 20:05:00 - 20:41:00
|
||||
// 20:10:00 is the nearest 5 minute interval, but we round down to 20:05:00
|
||||
// as this is a rate query and we want to include the previous value for the first interval
|
||||
expected: "timestamp_ms >= 1686081900000 AND timestamp_ms < 1686084060000",
|
||||
},
|
||||
{
|
||||
name: "TestBuildQueryAdjustedTimes start close to 42 seconds with step 180 seconds and end close to 30 seconds",
|
||||
@@ -501,8 +503,10 @@ func TestBuildQueryAdjustedTimes(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
// 20:09:00 - 20:39:00
|
||||
expected: "timestamp_ms >= 1686082140000 AND timestamp_ms <= 1686083940000",
|
||||
// 20:06:00 - 20:39:00
|
||||
// 20:09:00 is the nearest 3 minute interval, but we round down to 20:06:00
|
||||
// as this is a rate query and we want to include the previous value for the first interval
|
||||
expected: "timestamp_ms >= 1686081960000 AND timestamp_ms < 1686084060000",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -558,8 +558,8 @@ func TestQueryRange(t *testing.T) {
|
||||
}
|
||||
q := NewQuerier(opts)
|
||||
expectedTimeRangeInQueryString := []string{
|
||||
fmt.Sprintf("timestamp_ms >= %d AND timestamp_ms <= %d", 1675115580000, 1675115580000+120*60*1000),
|
||||
fmt.Sprintf("timestamp_ms >= %d AND timestamp_ms <= %d", 1675115580000+120*60*1000, 1675115580000+180*60*1000),
|
||||
fmt.Sprintf("timestamp_ms >= %d AND timestamp_ms < %d", 1675115520000, 1675115580000+120*60*1000),
|
||||
fmt.Sprintf("timestamp_ms >= %d AND timestamp_ms < %d", 1675115520000+120*60*1000, 1675115580000+180*60*1000),
|
||||
fmt.Sprintf("timestamp >= '%d' AND timestamp <= '%d'", 1675115580000*1000000, (1675115580000+120*60*1000)*int64(1000000)),
|
||||
fmt.Sprintf("timestamp >= '%d' AND timestamp <= '%d'", (1675115580000+60*60*1000)*int64(1000000), (1675115580000+180*60*1000)*int64(1000000)),
|
||||
}
|
||||
@@ -669,7 +669,7 @@ func TestQueryRangeValueType(t *testing.T) {
|
||||
q := NewQuerier(opts)
|
||||
// No caching
|
||||
expectedTimeRangeInQueryString := []string{
|
||||
fmt.Sprintf("timestamp_ms >= %d AND timestamp_ms <= %d", 1675115580000, 1675115580000+120*60*1000),
|
||||
fmt.Sprintf("timestamp_ms >= %d AND timestamp_ms < %d", 1675115520000, 1675115580000+120*60*1000),
|
||||
fmt.Sprintf("timestamp >= '%d' AND timestamp <= '%d'", (1675115580000+60*60*1000)*int64(1000000), (1675115580000+180*60*1000)*int64(1000000)),
|
||||
}
|
||||
|
||||
|
||||
@@ -476,15 +476,15 @@ func reduceToQuery(query string, reduceTo v3.ReduceToOperator, aggregateOperator
|
||||
var groupBy string
|
||||
switch reduceTo {
|
||||
case v3.ReduceToOperatorLast:
|
||||
query = fmt.Sprintf("SELECT anyLast(value) as value, any(ts) as ts FROM (%s) %s", query, groupBy)
|
||||
query = fmt.Sprintf("SELECT anyLast(value) as value, now() as ts FROM (%s) %s", query, groupBy)
|
||||
case v3.ReduceToOperatorSum:
|
||||
query = fmt.Sprintf("SELECT sum(value) as value, any(ts) as ts FROM (%s) %s", query, groupBy)
|
||||
query = fmt.Sprintf("SELECT sum(value) as value, now() as ts FROM (%s) %s", query, groupBy)
|
||||
case v3.ReduceToOperatorAvg:
|
||||
query = fmt.Sprintf("SELECT avg(value) as value, any(ts) as ts FROM (%s) %s", query, groupBy)
|
||||
query = fmt.Sprintf("SELECT avg(value) as value, now() as ts FROM (%s) %s", query, groupBy)
|
||||
case v3.ReduceToOperatorMax:
|
||||
query = fmt.Sprintf("SELECT max(value) as value, any(ts) as ts FROM (%s) %s", query, groupBy)
|
||||
query = fmt.Sprintf("SELECT max(value) as value, now() as ts FROM (%s) %s", query, groupBy)
|
||||
case v3.ReduceToOperatorMin:
|
||||
query = fmt.Sprintf("SELECT min(value) as value, any(ts) as ts FROM (%s) %s", query, groupBy)
|
||||
query = fmt.Sprintf("SELECT min(value) as value, now() as ts FROM (%s) %s", query, groupBy)
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported reduce operator")
|
||||
}
|
||||
|
||||
@@ -73,9 +73,18 @@ func Parse(filters *v3.FilterSet) (string, error) {
|
||||
|
||||
case v3.FilterOperatorExists, v3.FilterOperatorNotExists:
|
||||
filter = fmt.Sprintf("%s %s %s", exprFormattedValue(v.Key.Key), logOperatorsToExpr[v.Operator], getTypeName(v.Key.Type))
|
||||
|
||||
default:
|
||||
filter = fmt.Sprintf("%s %s %s", name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value))
|
||||
|
||||
if v.Operator == v3.FilterOperatorContains || v.Operator == v3.FilterOperatorNotContains {
|
||||
// `contains` and `ncontains` should be case insensitive to match how they work when querying logs.
|
||||
filter = fmt.Sprintf(
|
||||
"lower(%s) %s lower(%s)",
|
||||
name, logOperatorsToExpr[v.Operator], exprFormattedValue(v.Value),
|
||||
)
|
||||
}
|
||||
|
||||
// Avoid running operators on nil values
|
||||
if v.Operator != v3.FilterOperatorEqual && v.Operator != v3.FilterOperatorNotEqual {
|
||||
filter = fmt.Sprintf("%s != nil && %s", name, filter)
|
||||
|
||||
@@ -6,10 +6,12 @@ import (
|
||||
"fmt"
|
||||
"math"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"sort"
|
||||
"sync"
|
||||
"text/template"
|
||||
"time"
|
||||
"unicode"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
@@ -435,7 +437,7 @@ func (r *ThresholdRule) runChQuery(ctx context.Context, db clickhouse.Conn, quer
|
||||
|
||||
for i, v := range vars {
|
||||
|
||||
colName := columnNames[i]
|
||||
colName := normalizeLabelName(columnNames[i])
|
||||
|
||||
switch v := v.(type) {
|
||||
case *string:
|
||||
@@ -764,6 +766,23 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, ts time.Time, ch c
|
||||
return nil, fmt.Errorf("this is unexpected, invalid query label")
|
||||
}
|
||||
|
||||
func normalizeLabelName(name string) string {
|
||||
// See https://prometheus.io/docs/concepts/data_model/#metric-names-and-labels
|
||||
|
||||
// Regular expression to match non-alphanumeric characters except underscores
|
||||
reg := regexp.MustCompile(`[^a-zA-Z0-9_]`)
|
||||
|
||||
// Replace all non-alphanumeric characters except underscores with underscores
|
||||
normalized := reg.ReplaceAllString(name, "_")
|
||||
|
||||
// If the first character is not a letter or an underscore, prepend an underscore
|
||||
if len(normalized) > 0 && !unicode.IsLetter(rune(normalized[0])) && normalized[0] != '_' {
|
||||
normalized = "_" + normalized
|
||||
}
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Queriers) (interface{}, error) {
|
||||
|
||||
valueFormatter := formatter.FromUnit(r.Unit())
|
||||
@@ -829,7 +848,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time, queriers *Querie
|
||||
|
||||
annotations := make(labels.Labels, 0, len(r.annotations))
|
||||
for _, a := range r.annotations {
|
||||
annotations = append(annotations, labels.Label{Name: a.Name, Value: expand(a.Value)})
|
||||
annotations = append(annotations, labels.Label{Name: normalizeLabelName(a.Name), Value: expand(a.Value)})
|
||||
}
|
||||
|
||||
lbs := lb.Labels()
|
||||
|
||||
@@ -295,3 +295,43 @@ func TestThresholdRuleCombinations(t *testing.T) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeLabelName(t *testing.T) {
|
||||
cases := []struct {
|
||||
labelName string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
labelName: "label",
|
||||
expected: "label",
|
||||
},
|
||||
{
|
||||
labelName: "label.with.dots",
|
||||
expected: "label_with_dots",
|
||||
},
|
||||
{
|
||||
labelName: "label-with-dashes",
|
||||
expected: "label_with_dashes",
|
||||
},
|
||||
{
|
||||
labelName: "labelwithnospaces",
|
||||
expected: "labelwithnospaces",
|
||||
},
|
||||
{
|
||||
labelName: "label with spaces",
|
||||
expected: "label_with_spaces",
|
||||
},
|
||||
{
|
||||
labelName: "label with spaces and .dots",
|
||||
expected: "label_with_spaces_and__dots",
|
||||
},
|
||||
{
|
||||
labelName: "label with spaces and -dashes",
|
||||
expected: "label_with_spaces_and__dashes",
|
||||
},
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
assert.Equal(t, c.expected, normalizeLabelName(c.labelName))
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user