Compare commits

..

2 Commits

Author SHA1 Message Date
Abhi Kumar
1f602a216e chore: broke down drilldown navigate into a saperate hook 2026-04-23 13:46:19 +05:30
Ashwin Bhatkal
021f1c5775 fix: handle cancel functionality for Run Query Button (#10958)
* fix: add ERR_CANCELED retry skip and new query key constants

* refactor: add disabled prop and handleCancelQuery to shared query components (#10959)

* refactor: add disabled prop and handleCancelQuery to shared query components

* feat: add cancel query support to alert rule editing (#10960)

* feat: add cancel query support to alert rule editing

* feat: add cancel query support to CreateAlertV2 (#10961)

* feat: add cancel query support to CreateAlertV2

* feat: add cancel query and AbortSignal support to MetricsExplorer Explorer (#10962)

* feat: add cancel query and AbortSignal support to MetricsExplorer Explorer

* feat: add cancel query support to MetricsExplorer Inspect (#10963)

* feat: add cancel query support to MetricsExplorer Inspect

* feat: add cancel query support to MetricsExplorer Summary (#10964)

* feat: add cancel query support to MetricsExplorer Summary

* feat: add cancel query support to MeterExplorer and dashboard widgets (#10965)

* feat: add cancel query support to MeterExplorer and dashboard widgets

* feat: add cancel query support to Logs, Traces, Exceptions and API Monitoring (#10972)

* feat: add cancel query support to Logs, Traces, Errors, and API Monitoring

* refactor: remove deprecated props and enforce strict query cancel interfaces (#10974)

* refactor: remove deprecated props and enforce strict query cancel interfaces

* fix: metrics explorer inspect cancel and run query bugs (#10975)

* fix: metrics explorer inspect cancel and run query bugs

* fix: api monitoring cancel and run query bugs (#10984)

* feat: add cancelled query placeholder UI to alerts, explorers, exceptions, and api monitoring (#10988)

* feat: add cancel query support to MeterExplorer and dashboard widgets

* fix: api monitoring cancel and run query bugs

* feat: add cancelled query placeholder UI to alerts, explorers, exceptions, and api monitoring

* fix: cancelled placeholder for alert v2 and metrics inspect, use css modules

* fix: cancelled placeholder race condition in metrics inspect auto-reset

* fix: prioritize cancelled state over loading in metrics inspect content

* fix: keep query builder rendered and match graph view height in inspect fallback

* feat: add cancelled query placeholder to logs, traces, and dashboard widgets (#11007)

* feat: add cancelled query placeholder to logs, traces, and dashboard widgets

* fix: reset cancel on run and swap only chart body in widget graph

* fix: use constants for max retry count (#11049)

* fix: use semantic tokens
2026-04-23 06:58:50 +00:00
95 changed files with 1583 additions and 3789 deletions

View File

@@ -49,7 +49,6 @@ jobs:
- role
- ttl
- alerts
- alertmanager
- ingestionkeys
- rootuser
- serviceaccount

View File

@@ -277,23 +277,8 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
annotations := make(ruletypes.Labels, 0, len(r.Annotations().Map()))
for name, value := range r.Annotations().Map() {
// no need to expand custom templating annotations — they get expanded in the notifier layer
if ruletypes.IsCustomTemplatingAnnotation(name) {
annotations = append(annotations, ruletypes.Label{Name: name, Value: value})
continue
}
annotations = append(annotations, ruletypes.Label{Name: name, Value: expand(value)})
}
// Add values to be used in notifier layer for notification templates
annotations = append(annotations, ruletypes.Label{Name: ruletypes.AnnotationValue, Value: value})
annotations = append(annotations, ruletypes.Label{Name: ruletypes.AnnotationThresholdValue, Value: threshold})
annotations = append(annotations, ruletypes.Label{Name: ruletypes.AnnotationCompareOp, Value: smpl.CompareOperator.Literal()})
annotations = append(annotations, ruletypes.Label{Name: ruletypes.AnnotationMatchType, Value: smpl.MatchType.Literal()})
if smpl.IsRecovering {
lb.Set(ruletypes.LabelIsRecovering, "true")
}
if smpl.IsMissing {
lb.Set(ruletypes.AlertNameLabel, "[No data] "+r.Name())
lb.Set(ruletypes.NoDataLabel, "true")

View File

@@ -6,15 +6,20 @@ import { PayloadProps, Props } from 'types/api/thirdPartyApis/listOverview';
const listOverview = async (
props: Props,
signal?: AbortSignal,
): Promise<SuccessResponseV2<PayloadProps>> => {
const { start, end, show_ip: showIp, filter } = props;
try {
const response = await axios.post(`/third-party-apis/overview/list`, {
start,
end,
show_ip: showIp,
filter,
});
const response = await axios.post(
`/third-party-apis/overview/list`,
{
start,
end,
show_ip: showIp,
filter,
},
{ signal },
);
return {
httpStatusCode: response.status,

View File

@@ -0,0 +1,26 @@
.placeholder {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
height: 100%;
min-height: 240px;
width: 100%;
padding: 24px;
gap: 12px;
}
.emoji {
width: 48px;
height: 48px;
}
.text {
text-align: center;
font-size: 14px;
color: var(--muted-foreground);
}
.subText {
color: var(--foreground);
}

View File

@@ -0,0 +1,31 @@
import { Typography } from 'antd';
import eyesEmojiUrl from 'assets/Images/eyesEmoji.svg';
import styles from './QueryCancelledPlaceholder.module.scss';
interface QueryCancelledPlaceholderProps {
subText?: string;
}
function QueryCancelledPlaceholder({
subText,
}: QueryCancelledPlaceholderProps): JSX.Element {
return (
<div className={styles.placeholder}>
<img className={styles.emoji} src={eyesEmojiUrl} alt="eyes emoji" />
<Typography className={styles.text}>
Query cancelled.
<span className={styles.subText}>
{' '}
{subText || 'Click "Run Query" to load data.'}
</span>
</Typography>
</div>
);
}
QueryCancelledPlaceholder.defaultProps = {
subText: undefined,
};
export default QueryCancelledPlaceholder;

View File

@@ -0,0 +1 @@
export { default } from './QueryCancelledPlaceholder';

View File

@@ -0,0 +1,8 @@
/**
* Maximum number of retries for a failed react-query request before giving up.
* Used as the upper bound in the default `retry` predicate:
* `return failureCount < MAX_QUERY_RETRIES;`
*
* This retries up to 3 times (4 attempts total including the initial request).
*/
export const MAX_QUERY_RETRIES = 3;

View File

@@ -25,7 +25,8 @@ export const REACT_QUERY_KEY = {
ALERT_RULE_TIMELINE_GRAPH: 'ALERT_RULE_TIMELINE_GRAPH',
GET_CONSUMER_LAG_DETAILS: 'GET_CONSUMER_LAG_DETAILS',
TOGGLE_ALERT_STATE: 'TOGGLE_ALERT_STATE',
GET_ALL_ALLERTS: 'GET_ALL_ALLERTS',
GET_ALL_ALERTS: 'GET_ALL_ALERTS',
ALERT_RULES_CHART_PREVIEW: 'ALERT_RULES_CHART_PREVIEW',
REMOVE_ALERT_RULE: 'REMOVE_ALERT_RULE',
DUPLICATE_ALERT_RULE: 'DUPLICATE_ALERT_RULE',
GET_HOST_LIST: 'GET_HOST_LIST',

View File

@@ -21,6 +21,7 @@ import { FilterConfirmProps } from 'antd/lib/table/interface';
import logEvent from 'api/common/logEvent';
import getAll from 'api/errors/getAll';
import getErrorCounts from 'api/errors/getErrorCounts';
import QueryCancelledPlaceholder from 'components/QueryCancelledPlaceholder';
import { ResizeTable } from 'components/ResizeTable';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import ROUTES from 'constants/routes';
@@ -36,6 +37,7 @@ import useUrlQuery from 'hooks/useUrlQuery';
import createQueryParams from 'lib/createQueryParams';
import history from 'lib/history';
import { isUndefined } from 'lodash-es';
import { useAllErrorsQueryState } from 'pages/AllErrors/QueryStateContext';
import { useTimezone } from 'providers/Timezone';
import { AppState } from 'store/reducers';
import { ErrorResponse, SuccessResponse } from 'types/api';
@@ -121,7 +123,13 @@ function AllErrors(): JSX.Element {
const { queries } = useResourceAttribute();
const compositeData = useGetCompositeQueryParam();
const [{ isLoading, data }, errorCountResponse] = useQueries([
const setIsFetching = useAllErrorsQueryState((s) => s.setIsFetching);
const isCancelled = useAllErrorsQueryState((s) => s.isCancelled);
const [
{ isLoading, isFetching: isErrorsFetching, data },
errorCountResponse,
] = useQueries([
{
queryKey: ['getAllErrors', updatedPath, maxTime, minTime, compositeData],
queryFn: (): Promise<SuccessResponse<PayloadProps> | ErrorResponse> =>
@@ -162,6 +170,12 @@ function AllErrors(): JSX.Element {
enabled: !loading,
},
]);
const isFetching = isErrorsFetching || errorCountResponse.isFetching;
useEffect(() => {
setIsFetching(isFetching);
}, [isFetching, setIsFetching]);
const { notifications } = useNotifications();
useEffect(() => {
@@ -473,6 +487,12 @@ function AllErrors(): JSX.Element {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [errorCountResponse.data?.payload]);
if (isCancelled && !data?.payload?.length) {
return (
<QueryCancelledPlaceholder subText='Click "Run Query" to load exceptions.' />
);
}
return (
<ResizeTable
columns={columns}

View File

@@ -1,12 +1,16 @@
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useQueryClient } from 'react-query';
// eslint-disable-next-line no-restricted-imports
import { useSelector } from 'react-redux';
import { LoadingOutlined } from '@ant-design/icons';
import { Spin, Table } from 'antd';
import logEvent from 'api/common/logEvent';
import emptyStateUrl from 'assets/Icons/emptyState.svg';
import cx from 'classnames';
import QuerySearch from 'components/QueryBuilderV2/QueryV2/QuerySearch/QuerySearch';
import QueryCancelledPlaceholder from 'components/QueryCancelledPlaceholder';
import { initialQueriesMap } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import RightToolbarActions from 'container/QueryBuilder/components/ToolbarActions/RightToolbarActions';
import Toolbar from 'container/Toolbar/Toolbar';
import { useGetCompositeQueryParam } from 'hooks/queryBuilder/useGetCompositeQueryParam';
@@ -23,8 +27,6 @@ import { DataSource } from 'types/common/queryBuilder';
import { GlobalReducer } from 'types/reducer/globalTime';
import DOCLINKS from 'utils/docLinks';
import emptyStateUrl from '@/assets/Icons/emptyState.svg';
import { ApiMonitoringHardcodedAttributeKeys } from '../../constants';
import { DEFAULT_PARAMS, useApiMonitoringParams } from '../../queryParams';
import { columnsConfig, formatDataForTable } from '../../utils';
@@ -40,6 +42,7 @@ function DomainList(): JSX.Element {
(state) => state.globalTime,
);
const queryClient = useQueryClient();
const { currentQuery, handleRunQuery } = useQueryBuilder();
const query = useMemo(() => currentQuery?.builder?.queryData[0] || null, [
currentQuery,
@@ -53,6 +56,19 @@ function DomainList(): JSX.Element {
const compositeData = useGetCompositeQueryParam();
const [isCancelled, setIsCancelled] = useState(false);
const handleCancelQuery = useCallback(() => {
queryClient.cancelQueries([REACT_QUERY_KEY.GET_DOMAINS_LIST]);
setIsCancelled(true);
}, [queryClient]);
const handleStageAndRunQuery = useCallback(() => {
setIsCancelled(false);
queryClient.invalidateQueries([REACT_QUERY_KEY.GET_DOMAINS_LIST]);
handleRunQuery();
}, [queryClient, handleRunQuery]);
const { data, isLoading, isFetching } = useListOverview({
start: minTime,
end: maxTime,
@@ -105,6 +121,13 @@ function DomainList(): JSX.Element {
[data],
);
// Auto-reset cancelled state when a new fetch starts
useEffect(() => {
if (isFetching) {
setIsCancelled(false);
}
}, [isFetching]);
// Open drawer if selectedDomain is set in URL
useEffect(() => {
if (selectedDomain && formattedDataForTable?.length > 0) {
@@ -119,7 +142,13 @@ function DomainList(): JSX.Element {
<section className={cx('api-module-right-section')}>
<Toolbar
showAutoRefresh={false}
rightActions={<RightToolbarActions onStageRunQuery={handleRunQuery} />}
rightActions={
<RightToolbarActions
onStageRunQuery={handleStageAndRunQuery}
isLoadingQueries={isFetching}
handleCancelQuery={handleCancelQuery}
/>
}
/>
<div className={cx('api-monitoring-list-header')}>
<QuerySearch
@@ -130,38 +159,44 @@ function DomainList(): JSX.Element {
hardcodedAttributeKeys={ApiMonitoringHardcodedAttributeKeys}
/>
</div>
{!isFetching && !isLoading && formattedDataForTable.length === 0 && (
<div className="no-filtered-domains-message-container">
<div className="no-filtered-domains-message-content">
<img
src={emptyStateUrl}
alt="thinking-emoji"
className="empty-state-svg"
/>
{isCancelled && formattedDataForTable.length === 0 && (
<QueryCancelledPlaceholder subText='Click "Run Query" to load API monitoring data.' />
)}
{!isCancelled &&
!isFetching &&
!isLoading &&
formattedDataForTable.length === 0 && (
<div className="no-filtered-domains-message-container">
<div className="no-filtered-domains-message-content">
<img
src={emptyStateUrl}
alt="thinking-emoji"
className="empty-state-svg"
/>
<div className="no-filtered-domains-message">
<div className="no-domain-title">
No External API calls detected with applied filters.
<div className="no-filtered-domains-message">
<div className="no-domain-title">
No External API calls detected with applied filters.
</div>
<div className="no-domain-subtitle">
Ensure all HTTP client spans are being sent with kind as{' '}
<span className="attribute">Client</span> and url set in{' '}
<span className="attribute">url.full</span> or{' '}
<span className="attribute">http.url</span> attribute.
</div>
<a
href={DOCLINKS.EXTERNAL_API_MONITORING}
target="_blank"
rel="noreferrer"
className="external-api-doc-link"
>
Learn how External API monitoring works in SigNoz{' '}
<MoveUpRight size={14} />
</a>
</div>
<div className="no-domain-subtitle">
Ensure all HTTP client spans are being sent with kind as{' '}
<span className="attribute">Client</span> and url set in{' '}
<span className="attribute">url.full</span> or{' '}
<span className="attribute">http.url</span> attribute.
</div>
<a
href={DOCLINKS.EXTERNAL_API_MONITORING}
target="_blank"
rel="noreferrer"
className="external-api-doc-link"
>
Learn how External API monitoring works in SigNoz{' '}
<MoveUpRight size={14} />
</a>
</div>
</div>
</div>
)}
)}
{(isFetching || isLoading || formattedDataForTable.length > 0) && (
<Table
className="api-monitoring-domain-list-table"

View File

@@ -18,9 +18,16 @@ import { GlobalReducer } from 'types/reducer/globalTime';
export interface ChartPreviewProps {
alertDef: AlertDef;
source?: YAxisSource;
isCancelled?: boolean;
onFetchingStateChange?: (isFetching: boolean) => void;
}
function ChartPreview({ alertDef, source }: ChartPreviewProps): JSX.Element {
function ChartPreview({
alertDef,
source,
isCancelled = false,
onFetchingStateChange,
}: ChartPreviewProps): JSX.Element {
const { currentQuery, panelType, stagedQuery } = useQueryBuilder();
const {
alertType,
@@ -88,6 +95,8 @@ function ChartPreview({ alertDef, source }: ChartPreviewProps): JSX.Element {
graphType={panelType || PANEL_TYPES.TIME_SERIES}
setQueryStatus={setQueryStatus}
additionalThresholds={thresholdState.thresholds}
isCancelled={isCancelled}
onFetchingStateChange={onFetchingStateChange}
/>
);
@@ -102,6 +111,8 @@ function ChartPreview({ alertDef, source }: ChartPreviewProps): JSX.Element {
graphType={panelType || PANEL_TYPES.TIME_SERIES}
setQueryStatus={setQueryStatus}
additionalThresholds={thresholdState.thresholds}
isCancelled={isCancelled}
onFetchingStateChange={onFetchingStateChange}
/>
);

View File

@@ -1,9 +1,11 @@
import { useCallback, useMemo } from 'react';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useQueryClient } from 'react-query';
import { Button } from 'antd';
import classNames from 'classnames';
import { YAxisSource } from 'components/YAxisUnitSelector/types';
import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import QuerySectionComponent from 'container/FormAlertRules/QuerySection';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { getMetricNameFromQueryData } from 'hooks/useGetYAxisUnit';
@@ -62,7 +64,24 @@ function QuerySection(): JSX.Element {
return currentQueryKey !== stagedQueryKey;
}, [currentQuery, alertType, thresholdState, stagedQuery]);
const queryClient = useQueryClient();
const [isLoadingQueries, setIsLoadingQueries] = useState(false);
const [isCancelled, setIsCancelled] = useState(false);
useEffect(() => {
if (isLoadingQueries) {
setIsCancelled(false);
}
}, [isLoadingQueries]);
const handleCancelQuery = useCallback(() => {
queryClient.cancelQueries([REACT_QUERY_KEY.ALERT_RULES_CHART_PREVIEW]);
setIsCancelled(true);
}, [queryClient]);
const runQueryHandler = useCallback(() => {
setIsCancelled(false);
queryClient.invalidateQueries([REACT_QUERY_KEY.ALERT_RULES_CHART_PREVIEW]);
// Reset the source param when the query is changed
// Then manually run the query
if (source === YAxisSource.DASHBOARDS && didQueryChange) {
@@ -76,6 +95,7 @@ function QuerySection(): JSX.Element {
currentQuery,
didQueryChange,
handleRunQuery,
queryClient,
redirectWithQueryBuilderData,
source,
]);
@@ -106,7 +126,12 @@ function QuerySection(): JSX.Element {
return (
<div className="query-section">
<Stepper stepNumber={1} label="Define the query" />
<ChartPreview alertDef={alertDef} source={source} />
<ChartPreview
alertDef={alertDef}
source={source}
isCancelled={isCancelled}
onFetchingStateChange={setIsLoadingQueries}
/>
<div className="query-section-tabs">
<div className="query-section-query-actions">
{tabs.map((tab) => (
@@ -130,6 +155,8 @@ function QuerySection(): JSX.Element {
setQueryCategory={onQueryCategoryChange}
alertType={alertType}
runQuery={runQueryHandler}
isLoadingQueries={isLoadingQueries}
handleCancelQuery={handleCancelQuery}
alertDef={alertDef}
panelType={PANEL_TYPES.TIME_SERIES}
key={currentQuery.queryType}

View File

@@ -4,12 +4,14 @@ import { useTranslation } from 'react-i18next';
import { useDispatch, useSelector } from 'react-redux';
import { useLocation } from 'react-router-dom';
import ErrorInPlace from 'components/ErrorInPlace/ErrorInPlace';
import QueryCancelledPlaceholder from 'components/QueryCancelledPlaceholder';
import Spinner from 'components/Spinner';
import WarningPopover from 'components/WarningPopover/WarningPopover';
import { ENTITY_VERSION_V5 } from 'constants/app';
import { FeatureKeys } from 'constants/features';
import { QueryParams } from 'constants/query';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import AnomalyAlertEvaluationView from 'container/AnomalyAlertEvaluationView';
import { INITIAL_CRITICAL_THRESHOLD } from 'container/CreateAlertV2/context/constants';
import { Threshold } from 'container/CreateAlertV2/context/types';
@@ -69,6 +71,8 @@ export interface ChartPreviewProps {
setQueryStatus?: (status: string) => void;
showSideLegend?: boolean;
additionalThresholds?: Threshold[];
isCancelled?: boolean;
onFetchingStateChange?: (isFetching: boolean) => void;
}
// eslint-disable-next-line sonarjs/cognitive-complexity
@@ -86,6 +90,8 @@ function ChartPreview({
setQueryStatus,
showSideLegend = false,
additionalThresholds,
isCancelled = false,
onFetchingStateChange,
}: ChartPreviewProps): JSX.Element | null {
const { t } = useTranslation('alerts');
const dispatch = useDispatch();
@@ -185,7 +191,7 @@ function ChartPreview({
ENTITY_VERSION_V5,
{
queryKey: [
'chartPreview',
REACT_QUERY_KEY.ALERT_RULES_CHART_PREVIEW,
userQueryKey || JSON.stringify(query),
selectedInterval,
minTime,
@@ -193,9 +199,14 @@ function ChartPreview({
alertDef?.ruleType,
],
enabled: canQuery,
keepPreviousData: true,
},
);
useEffect(() => {
onFetchingStateChange?.(queryResponse.isFetching);
}, [queryResponse.isFetching, onFetchingStateChange]);
const graphRef = useRef<HTMLDivElement>(null);
useEffect((): void => {
@@ -334,11 +345,16 @@ function ChartPreview({
const chartData = getUPlotChartData(queryResponse?.data?.payload);
const hasResultData = !!queryResponse?.data?.payload?.data?.result?.length;
const isAnomalyDetectionAlert =
alertDef?.ruleType === AlertDetectionTypes.ANOMALY_DETECTION_ALERT;
const chartDataAvailable =
chartData && !queryResponse.isError && !queryResponse.isLoading;
chartData &&
hasResultData &&
!queryResponse.isLoading &&
(!queryResponse.isError || isCancelled);
const isAnomalyDetectionEnabled =
featureFlags?.find((flag) => flag.name === FeatureKeys.ANOMALY_DETECTION)
@@ -359,10 +375,14 @@ function ChartPreview({
{queryResponse.isLoading && (
<Spinner size="large" tip="Loading..." height="100%" />
)}
{(queryResponse?.isError || queryResponse?.error) && (
{(queryResponse?.isError || queryResponse?.error) && !isCancelled && (
<ErrorInPlace error={queryResponse.error as APIError} />
)}
{isCancelled && !queryResponse.isLoading && !hasResultData && (
<QueryCancelledPlaceholder subText='Click "Run Query" to load the chart preview.' />
)}
{chartDataAvailable && !isAnomalyDetectionAlert && (
<GridPanelSwitch
options={options}
@@ -403,6 +423,8 @@ ChartPreview.defaultProps = {
setQueryStatus: (): void => {},
showSideLegend: false,
additionalThresholds: undefined,
isCancelled: false,
onFetchingStateChange: undefined,
};
export default ChartPreview;

View File

@@ -35,6 +35,8 @@ function QuerySection({
setQueryCategory,
alertType,
runQuery,
isLoadingQueries,
handleCancelQuery,
alertDef,
panelType,
ruleId,
@@ -226,6 +228,8 @@ function QuerySection({
queryType: queryCategory,
});
}}
handleCancelQuery={handleCancelQuery}
isLoadingQueries={isLoadingQueries}
/>
</span>
}
@@ -245,7 +249,11 @@ function QuerySection({
onChange={handleQueryCategoryChange}
tabBarExtraContent={
<span style={{ display: 'flex', gap: '1rem', alignItems: 'center' }}>
<RunQueryBtn onStageRunQuery={runQuery} />
<RunQueryBtn
onStageRunQuery={runQuery}
handleCancelQuery={handleCancelQuery}
isLoadingQueries={isLoadingQueries}
/>
</span>
}
items={items}
@@ -287,6 +295,8 @@ interface QuerySectionProps {
setQueryCategory: (n: EQueryType) => void;
alertType: AlertTypes;
runQuery: VoidFunction;
isLoadingQueries: boolean;
handleCancelQuery: () => void;
alertDef: AlertDef;
panelType: PANEL_TYPES;
ruleId: string;

View File

@@ -136,6 +136,19 @@ function FormAlertRules({
// use query client
const ruleCache = useQueryClient();
const [isChartQueryCancelled, setIsChartQueryCancelled] = useState(false);
const [isLoadingAlertQuery, setIsLoadingAlertQuery] = useState(false);
useEffect(() => {
if (isLoadingAlertQuery) {
setIsChartQueryCancelled(false);
}
}, [isLoadingAlertQuery]);
const handleCancelAlertQuery = useCallback(() => {
ruleCache.cancelQueries(REACT_QUERY_KEY.ALERT_RULES_CHART_PREVIEW);
setIsChartQueryCancelled(true);
}, [ruleCache]);
const isNewRule = !ruleId || isEmpty(ruleId);
@@ -702,6 +715,8 @@ function FormAlertRules({
yAxisUnit={yAxisUnit || ''}
graphType={panelType || PANEL_TYPES.TIME_SERIES}
setQueryStatus={setQueryStatus}
isCancelled={isChartQueryCancelled}
onFetchingStateChange={setIsLoadingAlertQuery}
/>
);
@@ -720,6 +735,8 @@ function FormAlertRules({
yAxisUnit={yAxisUnit || ''}
graphType={panelType || PANEL_TYPES.TIME_SERIES}
setQueryStatus={setQueryStatus}
isCancelled={isChartQueryCancelled}
onFetchingStateChange={setIsLoadingAlertQuery}
/>
);
@@ -902,7 +919,15 @@ function FormAlertRules({
queryCategory={currentQuery.queryType}
setQueryCategory={onQueryCategoryChange}
alertType={alertType || AlertTypes.METRICS_BASED_ALERT}
runQuery={(): void => handleRunQuery()}
runQuery={(): void => {
setIsChartQueryCancelled(false);
ruleCache.invalidateQueries([
REACT_QUERY_KEY.ALERT_RULES_CHART_PREVIEW,
]);
handleRunQuery();
}}
isLoadingQueries={isLoadingAlertQuery}
handleCancelQuery={handleCancelAlertQuery}
alertDef={alertDef}
panelType={panelType || PANEL_TYPES.TIME_SERIES}
key={currentQuery.queryType}

View File

@@ -6,6 +6,7 @@ import React, {
useRef,
useState,
} from 'react';
import { useQueryClient } from 'react-query';
// eslint-disable-next-line no-restricted-imports
import { useSelector } from 'react-redux'; // old code, TODO: fix this correctly
import {
@@ -18,6 +19,7 @@ import cx from 'classnames';
import { ToggleGraphProps } from 'components/Graph/types';
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
import { QueryBuilderV2 } from 'components/QueryBuilderV2/QueryBuilderV2';
import QueryCancelledPlaceholder from 'components/QueryCancelledPlaceholder';
import Spinner from 'components/Spinner';
import TimePreference from 'components/TimePreferenceDropDown';
import WarningPopover from 'components/WarningPopover/WarningPopover';
@@ -86,6 +88,7 @@ function FullView({
const fullViewRef = useRef<HTMLDivElement>(null);
const { handleRunQuery } = useQueryBuilder();
const queryClient = useQueryClient();
useEffect(() => {
setCurrentGraphRef(fullViewRef);
@@ -203,8 +206,8 @@ function FullView({
});
}, [selectedPanelType]);
const response = useGetQueryRange(requestData, ENTITY_VERSION_V5, {
queryKey: [
const queryRangeKey = useMemo(
() => [
widget?.query,
selectedPanelType,
requestData,
@@ -212,10 +215,28 @@ function FullView({
minTime,
maxTime,
],
[widget?.query, selectedPanelType, requestData, version, minTime, maxTime],
);
const response = useGetQueryRange(requestData, ENTITY_VERSION_V5, {
queryKey: queryRangeKey,
enabled: !isDependedDataLoaded,
keepPreviousData: true,
});
const [isCancelled, setIsCancelled] = useState(false);
useEffect(() => {
if (response.isFetching) {
setIsCancelled(false);
}
}, [response.isFetching]);
const handleCancelQuery = useCallback(() => {
queryClient.cancelQueries(queryRangeKey);
setIsCancelled(true);
}, [queryClient, queryRangeKey]);
const onDragSelect = useCallback((start: number, end: number): void => {
const startTimestamp = Math.trunc(start);
const endTimestamp = Math.trunc(end);
@@ -354,6 +375,8 @@ function FullView({
onStageRunQuery={(): void => {
handleRunQuery();
}}
isLoadingQueries={response.isFetching}
handleCancelQuery={handleCancelQuery}
/>
</>
)}
@@ -386,23 +409,27 @@ function FullView({
}}
/>
)}
<PanelWrapper
panelMode={PanelMode.STANDALONE_VIEW}
queryResponse={response}
widget={widget}
setRequestData={setRequestData}
isFullViewMode
onToggleModelHandler={onToggleModelHandler}
setGraphVisibility={setGraphsVisibilityStates}
graphVisibility={graphsVisibilityStates}
onDragSelect={customOnDragSelect ?? onDragSelect}
tableProcessedDataRef={tableProcessedDataRef}
searchTerm={searchTerm}
onClickHandler={onClickHandler}
enableDrillDown={enableDrillDown}
selectedGraph={selectedPanelType}
onColumnWidthsChange={onColumnWidthsChange}
/>
{isCancelled ? (
<QueryCancelledPlaceholder subText='Click "Run Query" to reload the widget.' />
) : (
<PanelWrapper
panelMode={PanelMode.STANDALONE_VIEW}
queryResponse={response}
widget={widget}
setRequestData={setRequestData}
isFullViewMode
onToggleModelHandler={onToggleModelHandler}
setGraphVisibility={setGraphsVisibilityStates}
graphVisibility={graphsVisibilityStates}
onDragSelect={customOnDragSelect ?? onDragSelect}
tableProcessedDataRef={tableProcessedDataRef}
searchTerm={searchTerm}
onClickHandler={onClickHandler}
enableDrillDown={enableDrillDown}
selectedGraph={selectedPanelType}
onColumnWidthsChange={onColumnWidthsChange}
/>
)}
</GraphContainer>
</div>
</>

View File

@@ -1,4 +1,5 @@
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useQueryClient } from 'react-query';
import * as Sentry from '@sentry/react';
import { Button, Tooltip } from 'antd';
import logEvent from 'api/common/logEvent';
@@ -7,6 +8,7 @@ import { QueryBuilderV2 } from 'components/QueryBuilderV2/QueryBuilderV2';
import QuickFilters from 'components/QuickFilters/QuickFilters';
import { QuickFiltersSource, SignalType } from 'components/QuickFilters/types';
import { initialQueryMeterWithType, PANEL_TYPES } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import ExplorerOptionWrapper from 'container/ExplorerOptions/ExplorerOptionWrapper';
import RightToolbarActions from 'container/QueryBuilder/components/ToolbarActions/RightToolbarActions';
import { QueryBuilderProps } from 'container/QueryBuilder/QueryBuilder.interfaces';
@@ -37,6 +39,20 @@ function Explorer(): JSX.Element {
currentQuery,
} = useQueryBuilder();
const { safeNavigate } = useSafeNavigate();
const queryClient = useQueryClient();
const [isLoadingQueries, setIsLoadingQueries] = useState(false);
const [isCancelled, setIsCancelled] = useState(false);
useEffect(() => {
if (isLoadingQueries) {
setIsCancelled(false);
}
}, [isLoadingQueries]);
const handleCancelQuery = useCallback(() => {
queryClient.cancelQueries([REACT_QUERY_KEY.GET_QUERY_RANGE]);
setIsCancelled(true);
}, [queryClient]);
const [showQuickFilters, setShowQuickFilters] = useState(true);
@@ -155,7 +171,11 @@ function Explorer(): JSX.Element {
<div className="explore-header-right-actions">
<DateTimeSelector showAutoRefresh />
<RightToolbarActions onStageRunQuery={(): void => handleRunQuery()} />
<RightToolbarActions
onStageRunQuery={(): void => handleRunQuery()}
isLoadingQueries={isLoadingQueries}
handleCancelQuery={handleCancelQuery}
/>
</div>
</div>
<QueryBuilderV2
@@ -171,7 +191,10 @@ function Explorer(): JSX.Element {
/>
<div className="explore-content">
<TimeSeries />
<TimeSeries
onFetchingStateChange={setIsLoadingQueries}
isCancelled={isCancelled}
/>
</div>
</div>
<ExplorerOptionWrapper

View File

@@ -1,43 +0,0 @@
import { Button } from 'antd';
import logEvent from 'api/common/logEvent';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { QueryBuilder } from 'container/QueryBuilder';
import { ButtonWrapper } from 'container/TracesExplorer/QuerySection/styles';
import { useGetPanelTypesQueryParam } from 'hooks/queryBuilder/useGetPanelTypesQueryParam';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { DataSource } from 'types/common/queryBuilder';
import { MeterExplorerEventKeys, MeterExplorerEvents } from '../events';
function QuerySection(): JSX.Element {
const { handleRunQuery } = useQueryBuilder();
const panelTypes = useGetPanelTypesQueryParam(PANEL_TYPES.TIME_SERIES);
return (
<div className="query-section">
<QueryBuilder
panelType={panelTypes}
config={{ initialDataSource: DataSource.METRICS, queryVariant: 'static' }}
version="v4"
actions={
<ButtonWrapper>
<Button
onClick={(): void => {
handleRunQuery();
logEvent(MeterExplorerEvents.QueryBuilderQueryChanged, {
[MeterExplorerEventKeys.Tab]: 'explorer',
});
}}
type="primary"
>
Run Query
</Button>
</ButtonWrapper>
}
/>
</div>
);
}
export default QuerySection;

View File

@@ -1,10 +1,12 @@
import { useMemo } from 'react';
import { useEffect, useMemo } from 'react';
import { useQueries } from 'react-query';
// eslint-disable-next-line no-restricted-imports
import { useSelector } from 'react-redux';
import { isAxiosError } from 'axios';
import QueryCancelledPlaceholder from 'components/QueryCancelledPlaceholder';
import { ENTITY_VERSION_V5 } from 'constants/app';
import { initialQueryMeterWithType, PANEL_TYPES } from 'constants/queryBuilder';
import { MAX_QUERY_RETRIES } from 'constants/reactQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import EmptyMetricsSearch from 'container/MetricsExplorer/Explorer/EmptyMetricsSearch';
import { BuilderUnitsFilter } from 'container/QueryBuilder/filters/BuilderUnitsFilter';
@@ -21,7 +23,15 @@ import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { DataSource } from 'types/common/queryBuilder';
import { GlobalReducer } from 'types/reducer/globalTime';
function TimeSeries(): JSX.Element {
interface TimeSeriesProps {
onFetchingStateChange?: (isFetching: boolean) => void;
isCancelled?: boolean;
}
function TimeSeries({
onFetchingStateChange,
isCancelled = false,
}: TimeSeriesProps): JSX.Element {
const { stagedQuery, currentQuery } = useQueryBuilder();
const { yAxisUnit, onUnitChange } = useUrlYAxisUnit('');
@@ -67,7 +77,11 @@ function TimeSeries(): JSX.Element {
minTime,
index,
],
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
queryFn: ({
signal,
}: {
signal?: AbortSignal;
}): Promise<SuccessResponse<MetricRangePayloadProps>> =>
GetMetricQueryRange(
{
query: payload,
@@ -79,9 +93,15 @@ function TimeSeries(): JSX.Element {
},
},
ENTITY_VERSION_V5,
undefined,
signal,
),
enabled: !!payload,
retry: (failureCount: number, error: Error): boolean => {
retry: (failureCount: number, error: unknown): boolean => {
if (isAxiosError(error) && error.code === 'ERR_CANCELED') {
return false;
}
let status: number | undefined;
if (error instanceof APIError) {
@@ -94,7 +114,7 @@ function TimeSeries(): JSX.Element {
return false;
}
return failureCount < 3;
return failureCount < MAX_QUERY_RETRIES;
},
onError: (error: APIError): void => {
showErrorModal(error);
@@ -102,6 +122,11 @@ function TimeSeries(): JSX.Element {
})),
);
const isFetching = queries.some((q) => q.isFetching);
useEffect(() => {
onFetchingStateChange?.(isFetching);
}, [isFetching, onFetchingStateChange]);
const data = useMemo(() => queries.map(({ data }) => data) ?? [], [queries]);
const responseData = useMemo(
@@ -122,7 +147,11 @@ function TimeSeries(): JSX.Element {
<BuilderUnitsFilter onChange={onUnitChange} yAxisUnit={yAxisUnit} />
<div className="time-series-container">
{!hasMetricSelected && <EmptyMetricsSearch />}
{hasMetricSelected &&
{isCancelled && hasMetricSelected && (
<QueryCancelledPlaceholder subText='Click "Run Query" to load metrics.' />
)}
{!isCancelled &&
hasMetricSelected &&
responseData.map((datapoint, index) => (
<div
className="time-series-view-panel"

View File

@@ -1,4 +1,5 @@
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useQueryClient } from 'react-query';
import { useSearchParams } from 'react-router-dom-v5-compat';
import * as Sentry from '@sentry/react';
import { Switch, Tooltip } from 'antd';
@@ -6,6 +7,7 @@ import logEvent from 'api/common/logEvent';
import { QueryBuilderV2 } from 'components/QueryBuilderV2/QueryBuilderV2';
import WarningPopover from 'components/WarningPopover/WarningPopover';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import ExplorerOptionWrapper from 'container/ExplorerOptions/ExplorerOptionWrapper';
import RightToolbarActions from 'container/QueryBuilder/components/ToolbarActions/RightToolbarActions';
import { QueryBuilderProps } from 'container/QueryBuilder/QueryBuilder.interfaces';
@@ -54,6 +56,21 @@ function Explorer(): JSX.Element {
const { handleExplorerTabChange } = useHandleExplorerTabChange();
const [isMetricDetailsOpen, setIsMetricDetailsOpen] = useState(false);
const queryClient = useQueryClient();
const [isLoadingQueries, setIsLoadingQueries] = useState(false);
const [isCancelled, setIsCancelled] = useState(false);
useEffect(() => {
if (isLoadingQueries) {
setIsCancelled(false);
}
}, [isLoadingQueries]);
const handleCancelQuery = useCallback(() => {
queryClient.cancelQueries([REACT_QUERY_KEY.GET_QUERY_RANGE]);
setIsCancelled(true);
}, [queryClient]);
const metricNames = useMemo(() => {
const currentMetricNames: string[] = [];
stagedQuery?.builder.queryData.forEach((query) => {
@@ -307,7 +324,11 @@ function Explorer(): JSX.Element {
<div className="explore-header-right-actions">
{!isEmpty(warning) && <WarningPopover warningData={warning} />}
<DateTimeSelector showAutoRefresh />
<RightToolbarActions onStageRunQuery={(): void => handleRunQuery()} />
<RightToolbarActions
onStageRunQuery={(): void => handleRunQuery()}
isLoadingQueries={isLoadingQueries}
handleCancelQuery={handleCancelQuery}
/>
</div>
</div>
<QueryBuilderV2
@@ -319,6 +340,7 @@ function Explorer(): JSX.Element {
/>
<div className="explore-content">
<TimeSeries
onFetchingStateChange={setIsLoadingQueries}
showOneChartPerQuery={showOneChartPerQuery}
setWarning={setWarning}
areAllMetricUnitsSame={areAllMetricUnitsSame}
@@ -331,6 +353,7 @@ function Explorer(): JSX.Element {
yAxisUnit={yAxisUnit}
setYAxisUnit={setYAxisUnit}
showYAxisUnitSelector={showYAxisUnitSelector}
isCancelled={isCancelled}
/>
</div>
</div>

View File

@@ -1,7 +1,10 @@
import { Button } from 'antd';
import { useCallback } from 'react';
import { useIsFetching, useQueryClient } from 'react-query';
import logEvent from 'api/common/logEvent';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { QueryBuilder } from 'container/QueryBuilder';
import RunQueryBtn from 'container/QueryBuilder/components/RunQueryBtn/RunQueryBtn';
import { ButtonWrapper } from 'container/TracesExplorer/QuerySection/styles';
import { useGetPanelTypesQueryParam } from 'hooks/queryBuilder/useGetPanelTypesQueryParam';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
@@ -11,9 +14,16 @@ import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
function QuerySection(): JSX.Element {
const { handleRunQuery } = useQueryBuilder();
const queryClient = useQueryClient();
const panelTypes = useGetPanelTypesQueryParam(PANEL_TYPES.TIME_SERIES);
const isLoadingQueries = useIsFetching([REACT_QUERY_KEY.GET_QUERY_RANGE]) > 0;
const handleCancelQuery = useCallback(() => {
queryClient.cancelQueries([REACT_QUERY_KEY.GET_QUERY_RANGE]);
}, [queryClient]);
return (
<div className="query-section">
<QueryBuilder
@@ -22,17 +32,16 @@ function QuerySection(): JSX.Element {
version="v4"
actions={
<ButtonWrapper>
<Button
onClick={(): void => {
<RunQueryBtn
onStageRunQuery={(): void => {
handleRunQuery();
logEvent(MetricsExplorerEvents.QueryBuilderQueryChanged, {
[MetricsExplorerEventKeys.Tab]: 'explorer',
});
}}
type="primary"
>
Run Query
</Button>
isLoadingQueries={isLoadingQueries}
handleCancelQuery={handleCancelQuery}
/>
</ButtonWrapper>
}
/>

View File

@@ -1,4 +1,4 @@
import { useMemo } from 'react';
import { useEffect, useMemo } from 'react';
import { useQueries, useQueryClient } from 'react-query';
// eslint-disable-next-line no-restricted-imports
import { useSelector } from 'react-redux';
@@ -11,10 +11,12 @@ import {
} from 'api/generated/services/metrics';
import { isAxiosError } from 'axios';
import classNames from 'classnames';
import QueryCancelledPlaceholder from 'components/QueryCancelledPlaceholder';
import YAxisUnitSelector from 'components/YAxisUnitSelector';
import { YAxisSource } from 'components/YAxisUnitSelector/types';
import { ENTITY_VERSION_V5 } from 'constants/app';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import { MAX_QUERY_RETRIES } from 'constants/reactQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import TimeSeriesView from 'container/TimeSeriesView/TimeSeriesView';
import { convertDataValueToMs } from 'container/TimeSeriesView/utils';
@@ -36,6 +38,7 @@ import {
} from './utils';
function TimeSeries({
onFetchingStateChange,
showOneChartPerQuery,
setWarning,
isMetricUnitsLoading,
@@ -46,6 +49,7 @@ function TimeSeries({
setYAxisUnit,
showYAxisUnitSelector,
metrics,
isCancelled = false,
}: TimeSeriesProps): JSX.Element {
const { stagedQuery, currentQuery } = useQueryBuilder();
@@ -98,7 +102,11 @@ function TimeSeries({
minTime,
index,
],
queryFn: (): Promise<SuccessResponse<MetricRangePayloadProps>> =>
queryFn: ({
signal,
}: {
signal?: AbortSignal;
}): Promise<SuccessResponse<MetricRangePayloadProps>> =>
GetMetricQueryRange(
{
query: payload,
@@ -111,9 +119,15 @@ function TimeSeries({
},
// ENTITY_VERSION_V4,
ENTITY_VERSION_V5,
undefined,
signal,
),
enabled: !!payload,
retry: (failureCount: number, error: Error): boolean => {
retry: (failureCount: number, error: unknown): boolean => {
if (isAxiosError(error) && error.code === 'ERR_CANCELED') {
return false;
}
let status: number | undefined;
if (error instanceof APIError) {
@@ -126,11 +140,16 @@ function TimeSeries({
return false;
}
return failureCount < 3;
return failureCount < MAX_QUERY_RETRIES;
},
})),
);
const isFetching = queries.some((q) => q.isFetching);
useEffect(() => {
onFetchingStateChange?.(isFetching);
}, [isFetching, onFetchingStateChange]);
const data = useMemo(() => queries.map(({ data }) => data) ?? [], [queries]);
const responseData = useMemo(
@@ -231,7 +250,11 @@ function TimeSeries({
})}
>
{metricNames.length === 0 && <EmptyMetricsSearch />}
{metricNames.length > 0 &&
{isCancelled && metricNames.length > 0 && (
<QueryCancelledPlaceholder subText='Click "Run Query" to load metrics.' />
)}
{!isCancelled &&
metricNames.length > 0 &&
responseData.map((datapoint, index) => {
const isQueryDataItem = index < metricNames.length;
const metricName = isQueryDataItem ? metricNames[index] : undefined;

View File

@@ -3,6 +3,7 @@ import { MetricsexplorertypesMetricMetadataDTO } from 'api/generated/services/si
import { Warning } from 'types/api';
export interface TimeSeriesProps {
onFetchingStateChange?: (isFetching: boolean) => void;
showOneChartPerQuery: boolean;
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
areAllMetricUnitsSame: boolean;
@@ -15,4 +16,5 @@ export interface TimeSeriesProps {
yAxisUnit: string | undefined;
setYAxisUnit: (unit: string) => void;
showYAxisUnitSelector: boolean;
isCancelled?: boolean;
}

View File

@@ -4,9 +4,25 @@
.inspect-metrics-fallback {
display: flex;
align-items: center;
justify-content: center;
flex-direction: column;
gap: 32px;
height: 100%;
.inspect-metrics-fallback-header-placeholder {
// Reserve the same vertical space the GraphView header occupies
// (antd middle button height) so swapping chart ↔ fallback causes
// no layout shift.
height: 32px;
flex-shrink: 0;
}
.inspect-metrics-fallback-body {
flex: 1;
display: flex;
align-items: center;
justify-content: center;
min-height: 520px;
}
}
.inspect-metrics-title {

View File

@@ -1,9 +1,12 @@
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useQueryClient } from 'react-query';
import * as Sentry from '@sentry/react';
import { Color } from '@signozhq/design-tokens';
import { Button, Drawer, Empty, Skeleton, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import { useGetMetricMetadata } from 'api/generated/services/metrics';
import QueryCancelledPlaceholder from 'components/QueryCancelledPlaceholder';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
import { useIsDarkMode } from 'hooks/useDarkMode';
@@ -109,6 +112,28 @@ function Inspect({
reset,
} = useInspectMetrics(appliedMetricName);
const [isCancelled, setIsCancelled] = useState(false);
// Auto-reset isCancelled only on the rising edge of a new fetch
// (transition from not-loading → loading). Watching `isLoading` directly
// races with the cancel flow — when the user cancels mid-fetch, loading
// is still true in the render right after setIsCancelled(true), which
// would immediately reset it.
const wasLoadingRef = useRef(false);
useEffect(() => {
const nowLoading = isInspectMetricsLoading || isInspectMetricsRefetching;
if (!wasLoadingRef.current && nowLoading) {
setIsCancelled(false);
}
wasLoadingRef.current = nowLoading;
}, [isInspectMetricsLoading, isInspectMetricsRefetching]);
const queryClient = useQueryClient();
const handleCancelInspectQuery = useCallback(() => {
queryClient.cancelQueries(REACT_QUERY_KEY.GET_INSPECT_METRICS_DETAILS);
setIsCancelled(true);
}, [queryClient]);
const handleDispatchMetricInspectionOptions = useCallback(
(action: MetricInspectionAction): void => {
dispatchMetricInspectionOptions(action);
@@ -167,96 +192,66 @@ function Inspect({
setExpandedViewOptions(null);
}, [inspectionStep]);
const content = useMemo(() => {
if (isInspectMetricsLoading && !isInspectMetricsRefetching) {
return (
<div
data-testid="inspect-metrics-loading"
className="inspect-metrics-fallback"
>
<Skeleton active />
</div>
const chartArea = useMemo(() => {
const renderFallback = (testId: string, body: JSX.Element): JSX.Element => (
<div data-testid={testId} className="inspect-metrics-fallback">
<div className="inspect-metrics-fallback-header-placeholder" />
<div className="inspect-metrics-fallback-body">{body}</div>
</div>
);
// Cancelled state takes precedence over any react-query state — ensures
// the placeholder shows immediately on cancel, regardless of whether
// isLoading/isRefetching has settled yet.
if (isCancelled) {
return renderFallback(
'inspect-metrics-cancelled',
<QueryCancelledPlaceholder subText='Click "Run Query" to see inspect results.' />,
);
}
if (isInspectMetricsError) {
const errorMessage = 'Error loading inspect metrics.';
if (isInspectMetricsLoading && !isInspectMetricsRefetching) {
return renderFallback('inspect-metrics-loading', <Skeleton active />);
}
return (
<div
data-testid="inspect-metrics-error"
className="inspect-metrics-fallback"
>
<Empty description={errorMessage} />
</div>
if (isInspectMetricsError) {
return renderFallback(
'inspect-metrics-error',
<Empty description="Error loading inspect metrics." />,
);
}
if (!inspectMetricsTimeSeries.length) {
return (
<div
data-testid="inspect-metrics-empty"
className="inspect-metrics-fallback"
>
<Empty description="No time series found for this metric to inspect." />
</div>
return renderFallback(
'inspect-metrics-empty',
<Empty description="No time series found for this metric to inspect." />,
);
}
return (
<div className="inspect-metrics-content">
<div className="inspect-metrics-content-first-col">
<GraphView
inspectMetricsTimeSeries={aggregatedTimeSeries}
formattedInspectMetricsTimeSeries={formattedInspectMetricsTimeSeries}
resetInspection={resetInspection}
metricName={appliedMetricName}
metricUnit={selectedMetricUnit}
metricType={selectedMetricType}
spaceAggregationSeriesMap={spaceAggregationSeriesMap}
inspectionStep={inspectionStep}
setPopoverOptions={setPopoverOptions}
setShowExpandedView={setShowExpandedView}
showExpandedView={showExpandedView}
setExpandedViewOptions={setExpandedViewOptions}
popoverOptions={popoverOptions}
metricInspectionAppliedOptions={metricInspectionOptions.appliedOptions}
isInspectMetricsRefetching={isInspectMetricsRefetching}
/>
<QueryBuilder
currentMetricName={currentMetricName}
setCurrentMetricName={setCurrentMetricName}
setAppliedMetricName={setAppliedMetricName}
spaceAggregationLabels={spaceAggregationLabels}
currentMetricInspectionOptions={metricInspectionOptions.currentOptions}
dispatchMetricInspectionOptions={handleDispatchMetricInspectionOptions}
inspectionStep={inspectionStep}
inspectMetricsTimeSeries={inspectMetricsTimeSeries}
currentQuery={currentQueryData}
setCurrentQuery={setCurrentQueryData}
/>
</div>
<div className="inspect-metrics-content-second-col">
<Stepper
inspectionStep={inspectionStep}
resetInspection={resetInspection}
/>
{showExpandedView && (
<ExpandedView
options={expandedViewOptions}
spaceAggregationSeriesMap={spaceAggregationSeriesMap}
step={inspectionStep}
metricInspectionAppliedOptions={metricInspectionOptions.appliedOptions}
timeAggregatedSeriesMap={timeAggregatedSeriesMap}
/>
)}
</div>
</div>
<GraphView
inspectMetricsTimeSeries={aggregatedTimeSeries}
formattedInspectMetricsTimeSeries={formattedInspectMetricsTimeSeries}
resetInspection={resetInspection}
metricName={appliedMetricName}
metricUnit={selectedMetricUnit}
metricType={selectedMetricType}
spaceAggregationSeriesMap={spaceAggregationSeriesMap}
inspectionStep={inspectionStep}
setPopoverOptions={setPopoverOptions}
setShowExpandedView={setShowExpandedView}
showExpandedView={showExpandedView}
setExpandedViewOptions={setExpandedViewOptions}
popoverOptions={popoverOptions}
metricInspectionAppliedOptions={metricInspectionOptions.appliedOptions}
isInspectMetricsRefetching={isInspectMetricsRefetching}
/>
);
}, [
isInspectMetricsLoading,
isInspectMetricsRefetching,
isInspectMetricsError,
isCancelled,
inspectMetricsTimeSeries,
aggregatedTimeSeries,
formattedInspectMetricsTimeSeries,
@@ -312,7 +307,46 @@ function Inspect({
className="inspect-metrics-modal"
destroyOnClose
>
{content}
<div className="inspect-metrics-content">
<div className="inspect-metrics-content-first-col">
{chartArea}
<QueryBuilder
currentMetricName={currentMetricName}
setCurrentMetricName={setCurrentMetricName}
setAppliedMetricName={setAppliedMetricName}
spaceAggregationLabels={spaceAggregationLabels}
currentMetricInspectionOptions={metricInspectionOptions.currentOptions}
dispatchMetricInspectionOptions={handleDispatchMetricInspectionOptions}
inspectionStep={inspectionStep}
inspectMetricsTimeSeries={inspectMetricsTimeSeries}
currentQuery={currentQueryData}
setCurrentQuery={setCurrentQueryData}
isLoadingQueries={isInspectMetricsLoading || isInspectMetricsRefetching}
handleCancelQuery={handleCancelInspectQuery}
onRunQuery={(): void => {
setIsCancelled(false);
queryClient.invalidateQueries([
REACT_QUERY_KEY.GET_INSPECT_METRICS_DETAILS,
]);
}}
/>
</div>
<div className="inspect-metrics-content-second-col">
<Stepper
inspectionStep={inspectionStep}
resetInspection={resetInspection}
/>
{showExpandedView && (
<ExpandedView
options={expandedViewOptions}
spaceAggregationSeriesMap={spaceAggregationSeriesMap}
step={inspectionStep}
metricInspectionAppliedOptions={metricInspectionOptions.appliedOptions}
timeAggregatedSeriesMap={timeAggregatedSeriesMap}
/>
)}
</div>
</div>
</Drawer>
</Sentry.ErrorBoundary>
);

View File

@@ -20,13 +20,22 @@ function QueryBuilder({
inspectMetricsTimeSeries,
currentQuery,
setCurrentQuery,
isLoadingQueries,
handleCancelQuery,
onRunQuery,
}: QueryBuilderProps): JSX.Element {
const applyInspectionOptions = useCallback(() => {
onRunQuery?.();
setAppliedMetricName(currentMetricName ?? '');
dispatchMetricInspectionOptions({
type: 'APPLY_METRIC_INSPECTION_OPTIONS',
});
}, [currentMetricName, setAppliedMetricName, dispatchMetricInspectionOptions]);
}, [
currentMetricName,
setAppliedMetricName,
dispatchMetricInspectionOptions,
onRunQuery,
]);
return (
<div className="inspect-metrics-query-builder">
@@ -39,7 +48,11 @@ function QueryBuilder({
>
Query Builder
</Button>
<RunQueryBtn onStageRunQuery={applyInspectionOptions} />
<RunQueryBtn
onStageRunQuery={applyInspectionOptions}
handleCancelQuery={handleCancelQuery}
isLoadingQueries={isLoadingQueries}
/>
</div>
<Card className="inspect-metrics-query-builder-content">
<MetricNameSearch

View File

@@ -103,6 +103,8 @@ describe('QueryBuilder', () => {
filterExpression: '',
} as any,
setCurrentQuery: jest.fn(),
isLoadingQueries: false,
handleCancelQuery: jest.fn(),
};
beforeEach(() => {

View File

@@ -65,6 +65,9 @@ export interface QueryBuilderProps {
inspectMetricsTimeSeries: InspectMetricsSeries[];
currentQuery: IBuilderQuery;
setCurrentQuery: (query: IBuilderQuery) => void;
isLoadingQueries: boolean;
handleCancelQuery: () => void;
onRunQuery?: () => void;
}
export interface MetricNameSearchProps {

View File

@@ -1,6 +1,9 @@
import { useCallback, useEffect, useMemo, useReducer, useState } from 'react';
import { useQuery } from 'react-query';
import { inspectMetrics } from 'api/generated/services/metrics';
import { isAxiosError } from 'axios';
import { MAX_QUERY_RETRIES } from 'constants/reactQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { themeColors } from 'constants/theme';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
@@ -107,7 +110,7 @@ export function useInspectMetrics(
isRefetching: isInspectMetricsRefetching,
} = useQuery({
queryKey: [
'inspectMetrics',
REACT_QUERY_KEY.GET_INSPECT_METRICS_DETAILS,
metricName,
start,
end,
@@ -127,6 +130,12 @@ export function useInspectMetrics(
),
enabled: !!metricName,
keepPreviousData: true,
retry: (failureCount: number, error: Error): boolean => {
if (isAxiosError(error) && error.code === 'ERR_CANCELED') {
return false;
}
return failureCount < MAX_QUERY_RETRIES;
},
});
const inspectMetricsData = useMemo(

View File

@@ -12,6 +12,8 @@ function MetricsSearch({
currentQueryFilterExpression,
setCurrentQueryFilterExpression,
isLoading,
handleCancelQuery,
onRunQuery,
}: MetricsSearchProps): JSX.Element {
const handleOnChange = useCallback(
(expression: string): void => {
@@ -22,7 +24,8 @@ function MetricsSearch({
const handleStageAndRunQuery = useCallback(() => {
onChange(currentQueryFilterExpression);
}, [currentQueryFilterExpression, onChange]);
onRunQuery?.();
}, [currentQueryFilterExpression, onChange, onRunQuery]);
const handleRunQuery = useCallback(
(expression: string): void => {
@@ -53,6 +56,7 @@ function MetricsSearch({
<RunQueryBtn
onStageRunQuery={handleStageAndRunQuery}
isLoadingQueries={isLoading}
handleCancelQuery={handleCancelQuery}
/>
<div className="metrics-search-options">
<DateTimeSelectionV2

View File

@@ -4,6 +4,7 @@ import React, { useCallback, useEffect, useMemo, useState } from 'react';
import { useSelector } from 'react-redux'; // old code, TODO: fix this correctly
import { useSearchParams } from 'react-router-dom-v5-compat';
import * as Sentry from '@sentry/react';
import { Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import { convertToApiError } from 'api/ErrorResponseHandlerForGeneratedAPIs';
import {
@@ -17,6 +18,7 @@ import {
Querybuildertypesv5OrderByDTO,
Querybuildertypesv5OrderDirectionDTO,
} from 'api/generated/services/sigNoz.schemas';
import eyesEmojiUrl from 'assets/Images/eyesEmoji.svg';
import { convertExpressionToFilters } from 'components/QueryBuilderV2/utils';
import { initialQueriesMap } from 'constants/queryBuilder';
import { usePageSize } from 'container/InfraMonitoringK8s/utils';
@@ -104,6 +106,8 @@ function Summary(): JSX.Element {
setCurrentQueryFilterExpression,
] = useState<string>(appliedFilterExpression);
const [isCancelled, setIsCancelled] = useState<boolean>(false);
useEffect(() => {
setCurrentQueryFilterExpression(appliedFilterExpression);
}, [appliedFilterExpression]);
@@ -164,6 +168,7 @@ function Summary(): JSX.Element {
isLoading: isGetMetricsStatsLoading,
isError: isGetMetricsStatsError,
error: metricsStatsError,
reset: resetMetricsStats,
} = useGetMetricsStats();
const {
@@ -172,6 +177,7 @@ function Summary(): JSX.Element {
isLoading: isGetMetricsTreemapLoading,
isError: isGetMetricsTreemapError,
error: metricsTreemapError,
reset: resetMetricsTreemap,
} = useGetMetricsTreemap();
const metricsStatsApiError = useMemo(
@@ -196,6 +202,40 @@ function Summary(): JSX.Element {
});
}, [metricsTreemapQuery, getMetricsTreemap]);
const handleCancelQuery = useCallback(() => {
resetMetricsStats();
resetMetricsTreemap();
setCurrentQueryFilterExpression(appliedFilterExpression);
setIsCancelled(true);
}, [
resetMetricsStats,
resetMetricsTreemap,
setCurrentQueryFilterExpression,
appliedFilterExpression,
]);
const handleRunQuery = useCallback(() => {
setIsCancelled(false);
getMetricsStats({
data: {
...metricsListQuery,
filter: { expression: currentQueryFilterExpression },
},
});
getMetricsTreemap({
data: {
...metricsTreemapQuery,
filter: { expression: currentQueryFilterExpression },
},
});
}, [
getMetricsStats,
getMetricsTreemap,
metricsListQuery,
metricsTreemapQuery,
currentQueryFilterExpression,
]);
const handleFilterChange = useCallback(
(expression: string) => {
const newFilters: TagFilter = {
@@ -330,11 +370,19 @@ function Summary(): JSX.Element {
!isGetMetricsTreemapLoading &&
!isGetMetricsTreemapError;
const isLoadingQueries =
isGetMetricsStatsLoading || isGetMetricsTreemapLoading;
const showFullScreenLoading =
(isGetMetricsStatsLoading || isGetMetricsTreemapLoading) &&
isLoadingQueries &&
formattedMetricsData.length === 0 &&
!treeMapData?.data[heatmapView]?.length;
const showNoMetrics =
isMetricsListDataEmpty &&
isMetricsTreeMapDataEmpty &&
!appliedFilterExpression;
return (
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
<div className="metrics-explorer-summary-tab">
@@ -343,13 +391,26 @@ function Summary(): JSX.Element {
onChange={handleFilterChange}
currentQueryFilterExpression={currentQueryFilterExpression}
setCurrentQueryFilterExpression={setCurrentQueryFilterExpression}
isLoading={isGetMetricsStatsLoading || isGetMetricsTreemapLoading}
isLoading={isLoadingQueries}
handleCancelQuery={handleCancelQuery}
onRunQuery={handleRunQuery}
/>
{showFullScreenLoading ? (
<MetricsLoading />
) : isMetricsListDataEmpty &&
isMetricsTreeMapDataEmpty &&
!appliedFilterExpression ? (
) : isCancelled ? (
<div className="no-logs-container">
<div className="no-logs-container-content">
<img className="eyes-emoji" src={eyesEmojiUrl} alt="eyes emoji" />
<Typography className="no-logs-text">
Query cancelled.
<span className="sub-text">
{' '}
Click &quot;Run Query&quot; to load metrics.
</span>
</Typography>
</div>
</div>
) : showNoMetrics ? (
<NoLogs dataSource={DataSource.METRICS} />
) : (
<>

View File

@@ -33,6 +33,8 @@ export interface MetricsSearchProps {
currentQueryFilterExpression: string;
setCurrentQueryFilterExpression: (expression: string) => void;
isLoading: boolean;
handleCancelQuery: () => void;
onRunQuery: () => void;
}
export interface MetricsTreemapProps {

View File

@@ -1,5 +1,4 @@
import { useCallback, useEffect, useMemo } from 'react';
import { QueryKey } from 'react-query';
import { Color } from '@signozhq/design-tokens';
import { Button, Tabs, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
@@ -25,8 +24,8 @@ import PromQLQueryContainer from './QueryBuilder/promQL';
import './QuerySection.styles.scss';
function QuerySection({
selectedGraph,
queryRangeKey,
isLoadingQueries,
handleCancelQuery,
selectedWidget,
dashboardVersion,
dashboardId,
@@ -179,7 +178,7 @@ function QuerySection({
label="Stage & Run Query"
onStageRunQuery={handleRunQuery}
isLoadingQueries={isLoadingQueries}
queryRangeKey={queryRangeKey}
handleCancelQuery={handleCancelQuery}
/>
</span>
}
@@ -191,8 +190,8 @@ function QuerySection({
interface QueryProps {
selectedGraph: PANEL_TYPES;
queryRangeKey?: QueryKey;
isLoadingQueries?: boolean;
isLoadingQueries: boolean;
handleCancelQuery: () => void;
selectedWidget: Widgets;
dashboardVersion?: string;
dashboardId?: string;

View File

@@ -1,5 +1,6 @@
import { memo } from 'react';
import { InfoCircleOutlined } from '@ant-design/icons';
import QueryCancelledPlaceholder from 'components/QueryCancelledPlaceholder';
import WarningPopover from 'components/WarningPopover/WarningPopover';
import { Card } from 'container/GridCardLayout/styles';
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
@@ -22,6 +23,7 @@ function WidgetGraph({
selectedWidget,
isLoadingPanelData,
enableDrillDown = false,
isCancelled = false,
}: WidgetGraphContainerProps): JSX.Element {
const { currentQuery } = useQueryBuilder();
@@ -46,20 +48,24 @@ function WidgetGraph({
</div>
<DateTimeSelectionV2 showAutoRefresh={false} hideShareModal />
</div>
{queryResponse.error && (
{!isCancelled && queryResponse.error && (
<AlertIconContainer color="red" title={queryResponse.error.message}>
<InfoCircleOutlined />
</AlertIconContainer>
)}
<WidgetGraphComponent
isLoadingPanelData={isLoadingPanelData}
selectedGraph={selectedGraph}
queryResponse={queryResponse}
setRequestData={setRequestData}
selectedWidget={selectedWidget}
enableDrillDown={enableDrillDown}
/>
{isCancelled ? (
<QueryCancelledPlaceholder subText='Click "Run Query" to reload the chart.' />
) : (
<WidgetGraphComponent
isLoadingPanelData={isLoadingPanelData}
selectedGraph={selectedGraph}
queryResponse={queryResponse}
setRequestData={setRequestData}
selectedWidget={selectedWidget}
enableDrillDown={enableDrillDown}
/>
)}
</Container>
);
}

View File

@@ -1,5 +1,5 @@
import { memo, useEffect } from 'react';
import { useMemo } from 'react';
import { memo, useCallback, useEffect, useMemo, useState } from 'react';
import { useQueryClient } from 'react-query';
// eslint-disable-next-line no-restricted-imports
import { useSelector } from 'react-redux';
import { ENTITY_VERSION_V5 } from 'constants/app';
@@ -34,6 +34,7 @@ function LeftContainer({
isNewPanel = false,
}: WidgetGraphProps): JSX.Element {
const { stagedQuery } = useQueryBuilder();
const queryClient = useQueryClient();
const { selectedTime: globalSelectedInterval, minTime, maxTime } = useSelector<
AppState,
@@ -49,12 +50,25 @@ function LeftContainer({
],
[globalSelectedInterval, requestData, minTime, maxTime],
);
const [isCancelled, setIsCancelled] = useState(false);
const handleCancelQuery = useCallback(() => {
queryClient.cancelQueries(queryRangeKey);
setIsCancelled(true);
}, [queryClient, queryRangeKey]);
const queryResponse = useGetQueryRange(requestData, ENTITY_VERSION_V5, {
enabled: !!stagedQuery,
queryKey: queryRangeKey,
keepPreviousData: true,
});
useEffect(() => {
if (queryResponse.isFetching) {
setIsCancelled(false);
}
}, [queryResponse.isFetching]);
// Update parent component with query response for legend colors
useEffect(() => {
if (setQueryResponse) {
@@ -71,12 +85,13 @@ function LeftContainer({
selectedWidget={selectedWidget}
isLoadingPanelData={isLoadingPanelData}
enableDrillDown={enableDrillDown}
isCancelled={isCancelled}
/>
<QueryContainer className="query-section-left-container">
<QuerySection
selectedGraph={selectedGraph}
queryRangeKey={queryRangeKey}
isLoadingQueries={queryResponse.isFetching}
handleCancelQuery={handleCancelQuery}
selectedWidget={selectedWidget}
dashboardVersion={ENTITY_VERSION_V5}
dashboardId={dashboardData?.id}

View File

@@ -50,4 +50,5 @@ export type WidgetGraphContainerProps = {
selectedWidget: Widgets;
isLoadingPanelData: boolean;
enableDrillDown?: boolean;
isCancelled?: boolean;
};

View File

@@ -1,5 +1,3 @@
import { useCallback } from 'react';
import { QueryKey, useIsFetching, useQueryClient } from 'react-query';
import { Button } from '@signozhq/ui';
import cx from 'classnames';
import {
@@ -12,14 +10,23 @@ import {
import { getUserOperatingSystem, UserOperatingSystem } from 'utils/getUserOS';
import './RunQueryBtn.scss';
interface RunQueryBtnProps {
type RunQueryBtnProps = {
className?: string;
label?: string;
isLoadingQueries?: boolean;
handleCancelQuery?: () => void;
onStageRunQuery?: () => void;
queryRangeKey?: QueryKey;
}
disabled?: boolean;
} & (
| {
onStageRunQuery: () => void;
handleCancelQuery: () => void;
isLoadingQueries: boolean;
}
| {
onStageRunQuery?: never;
handleCancelQuery?: never;
isLoadingQueries?: never;
}
);
function RunQueryBtn({
className,
@@ -27,26 +34,10 @@ function RunQueryBtn({
isLoadingQueries,
handleCancelQuery,
onStageRunQuery,
queryRangeKey,
disabled,
}: RunQueryBtnProps): JSX.Element {
const isMac = getUserOperatingSystem() === UserOperatingSystem.MACOS;
const queryClient = useQueryClient();
const isKeyFetchingCount = useIsFetching(
queryRangeKey as QueryKey | undefined,
);
const isLoading =
typeof isLoadingQueries === 'boolean'
? isLoadingQueries
: isKeyFetchingCount > 0;
const onCancel = useCallback(() => {
if (handleCancelQuery) {
return handleCancelQuery();
}
if (queryRangeKey) {
queryClient.cancelQueries(queryRangeKey);
}
}, [handleCancelQuery, queryClient, queryRangeKey]);
const isLoading = isLoadingQueries ?? false;
return isLoading ? (
<Button
@@ -54,7 +45,7 @@ function RunQueryBtn({
type="button"
prefix={<Loader2 size={14} className="loading-icon animate-spin" />}
className={cx('cancel-query-btn', className)}
onClick={onCancel}
onClick={handleCancelQuery}
>
Cancel
</Button>
@@ -63,7 +54,7 @@ function RunQueryBtn({
color="primary"
type="button"
className={cx('run-query-btn', className)}
disabled={isLoading || !onStageRunQuery}
disabled={disabled}
onClick={onStageRunQuery}
prefix={<Play size={14} />}
>

View File

@@ -1,18 +1,8 @@
// frontend/src/container/QueryBuilder/components/RunQueryBtn/__tests__/RunQueryBtn.test.tsx
import { fireEvent, render, screen } from '@testing-library/react';
import { render, screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import RunQueryBtn from '../RunQueryBtn';
jest.mock('react-query', () => {
const actual = jest.requireActual('react-query');
return {
...actual,
useIsFetching: jest.fn(),
useQueryClient: jest.fn(),
};
});
import { useIsFetching, useQueryClient } from 'react-query';
// Mock OS util
jest.mock('utils/getUserOS', () => ({
getUserOperatingSystem: jest.fn(),
@@ -26,79 +16,60 @@ describe('RunQueryBtn', () => {
(getUserOperatingSystem as jest.Mock).mockReturnValue(
UserOperatingSystem.MACOS,
);
(useIsFetching as jest.Mock).mockReturnValue(0);
(useQueryClient as jest.Mock).mockReturnValue({
cancelQueries: jest.fn(),
});
});
test('uses isLoadingQueries prop over useIsFetching', () => {
// Simulate fetching but prop forces not loading
(useIsFetching as jest.Mock).mockReturnValue(1);
test('renders run state and triggers on click', async () => {
const user = userEvent.setup();
const onRun = jest.fn();
render(<RunQueryBtn onStageRunQuery={onRun} isLoadingQueries={false} />);
// Should show "Run Query" (not cancel)
const runBtn = screen.getByRole('button', { name: /run query/i });
expect(runBtn).toBeInTheDocument();
expect(runBtn).toBeEnabled();
});
test('fallback cancel: uses handleCancelQuery when no key provided', () => {
(useIsFetching as jest.Mock).mockReturnValue(0);
const cancelQueries = jest.fn();
(useQueryClient as jest.Mock).mockReturnValue({ cancelQueries });
const onCancel = jest.fn();
render(<RunQueryBtn isLoadingQueries handleCancelQuery={onCancel} />);
const cancelBtn = screen.getByRole('button', { name: /cancel/i });
fireEvent.click(cancelBtn);
expect(onCancel).toHaveBeenCalledTimes(1);
expect(cancelQueries).not.toHaveBeenCalled();
});
test('renders run state and triggers on click', () => {
const onRun = jest.fn();
render(<RunQueryBtn onStageRunQuery={onRun} />);
render(
<RunQueryBtn
onStageRunQuery={onRun}
handleCancelQuery={onCancel}
isLoadingQueries={false}
/>,
);
const btn = screen.getByRole('button', { name: /run query/i });
expect(btn).toBeEnabled();
fireEvent.click(btn);
await user.click(btn);
expect(onRun).toHaveBeenCalledTimes(1);
});
test('disabled when onStageRunQuery is undefined', () => {
render(<RunQueryBtn />);
expect(screen.getByRole('button', { name: /run query/i })).toBeDisabled();
});
test('shows cancel state and calls handleCancelQuery', () => {
test('shows cancel state and calls handleCancelQuery', async () => {
const user = userEvent.setup();
const onRun = jest.fn();
const onCancel = jest.fn();
render(<RunQueryBtn isLoadingQueries handleCancelQuery={onCancel} />);
render(
<RunQueryBtn
onStageRunQuery={onRun}
handleCancelQuery={onCancel}
isLoadingQueries
/>,
);
const cancel = screen.getByRole('button', { name: /cancel/i });
fireEvent.click(cancel);
await user.click(cancel);
expect(onCancel).toHaveBeenCalledTimes(1);
});
test('derives loading from queryKey via useIsFetching and cancels via queryClient', () => {
(useIsFetching as jest.Mock).mockReturnValue(1);
const cancelQueries = jest.fn();
(useQueryClient as jest.Mock).mockReturnValue({ cancelQueries });
test('disabled when disabled prop is true', () => {
render(<RunQueryBtn disabled />);
expect(screen.getByRole('button', { name: /run query/i })).toBeDisabled();
});
const queryKey = ['GET_QUERY_RANGE', '1h', { some: 'req' }, 1, 2];
render(<RunQueryBtn queryRangeKey={queryKey} />);
// Button switches to cancel state
const cancelBtn = screen.getByRole('button', { name: /cancel/i });
expect(cancelBtn).toBeInTheDocument();
// Clicking cancel calls cancelQueries with the key
fireEvent.click(cancelBtn);
expect(cancelQueries).toHaveBeenCalledWith(queryKey);
test('disabled when no props provided', () => {
render(<RunQueryBtn />);
expect(
screen.getByRole('button', { name: /run query/i }),
).toBeInTheDocument();
});
test('shows Command + CornerDownLeft on mac', () => {
const { container } = render(
<RunQueryBtn onStageRunQuery={(): void => {}} />,
<RunQueryBtn
onStageRunQuery={jest.fn()}
handleCancelQuery={jest.fn()}
isLoadingQueries={false}
/>,
);
expect(container.querySelector('.lucide-command')).toBeInTheDocument();
expect(
@@ -111,7 +82,11 @@ describe('RunQueryBtn', () => {
UserOperatingSystem.WINDOWS,
);
const { container } = render(
<RunQueryBtn onStageRunQuery={(): void => {}} />,
<RunQueryBtn
onStageRunQuery={jest.fn()}
handleCancelQuery={jest.fn()}
isLoadingQueries={false}
/>,
);
expect(container.querySelector('.lucide-chevron-up')).toBeInTheDocument();
expect(container.querySelector('.lucide-command')).not.toBeInTheDocument();
@@ -121,8 +96,14 @@ describe('RunQueryBtn', () => {
});
test('renders custom label when provided', () => {
const onRun = jest.fn();
render(<RunQueryBtn onStageRunQuery={onRun} label="Stage & Run Query" />);
render(
<RunQueryBtn
onStageRunQuery={jest.fn()}
handleCancelQuery={jest.fn()}
isLoadingQueries={false}
label="Stage & Run Query"
/>,
);
expect(
screen.getByRole('button', { name: /stage & run query/i }),
).toBeInTheDocument();

View File

@@ -1,5 +1,4 @@
import { MutableRefObject, useEffect } from 'react';
import { useQueryClient } from 'react-query';
import { useEffect } from 'react';
import { LogsExplorerShortcuts } from 'constants/shortcuts/logsExplorerShortcuts';
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
@@ -9,23 +8,19 @@ import './ToolbarActions.styles.scss';
interface RightToolbarActionsProps {
onStageRunQuery: () => void;
isLoadingQueries?: boolean;
listQueryKeyRef?: MutableRefObject<any>;
chartQueryKeyRef?: MutableRefObject<any>;
isLoadingQueries: boolean;
handleCancelQuery: () => void;
showLiveLogs?: boolean;
}
export default function RightToolbarActions({
onStageRunQuery,
isLoadingQueries,
listQueryKeyRef,
chartQueryKeyRef,
handleCancelQuery,
showLiveLogs,
}: RightToolbarActionsProps): JSX.Element {
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
const queryClient = useQueryClient();
useEffect(() => {
if (showLiveLogs) {
return;
@@ -42,20 +37,11 @@ export default function RightToolbarActions({
if (showLiveLogs) {
return (
<div className="right-toolbar-actions-container">
<RunQueryBtn />
<RunQueryBtn disabled />
</div>
);
}
const handleCancelQuery = (): void => {
if (listQueryKeyRef?.current) {
queryClient.cancelQueries(listQueryKeyRef.current);
}
if (chartQueryKeyRef?.current) {
queryClient.cancelQueries(chartQueryKeyRef.current);
}
};
return (
<div className="right-toolbar-actions-container">
<RunQueryBtn
@@ -68,8 +54,5 @@ export default function RightToolbarActions({
}
RightToolbarActions.defaultProps = {
isLoadingQueries: false,
listQueryKeyRef: null,
chartQueryKeyRef: null,
showLiveLogs: false,
};

View File

@@ -92,7 +92,12 @@ describe('ToolbarActions', () => {
const onStageRunQuery = jest.fn();
const { queryByText } = render(
<MockQueryClientProvider>
<RightToolbarActions onStageRunQuery={onStageRunQuery} />,
<RightToolbarActions
onStageRunQuery={onStageRunQuery}
isLoadingQueries={false}
handleCancelQuery={jest.fn()}
/>
,
</MockQueryClientProvider>,
);

View File

@@ -0,0 +1,282 @@
import { renderHook } from '@testing-library/react';
import ROUTES from 'constants/routes';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { getViewQuery } from '../drilldownUtils';
import { AggregateData } from '../useAggregateDrilldown';
import useBaseDrilldownNavigate, {
buildDrilldownUrl,
getRoute,
} from '../useBaseDrilldownNavigate';
const mockSafeNavigate = jest.fn();
jest.mock('hooks/useSafeNavigate', () => ({
useSafeNavigate: (): { safeNavigate: typeof mockSafeNavigate } => ({ safeNavigate: mockSafeNavigate }),
}));
jest.mock('../drilldownUtils', () => ({
...jest.requireActual('../drilldownUtils'),
getViewQuery: jest.fn(),
}));
const mockGetViewQuery = getViewQuery as jest.Mock;
// ─── Fixtures ────────────────────────────────────────────────────────────────
const MOCK_QUERY: Query = {
id: 'q1',
queryType: 'builder' as any,
builder: {
queryData: [
{
queryName: 'A',
dataSource: 'metrics' as any,
groupBy: [],
expression: '',
disabled: false,
functions: [],
legend: '',
having: [],
limit: null,
stepInterval: undefined,
orderBy: [],
},
],
queryFormulas: [],
queryTraceOperator: [],
},
promql: [],
clickhouse_sql: [],
};
const MOCK_VIEW_QUERY: Query = {
...MOCK_QUERY,
builder: {
...MOCK_QUERY.builder,
queryData: [
{
...MOCK_QUERY.builder.queryData[0],
filters: { items: [], op: 'AND' },
},
],
},
};
const MOCK_AGGREGATE_DATA: AggregateData = {
queryName: 'A',
filters: [{ filterKey: 'service_name', filterValue: 'auth', operator: '=' }],
timeRange: { startTime: 1000000, endTime: 2000000 },
};
// ─── getRoute ─────────────────────────────────────────────────────────────────
describe('getRoute', () => {
it.each([
['view_logs', ROUTES.LOGS_EXPLORER],
['view_metrics', ROUTES.METRICS_EXPLORER],
['view_traces', ROUTES.TRACES_EXPLORER],
])('maps %s to the correct explorer route', (key, expected) => {
expect(getRoute(key)).toBe(expected);
});
it('returns empty string for an unknown key', () => {
expect(getRoute('view_dashboard')).toBe('');
});
});
// ─── buildDrilldownUrl ────────────────────────────────────────────────────────
describe('buildDrilldownUrl', () => {
beforeEach(() => {
mockGetViewQuery.mockReturnValue(MOCK_VIEW_QUERY);
});
afterEach(() => {
jest.clearAllMocks();
});
it('returns null for an unknown drilldown key', () => {
const url = buildDrilldownUrl(MOCK_QUERY, MOCK_AGGREGATE_DATA, 'view_dashboard');
expect(url).toBeNull();
});
it('returns null when getViewQuery returns null', () => {
mockGetViewQuery.mockReturnValue(null);
const url = buildDrilldownUrl(MOCK_QUERY, MOCK_AGGREGATE_DATA, 'view_logs');
expect(url).toBeNull();
});
it('returns a URL starting with the logs explorer route for view_logs', () => {
const url = buildDrilldownUrl(MOCK_QUERY, MOCK_AGGREGATE_DATA, 'view_logs');
expect(url).not.toBeNull();
expect(url).toContain(ROUTES.LOGS_EXPLORER);
});
it('returns a URL starting with the traces explorer route for view_traces', () => {
const url = buildDrilldownUrl(MOCK_QUERY, MOCK_AGGREGATE_DATA, 'view_traces');
expect(url).toContain(ROUTES.TRACES_EXPLORER);
});
it('includes compositeQuery param in the URL', () => {
const url = buildDrilldownUrl(MOCK_QUERY, MOCK_AGGREGATE_DATA, 'view_logs');
expect(url).toContain('compositeQuery=');
});
it('includes startTime and endTime when aggregateData has a timeRange', () => {
const url = buildDrilldownUrl(MOCK_QUERY, MOCK_AGGREGATE_DATA, 'view_logs');
expect(url).toContain('startTime=1000000');
expect(url).toContain('endTime=2000000');
});
it('omits startTime and endTime when aggregateData has no timeRange', () => {
const { timeRange: _, ...withoutTimeRange } = MOCK_AGGREGATE_DATA;
const url = buildDrilldownUrl(MOCK_QUERY, withoutTimeRange, 'view_logs');
expect(url).not.toContain('startTime=');
expect(url).not.toContain('endTime=');
});
it('includes summaryFilters param for view_metrics', () => {
const url = buildDrilldownUrl(MOCK_QUERY, MOCK_AGGREGATE_DATA, 'view_metrics');
expect(url).toContain(ROUTES.METRICS_EXPLORER);
expect(url).toContain('summaryFilters=');
});
it('does not include summaryFilters param for non-metrics routes', () => {
const url = buildDrilldownUrl(MOCK_QUERY, MOCK_AGGREGATE_DATA, 'view_logs');
expect(url).not.toContain('summaryFilters=');
});
it('handles null aggregateData by passing empty filters and empty queryName', () => {
const url = buildDrilldownUrl(MOCK_QUERY, null, 'view_logs');
expect(url).not.toBeNull();
expect(mockGetViewQuery).toHaveBeenCalledWith(MOCK_QUERY, [], 'view_logs', '');
});
it('passes aggregateData filters and queryName to getViewQuery', () => {
buildDrilldownUrl(MOCK_QUERY, MOCK_AGGREGATE_DATA, 'view_logs');
expect(mockGetViewQuery).toHaveBeenCalledWith(
MOCK_QUERY,
MOCK_AGGREGATE_DATA.filters,
'view_logs',
MOCK_AGGREGATE_DATA.queryName,
);
});
});
// ─── useBaseDrilldownNavigate ─────────────────────────────────────────────────
describe('useBaseDrilldownNavigate', () => {
beforeEach(() => {
mockGetViewQuery.mockReturnValue(MOCK_VIEW_QUERY);
});
afterEach(() => {
jest.clearAllMocks();
});
it('calls safeNavigate with the built URL on a valid key', () => {
const { result } = renderHook(() =>
useBaseDrilldownNavigate({
resolvedQuery: MOCK_QUERY,
aggregateData: MOCK_AGGREGATE_DATA,
}),
);
result.current('view_logs');
expect(mockSafeNavigate).toHaveBeenCalledTimes(1);
const [url] = mockSafeNavigate.mock.calls[0];
expect(url).toContain(ROUTES.LOGS_EXPLORER);
expect(url).toContain('compositeQuery=');
});
it('opens the explorer in a new tab', () => {
const { result } = renderHook(() =>
useBaseDrilldownNavigate({
resolvedQuery: MOCK_QUERY,
aggregateData: MOCK_AGGREGATE_DATA,
}),
);
result.current('view_traces');
expect(mockSafeNavigate).toHaveBeenCalledWith(
expect.any(String),
{ newTab: true },
);
});
it('calls callback after successful navigation', () => {
const callback = jest.fn();
const { result } = renderHook(() =>
useBaseDrilldownNavigate({
resolvedQuery: MOCK_QUERY,
aggregateData: MOCK_AGGREGATE_DATA,
callback,
}),
);
result.current('view_logs');
expect(callback).toHaveBeenCalledTimes(1);
});
it('does not call safeNavigate for an unknown key', () => {
const { result } = renderHook(() =>
useBaseDrilldownNavigate({
resolvedQuery: MOCK_QUERY,
aggregateData: MOCK_AGGREGATE_DATA,
}),
);
result.current('view_dashboard');
expect(mockSafeNavigate).not.toHaveBeenCalled();
});
it('still calls callback when the key is unknown', () => {
const callback = jest.fn();
const { result } = renderHook(() =>
useBaseDrilldownNavigate({
resolvedQuery: MOCK_QUERY,
aggregateData: MOCK_AGGREGATE_DATA,
callback,
}),
);
result.current('view_dashboard');
expect(callback).toHaveBeenCalledTimes(1);
expect(mockSafeNavigate).not.toHaveBeenCalled();
});
it('still calls callback when getViewQuery returns null', () => {
mockGetViewQuery.mockReturnValue(null);
const callback = jest.fn();
const { result } = renderHook(() =>
useBaseDrilldownNavigate({
resolvedQuery: MOCK_QUERY,
aggregateData: MOCK_AGGREGATE_DATA,
callback,
}),
);
result.current('view_logs');
expect(callback).toHaveBeenCalledTimes(1);
expect(mockSafeNavigate).not.toHaveBeenCalled();
});
it('handles null aggregateData without throwing', () => {
const { result } = renderHook(() =>
useBaseDrilldownNavigate({
resolvedQuery: MOCK_QUERY,
aggregateData: null,
}),
);
expect(() => result.current('view_logs')).not.toThrow();
expect(mockSafeNavigate).toHaveBeenCalledTimes(1);
});
});

View File

@@ -166,7 +166,7 @@ export const getAggregateColumnHeader = (
};
};
const getFiltersFromMetric = (metric: any): FilterData[] =>
export const getFiltersFromMetric = (metric: any): FilterData[] =>
Object.keys(metric).map((key) => ({
filterKey: key,
filterValue: metric[key],

View File

@@ -2,14 +2,10 @@ import { useCallback, useEffect, useMemo, useState } from 'react';
import { useLocation } from 'react-router-dom';
import { LinkOutlined, LoadingOutlined } from '@ant-design/icons';
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
import ROUTES from 'constants/routes';
import useUpdatedQuery from 'container/GridCardLayout/useResolveQuery';
import { processContextLinks } from 'container/NewWidget/RightContainer/ContextLinks/utils';
import useContextVariables from 'hooks/dashboard/useContextVariables';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import createQueryParams from 'lib/createQueryParams';
import ContextMenu from 'periscope/components/ContextMenu';
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
import { ContextLinksData } from 'types/api/dashboard/getAll';
@@ -17,9 +13,10 @@ import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { ContextMenuItem } from './contextConfig';
import { getDataLinks } from './dataLinksUtils';
import { getAggregateColumnHeader, getViewQuery } from './drilldownUtils';
import { getAggregateColumnHeader } from './drilldownUtils';
import { getBaseContextConfig } from './menuOptions';
import { AggregateData } from './useAggregateDrilldown';
import useBaseDrilldownNavigate from './useBaseDrilldownNavigate';
interface UseBaseAggregateOptionsProps {
query: Query;
@@ -37,19 +34,6 @@ interface BaseAggregateOptionsConfig {
items?: ContextMenuItem;
}
const getRoute = (key: string): string => {
switch (key) {
case 'view_logs':
return ROUTES.LOGS_EXPLORER;
case 'view_metrics':
return ROUTES.METRICS_EXPLORER;
case 'view_traces':
return ROUTES.TRACES_EXPLORER;
default:
return '';
}
};
const useBaseAggregateOptions = ({
query,
onClose,
@@ -62,10 +46,8 @@ const useBaseAggregateOptions = ({
baseAggregateOptionsConfig: BaseAggregateOptionsConfig;
} => {
const [resolvedQuery, setResolvedQuery] = useState<Query>(query);
const {
getUpdatedQuery,
isLoading: isResolveQueryLoading,
} = useUpdatedQuery();
const { getUpdatedQuery, isLoading: isResolveQueryLoading } =
useUpdatedQuery();
const { dashboardData } = useDashboardStore();
useEffect(() => {
@@ -87,8 +69,6 @@ const useBaseAggregateOptions = ({
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [query, aggregateData, panelType]);
const { safeNavigate } = useSafeNavigate();
// Use the new useContextVariables hook
const { processedVariables } = useContextVariables({
maxValues: 2,
@@ -122,50 +102,16 @@ const useBaseAggregateOptions = ({
{label}
</ContextMenu.Item>
));
} catch (error) {
} catch {
return [];
}
}, [contextLinks, processedVariables, onClose, aggregateData, query]);
const handleBaseDrilldown = useCallback(
(key: string): void => {
const route = getRoute(key);
const timeRange = aggregateData?.timeRange;
const filtersToAdd = aggregateData?.filters || [];
const viewQuery = getViewQuery(
resolvedQuery,
filtersToAdd,
key,
aggregateData?.queryName || '',
);
let queryParams = {
[QueryParams.compositeQuery]: encodeURIComponent(JSON.stringify(viewQuery)),
...(timeRange && {
[QueryParams.startTime]: timeRange?.startTime.toString(),
[QueryParams.endTime]: timeRange?.endTime.toString(),
}),
} as Record<string, string>;
if (route === ROUTES.METRICS_EXPLORER) {
queryParams = {
...queryParams,
[QueryParams.summaryFilters]: JSON.stringify(
viewQuery?.builder.queryData[0].filters,
),
};
}
if (route) {
safeNavigate(`${route}?${createQueryParams(queryParams)}`, {
newTab: true,
});
}
onClose();
},
[resolvedQuery, safeNavigate, onClose, aggregateData],
);
const handleBaseDrilldown = useBaseDrilldownNavigate({
resolvedQuery,
aggregateData,
callback: onClose,
});
const { pathname } = useLocation();

View File

@@ -0,0 +1,117 @@
import { useCallback } from 'react';
import { QueryParams } from 'constants/query';
import ROUTES from 'constants/routes';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import createQueryParams from 'lib/createQueryParams';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { getViewQuery } from './drilldownUtils';
import { AggregateData } from './useAggregateDrilldown';
type DrilldownKey = 'view_logs' | 'view_metrics' | 'view_traces';
const DRILLDOWN_ROUTE_MAP: Record<DrilldownKey, string> = {
view_logs: ROUTES.LOGS_EXPLORER,
view_metrics: ROUTES.METRICS_EXPLORER,
view_traces: ROUTES.TRACES_EXPLORER,
};
const getRoute = (key: string): string =>
DRILLDOWN_ROUTE_MAP[key as DrilldownKey] ?? '';
interface UseBaseDrilldownNavigateProps {
resolvedQuery: Query;
aggregateData: AggregateData | null;
callback?: () => void;
}
const useBaseDrilldownNavigate = ({
resolvedQuery,
aggregateData,
callback,
}: UseBaseDrilldownNavigateProps): ((key: string) => void) => {
const { safeNavigate } = useSafeNavigate();
return useCallback(
(key: string): void => {
const route = getRoute(key);
const viewQuery = getViewQuery(
resolvedQuery,
aggregateData?.filters ?? [],
key,
aggregateData?.queryName ?? '',
);
if (!viewQuery || !route) {
callback?.();
return;
}
const timeRange = aggregateData?.timeRange;
let queryParams: Record<string, string> = {
[QueryParams.compositeQuery]: encodeURIComponent(JSON.stringify(viewQuery)),
...(timeRange && {
[QueryParams.startTime]: timeRange.startTime.toString(),
[QueryParams.endTime]: timeRange.endTime.toString(),
}),
};
if (route === ROUTES.METRICS_EXPLORER) {
queryParams = {
...queryParams,
[QueryParams.summaryFilters]: JSON.stringify(
viewQuery.builder.queryData[0].filters,
),
};
}
safeNavigate(`${route}?${createQueryParams(queryParams)}`, {
newTab: true,
});
callback?.();
},
[resolvedQuery, safeNavigate, callback, aggregateData],
);
};
export function buildDrilldownUrl(
resolvedQuery: Query,
aggregateData: AggregateData | null,
key: string,
): string | null {
const route = getRoute(key);
const viewQuery = getViewQuery(
resolvedQuery,
aggregateData?.filters ?? [],
key,
aggregateData?.queryName ?? '',
);
if (!viewQuery || !route) {
return null;
}
const timeRange = aggregateData?.timeRange;
let queryParams: Record<string, string> = {
[QueryParams.compositeQuery]: encodeURIComponent(JSON.stringify(viewQuery)),
...(timeRange && {
[QueryParams.startTime]: timeRange.startTime.toString(),
[QueryParams.endTime]: timeRange.endTime.toString(),
}),
};
if (route === ROUTES.METRICS_EXPLORER) {
queryParams = {
...queryParams,
[QueryParams.summaryFilters]: JSON.stringify(
viewQuery.builder.queryData[0].filters,
),
};
}
return `${route}?${createQueryParams(queryParams)}`;
}
export { getRoute };
export default useBaseDrilldownNavigate;

View File

@@ -2,6 +2,7 @@ import { useMemo } from 'react';
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
import { isAxiosError } from 'axios';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { MAX_QUERY_RETRIES } from 'constants/reactQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { updateBarStepInterval } from 'container/GridCardLayout/utils';
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
@@ -132,6 +133,10 @@ export const useGetQueryRange: UseGetQueryRange = (
return options.retry;
}
return (failureCount: number, error: Error): boolean => {
if (isAxiosError(error) && error.code === 'ERR_CANCELED') {
return false;
}
let status: number | undefined;
if (error instanceof APIError) {
@@ -144,7 +149,7 @@ export const useGetQueryRange: UseGetQueryRange = (
return false;
}
return failureCount < 3;
return failureCount < MAX_QUERY_RETRIES;
};
}, [options?.retry]);

View File

@@ -1,5 +1,7 @@
import { useQuery, UseQueryResult } from 'react-query';
import listOverview from 'api/thirdPartyApis/listOverview';
import { isAxiosError } from 'axios';
import { MAX_QUERY_RETRIES } from 'constants/reactQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { SuccessResponseV2 } from 'types/api';
import APIError from 'types/api/error';
@@ -20,12 +22,21 @@ export const useListOverview = (
showIp,
filter.expression,
],
queryFn: () =>
listOverview({
start,
end,
show_ip: showIp,
filter,
}),
queryFn: ({ signal }) =>
listOverview(
{
start,
end,
show_ip: showIp,
filter,
},
signal,
),
retry: (failureCount, error): boolean => {
if (isAxiosError(error) && error.code === 'ERR_CANCELED') {
return false;
}
return failureCount < MAX_QUERY_RETRIES;
},
});
};

View File

@@ -0,0 +1,23 @@
import { create } from 'zustand';
interface AllErrorsQueryState {
isFetching: boolean;
isCancelled: boolean;
setIsFetching: (isFetching: boolean) => void;
setIsCancelled: (isCancelled: boolean) => void;
}
export const useAllErrorsQueryState = create<AllErrorsQueryState>((set) => ({
isFetching: false,
isCancelled: false,
setIsFetching: (isFetching): void => {
set((state) => ({
isFetching,
// Auto-reset cancelled when a new fetch starts
isCancelled: isFetching ? false : state.isCancelled,
}));
},
setIsCancelled: (isCancelled): void => {
set({ isCancelled });
},
}));

View File

@@ -1,4 +1,5 @@
import { useState } from 'react';
import { useCallback, useState } from 'react';
import { useQueryClient } from 'react-query';
import { useLocation } from 'react-router-dom';
import { FilterOutlined } from '@ant-design/icons';
import { Button, Tooltip } from 'antd';
@@ -19,12 +20,22 @@ import history from 'lib/history';
import { isNull } from 'lodash-es';
import { routes } from './config';
import { useAllErrorsQueryState } from './QueryStateContext';
import './AllErrors.styles.scss';
function AllErrors(): JSX.Element {
const { pathname } = useLocation();
const { handleRunQuery } = useQueryBuilder();
const queryClient = useQueryClient();
const isLoadingQueries = useAllErrorsQueryState((s) => s.isFetching);
const setIsCancelled = useAllErrorsQueryState((s) => s.setIsCancelled);
const handleCancelQuery = useCallback(() => {
queryClient.cancelQueries(['getAllErrors']);
queryClient.cancelQueries(['getErrorCounts']);
setIsCancelled(true);
}, [queryClient, setIsCancelled]);
const [showFilters, setShowFilters] = useState<boolean>(() => {
const localStorageValue = getLocalStorageKey(
@@ -77,7 +88,11 @@ function AllErrors(): JSX.Element {
}
rightActions={
<div className="right-toolbar-actions-container">
<RightToolbarActions onStageRunQuery={handleRunQuery} />
<RightToolbarActions
onStageRunQuery={handleRunQuery}
isLoadingQueries={isLoadingQueries}
handleCancelQuery={handleCancelQuery}
/>
<HeaderRightSection
enableAnnouncements={false}
enableShare

View File

@@ -1,10 +1,12 @@
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useQueryClient } from 'react-query';
import * as Sentry from '@sentry/react';
import getLocalStorageKey from 'api/browser/localstorage/get';
import setLocalStorageApi from 'api/browser/localstorage/set';
import { TelemetryFieldKey } from 'api/v5/v5';
import cx from 'classnames';
import ExplorerCard from 'components/ExplorerCard/ExplorerCard';
import QueryCancelledPlaceholder from 'components/QueryCancelledPlaceholder';
import QuickFilters from 'components/QuickFilters/QuickFilters';
import { QuickFiltersSource, SignalType } from 'components/QuickFilters/types';
import WarningPopover from 'components/WarningPopover/WarningPopover';
@@ -74,6 +76,27 @@ function LogsExplorer(): JSX.Element {
const chartQueryKeyRef = useRef<any>();
const [isLoadingQueries, setIsLoadingQueries] = useState<boolean>(false);
const [isCancelled, setIsCancelled] = useState(false);
useEffect(() => {
if (isLoadingQueries) {
setIsCancelled(false);
}
}, [isLoadingQueries]);
const queryClient = useQueryClient();
const handleCancelQuery = useCallback(() => {
if (listQueryKeyRef.current) {
queryClient.cancelQueries(listQueryKeyRef.current);
}
if (chartQueryKeyRef.current) {
queryClient.cancelQueries(chartQueryKeyRef.current);
}
setIsCancelled(true);
// Reset loading state — the views container unmounts when cancelled, so
// no child will call setIsLoadingQueries(false) otherwise.
setIsLoadingQueries(false);
}, [queryClient]);
const [warning, setWarning] = useState<Warning | undefined>(undefined);
@@ -296,10 +319,12 @@ function LogsExplorer(): JSX.Element {
}
rightActions={
<RightToolbarActions
onStageRunQuery={(): void => handleRunQuery()}
listQueryKeyRef={listQueryKeyRef}
chartQueryKeyRef={chartQueryKeyRef}
onStageRunQuery={(): void => {
setIsCancelled(false);
handleRunQuery();
}}
isLoadingQueries={isLoadingQueries}
handleCancelQuery={handleCancelQuery}
showLiveLogs={showLiveLogs}
/>
}
@@ -315,14 +340,18 @@ function LogsExplorer(): JSX.Element {
</ExplorerCard>
</div>
<div className="logs-explorer-views">
<LogsExplorerViewsContainer
listQueryKeyRef={listQueryKeyRef}
chartQueryKeyRef={chartQueryKeyRef}
setIsLoadingQueries={setIsLoadingQueries}
setWarning={setWarning}
showLiveLogs={showLiveLogs}
handleChangeSelectedView={handleChangeSelectedView}
/>
{isCancelled ? (
<QueryCancelledPlaceholder subText='Click "Run Query" to load logs.' />
) : (
<LogsExplorerViewsContainer
listQueryKeyRef={listQueryKeyRef}
chartQueryKeyRef={chartQueryKeyRef}
setIsLoadingQueries={setIsLoadingQueries}
setWarning={setWarning}
showLiveLogs={showLiveLogs}
handleChangeSelectedView={handleChangeSelectedView}
/>
)}
</div>
</div>
</section>

View File

@@ -1,10 +1,12 @@
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useQueryClient } from 'react-query';
import { useSearchParams } from 'react-router-dom-v5-compat';
import * as Sentry from '@sentry/react';
import { Card } from 'antd';
import logEvent from 'api/common/logEvent';
import cx from 'classnames';
import ExplorerCard from 'components/ExplorerCard/ExplorerCard';
import QueryCancelledPlaceholder from 'components/QueryCancelledPlaceholder';
import QuickFilters from 'components/QuickFilters/QuickFilters';
import { QuickFiltersSource, SignalType } from 'components/QuickFilters/types';
import WarningPopover from 'components/WarningPopover/WarningPopover';
@@ -71,11 +73,29 @@ function TracesExplorer(): JSX.Element {
});
const [searchParams] = useSearchParams();
const queryClient = useQueryClient();
const listQueryKeyRef = useRef<any>();
// Get panel type from URL
const panelTypesFromUrl = useGetPanelTypesQueryParam(PANEL_TYPES.LIST);
const [isLoadingQueries, setIsLoadingQueries] = useState<boolean>(false);
const [isCancelled, setIsCancelled] = useState(false);
useEffect(() => {
if (isLoadingQueries) {
setIsCancelled(false);
}
}, [isLoadingQueries]);
const handleCancelQuery = useCallback(() => {
if (listQueryKeyRef.current) {
queryClient.cancelQueries(listQueryKeyRef.current);
}
setIsCancelled(true);
// Reset loading state — the active view unmounts when cancelled, so no
// child will call setIsLoadingQueries(false) otherwise.
setIsLoadingQueries(false);
}, [queryClient]);
const [selectedView, setSelectedView] = useState<ExplorerViews>(() =>
getExplorerViewFromUrl(searchParams, panelTypesFromUrl),
@@ -210,9 +230,12 @@ function TracesExplorer(): JSX.Element {
}
rightActions={
<RightToolbarActions
onStageRunQuery={(): void => handleRunQuery()}
onStageRunQuery={(): void => {
setIsCancelled(false);
handleRunQuery();
}}
isLoadingQueries={isLoadingQueries}
listQueryKeyRef={listQueryKeyRef}
handleCancelQuery={handleCancelQuery}
/>
}
/>
@@ -224,7 +247,11 @@ function TracesExplorer(): JSX.Element {
</ExplorerCard>
<div className="traces-explorer-views">
{selectedView === ExplorerViews.LIST && (
{isCancelled && (
<QueryCancelledPlaceholder subText='Click "Run Query" to load traces.' />
)}
{!isCancelled && selectedView === ExplorerViews.LIST && (
<div className="trace-explorer-list-view">
<ListView
isFilterApplied={isFilterApplied}
@@ -235,7 +262,7 @@ function TracesExplorer(): JSX.Element {
</div>
)}
{selectedView === ExplorerViews.TRACE && (
{!isCancelled && selectedView === ExplorerViews.TRACE && (
<div className="trace-explorer-traces-view">
<TracesView
isFilterApplied={isFilterApplied}
@@ -246,7 +273,7 @@ function TracesExplorer(): JSX.Element {
</div>
)}
{selectedView === ExplorerViews.TIMESERIES && (
{!isCancelled && selectedView === ExplorerViews.TIMESERIES && (
<div className="trace-explorer-time-series-view">
<TimeSeriesView
dataSource={DataSource.TRACES}
@@ -258,7 +285,7 @@ function TracesExplorer(): JSX.Element {
</div>
)}
{selectedView === ExplorerViews.TABLE && (
{!isCancelled && selectedView === ExplorerViews.TABLE && (
<div className="trace-explorer-table-view">
<TableView
setWarning={setWarning}

View File

@@ -23,14 +23,7 @@ import (
"sync"
"time"
htmltemplate "html/template"
"github.com/SigNoz/signoz/pkg/alertmanager/alertmanagertemplate"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/templating/markdownrenderer"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/types/emailtypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
commoncfg "github.com/prometheus/common/config"
"github.com/prometheus/alertmanager/config"
@@ -45,29 +38,16 @@ const (
// Email implements a Notifier for email notifications.
type Email struct {
conf *config.EmailConfig
tmpl *template.Template
logger *slog.Logger
hostname string
templater alertmanagertypes.Templater
templateStore emailtypes.TemplateStore
}
// layoutData is the value passed to the alert_email_notification layout
// template. It embeds NotificationTemplateData so templates can reference
// `.Status`, `.Alerts`, `.ExternalURL`, etc. directly alongside the rendered
// Title and per-alert Bodies.
type layoutData struct {
alertmanagertypes.NotificationTemplateData
Title string
Bodies []htmltemplate.HTML
conf *config.EmailConfig
tmpl *template.Template
logger *slog.Logger
hostname string
}
var errNoAuthUsernameConfigured = errors.NewInternalf(errors.CodeInternal, "no auth username configured")
// New returns a new Email notifier. templateStore may be nil; in that case
// custom-body alerts fall back to plain <div>-wrapped HTML.
func New(c *config.EmailConfig, t *template.Template, l *slog.Logger, templater alertmanagertypes.Templater, templateStore emailtypes.TemplateStore) *Email {
// New returns a new Email notifier.
func New(c *config.EmailConfig, t *template.Template, l *slog.Logger) *Email {
if _, ok := c.Headers["Subject"]; !ok {
c.Headers["Subject"] = config.DefaultEmailSubject
}
@@ -83,7 +63,7 @@ func New(c *config.EmailConfig, t *template.Template, l *slog.Logger, templater
if err != nil {
h = "localhost.localdomain"
}
return &Email{conf: c, tmpl: t, logger: l, hostname: h, templater: templater, templateStore: templateStore}
return &Email{conf: c, tmpl: t, logger: l, hostname: h}
}
// auth resolves a string of authentication mechanisms.
@@ -265,16 +245,6 @@ func (n *Email) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {
}
}
// Prepare the content for the email. customSubject, when non-empty, overrides
// the configured Subject header for this notification only. We deliberately
// do not mutate n.conf.Headers here: the config map is shared across
// concurrent notifications to the same receiver.
customSubject, htmlBody, err := n.prepareContent(ctx, as, data)
if err != nil {
n.logger.ErrorContext(ctx, "failed to prepare notification content", errors.Attr(err))
return false, err
}
// Send the email headers and body.
message, err := c.Data()
if err != nil {
@@ -292,10 +262,6 @@ func (n *Email) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {
buffer := &bytes.Buffer{}
for header, t := range n.conf.Headers {
if header == "Subject" && customSubject != "" {
fmt.Fprintf(buffer, "%s: %s\r\n", header, mime.QEncoding.Encode("utf-8", customSubject))
continue
}
value, err := n.tmpl.ExecuteTextString(t, data)
if err != nil {
return false, errors.WrapInternalf(err, errors.CodeInternal, "execute %q header template", header)
@@ -370,7 +336,7 @@ func (n *Email) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {
}
}
if htmlBody != "" {
if len(n.conf.HTML) > 0 {
// Html template
// Preferred alternative placed last per section 5.1.4 of RFC 2046
// https://www.ietf.org/rfc/rfc2046.txt
@@ -381,8 +347,12 @@ func (n *Email) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {
if err != nil {
return false, errors.WrapInternalf(err, errors.CodeInternal, "create part for html template")
}
body, err := n.tmpl.ExecuteHTMLString(n.conf.HTML, data)
if err != nil {
return false, errors.WrapInternalf(err, errors.CodeInternal, "execute html template")
}
qw := quotedprintable.NewWriter(w)
_, err = qw.Write([]byte(htmlBody))
_, err = qw.Write([]byte(body))
if err != nil {
return true, errors.WrapInternalf(err, errors.CodeInternal, "write HTML part")
}
@@ -411,146 +381,6 @@ func (n *Email) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {
return false, nil
}
// prepareContent returns a subject override (empty when the default config
// Subject should be used) and the HTML body for the email. Callers must treat
// the subject as local state and never write it back to n.conf.Headers.
func (n *Email) prepareContent(ctx context.Context, alerts []*types.Alert, data *template.Data) (string, string, error) {
customTitle, customBody := alertmanagertemplate.ExtractTemplatesFromAnnotations(alerts)
result, err := n.templater.Expand(ctx, alertmanagertypes.ExpandRequest{
TitleTemplate: customTitle,
BodyTemplate: customBody,
DefaultTitleTemplate: n.conf.Headers["Subject"],
// The email body's default path uses the configured HTML template
// verbatim (n.conf.HTML below). Leave DefaultBodyTemplate empty so
// the templater produces a single empty default body we ignore.
DefaultBodyTemplate: "",
}, alerts)
if err != nil {
return "", "", err
}
// subject == "" signals "use the configured Subject header"; Notify then
// runs it through the normal header template pipeline.
subject := ""
if customTitle != "" {
subject = result.Title
}
if !result.IsDefaultBody {
// Custom-body path: render each expanded markdown body to HTML, then
// wrap the whole thing in the alert_email_notification layout (or fall
// back to plain <div> wrapping when the template store is absent).
for i, body := range result.Body {
if body == "" {
continue
}
rendered, err := markdownrenderer.RenderHTML(body)
if err != nil {
return "", "", err
}
result.Body[i] = rendered
}
appendRelatedLinkButtons(alerts, result.Body)
html, err := n.renderLayout(ctx, result)
if err != nil {
n.logger.WarnContext(ctx, "custom email template rendering failed, falling back to plain <div> wrap", errors.Attr(err))
return subject, wrapBodiesAsDivs(result.Body), nil
}
return subject, html, nil
}
// Default-body path: use the HTML config template if available.
if len(n.conf.HTML) > 0 {
body, err := n.tmpl.ExecuteHTMLString(n.conf.HTML, data)
if err != nil {
return "", "", errors.WrapInternalf(err, errors.CodeInternal, "execute html template")
}
return subject, body, nil
}
return subject, "", nil
}
// renderLayout wraps result in the alert_email_notification HTML layout.
// Returns an error when the store has no such template (e.g. in tests using
// an empty store) so prepareContent can fall back to plain <div> wrapping.
func (n *Email) renderLayout(ctx context.Context, result *alertmanagertypes.ExpandResult) (string, error) {
if n.templateStore == nil {
return "", errors.NewInternalf(errors.CodeInternal, "no email template store configured")
}
layout, err := n.templateStore.Get(ctx, emailtypes.TemplateNameAlertEmailNotification)
if err != nil {
return "", err
}
bodies := make([]htmltemplate.HTML, 0, len(result.Body))
for _, b := range result.Body {
bodies = append(bodies, htmltemplate.HTML(b))
}
data := layoutData{Title: result.Title, Bodies: bodies}
if result.NotificationData != nil {
data.NotificationTemplateData = *result.NotificationData
}
var buf bytes.Buffer
if err := layout.Execute(&buf, data); err != nil {
return "", errors.WrapInternalf(err, errors.CodeInternal, "failed to render email layout")
}
return buf.String(), nil
}
// appendRelatedLinkButtons appends "View Related Logs/Traces" buttons to each
// per-alert body when the rule manager attached the corresponding annotation.
// bodies is positionally aligned with alerts (see alertmanagertemplate.Prepare);
// empty bodies are skipped so we never attach a button to an alert that produced
// no visible content.
func appendRelatedLinkButtons(alerts []*types.Alert, bodies []string) {
for i := range bodies {
if i >= len(alerts) || bodies[i] == "" {
continue
}
if link := alerts[i].Annotations[ruletypes.AnnotationRelatedLogs]; link != "" {
bodies[i] += htmlButton("View Related Logs", string(link))
}
if link := alerts[i].Annotations[ruletypes.AnnotationRelatedTraces]; link != "" {
bodies[i] += htmlButton("View Related Traces", string(link))
}
}
}
func wrapBodiesAsDivs(bodies []string) string {
var b strings.Builder
for _, part := range bodies {
if part == "" {
continue
}
b.WriteString("<div>")
b.WriteString(part)
b.WriteString("</div>")
}
return b.String()
}
func htmlButton(text, url string) string {
return fmt.Sprintf(`
<a href="%s" target="_blank" style="text-decoration: none;">
<button style="
padding: 6px 16px;
/* Default System Font */
font-family: sans-serif;
font-size: 14px;
font-weight: 500;
line-height: 1.5;
/* Light Theme & Dynamic Background (Solid) */
color: #111827;
background-color: #f9fafb;
/* Static Outline */
border: 1px solid #d1d5db;
border-radius: 4px;
cursor: pointer;
">
%s
</button>
</a>`, url, text)
}
type loginAuth struct {
username, password string
}

View File

@@ -8,7 +8,6 @@ import (
"context"
"fmt"
"io"
"log/slog"
"net"
"net/http"
"net/url"
@@ -18,12 +17,7 @@ import (
"testing"
"time"
"github.com/SigNoz/signoz/pkg/alertmanager/alertmanagertemplate"
"github.com/SigNoz/signoz/pkg/emailing/templatestore/filetemplatestore"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/types/emailtypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/emersion/go-smtp"
commoncfg "github.com/prometheus/common/config"
"github.com/prometheus/common/model"
@@ -48,18 +42,6 @@ const (
emailFrom = "alertmanager@example.com"
)
// testTemplater returns a Templater bound to tmpl with a discard logger.
func testTemplater(tmpl *template.Template) alertmanagertypes.Templater {
return alertmanagertemplate.New(tmpl, slog.New(slog.DiscardHandler))
}
// testEmptyStore returns an email template store with no templates. When the
// email notifier tries to render the alert_email_notification layout against
// this, the Get call fails and prepareContent falls back to plain <div> wrap.
func testEmptyStore() emailtypes.TemplateStore {
return filetemplatestore.NewEmptyStore()
}
// email represents an email returned by the MailDev REST API.
// See https://github.com/djfarrelly/MailDev/blob/master/docs/rest.md.
type email struct {
@@ -180,7 +162,7 @@ func notifyEmailWithContext(ctx context.Context, t *testing.T, cfg *config.Email
return nil, false, err
}
email := New(cfg, tmpl, promslog.NewNopLogger(), testTemplater(tmpl), testEmptyStore())
email := New(cfg, tmpl, promslog.NewNopLogger())
retry, err := email.Notify(ctx, firingAlert)
if err != nil {
@@ -724,7 +706,7 @@ func TestEmailRejected(t *testing.T) {
tmpl, firingAlert, err := prepare(cfg)
require.NoError(t, err)
e := New(cfg, tmpl, promslog.NewNopLogger(), testTemplater(tmpl), testEmptyStore())
e := New(cfg, tmpl, promslog.NewNopLogger())
// Send the alert to mock SMTP server.
retry, err := e.Notify(context.Background(), firingAlert)
@@ -1048,103 +1030,6 @@ func TestEmailImplicitTLS(t *testing.T) {
}
}
func TestPrepareContent(t *testing.T) {
t.Run("custom template", func(t *testing.T) {
tmpl, err := template.FromGlobs([]string{})
require.NoError(t, err)
tmpl.ExternalURL, _ = url.Parse("http://am")
bodyTpl := "line $labels.instance"
a1 := &types.Alert{
Alert: model.Alert{
Labels: model.LabelSet{
model.LabelName("instance"): model.LabelValue("one"),
},
Annotations: model.LabelSet{
model.LabelName(ruletypes.AnnotationBodyTemplate): model.LabelValue(bodyTpl),
},
},
}
a2 := &types.Alert{
Alert: model.Alert{
Labels: model.LabelSet{
model.LabelName("instance"): model.LabelValue("two"),
},
Annotations: model.LabelSet{
model.LabelName(ruletypes.AnnotationBodyTemplate): model.LabelValue(bodyTpl),
},
},
}
alerts := []*types.Alert{a1, a2}
cfg := &config.EmailConfig{Headers: map[string]string{"Subject": "subj"}}
n := New(cfg, tmpl, promslog.NewNopLogger(), testTemplater(tmpl), testEmptyStore())
ctx := context.Background()
data := notify.GetTemplateData(ctx, tmpl, alerts, n.logger)
_, htmlBody, err := n.prepareContent(ctx, alerts, data)
require.NoError(t, err)
require.Equal(t, "<div><p>line one</p>\n</div><div><p>line two</p>\n</div>", htmlBody)
})
t.Run("default template with HTML and custom title template", func(t *testing.T) {
tmpl, err := template.FromGlobs([]string{})
require.NoError(t, err)
tmpl.ExternalURL, _ = url.Parse("http://am")
firingAlert := &types.Alert{
Alert: model.Alert{
Labels: model.LabelSet{},
Annotations: model.LabelSet{
model.LabelName(ruletypes.AnnotationTitleTemplate): model.LabelValue("fixed from $alert.status"),
},
StartsAt: time.Now(),
EndsAt: time.Now().Add(time.Hour),
},
}
alerts := []*types.Alert{firingAlert}
cfg := &config.EmailConfig{
Headers: map[string]string{},
HTML: "Status: {{ .Status }}",
}
n := New(cfg, tmpl, promslog.NewNopLogger(), testTemplater(tmpl), testEmptyStore())
ctx := context.Background()
data := notify.GetTemplateData(ctx, tmpl, alerts, n.logger)
subject, htmlBody, err := n.prepareContent(ctx, alerts, data)
require.NoError(t, err)
require.Equal(t, "Status: firing", htmlBody)
// custom title supplied → prepareContent returns a subject override.
require.Equal(t, "fixed from firing", subject)
})
t.Run("default template without HTML", func(t *testing.T) {
cfg := &config.EmailConfig{Headers: map[string]string{"Subject": "the email subject"}}
tmpl, err := template.FromGlobs([]string{})
require.NoError(t, err)
tmpl.ExternalURL, _ = url.Parse("http://am")
n := New(cfg, tmpl, promslog.NewNopLogger(), testTemplater(tmpl), testEmptyStore())
firingAlert := &types.Alert{
Alert: model.Alert{
Labels: model.LabelSet{},
StartsAt: time.Now(),
EndsAt: time.Now().Add(time.Hour),
},
}
alerts := []*types.Alert{firingAlert}
ctx := context.Background()
data := notify.GetTemplateData(ctx, tmpl, alerts, n.logger)
subject, htmlBody, err := n.prepareContent(ctx, alerts, data)
require.NoError(t, err)
require.Equal(t, "", htmlBody)
// No custom title annotation → empty subject signals "use the configured
// Subject header", which Notify then runs through the normal header
// template pipeline.
require.Equal(t, "", subject)
})
}
func ptrTo(b bool) *bool {
return &b
}

View File

@@ -15,9 +15,7 @@ import (
"slices"
"strings"
"github.com/SigNoz/signoz/pkg/alertmanager/alertmanagertemplate"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
commoncfg "github.com/prometheus/common/config"
"github.com/prometheus/common/model"
@@ -46,7 +44,6 @@ type Notifier struct {
retrier *notify.Retrier
webhookURL *config.SecretURL
postJSONFunc func(ctx context.Context, client *http.Client, url string, body io.Reader) (*http.Response, error)
templater alertmanagertypes.Templater
}
// https://learn.microsoft.com/en-us/connectors/teams/?tabs=text1#adaptivecarditemschema
@@ -97,7 +94,7 @@ type teamsMessage struct {
}
// New returns a new notifier that uses the Microsoft Teams Power Platform connector.
func New(c *config.MSTeamsV2Config, t *template.Template, titleLink string, l *slog.Logger, templater alertmanagertypes.Templater, httpOpts ...commoncfg.HTTPClientOption) (*Notifier, error) {
func New(c *config.MSTeamsV2Config, t *template.Template, titleLink string, l *slog.Logger, httpOpts ...commoncfg.HTTPClientOption) (*Notifier, error) {
client, err := notify.NewClientWithTracing(*c.HTTPConfig, Integration, httpOpts...)
if err != nil {
return nil, err
@@ -112,7 +109,6 @@ func New(c *config.MSTeamsV2Config, t *template.Template, titleLink string, l *s
retrier: &notify.Retrier{},
webhookURL: c.WebhookURL,
postJSONFunc: notify.PostJSON,
templater: templater,
}
return n, nil
@@ -132,11 +128,25 @@ func (n *Notifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error)
return false, err
}
title := tmpl(n.conf.Title)
if err != nil {
return false, err
}
titleLink := tmpl(n.titleLink)
if err != nil {
return false, err
}
alerts := types.Alerts(as...)
color := colorGrey
switch alerts.Status() {
case model.AlertFiring:
color = colorRed
case model.AlertResolved:
color = colorGreen
}
var url string
if n.conf.WebhookURL != nil {
url = n.conf.WebhookURL.String()
@@ -148,12 +158,6 @@ func (n *Notifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error)
url = strings.TrimSpace(string(content))
}
bodyBlocks, err := n.prepareContent(ctx, as)
if err != nil {
n.logger.ErrorContext(ctx, "failed to prepare notification content", errors.Attr(err))
return false, err
}
// A message as referenced in https://learn.microsoft.com/en-us/connectors/teams/?tabs=text1%2Cdotnet#request-body-schema
t := teamsMessage{
Type: "message",
@@ -165,7 +169,17 @@ func (n *Notifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error)
Schema: "http://adaptivecards.io/schemas/adaptive-card.json",
Type: "AdaptiveCard",
Version: "1.2",
Body: bodyBlocks,
Body: []Body{
{
Type: "TextBlock",
Text: title,
Weight: "Bolder",
Size: "Medium",
Wrap: true,
Style: "heading",
Color: color,
},
},
Actions: []Action{
{
Type: "Action.OpenUrl",
@@ -181,6 +195,20 @@ func (n *Notifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error)
},
}
// add labels and annotations to the body of all alerts
for _, alert := range as {
t.Attachments[0].Content.Body = append(t.Attachments[0].Content.Body, Body{
Type: "TextBlock",
Text: "Alerts",
Weight: "Bolder",
Size: "Medium",
Wrap: true,
Color: color,
})
t.Attachments[0].Content.Body = append(t.Attachments[0].Content.Body, n.createLabelsAndAnnotationsBody(alert)...)
}
var payload bytes.Buffer
if err = json.NewEncoder(&payload).Encode(t); err != nil {
return false, err
@@ -200,77 +228,6 @@ func (n *Notifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error)
return shouldRetry, err
}
// prepareContent builds the Adaptive Card body blocks for the notification.
// The first block is always the title; the remainder depends on whether the
// alerts carried a custom body template.
func (n *Notifier) prepareContent(ctx context.Context, alerts []*types.Alert) ([]Body, error) {
customTitle, customBody := alertmanagertemplate.ExtractTemplatesFromAnnotations(alerts)
result, err := n.templater.Expand(ctx, alertmanagertypes.ExpandRequest{
TitleTemplate: customTitle,
BodyTemplate: customBody,
DefaultTitleTemplate: n.conf.Title,
// Default body is not a template — it's built per-alert below from
// labels/annotations as Adaptive Card FactSets, so leave it empty.
DefaultBodyTemplate: "",
}, alerts)
if err != nil {
return nil, err
}
color := colorGrey
switch types.Alerts(alerts...).Status() {
case model.AlertFiring:
color = colorRed
case model.AlertResolved:
color = colorGreen
}
blocks := []Body{{
Type: "TextBlock",
Text: result.Title,
Weight: "Bolder",
Size: "Medium",
Wrap: true,
Style: "heading",
Color: color,
}}
if result.IsDefaultBody {
for _, alert := range alerts {
blocks = append(blocks, Body{
Type: "TextBlock",
Text: "Alerts",
Weight: "Bolder",
Size: "Medium",
Wrap: true,
Color: color,
})
blocks = append(blocks, n.createLabelsAndAnnotationsBody(alert)...)
}
return blocks, nil
}
// Custom body path: result.Body is positionally aligned with alerts;
// entries for alerts whose template rendered empty are kept as "" so we
// can skip them here without shifting the per-alert color index.
for i, body := range result.Body {
if body == "" || i >= len(alerts) {
continue
}
perAlertColor := colorRed
if alerts[i].Resolved() {
perAlertColor = colorGreen
}
blocks = append(blocks, Body{
Type: "TextBlock",
Text: body,
Wrap: true,
Color: perAlertColor,
})
}
return blocks, nil
}
func (*Notifier) createLabelsAndAnnotationsBody(alert *types.Alert) []Body {
bodies := []Body{}
bodies = append(bodies, Body{
@@ -301,11 +258,7 @@ func (*Notifier) createLabelsAndAnnotationsBody(alert *types.Alert) []Body {
annotationsFacts := []Fact{}
for k, v := range alert.Annotations {
// Skip private (`_`-prefixed) annotations — templating inputs,
// threshold metadata, related-link URLs — and "summary", which default
// channels surface as the attachment pretext rather than a fact row.
if slices.Contains([]string{"summary", "related_logs", "related_traces"}, string(k)) ||
alertmanagertypes.IsPrivateAnnotation(string(k)) {
if slices.Contains([]string{"summary", "related_logs", "related_traces"}, string(k)) {
continue
}
annotationsFacts = append(annotationsFacts, Fact{Title: string(k), Value: string(v)})

View File

@@ -8,7 +8,6 @@ import (
"context"
"encoding/json"
"io"
"log/slog"
"net/http"
"net/http/httptest"
"net/url"
@@ -16,9 +15,6 @@ import (
"testing"
"time"
"github.com/SigNoz/signoz/pkg/alertmanager/alertmanagertemplate"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
commoncfg "github.com/prometheus/common/config"
"github.com/prometheus/common/model"
"github.com/prometheus/common/promslog"
@@ -27,28 +23,21 @@ import (
test "github.com/SigNoz/signoz/pkg/alertmanager/alertmanagernotify/alertmanagernotifytest"
"github.com/prometheus/alertmanager/config"
"github.com/prometheus/alertmanager/notify"
"github.com/prometheus/alertmanager/template"
"github.com/prometheus/alertmanager/types"
)
func newTestTemplater(tmpl *template.Template) alertmanagertypes.Templater {
return alertmanagertemplate.New(tmpl, slog.New(slog.DiscardHandler))
}
// This is a test URL that has been modified to not be valid.
var testWebhookURL, _ = url.Parse("https://example.westeurope.logic.azure.com:443/workflows/xxx/triggers/manual/paths/invoke?api-version=2016-06-01&sp=%2Ftriggers%2Fmanual%2Frun&sv=1.0&sig=xxx")
func TestMSTeamsV2Retry(t *testing.T) {
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.MSTeamsV2Config{
WebhookURL: &config.SecretURL{URL: testWebhookURL},
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
`{{ template "msteamsv2.default.titleLink" . }}`,
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
@@ -75,16 +64,14 @@ func TestNotifier_Notify_WithReason(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.MSTeamsV2Config{
WebhookURL: &config.SecretURL{URL: testWebhookURL},
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
`{{ template "msteamsv2.default.titleLink" . }}`,
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
@@ -166,8 +153,7 @@ func TestMSTeamsV2Templating(t *testing.T) {
t.Run(tc.title, func(t *testing.T) {
tc.cfg.WebhookURL = &config.SecretURL{URL: u}
tc.cfg.HTTPConfig = &commoncfg.HTTPClientConfig{}
tmpl := test.CreateTmpl(t)
pd, err := New(tc.cfg, tmpl, tc.titleLink, promslog.NewNopLogger(), newTestTemplater(tmpl))
pd, err := New(tc.cfg, test.CreateTmpl(t), tc.titleLink, promslog.NewNopLogger())
require.NoError(t, err)
ctx := context.Background()
@@ -200,124 +186,20 @@ func TestMSTeamsV2RedactedURL(t *testing.T) {
defer fn()
secret := "secret"
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.MSTeamsV2Config{
WebhookURL: &config.SecretURL{URL: u},
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
`{{ template "msteamsv2.default.titleLink" . }}`,
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
test.AssertNotifyLeaksNoSecret(ctx, t, notifier, secret)
}
func TestPrepareContent(t *testing.T) {
t.Run("default template - firing alerts", func(t *testing.T) {
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.MSTeamsV2Config{
WebhookURL: &config.SecretURL{URL: testWebhookURL},
HTTPConfig: &commoncfg.HTTPClientConfig{},
Title: "Alertname: {{ .CommonLabels.alertname }}",
},
tmpl,
`{{ template "msteamsv2.default.titleLink" . }}`,
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
ctx := context.Background()
ctx = notify.WithGroupKey(ctx, "1")
alerts := []*types.Alert{
{
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "test"},
// Custom body template
Annotations: model.LabelSet{
ruletypes.AnnotationBodyTemplate: "Firing alert: $alertname",
},
StartsAt: time.Now(),
EndsAt: time.Now().Add(time.Hour),
},
},
}
blocks, err := notifier.prepareContent(ctx, alerts)
require.NoError(t, err)
require.NotEmpty(t, blocks)
// First block should be the title with color (firing = red)
require.Equal(t, "Bolder", blocks[0].Weight)
require.Equal(t, colorRed, blocks[0].Color)
// verify title text
require.Equal(t, "Alertname: test", blocks[0].Text)
// verify body text
require.Equal(t, "Firing alert: test", blocks[1].Text)
})
t.Run("custom template - per-alert color", func(t *testing.T) {
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.MSTeamsV2Config{
WebhookURL: &config.SecretURL{URL: testWebhookURL},
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
`{{ template "msteamsv2.default.titleLink" . }}`,
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
ctx := context.Background()
ctx = notify.WithGroupKey(ctx, "1")
alerts := []*types.Alert{
{
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "test1"},
Annotations: model.LabelSet{
"summary": "test",
ruletypes.AnnotationTitleTemplate: "Custom Title",
ruletypes.AnnotationBodyTemplate: "custom body $alertname",
},
StartsAt: time.Now(),
EndsAt: time.Now().Add(time.Hour),
},
},
{
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "test2"},
Annotations: model.LabelSet{
"summary": "test",
ruletypes.AnnotationTitleTemplate: "Custom Title",
ruletypes.AnnotationBodyTemplate: "custom body $alertname",
},
StartsAt: time.Now().Add(-time.Hour),
EndsAt: time.Now().Add(-time.Minute),
},
},
}
blocks, err := notifier.prepareContent(ctx, alerts)
require.NoError(t, err)
require.NotEmpty(t, blocks)
// total 3 blocks: title and 2 body blocks
require.True(t, len(blocks) == 3)
// First block: title color is overall color of the alerts
require.Equal(t, colorRed, blocks[0].Color)
// verify title text
require.Equal(t, "Custom Title", blocks[0].Text)
// Body blocks should have per-alert color
require.Equal(t, colorRed, blocks[1].Color) // firing
require.Equal(t, colorGreen, blocks[2].Color) // resolved
})
}
func TestMSTeamsV2ReadingURLFromFile(t *testing.T) {
ctx, u, fn := test.GetContextWithCancelingURL()
defer fn()
@@ -327,16 +209,14 @@ func TestMSTeamsV2ReadingURLFromFile(t *testing.T) {
_, err = f.WriteString(u.String() + "\n")
require.NoError(t, err, "writing to temp file failed")
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.MSTeamsV2Config{
WebhookURLFile: f.Name(),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
`{{ template "msteamsv2.default.titleLink" . }}`,
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)

View File

@@ -15,10 +15,7 @@ import (
"os"
"strings"
"github.com/SigNoz/signoz/pkg/alertmanager/alertmanagertemplate"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/templating/markdownrenderer"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
commoncfg "github.com/prometheus/common/config"
"github.com/prometheus/common/model"
@@ -37,27 +34,25 @@ const maxMessageLenRunes = 130
// Notifier implements a Notifier for OpsGenie notifications.
type Notifier struct {
conf *config.OpsGenieConfig
tmpl *template.Template
logger *slog.Logger
client *http.Client
retrier *notify.Retrier
templater alertmanagertypes.Templater
conf *config.OpsGenieConfig
tmpl *template.Template
logger *slog.Logger
client *http.Client
retrier *notify.Retrier
}
// New returns a new OpsGenie notifier.
func New(c *config.OpsGenieConfig, t *template.Template, l *slog.Logger, templater alertmanagertypes.Templater, httpOpts ...commoncfg.HTTPClientOption) (*Notifier, error) {
func New(c *config.OpsGenieConfig, t *template.Template, l *slog.Logger, httpOpts ...commoncfg.HTTPClientOption) (*Notifier, error) {
client, err := notify.NewClientWithTracing(*c.HTTPConfig, Integration, httpOpts...)
if err != nil {
return nil, err
}
return &Notifier{
conf: c,
tmpl: t,
logger: l,
client: client,
retrier: &notify.Retrier{RetryCodes: []int{http.StatusTooManyRequests}},
templater: templater,
conf: c,
tmpl: t,
logger: l,
client: client,
retrier: &notify.Retrier{RetryCodes: []int{http.StatusTooManyRequests}},
}, nil
}
@@ -128,55 +123,6 @@ func safeSplit(s, sep string) []string {
return b
}
// prepareContent expands alert templates and returns the OpsGenie-ready title
// (truncated to the 130-rune limit) and HTML description. Custom bodies are
// rendered to HTML and stitched together with <hr> dividers; default bodies
// are joined with newlines (OpsGenie's legacy plain-text description).
func (n *Notifier) prepareContent(ctx context.Context, alerts []*types.Alert) (string, string, error) {
customTitle, customBody := alertmanagertemplate.ExtractTemplatesFromAnnotations(alerts)
result, err := n.templater.Expand(ctx, alertmanagertypes.ExpandRequest{
TitleTemplate: customTitle,
BodyTemplate: customBody,
DefaultTitleTemplate: n.conf.Message,
DefaultBodyTemplate: n.conf.Description,
}, alerts)
if err != nil {
return "", "", err
}
var description string
if result.IsDefaultBody {
description = strings.Join(result.Body, "\n")
} else {
var b strings.Builder
first := true
for _, part := range result.Body {
if part == "" {
continue
}
rendered, renderErr := markdownrenderer.RenderHTML(part)
if renderErr != nil {
return "", "", renderErr
}
if !first {
b.WriteString("<hr>")
}
b.WriteString("<div>")
b.WriteString(rendered)
b.WriteString("</div>")
first = false
}
description = b.String()
}
title, truncated := notify.TruncateInRunes(result.Title, maxMessageLenRunes)
if truncated {
n.logger.WarnContext(ctx, "Truncated message", slog.Int("max_runes", maxMessageLenRunes))
}
return title, description, nil
}
// Create requests for a list of alerts.
func (n *Notifier) createRequests(ctx context.Context, as ...*types.Alert) ([]*http.Request, bool, error) {
key, err := notify.ExtractGroupKey(ctx)
@@ -222,10 +168,9 @@ func (n *Notifier) createRequests(ctx context.Context, as ...*types.Alert) ([]*h
}
requests = append(requests, req.WithContext(ctx))
default:
message, description, err := n.prepareContent(ctx, as)
if err != nil {
n.logger.ErrorContext(ctx, "failed to prepare notification content", errors.Attr(err))
return nil, false, err
message, truncated := notify.TruncateInRunes(tmpl(n.conf.Message), maxMessageLenRunes)
if truncated {
logger.WarnContext(ctx, "Truncated message", slog.Any("alert", key), slog.Int("max_runes", maxMessageLenRunes))
}
createEndpointURL := n.conf.APIURL.Copy()
@@ -264,7 +209,7 @@ func (n *Notifier) createRequests(ctx context.Context, as ...*types.Alert) ([]*h
msg := &opsGenieCreateMessage{
Alias: alias,
Message: message,
Description: description,
Description: tmpl(n.conf.Description),
Details: details,
Source: tmpl(n.conf.Source),
Responders: responders,

View File

@@ -8,16 +8,12 @@ import (
"context"
"fmt"
"io"
"log/slog"
"net/http"
"net/url"
"os"
"testing"
"time"
"github.com/SigNoz/signoz/pkg/alertmanager/alertmanagertemplate"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
commoncfg "github.com/prometheus/common/config"
"github.com/prometheus/common/model"
"github.com/prometheus/common/promslog"
@@ -26,23 +22,16 @@ import (
"github.com/prometheus/alertmanager/config"
"github.com/prometheus/alertmanager/notify"
"github.com/prometheus/alertmanager/notify/test"
"github.com/prometheus/alertmanager/template"
"github.com/prometheus/alertmanager/types"
)
func newTestTemplater(tmpl *template.Template) alertmanagertypes.Templater {
return alertmanagertemplate.New(tmpl, slog.New(slog.DiscardHandler))
}
func TestOpsGenieRetry(t *testing.T) {
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.OpsGenieConfig{
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
@@ -58,16 +47,14 @@ func TestOpsGenieRedactedURL(t *testing.T) {
defer fn()
key := "key"
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.OpsGenieConfig{
APIURL: &config.URL{URL: u},
APIKey: config.Secret(key),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
@@ -85,16 +72,14 @@ func TestGettingOpsGegineApikeyFromFile(t *testing.T) {
_, err = f.WriteString(key)
require.NoError(t, err, "writing to temp file failed")
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.OpsGenieConfig{
APIURL: &config.URL{URL: u},
APIKeyFile: f.Name(),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
@@ -217,7 +202,7 @@ func TestOpsGenie(t *testing.T) {
},
} {
t.Run(tc.title, func(t *testing.T) {
notifier, err := New(tc.cfg, tmpl, logger, newTestTemplater(tmpl))
notifier, err := New(tc.cfg, tmpl, logger)
require.NoError(t, err)
ctx := context.Background()
@@ -293,7 +278,7 @@ func TestOpsGenieWithUpdate(t *testing.T) {
APIURL: &config.URL{URL: u},
HTTPConfig: &commoncfg.HTTPClientConfig{},
}
notifierWithUpdate, err := New(&opsGenieConfigWithUpdate, tmpl, promslog.NewNopLogger(), newTestTemplater(tmpl))
notifierWithUpdate, err := New(&opsGenieConfigWithUpdate, tmpl, promslog.NewNopLogger())
alert := &types.Alert{
Alert: model.Alert{
StartsAt: time.Now(),
@@ -336,7 +321,7 @@ func TestOpsGenieApiKeyFile(t *testing.T) {
APIURL: &config.URL{URL: u},
HTTPConfig: &commoncfg.HTTPClientConfig{},
}
notifierWithUpdate, err := New(&opsGenieConfigWithUpdate, tmpl, promslog.NewNopLogger(), newTestTemplater(tmpl))
notifierWithUpdate, err := New(&opsGenieConfigWithUpdate, tmpl, promslog.NewNopLogger())
require.NoError(t, err)
requests, _, err := notifierWithUpdate.createRequests(ctx)
@@ -344,99 +329,6 @@ func TestOpsGenieApiKeyFile(t *testing.T) {
require.Equal(t, "GenieKey my_secret_api_key", requests[0].Header.Get("Authorization"))
}
func TestPrepareContent(t *testing.T) {
t.Run("default template", func(t *testing.T) {
tmpl := test.CreateTmpl(t)
logger := promslog.NewNopLogger()
notifier := &Notifier{
conf: &config.OpsGenieConfig{
Message: `{{ .CommonLabels.Message }}`,
Description: `{{ .CommonLabels.Description }}`,
},
tmpl: tmpl,
logger: logger,
templater: newTestTemplater(tmpl),
}
ctx := context.Background()
ctx = notify.WithGroupKey(ctx, "1")
alert := &types.Alert{
Alert: model.Alert{
Labels: model.LabelSet{
"Message": "Firing alert: test",
"Description": "Check runbook for more details",
},
StartsAt: time.Now(),
EndsAt: time.Now().Add(time.Hour),
},
}
alerts := []*types.Alert{alert}
title, desc, prepErr := notifier.prepareContent(ctx, alerts)
require.NoError(t, prepErr)
require.Equal(t, "Firing alert: test", title)
require.Equal(t, "Check runbook for more details", desc)
})
t.Run("custom template", func(t *testing.T) {
tmpl := test.CreateTmpl(t)
logger := promslog.NewNopLogger()
notifier := &Notifier{
conf: &config.OpsGenieConfig{
Message: `{{ .CommonLabels.Message }}`,
Description: `{{ .CommonLabels.Description }}`,
},
tmpl: tmpl,
logger: logger,
templater: newTestTemplater(tmpl),
}
ctx := context.Background()
ctx = notify.WithGroupKey(ctx, "1")
alert1 := &types.Alert{
Alert: model.Alert{
Labels: model.LabelSet{
"service": "payment",
"namespace": "potter-the-harry",
},
Annotations: model.LabelSet{
ruletypes.AnnotationTitleTemplate: "High request throughput for $service",
ruletypes.AnnotationBodyTemplate: "Alert firing in NS: $labels.namespace",
},
StartsAt: time.Now(),
EndsAt: time.Now().Add(time.Hour),
},
}
alert2 := &types.Alert{
Alert: model.Alert{
Labels: model.LabelSet{
"service": "payment",
"namespace": "smart-the-rat",
},
Annotations: model.LabelSet{
ruletypes.AnnotationTitleTemplate: "High request throughput for $service",
ruletypes.AnnotationBodyTemplate: "Alert firing in NS: $labels.namespace",
},
StartsAt: time.Now(),
EndsAt: time.Now().Add(time.Hour),
},
}
alerts := []*types.Alert{alert1, alert2}
title, desc, err := notifier.prepareContent(ctx, alerts)
require.NoError(t, err)
require.Equal(t, "High request throughput for payment", title)
// Each alert body wrapped in <div>, separated by <hr>
require.Equal(t, "<div><p>Alert firing in NS: potter-the-harry</p>\n</div><hr><div><p>Alert firing in NS: smart-the-rat</p>\n</div>", desc)
})
}
func readBody(t *testing.T, r *http.Request) string {
t.Helper()
body, err := io.ReadAll(r.Body)

View File

@@ -15,9 +15,7 @@ import (
"os"
"strings"
"github.com/SigNoz/signoz/pkg/alertmanager/alertmanagertemplate"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/alecthomas/units"
commoncfg "github.com/prometheus/common/config"
"github.com/prometheus/common/model"
@@ -42,22 +40,21 @@ const (
// Notifier implements a Notifier for PagerDuty notifications.
type Notifier struct {
conf *config.PagerdutyConfig
tmpl *template.Template
logger *slog.Logger
apiV1 string // for tests.
client *http.Client
retrier *notify.Retrier
templater alertmanagertypes.Templater
conf *config.PagerdutyConfig
tmpl *template.Template
logger *slog.Logger
apiV1 string // for tests.
client *http.Client
retrier *notify.Retrier
}
// New returns a new PagerDuty notifier.
func New(c *config.PagerdutyConfig, t *template.Template, l *slog.Logger, templater alertmanagertypes.Templater, httpOpts ...commoncfg.HTTPClientOption) (*Notifier, error) {
func New(c *config.PagerdutyConfig, t *template.Template, l *slog.Logger, httpOpts ...commoncfg.HTTPClientOption) (*Notifier, error) {
client, err := notify.NewClientWithTracing(*c.HTTPConfig, Integration, httpOpts...)
if err != nil {
return nil, err
}
n := &Notifier{conf: c, tmpl: t, logger: l, client: client, templater: templater}
n := &Notifier{conf: c, tmpl: t, logger: l, client: client}
if c.ServiceKey != "" || c.ServiceKeyFile != "" {
n.apiV1 = "https://events.pagerduty.com/generic/2010-04-15/create_event.json"
// Retrying can solve the issue on 403 (rate limiting) and 5xx response codes.
@@ -146,12 +143,11 @@ func (n *Notifier) notifyV1(
key notify.Key,
data *template.Data,
details map[string]any,
title string,
) (bool, error) {
var tmplErr error
tmpl := notify.TmplText(n.tmpl, data, &tmplErr)
description, truncated := notify.TruncateInRunes(title, maxV1DescriptionLenRunes)
description, truncated := notify.TruncateInRunes(tmpl(n.conf.Description), maxV1DescriptionLenRunes)
if truncated {
n.logger.WarnContext(ctx, "Truncated description", slog.Any("key", key), slog.Int("max_runes", maxV1DescriptionLenRunes))
}
@@ -207,7 +203,6 @@ func (n *Notifier) notifyV2(
key notify.Key,
data *template.Data,
details map[string]any,
title string,
) (bool, error) {
var tmplErr error
tmpl := notify.TmplText(n.tmpl, data, &tmplErr)
@@ -216,7 +211,7 @@ func (n *Notifier) notifyV2(
n.conf.Severity = "error"
}
summary, truncated := notify.TruncateInRunes(title, maxV2SummaryLenRunes)
summary, truncated := notify.TruncateInRunes(tmpl(n.conf.Description), maxV2SummaryLenRunes)
if truncated {
n.logger.WarnContext(ctx, "Truncated summary", slog.Any("key", key), slog.Int("max_runes", maxV2SummaryLenRunes))
}
@@ -299,22 +294,6 @@ func (n *Notifier) notifyV2(
return retry, err
}
// prepareTitle expands the notification title. PagerDuty has no body surface
// we care about — the description/summary field is what users see as the
// incident headline, so we feed the configured Description as the default
// title template and ignore any custom body_template entirely.
func (n *Notifier) prepareTitle(ctx context.Context, alerts []*types.Alert) (string, error) {
customTitle, _ := alertmanagertemplate.ExtractTemplatesFromAnnotations(alerts)
result, err := n.templater.Expand(ctx, alertmanagertypes.ExpandRequest{
TitleTemplate: customTitle,
DefaultTitleTemplate: n.conf.Description,
}, alerts)
if err != nil {
return "", err
}
return result.Title, nil
}
// Notify implements the Notifier interface.
func (n *Notifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {
key, err := notify.ExtractGroupKey(ctx)
@@ -323,12 +302,6 @@ func (n *Notifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error)
}
logger := n.logger.With(slog.Any("group_key", key))
title, err := n.prepareTitle(ctx, as)
if err != nil {
n.logger.ErrorContext(ctx, "failed to prepare notification content", errors.Attr(err))
return false, err
}
var (
alerts = types.Alerts(as...)
data = notify.GetTemplateData(ctx, n.tmpl, as, logger)
@@ -356,7 +329,7 @@ func (n *Notifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error)
if n.apiV1 != "" {
nf = n.notifyV1
}
retry, err := nf(ctx, eventType, key, data, details, title)
retry, err := nf(ctx, eventType, key, data, details)
if err != nil {
if ctx.Err() != nil {
err = errors.WrapInternalf(err, errors.CodeInternal, "failed to notify PagerDuty: %v", context.Cause(ctx))

View File

@@ -9,7 +9,6 @@ import (
"context"
"encoding/json"
"io"
"log/slog"
"net/http"
"net/http/httptest"
"net/url"
@@ -18,10 +17,7 @@ import (
"testing"
"time"
"github.com/SigNoz/signoz/pkg/alertmanager/alertmanagertemplate"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
commoncfg "github.com/prometheus/common/config"
"github.com/prometheus/common/model"
"github.com/prometheus/common/promslog"
@@ -34,20 +30,14 @@ import (
"github.com/prometheus/alertmanager/types"
)
func newTestTemplater(tmpl *template.Template) alertmanagertypes.Templater {
return alertmanagertemplate.New(tmpl, slog.New(slog.DiscardHandler))
}
func TestPagerDutyRetryV1(t *testing.T) {
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.PagerdutyConfig{
ServiceKey: config.Secret("01234567890123456789012345678901"),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
@@ -59,15 +49,13 @@ func TestPagerDutyRetryV1(t *testing.T) {
}
func TestPagerDutyRetryV2(t *testing.T) {
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.PagerdutyConfig{
RoutingKey: config.Secret("01234567890123456789012345678901"),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
@@ -83,15 +71,13 @@ func TestPagerDutyRedactedURLV1(t *testing.T) {
defer fn()
key := "01234567890123456789012345678901"
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.PagerdutyConfig{
ServiceKey: config.Secret(key),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
notifier.apiV1 = u.String()
@@ -104,16 +90,14 @@ func TestPagerDutyRedactedURLV2(t *testing.T) {
defer fn()
key := "01234567890123456789012345678901"
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.PagerdutyConfig{
URL: &config.URL{URL: u},
RoutingKey: config.Secret(key),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
@@ -130,15 +114,13 @@ func TestPagerDutyV1ServiceKeyFromFile(t *testing.T) {
ctx, u, fn := test.GetContextWithCancelingURL()
defer fn()
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.PagerdutyConfig{
ServiceKeyFile: f.Name(),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
notifier.apiV1 = u.String()
@@ -156,16 +138,14 @@ func TestPagerDutyV2RoutingKeyFromFile(t *testing.T) {
ctx, u, fn := test.GetContextWithCancelingURL()
defer fn()
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.PagerdutyConfig{
URL: &config.URL{URL: u},
RoutingKeyFile: f.Name(),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
@@ -322,8 +302,7 @@ func TestPagerDutyTemplating(t *testing.T) {
t.Run(tc.title, func(t *testing.T) {
tc.cfg.URL = &config.URL{URL: u}
tc.cfg.HTTPConfig = &commoncfg.HTTPClientConfig{}
tmpl := test.CreateTmpl(t)
pd, err := New(tc.cfg, tmpl, promslog.NewNopLogger(), newTestTemplater(tmpl))
pd, err := New(tc.cfg, test.CreateTmpl(t), promslog.NewNopLogger())
require.NoError(t, err)
if pd.apiV1 != "" {
pd.apiV1 = u.String()
@@ -413,15 +392,13 @@ func TestEventSizeEnforcement(t *testing.T) {
Details: bigDetailsV1,
}
tmpl := test.CreateTmpl(t)
notifierV1, err := New(
&config.PagerdutyConfig{
ServiceKey: config.Secret("01234567890123456789012345678901"),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
@@ -443,9 +420,8 @@ func TestEventSizeEnforcement(t *testing.T) {
RoutingKey: config.Secret("01234567890123456789012345678901"),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
@@ -560,8 +536,7 @@ func TestPagerDutyEmptySrcHref(t *testing.T) {
Links: links,
}
pdTmpl := test.CreateTmpl(t)
pagerDuty, err := New(&pagerDutyConfig, pdTmpl, promslog.NewNopLogger(), newTestTemplater(pdTmpl))
pagerDuty, err := New(&pagerDutyConfig, test.CreateTmpl(t), promslog.NewNopLogger())
require.NoError(t, err)
ctx := context.Background()
@@ -628,8 +603,7 @@ func TestPagerDutyTimeout(t *testing.T) {
Timeout: tt.timeout,
}
tmpl := test.CreateTmpl(t)
pd, err := New(&cfg, tmpl, promslog.NewNopLogger(), newTestTemplater(tmpl))
pd, err := New(&cfg, test.CreateTmpl(t), promslog.NewNopLogger())
require.NoError(t, err)
ctx := context.Background()
@@ -907,79 +881,3 @@ func TestRenderDetails(t *testing.T) {
})
}
}
func TestPrepareContent(t *testing.T) {
prepareContext := func() context.Context {
ctx := context.Background()
ctx = notify.WithGroupKey(ctx, "1")
ctx = notify.WithReceiverName(ctx, "test-receiver")
ctx = notify.WithGroupLabels(ctx, model.LabelSet{"alertname": "HighCPU for Payment service"})
return ctx
}
t.Run("default template uses go text template config for title", func(t *testing.T) {
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.PagerdutyConfig{
RoutingKey: config.Secret("01234567890123456789012345678901"),
HTTPConfig: &commoncfg.HTTPClientConfig{},
Description: `{{ .CommonLabels.alertname }} ({{ .Status | toUpper }})`,
},
tmpl,
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
ctx := prepareContext()
alerts := []*types.Alert{
{
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "HighCPU for Payment service"},
StartsAt: time.Now(),
EndsAt: time.Now().Add(time.Hour),
},
},
}
title, err := notifier.prepareTitle(ctx, alerts)
require.NoError(t, err)
require.Equal(t, "HighCPU for Payment service (FIRING)", title)
})
t.Run("custom template uses $variable annotation for title", func(t *testing.T) {
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.PagerdutyConfig{
RoutingKey: config.Secret("01234567890123456789012345678901"),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
ctx := prepareContext()
alerts := []*types.Alert{
{
Alert: model.Alert{
Labels: model.LabelSet{
"alertname": "HighCPU",
"service": "api-server",
},
Annotations: model.LabelSet{
ruletypes.AnnotationTitleTemplate: "$rule.name on $service is in $alert.status state",
},
StartsAt: time.Now().Add(-time.Hour),
EndsAt: time.Now(),
},
},
}
title, err := notifier.prepareTitle(ctx, alerts)
require.NoError(t, err)
require.Equal(t, "HighCPU on api-server is in resolved state", title)
})
}

View File

@@ -26,15 +26,12 @@ var customNotifierIntegrations = []string{
msteamsv2.Integration,
}
func NewReceiverIntegrations(nc alertmanagertypes.Receiver, tmpl *template.Template, logger *slog.Logger, deps alertmanagertypes.NotificationDeps) ([]notify.Integration, error) {
func NewReceiverIntegrations(nc alertmanagertypes.Receiver, tmpl *template.Template, logger *slog.Logger) ([]notify.Integration, error) {
upstreamIntegrations, err := receiver.BuildReceiverIntegrations(nc, tmpl, logger)
if err != nil {
return nil, err
}
templater := deps.Templater
emailStore := deps.EmailTemplateStore
var (
errs types.MultiError
integrations []notify.Integration
@@ -56,25 +53,23 @@ func NewReceiverIntegrations(nc alertmanagertypes.Receiver, tmpl *template.Templ
}
for i, c := range nc.WebhookConfigs {
add(webhook.Integration, i, c, func(l *slog.Logger) (notify.Notifier, error) { return webhook.New(c, tmpl, l, templater) })
add(webhook.Integration, i, c, func(l *slog.Logger) (notify.Notifier, error) { return webhook.New(c, tmpl, l) })
}
for i, c := range nc.EmailConfigs {
add(email.Integration, i, c, func(l *slog.Logger) (notify.Notifier, error) {
return email.New(c, tmpl, l, templater, emailStore), nil
})
add(email.Integration, i, c, func(l *slog.Logger) (notify.Notifier, error) { return email.New(c, tmpl, l), nil })
}
for i, c := range nc.PagerdutyConfigs {
add(pagerduty.Integration, i, c, func(l *slog.Logger) (notify.Notifier, error) { return pagerduty.New(c, tmpl, l, templater) })
add(pagerduty.Integration, i, c, func(l *slog.Logger) (notify.Notifier, error) { return pagerduty.New(c, tmpl, l) })
}
for i, c := range nc.OpsGenieConfigs {
add(opsgenie.Integration, i, c, func(l *slog.Logger) (notify.Notifier, error) { return opsgenie.New(c, tmpl, l, templater) })
add(opsgenie.Integration, i, c, func(l *slog.Logger) (notify.Notifier, error) { return opsgenie.New(c, tmpl, l) })
}
for i, c := range nc.SlackConfigs {
add(slack.Integration, i, c, func(l *slog.Logger) (notify.Notifier, error) { return slack.New(c, tmpl, l, templater) })
add(slack.Integration, i, c, func(l *slog.Logger) (notify.Notifier, error) { return slack.New(c, tmpl, l) })
}
for i, c := range nc.MSTeamsV2Configs {
add(msteamsv2.Integration, i, c, func(l *slog.Logger) (notify.Notifier, error) {
return msteamsv2.New(c, tmpl, `{{ template "msteamsv2.default.titleLink" . }}`, l, templater)
return msteamsv2.New(c, tmpl, `{{ template "msteamsv2.default.titleLink" . }}`, l)
})
}

View File

@@ -14,11 +14,7 @@ import (
"os"
"strings"
"github.com/SigNoz/signoz/pkg/alertmanager/alertmanagertemplate"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/templating/markdownrenderer"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
commoncfg "github.com/prometheus/common/config"
"github.com/prometheus/alertmanager/config"
@@ -29,8 +25,6 @@ import (
const (
Integration = "slack"
colorRed = "#FF0000"
colorGreen = "#00FF00"
)
// https://api.slack.com/reference/messaging/attachments#legacy_fields - 1024, no units given, assuming runes or characters.
@@ -38,18 +32,17 @@ const maxTitleLenRunes = 1024
// Notifier implements a Notifier for Slack notifications.
type Notifier struct {
conf *config.SlackConfig
tmpl *template.Template
logger *slog.Logger
client *http.Client
retrier *notify.Retrier
templater alertmanagertypes.Templater
conf *config.SlackConfig
tmpl *template.Template
logger *slog.Logger
client *http.Client
retrier *notify.Retrier
postJSONFunc func(ctx context.Context, client *http.Client, url string, body io.Reader) (*http.Response, error)
}
// New returns a new Slack notification handler.
func New(c *config.SlackConfig, t *template.Template, l *slog.Logger, templater alertmanagertypes.Templater, httpOpts ...commoncfg.HTTPClientOption) (*Notifier, error) {
func New(c *config.SlackConfig, t *template.Template, l *slog.Logger, httpOpts ...commoncfg.HTTPClientOption) (*Notifier, error) {
client, err := notify.NewClientWithTracing(*c.HTTPConfig, Integration, httpOpts...)
if err != nil {
return nil, err
@@ -61,7 +54,6 @@ func New(c *config.SlackConfig, t *template.Template, l *slog.Logger, templater
logger: l,
client: client,
retrier: &notify.Retrier{},
templater: templater,
postJSONFunc: notify.PostJSON,
}, nil
}
@@ -89,10 +81,9 @@ type attachment struct {
Actions []config.SlackAction `json:"actions,omitempty"`
ImageURL string `json:"image_url,omitempty"`
ThumbURL string `json:"thumb_url,omitempty"`
Footer string `json:"footer,omitempty"`
Footer string `json:"footer"`
Color string `json:"color,omitempty"`
MrkdwnIn []string `json:"mrkdwn_in,omitempty"`
Blocks []any `json:"blocks,omitempty"`
}
// Notify implements the Notifier interface.
@@ -109,15 +100,79 @@ func (n *Notifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error)
data = notify.GetTemplateData(ctx, n.tmpl, as, logger)
tmplText = notify.TmplText(n.tmpl, data, &err)
)
var markdownIn []string
attachments, err := n.prepareContent(ctx, as, tmplText)
if err != nil {
n.logger.ErrorContext(ctx, "failed to prepare notification content", errors.Attr(err))
return false, err
if len(n.conf.MrkdwnIn) == 0 {
markdownIn = []string{"fallback", "pretext", "text"}
} else {
markdownIn = n.conf.MrkdwnIn
}
if len(attachments) > 0 {
n.addFieldsAndActions(&attachments[0], tmplText)
title, truncated := notify.TruncateInRunes(tmplText(n.conf.Title), maxTitleLenRunes)
if truncated {
logger.WarnContext(ctx, "Truncated title", slog.Int("max_runes", maxTitleLenRunes))
}
att := &attachment{
Title: title,
TitleLink: tmplText(n.conf.TitleLink),
Pretext: tmplText(n.conf.Pretext),
Text: tmplText(n.conf.Text),
Fallback: tmplText(n.conf.Fallback),
CallbackID: tmplText(n.conf.CallbackID),
ImageURL: tmplText(n.conf.ImageURL),
ThumbURL: tmplText(n.conf.ThumbURL),
Footer: tmplText(n.conf.Footer),
Color: tmplText(n.conf.Color),
MrkdwnIn: markdownIn,
}
numFields := len(n.conf.Fields)
if numFields > 0 {
fields := make([]config.SlackField, numFields)
for index, field := range n.conf.Fields {
// Check if short was defined for the field otherwise fallback to the global setting
var short bool
if field.Short != nil {
short = *field.Short
} else {
short = n.conf.ShortFields
}
// Rebuild the field by executing any templates and setting the new value for short
fields[index] = config.SlackField{
Title: tmplText(field.Title),
Value: tmplText(field.Value),
Short: &short,
}
}
att.Fields = fields
}
numActions := len(n.conf.Actions)
if numActions > 0 {
actions := make([]config.SlackAction, numActions)
for index, action := range n.conf.Actions {
slackAction := config.SlackAction{
Type: tmplText(action.Type),
Text: tmplText(action.Text),
URL: tmplText(action.URL),
Style: tmplText(action.Style),
Name: tmplText(action.Name),
Value: tmplText(action.Value),
}
if action.ConfirmField != nil {
slackAction.ConfirmField = &config.SlackConfirmationField{
Title: tmplText(action.ConfirmField.Title),
Text: tmplText(action.ConfirmField.Text),
OkText: tmplText(action.ConfirmField.OkText),
DismissText: tmplText(action.ConfirmField.DismissText),
}
}
actions[index] = slackAction
}
att.Actions = actions
}
req := &request{
@@ -127,7 +182,7 @@ func (n *Notifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error)
IconURL: tmplText(n.conf.IconURL),
LinkNames: n.conf.LinkNames,
Text: tmplText(n.conf.MessageText),
Attachments: attachments,
Attachments: []attachment{*att},
}
if err != nil {
return false, err
@@ -183,153 +238,6 @@ func (n *Notifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error)
return retry, nil
}
// prepareContent expands alert templates and returns the Slack attachment(s)
// ready to send. When alerts carry a custom body template, one title-only
// attachment plus one body attachment per alert is returned so that each alert
// can get its own firing/resolved color and per-alert action buttons.
func (n *Notifier) prepareContent(ctx context.Context, alerts []*types.Alert, tmplText func(string) string) ([]attachment, error) {
customTitle, customBody := alertmanagertemplate.ExtractTemplatesFromAnnotations(alerts)
result, err := n.templater.Expand(ctx, alertmanagertypes.ExpandRequest{
TitleTemplate: customTitle,
BodyTemplate: customBody,
DefaultTitleTemplate: n.conf.Title,
// The default path renders the attachment body using the configured
// Text template (and Slack's own mrkdwn flavour) below, so the default
// body template here is left empty.
DefaultBodyTemplate: "",
}, alerts)
if err != nil {
return nil, err
}
title, truncated := notify.TruncateInRunes(result.Title, maxTitleLenRunes)
if truncated {
n.logger.WarnContext(ctx, "Truncated title", slog.Int("max_runes", maxTitleLenRunes))
}
if result.IsDefaultBody {
var markdownIn []string
if len(n.conf.MrkdwnIn) == 0 {
markdownIn = []string{"fallback", "pretext", "text"}
} else {
markdownIn = n.conf.MrkdwnIn
}
return []attachment{
{
Title: title,
TitleLink: tmplText(n.conf.TitleLink),
Pretext: tmplText(n.conf.Pretext),
Text: tmplText(n.conf.Text),
Fallback: tmplText(n.conf.Fallback),
CallbackID: tmplText(n.conf.CallbackID),
ImageURL: tmplText(n.conf.ImageURL),
ThumbURL: tmplText(n.conf.ThumbURL),
Footer: tmplText(n.conf.Footer),
Color: tmplText(n.conf.Color),
MrkdwnIn: markdownIn,
},
}, nil
}
// Custom template path: one title attachment + one attachment per
// non-empty alert body. result.Body is positionally aligned with alerts,
// so we index alerts[i] directly and skip empty entries.
attachments := make([]attachment, 0, 1+len(result.Body))
attachments = append(attachments, attachment{
Title: title,
TitleLink: tmplText(n.conf.TitleLink),
})
for i, body := range result.Body {
if body == "" || i >= len(alerts) {
continue
}
// Custom bodies are authored in markdown; render each non-empty body to
// Slack's mrkdwn flavour. Default bodies skip this because the Text
// template is already channel-ready.
rendered, renderErr := markdownrenderer.RenderSlackMrkdwn(body)
if renderErr != nil {
return nil, renderErr
}
color := colorRed
if alerts[i].Resolved() {
color = colorGreen
}
attachments = append(attachments, attachment{
Text: rendered,
Color: color,
MrkdwnIn: []string{"text"},
Actions: buildRelatedLinkActions(alerts[i]),
})
}
return attachments, nil
}
// buildRelatedLinkActions returns the "View Related Logs/Traces" action
// buttons for an alert, or nil when no related-link annotations are present.
func buildRelatedLinkActions(alert *types.Alert) []config.SlackAction {
var actions []config.SlackAction
if link := alert.Annotations[ruletypes.AnnotationRelatedLogs]; link != "" {
actions = append(actions, config.SlackAction{Type: "button", Text: "View Related Logs", URL: string(link)})
}
if link := alert.Annotations[ruletypes.AnnotationRelatedTraces]; link != "" {
actions = append(actions, config.SlackAction{Type: "button", Text: "View Related Traces", URL: string(link)})
}
return actions
}
// addFieldsAndActions populates fields and actions on the attachment from the Slack config.
func (n *Notifier) addFieldsAndActions(att *attachment, tmplText func(string) string) {
numFields := len(n.conf.Fields)
if numFields > 0 {
fields := make([]config.SlackField, numFields)
for index, field := range n.conf.Fields {
var short bool
if field.Short != nil {
short = *field.Short
} else {
short = n.conf.ShortFields
}
fields[index] = config.SlackField{
Title: tmplText(field.Title),
Value: tmplText(field.Value),
Short: &short,
}
}
att.Fields = fields
}
numActions := len(n.conf.Actions)
if numActions > 0 {
actions := make([]config.SlackAction, numActions)
for index, action := range n.conf.Actions {
slackAction := config.SlackAction{
Type: tmplText(action.Type),
Text: tmplText(action.Text),
URL: tmplText(action.URL),
Style: tmplText(action.Style),
Name: tmplText(action.Name),
Value: tmplText(action.Value),
}
if action.ConfirmField != nil {
slackAction.ConfirmField = &config.SlackConfirmationField{
Title: tmplText(action.ConfirmField.Title),
Text: tmplText(action.ConfirmField.Text),
OkText: tmplText(action.ConfirmField.OkText),
DismissText: tmplText(action.ConfirmField.DismissText),
}
}
actions[index] = slackAction
}
att.Actions = actions
}
}
// checkResponseError parses out the error message from Slack API response.
func checkResponseError(resp *http.Response) (bool, error) {
body, err := io.ReadAll(resp.Body)

View File

@@ -17,9 +17,6 @@ import (
"testing"
"time"
"github.com/SigNoz/signoz/pkg/alertmanager/alertmanagertemplate"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
commoncfg "github.com/prometheus/common/config"
"github.com/prometheus/common/model"
"github.com/prometheus/common/promslog"
@@ -32,19 +29,13 @@ import (
"github.com/prometheus/alertmanager/types"
)
func newTestTemplater(tmpl *template.Template) alertmanagertypes.Templater {
return alertmanagertemplate.New(tmpl, slog.New(slog.DiscardHandler))
}
func TestSlackRetry(t *testing.T) {
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.SlackConfig{
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
@@ -58,15 +49,13 @@ func TestSlackRedactedURL(t *testing.T) {
ctx, u, fn := test.GetContextWithCancelingURL()
defer fn()
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.SlackConfig{
APIURL: &config.SecretURL{URL: u},
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
@@ -82,15 +71,13 @@ func TestGettingSlackURLFromFile(t *testing.T) {
_, err = f.WriteString(u.String())
require.NoError(t, err, "writing to temp file failed")
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.SlackConfig{
APIURLFile: f.Name(),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
@@ -106,15 +93,13 @@ func TestTrimmingSlackURLFromFile(t *testing.T) {
_, err = f.WriteString(u.String() + "\n\n")
require.NoError(t, err, "writing to temp file failed")
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.SlackConfig{
APIURLFile: f.Name(),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
@@ -199,7 +184,6 @@ func TestNotifier_Notify_WithReason(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
apiurl, _ := url.Parse("https://slack.com/post.Message")
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.SlackConfig{
NotifierConfig: config.NotifierConfig{},
@@ -207,9 +191,8 @@ func TestNotifier_Notify_WithReason(t *testing.T) {
APIURL: &config.SecretURL{URL: apiurl},
Channel: "channelname",
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
@@ -259,7 +242,6 @@ func TestSlackTimeout(t *testing.T) {
for name, tt := range tests {
t.Run(name, func(t *testing.T) {
u, _ := url.Parse("https://slack.com/post.Message")
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.SlackConfig{
NotifierConfig: config.NotifierConfig{},
@@ -268,9 +250,8 @@ func TestSlackTimeout(t *testing.T) {
Channel: "channelname",
Timeout: tt.timeout,
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
newTestTemplater(tmpl),
)
require.NoError(t, err)
notifier.postJSONFunc = func(ctx context.Context, client *http.Client, url string, body io.Reader) (*http.Response, error) {
@@ -301,225 +282,6 @@ func TestSlackTimeout(t *testing.T) {
}
}
// setupTestContext creates a context with group key, receiver name, and group labels
// required by the notification processor.
func setupTestContext() context.Context {
ctx := context.Background()
ctx = notify.WithGroupKey(ctx, "test-group")
ctx = notify.WithReceiverName(ctx, "slack")
ctx = notify.WithGroupLabels(ctx, model.LabelSet{
"alertname": "TestAlert",
"severity": "critical",
})
return ctx
}
func TestPrepareContent(t *testing.T) {
t.Run("default template uses go text template config for title and body", func(t *testing.T) {
// When alerts have no custom annotation templates (title_template / body_template),
tmpl := test.CreateTmpl(t)
templater := newTestTemplater(tmpl)
notifier := &Notifier{
conf: &config.SlackConfig{
Title: `{{ .CommonLabels.alertname }} ({{ .Status | toUpper }})`,
Text: `{{ range .Alerts }}Alert: {{ .Labels.alertname }} - severity {{ .Labels.severity }}{{ end }}`,
Color: `{{ if eq .Status "firing" }}danger{{ else }}good{{ end }}`,
TitleLink: "https://alertmanager.signoz.com",
},
tmpl: tmpl,
logger: slog.New(slog.DiscardHandler),
templater: templater,
}
ctx := setupTestContext()
alerts := []*types.Alert{
{Alert: model.Alert{
Labels: model.LabelSet{ruletypes.LabelAlertName: "HighCPU", ruletypes.LabelSeverityName: "critical"},
StartsAt: time.Now(),
EndsAt: time.Now().Add(time.Hour),
}},
}
// Build tmplText the same way Notify does
var err error
data := notify.GetTemplateData(ctx, tmpl, alerts, slog.New(slog.DiscardHandler))
tmplText := notify.TmplText(tmpl, data, &err)
atts, attErr := notifier.prepareContent(ctx, alerts, tmplText)
require.NoError(t, attErr)
require.NoError(t, err)
require.Len(t, atts, 1)
require.Equal(t, "HighCPU (FIRING)", atts[0].Title)
require.Equal(t, "Alert: HighCPU - severity critical", atts[0].Text)
// Color is templated — firing alert should be "danger"
require.Equal(t, "danger", atts[0].Color)
// No BlockKit blocks for default template
require.Nil(t, atts[0].Blocks)
// Default markdownIn when config has none
require.Equal(t, []string{"fallback", "pretext", "text"}, atts[0].MrkdwnIn)
})
t.Run("custom template produces 1+N attachments with per-alert color", func(t *testing.T) {
// When alerts carry custom $variable annotation templates (title_template / body_template)
tmpl := test.CreateTmpl(t)
templater := newTestTemplater(tmpl)
notifier := &Notifier{
conf: &config.SlackConfig{
Title: "default title fallback",
Text: "default text fallback",
TitleLink: "https://alertmanager.signoz.com",
},
tmpl: tmpl,
logger: slog.New(slog.DiscardHandler),
templater: templater,
}
tmplText := func(s string) string { return s }
bodyTemplate := `## $rule.name
**Service:** *$labels.service*
**Instance:** *$labels.instance*
**Region:** *$labels.region*
**Method:** *$labels.http_method*
---
| Metric | Value |
|--------|-------|
| **Current** | *$value* |
| **Threshold** | *$threshold.value* |
**Status:** $alert.status | **Severity:** $labels.severity`
titleTemplate := "[$alert.status] $rule.name — $labels.service"
ctx := setupTestContext()
firingAlert := &types.Alert{
Alert: model.Alert{
Labels: model.LabelSet{ruletypes.LabelAlertName: "HighCPU", ruletypes.LabelSeverityName: "critical", "service": "api-server", "instance": "i-0abc123", "region": "us-east-1", "http_method": "GET"},
StartsAt: time.Now(),
EndsAt: time.Now().Add(time.Hour),
Annotations: model.LabelSet{
ruletypes.AnnotationTitleTemplate: model.LabelValue(titleTemplate),
ruletypes.AnnotationBodyTemplate: model.LabelValue(bodyTemplate),
"value": "100",
"threshold.value": "200",
},
},
}
resolvedAlert := &types.Alert{
Alert: model.Alert{
Labels: model.LabelSet{ruletypes.LabelAlertName: "HighCPU", ruletypes.LabelSeverityName: "critical", "service": "api-server", "instance": "i-0abc123", "region": "us-east-1", "http_method": "GET"},
StartsAt: time.Now().Add(-2 * time.Hour),
EndsAt: time.Now().Add(-time.Hour),
Annotations: model.LabelSet{
ruletypes.AnnotationTitleTemplate: model.LabelValue(titleTemplate),
ruletypes.AnnotationBodyTemplate: model.LabelValue(bodyTemplate),
"value": "50",
"threshold.value": "200",
},
},
}
atts, err := notifier.prepareContent(ctx, []*types.Alert{firingAlert, resolvedAlert}, tmplText)
require.NoError(t, err)
// 1 title attachment + 2 body attachments (one per alert)
require.Len(t, atts, 3)
// First attachment: title-only, no color, no blocks
require.Equal(t, "[firing] HighCPU — api-server", atts[0].Title)
require.Empty(t, atts[0].Color)
require.Nil(t, atts[0].Blocks)
require.Equal(t, "https://alertmanager.signoz.com", atts[0].TitleLink)
expectedFiringBody := "*HighCPU*\n\n" +
"*Service:* _api-server_\n*Instance:* _i-0abc123_\n*Region:* _us-east-1_\n*Method:* _GET_\n\n" +
"---\n\n" +
"```\nMetric | Value\n----------|------\nCurrent | 100 \nThreshold | 200 \n```\n\n" +
"*Status:* firing | *Severity:* critical\n\n"
expectedResolvedBody := "*HighCPU*\n\n" +
"*Service:* _api-server_\n*Instance:* _i-0abc123_\n*Region:* _us-east-1_\n*Method:* _GET_\n\n" +
"---\n\n" +
"```\nMetric | Value\n----------|------\nCurrent | 50 \nThreshold | 200 \n```\n\n" +
"*Status:* resolved | *Severity:* critical\n\n"
// Second attachment: firing alert body rendered as slack mrkdwn text, red color
require.Nil(t, atts[1].Blocks)
require.Equal(t, "#FF0000", atts[1].Color)
require.Equal(t, []string{"text"}, atts[1].MrkdwnIn)
require.Equal(t, expectedFiringBody, atts[1].Text)
// Third attachment: resolved alert body rendered as slack mrkdwn text, green color
require.Nil(t, atts[2].Blocks)
require.Equal(t, "#00FF00", atts[2].Color)
require.Equal(t, []string{"text"}, atts[2].MrkdwnIn)
require.Equal(t, expectedResolvedBody, atts[2].Text)
})
t.Run("default template with fields and actions", func(t *testing.T) {
// Verifies that addFieldsAndActions (called from Notify after prepareContent)
// correctly populates fields and actions on the attachment from config.
tmpl := test.CreateTmpl(t)
templater := newTestTemplater(tmpl)
short := true
notifier := &Notifier{
conf: &config.SlackConfig{
Title: `{{ .CommonLabels.alertname }}`,
Text: "alert text",
Color: "warning",
Fields: []*config.SlackField{
{Title: "Severity", Value: "critical", Short: &short},
{Title: "Service", Value: "api-server", Short: &short},
},
Actions: []*config.SlackAction{
{Type: "button", Text: "View Alert", URL: "https://alertmanager.signoz.com"},
},
TitleLink: "https://alertmanager.signoz.com",
},
tmpl: tmpl,
logger: slog.New(slog.DiscardHandler),
templater: templater,
}
tmplText := func(s string) string { return s }
ctx := setupTestContext()
alerts := []*types.Alert{
{Alert: model.Alert{
Labels: model.LabelSet{ruletypes.LabelAlertName: "TestAlert"},
StartsAt: time.Now(),
EndsAt: time.Now().Add(time.Hour),
}},
}
atts, err := notifier.prepareContent(ctx, alerts, tmplText)
require.NoError(t, err)
require.Len(t, atts, 1)
// prepareContent does not populate fields/actions — that's done by
// addFieldsAndActions which is called from Notify.
require.Nil(t, atts[0].Fields)
require.Nil(t, atts[0].Actions)
// Simulate what Notify does after prepareContent
notifier.addFieldsAndActions(&atts[0], tmplText)
// Verify fields
require.Len(t, atts[0].Fields, 2)
require.Equal(t, "Severity", atts[0].Fields[0].Title)
require.Equal(t, "critical", atts[0].Fields[0].Value)
require.True(t, *atts[0].Fields[0].Short)
require.Equal(t, "Service", atts[0].Fields[1].Title)
require.Equal(t, "api-server", atts[0].Fields[1].Value)
// Verify actions
require.Len(t, atts[0].Actions, 1)
require.Equal(t, "button", atts[0].Actions[0].Type)
require.Equal(t, "View Alert", atts[0].Actions[0].Text)
require.Equal(t, "https://alertmanager.signoz.com", atts[0].Actions[0].URL)
})
}
func TestSlackMessageField(t *testing.T) {
// 1. Setup a fake Slack server
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
@@ -567,7 +329,7 @@ func TestSlackMessageField(t *testing.T) {
tmpl.ExternalURL = u
logger := slog.New(slog.DiscardHandler)
notifier, err := New(conf, tmpl, logger, newTestTemplater(tmpl))
notifier, err := New(conf, tmpl, logger)
if err != nil {
t.Fatal(err)
}

View File

@@ -13,12 +13,8 @@ import (
"os"
"strings"
"github.com/SigNoz/signoz/pkg/alertmanager/alertmanagertemplate"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
commoncfg "github.com/prometheus/common/config"
"github.com/prometheus/common/model"
"github.com/prometheus/alertmanager/config"
"github.com/prometheus/alertmanager/notify"
@@ -27,23 +23,20 @@ import (
)
const (
Integration = "webhook"
templatedTitle = "templated_title"
templatedBody = "templated_body"
Integration = "webhook"
)
// Notifier implements a Notifier for generic webhooks.
type Notifier struct {
conf *config.WebhookConfig
tmpl *template.Template
logger *slog.Logger
client *http.Client
retrier *notify.Retrier
templater alertmanagertemplate.Templater
conf *config.WebhookConfig
tmpl *template.Template
logger *slog.Logger
client *http.Client
retrier *notify.Retrier
}
// New returns a new Webhook.
func New(conf *config.WebhookConfig, t *template.Template, l *slog.Logger, templater alertmanagertemplate.Templater, httpOpts ...commoncfg.HTTPClientOption) (*Notifier, error) {
func New(conf *config.WebhookConfig, t *template.Template, l *slog.Logger, httpOpts ...commoncfg.HTTPClientOption) (*Notifier, error) {
client, err := notify.NewClientWithTracing(*conf.HTTPConfig, Integration, httpOpts...)
if err != nil {
return nil, err
@@ -55,8 +48,7 @@ func New(conf *config.WebhookConfig, t *template.Template, l *slog.Logger, templ
client: client,
// Webhooks are assumed to respond with 2xx response codes on a successful
// request and 5xx response codes are assumed to be recoverable.
retrier: &notify.Retrier{},
templater: templater,
retrier: &notify.Retrier{},
}, nil
}
@@ -78,48 +70,9 @@ func truncateAlerts(maxAlerts uint64, alerts []*types.Alert) ([]*types.Alert, ui
return alerts, 0
}
// templateTitleBody extracts custom templates from alert annotations, expands them and
// replaces the private annotations with the rendered title and body.
func (n *Notifier) templateTitleBody(ctx context.Context, alerts []*types.Alert) error {
customTitle, customBody := alertmanagertemplate.ExtractTemplatesFromAnnotations(alerts)
result, err := n.templater.Expand(ctx, alertmanagertypes.ExpandRequest{
TitleTemplate: customTitle,
BodyTemplate: customBody,
}, alerts)
if err != nil {
return err
}
for i, alert := range alerts {
if alert.Annotations == nil {
continue
}
// Update templated_title annotation with rendered title, only if key exists and result is non-blank
if _, ok := alert.Annotations[ruletypes.AnnotationTitleTemplate]; ok {
delete(alert.Annotations, ruletypes.AnnotationTitleTemplate)
if result.Title != "" {
alert.Annotations[templatedTitle] = model.LabelValue(result.Title)
}
}
// Update templated_body annotation with rendered body, only if key exists and result is non-blank
if _, ok := alert.Annotations[ruletypes.AnnotationBodyTemplate]; ok {
delete(alert.Annotations, ruletypes.AnnotationBodyTemplate)
if i < len(result.Body) && result.Body[i] != "" {
alert.Annotations[templatedBody] = model.LabelValue(result.Body[i])
}
}
}
return nil
}
// Notify implements the Notifier interface.
func (n *Notifier) Notify(ctx context.Context, alerts ...*types.Alert) (bool, error) {
alerts, numTruncated := truncateAlerts(n.conf.MaxAlerts, alerts)
// expand custom templating annotations
if err := n.templateTitleBody(ctx, alerts); err != nil {
n.logger.ErrorContext(ctx, "failed to prepare notification content", errors.Attr(err))
}
data := notify.GetTemplateData(ctx, n.tmpl, alerts, n.logger)
groupKey, err := notify.ExtractGroupKey(ctx)

View File

@@ -9,7 +9,6 @@ import (
"context"
"fmt"
"io"
"log/slog"
"net/http"
"net/http/httptest"
"os"
@@ -21,8 +20,6 @@ import (
"github.com/prometheus/common/promslog"
"github.com/stretchr/testify/require"
"github.com/SigNoz/signoz/pkg/alertmanager/alertmanagertemplate"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/prometheus/alertmanager/config"
"github.com/prometheus/alertmanager/notify"
"github.com/prometheus/alertmanager/notify/test"
@@ -30,15 +27,13 @@ import (
)
func TestWebhookRetry(t *testing.T) {
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.WebhookConfig{
URL: config.SecretTemplateURL("http://example.com"),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
alertmanagertemplate.New(tmpl, slog.Default()),
)
if err != nil {
require.NoError(t, err)
@@ -101,16 +96,13 @@ func TestWebhookRedactedURL(t *testing.T) {
defer fn()
secret := "secret"
tmpl := test.CreateTmpl(t)
templater := alertmanagertemplate.New(tmpl, slog.Default())
notifier, err := New(
&config.WebhookConfig{
URL: config.SecretTemplateURL(u.String()),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
templater,
)
require.NoError(t, err)
@@ -126,15 +118,13 @@ func TestWebhookReadingURLFromFile(t *testing.T) {
_, err = f.WriteString(u.String() + "\n")
require.NoError(t, err, "writing to temp file failed")
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.WebhookConfig{
URLFile: f.Name(),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
alertmanagertemplate.New(tmpl, slog.Default()),
)
require.NoError(t, err)
@@ -188,15 +178,13 @@ func TestWebhookURLTemplating(t *testing.T) {
t.Run(tc.name, func(t *testing.T) {
calledURL = "" // Reset for each test
tmpl := test.CreateTmpl(t)
notifier, err := New(
&config.WebhookConfig{
URL: config.SecretTemplateURL(tc.url),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
test.CreateTmpl(t),
promslog.NewNopLogger(),
alertmanagertemplate.New(tmpl, slog.Default()),
)
require.NoError(t, err)
@@ -228,103 +216,3 @@ func TestWebhookURLTemplating(t *testing.T) {
})
}
}
func TestTemplateTitleBody(t *testing.T) {
tmpl := test.CreateTmpl(t)
templater := alertmanagertemplate.New(tmpl, slog.Default())
notifier, err := New(
&config.WebhookConfig{
URL: config.SecretTemplateURL("http://example.com"),
HTTPConfig: &commoncfg.HTTPClientConfig{},
},
tmpl,
slog.Default(),
templater,
)
require.NoError(t, err)
t.Run("annotations are updated with custom title and body templates", func(t *testing.T) {
alerts := []*types.Alert{
{
Alert: model.Alert{
Labels: model.LabelSet{
"alertname": "TestAlert",
"severity": "critical",
},
Annotations: model.LabelSet{
ruletypes.AnnotationTitleTemplate: "Alert: $labels.alertname",
ruletypes.AnnotationBodyTemplate: "Severity is $labels.severity",
},
StartsAt: time.Now(),
EndsAt: time.Now().Add(time.Hour),
},
},
{
Alert: model.Alert{
Labels: model.LabelSet{
"alertname": "TestAlert",
"severity": "warning",
},
Annotations: model.LabelSet{
ruletypes.AnnotationTitleTemplate: "Alert: $labels.alertname",
ruletypes.AnnotationBodyTemplate: "Severity is $labels.severity",
},
StartsAt: time.Now(),
EndsAt: time.Now().Add(time.Hour),
},
},
}
ctx := context.Background()
err := notifier.templateTitleBody(ctx, alerts)
require.NoError(t, err)
for _, a := range alerts {
_, hasTitleTmpl := a.Annotations[ruletypes.AnnotationTitleTemplate]
_, hasBodyTmpl := a.Annotations[ruletypes.AnnotationBodyTemplate]
require.False(t, hasTitleTmpl, "private title template key should be stripped")
require.False(t, hasBodyTmpl, "private body template key should be stripped")
}
require.Equal(t, model.LabelValue("Alert: TestAlert"), alerts[0].Annotations[templatedTitle])
require.Equal(t, model.LabelValue("Alert: TestAlert"), alerts[1].Annotations[templatedTitle])
require.Equal(t, model.LabelValue("Severity is critical"), alerts[0].Annotations[templatedBody])
require.Equal(t, model.LabelValue("Severity is warning"), alerts[1].Annotations[templatedBody])
})
t.Run("annotations not updated when template keys are absent", func(t *testing.T) {
alerts := []*types.Alert{
{
Alert: model.Alert{
Labels: model.LabelSet{
"alertname": "NoTemplateAlert",
},
Annotations: model.LabelSet{
"summary": "keep this",
"description": "keep this too",
},
StartsAt: time.Now(),
EndsAt: time.Now().Add(time.Hour),
},
},
}
ctx := context.Background()
err := notifier.templateTitleBody(ctx, alerts)
require.NoError(t, err)
_, hasTitleTemplate := alerts[0].Annotations[ruletypes.AnnotationTitleTemplate]
_, hasBodyTemplate := alerts[0].Annotations[ruletypes.AnnotationBodyTemplate]
require.False(t, hasTitleTemplate, "title_template should not be added when absent")
require.False(t, hasBodyTemplate, "body_template should not be added when absent")
_, hasTemplatedTitle := alerts[0].Annotations[templatedTitle]
_, hasTemplatedBody := alerts[0].Annotations[templatedBody]
require.False(t, hasTemplatedTitle, "templated_title should not be added when no custom templates")
require.False(t, hasTemplatedBody, "templated_body should not be added when no custom templates")
require.Equal(t, model.LabelValue("keep this"), alerts[0].Annotations["summary"])
require.Equal(t, model.LabelValue("keep this too"), alerts[0].Annotations["description"])
})
}

View File

@@ -28,9 +28,6 @@ type Config struct {
// Configuration for the notification log.
NFLog NFLogConfig `mapstructure:"nflog"`
// EmailTemplatesDirectory is the directory containing email layout templates (.gotmpl files).
EmailTemplatesDirectory string `mapstructure:"email_templates_directory"`
}
type AlertsConfig struct {
@@ -103,6 +100,5 @@ func NewConfig() Config {
MaintenanceInterval: 15 * time.Minute,
Retention: 120 * time.Hour,
},
EmailTemplatesDirectory: "/root/templates",
}
}

View File

@@ -10,7 +10,6 @@ import (
"github.com/prometheus/alertmanager/types"
"golang.org/x/sync/errgroup"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/prometheus/alertmanager/dispatch"
"github.com/prometheus/alertmanager/featurecontrol"
"github.com/prometheus/alertmanager/inhibit"
@@ -24,11 +23,9 @@ import (
"github.com/prometheus/common/model"
"github.com/SigNoz/signoz/pkg/alertmanager/alertmanagernotify"
"github.com/SigNoz/signoz/pkg/alertmanager/alertmanagertemplate"
"github.com/SigNoz/signoz/pkg/alertmanager/nfmanager"
"github.com/SigNoz/signoz/pkg/emailing/templatestore/filetemplatestore"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
"github.com/SigNoz/signoz/pkg/types/emailtypes"
)
var (
@@ -69,8 +66,6 @@ type Server struct {
pipelineBuilder *notify.PipelineBuilder
marker *alertmanagertypes.MemMarker
tmpl *template.Template
notificationDeps alertmanagertypes.NotificationDeps
emailTemplateStore emailtypes.TemplateStore
wg sync.WaitGroup
stopc chan struct{}
notificationManager nfmanager.NotificationManager
@@ -203,12 +198,6 @@ func New(ctx context.Context, logger *slog.Logger, registry prometheus.Registere
server.pipelineBuilder = notify.NewPipelineBuilder(signozRegisterer, featurecontrol.NoopFlags{})
server.dispatcherMetrics = NewDispatcherMetrics(false, signozRegisterer)
emailTemplateStore, storeErr := filetemplatestore.NewStore(ctx, srvConfig.EmailTemplatesDirectory, emailtypes.Templates, server.logger)
if storeErr != nil {
server.logger.ErrorContext(ctx, "failed to create alert email template store, using empty store", errors.Attr(storeErr))
emailTemplateStore = filetemplatestore.NewEmptyStore()
}
server.emailTemplateStore = emailTemplateStore
return server, nil
}
@@ -245,11 +234,6 @@ func (server *Server) SetConfig(ctx context.Context, alertmanagerConfig *alertma
server.tmpl.ExternalURL = server.srvConfig.ExternalURL
server.notificationDeps = alertmanagertypes.NotificationDeps{
Templater: alertmanagertemplate.New(server.tmpl, server.logger),
EmailTemplateStore: server.emailTemplateStore,
}
// Build the routing tree and record which receivers are used.
routes := dispatch.NewRoute(config.Route, nil)
activeReceivers := make(map[string]struct{})
@@ -266,7 +250,7 @@ func (server *Server) SetConfig(ctx context.Context, alertmanagerConfig *alertma
server.logger.InfoContext(ctx, "skipping creation of receiver not referenced by any route", slog.String("receiver", rcv.Name))
continue
}
integrations, err := alertmanagernotify.NewReceiverIntegrations(rcv, server.tmpl, server.logger, server.notificationDeps)
integrations, err := alertmanagernotify.NewReceiverIntegrations(rcv, server.tmpl, server.logger)
if err != nil {
return err
}
@@ -342,7 +326,7 @@ func (server *Server) SetConfig(ctx context.Context, alertmanagerConfig *alertma
func (server *Server) TestReceiver(ctx context.Context, receiver alertmanagertypes.Receiver) error {
testAlert := alertmanagertypes.NewTestAlert(receiver, time.Now(), time.Now())
return alertmanagertypes.TestReceiver(ctx, receiver, alertmanagernotify.NewReceiverIntegrations, server.alertmanagerConfig, server.tmpl, server.logger, server.notificationDeps, testAlert.Labels, testAlert)
return alertmanagertypes.TestReceiver(ctx, receiver, alertmanagernotify.NewReceiverIntegrations, server.alertmanagerConfig, server.tmpl, server.logger, testAlert.Labels, testAlert)
}
func (server *Server) TestAlert(ctx context.Context, receiversMap map[*alertmanagertypes.PostableAlert][]string, config *alertmanagertypes.NotificationConfig) error {
@@ -425,7 +409,6 @@ func (server *Server) TestAlert(ctx context.Context, receiversMap map[*alertmana
server.alertmanagerConfig,
server.tmpl,
server.logger,
server.notificationDeps,
group.groupLabels,
group.alerts...,
)

View File

@@ -137,9 +137,6 @@ func (at *templater) expandTitle(
}
// expandBody expands the body template for each individual alert. Returns nil if the template is empty.
// Non-nil results are positionally aligned with ntd.Alerts: sb[i] corresponds to alerts[i], and
// entries for alerts whose template expands to empty are kept as "" so callers can index per-alert
// metadata (related links, firing/resolved color) by the same index.
func (at *templater) expandBody(
bodyTemplate string,
ntd *alertmanagertypes.NotificationTemplateData,
@@ -147,7 +144,7 @@ func (at *templater) expandBody(
if bodyTemplate == "" {
return nil, nil, nil
}
sb := make([]string, len(ntd.Alerts))
var sb []string
missingVars := make(map[string]bool)
for i := range ntd.Alerts {
processRes, err := preProcessTemplateAndData(bodyTemplate, &ntd.Alerts[i])
@@ -158,10 +155,13 @@ func (at *templater) expandBody(
if err != nil {
return nil, nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to execute custom body template: %s", err.Error())
}
// add unknown variables and templated text to the result
for k := range processRes.UnknownVars {
missingVars[k] = true
}
sb[i] = strings.TrimSpace(part)
if strings.TrimSpace(part) != "" {
sb = append(sb, strings.TrimSpace(part))
}
}
return sb, missingVars, nil
}
@@ -189,20 +189,17 @@ func (at *templater) buildNotificationTemplateData(
externalURL = at.tmpl.ExternalURL.String()
}
// Raw (including private `_*`) kv first so buildRuleInfo can read the
// private rule-metadata annotations (threshold, op, match_type). The
// filtered copies are what ends up on the template-visible surfaces.
rawCommonAnnotations := extractCommonKV(alerts, func(a *types.Alert) model.LabelSet { return a.Annotations })
commonAnnotations := extractCommonKV(alerts, func(a *types.Alert) model.LabelSet { return a.Annotations })
commonLabels := extractCommonKV(alerts, func(a *types.Alert) model.LabelSet { return a.Labels })
// aggregate labels and annotations from all alerts
labels := aggregateKV(alerts, func(a *types.Alert) model.LabelSet { return a.Labels })
annotations := aggregateKV(alerts, func(a *types.Alert) model.LabelSet { return a.Annotations })
// Strip private annotations from template-visible surfaces; the structured
// fields on AlertInfo/RuleInfo already hold anything a template needs from
// them.
commonAnnotations := alertmanagertypes.FilterPublicAnnotations(rawCommonAnnotations)
// Strip private annotations from surfaces visible to templates or
// notifications; the structured fields on AlertInfo/RuleInfo already hold
// anything a template needs from them.
commonAnnotations = alertmanagertypes.FilterPublicAnnotations(commonAnnotations)
annotations = alertmanagertypes.FilterPublicAnnotations(annotations)
// build the alert data slice
@@ -236,7 +233,7 @@ func (at *templater) buildNotificationTemplateData(
TotalFiring: firing,
TotalResolved: resolved,
},
Rule: buildRuleInfo(commonLabels, rawCommonAnnotations),
Rule: buildRuleInfo(commonLabels, commonAnnotations),
GroupLabels: gl,
CommonLabels: commonLabels,
CommonAnnotations: commonAnnotations,

View File

@@ -211,23 +211,8 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
annotations := make(ruletypes.Labels, 0, len(r.annotations.Map()))
for name, value := range r.annotations.Map() {
// no need to expand custom templating annotations — they get expanded in the notifier layer
if ruletypes.IsCustomTemplatingAnnotation(name) {
annotations = append(annotations, ruletypes.Label{Name: name, Value: value})
continue
}
annotations = append(annotations, ruletypes.Label{Name: name, Value: expand(value)})
}
// Add values to be used in notifier layer for notification templates
annotations = append(annotations, ruletypes.Label{Name: ruletypes.AnnotationValue, Value: valueFormatter.Format(result.V, r.Unit())})
annotations = append(annotations, ruletypes.Label{Name: ruletypes.AnnotationThresholdValue, Value: threshold})
annotations = append(annotations, ruletypes.Label{Name: ruletypes.AnnotationCompareOp, Value: result.CompareOperator.Literal()})
annotations = append(annotations, ruletypes.Label{Name: ruletypes.AnnotationMatchType, Value: result.MatchType.Literal()})
if result.IsRecovering {
lb.Set(ruletypes.LabelIsRecovering, "true")
}
if result.IsMissing {
lb.Set(ruletypes.AlertNameLabel, "[No data] "+r.Name())
lb.Set(ruletypes.NoDataLabel, "true")

View File

@@ -337,23 +337,8 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
annotations := make(ruletypes.Labels, 0, len(r.annotations.Map()))
for name, value := range r.annotations.Map() {
// no need to expand custom templating annotations — they get expanded in the notifier layer
if ruletypes.IsCustomTemplatingAnnotation(name) {
annotations = append(annotations, ruletypes.Label{Name: name, Value: value})
continue
}
annotations = append(annotations, ruletypes.Label{Name: name, Value: expand(value)})
}
// Add values to be used in notifier layer for notification templates
annotations = append(annotations, ruletypes.Label{Name: ruletypes.AnnotationValue, Value: value})
annotations = append(annotations, ruletypes.Label{Name: ruletypes.AnnotationThresholdValue, Value: threshold})
annotations = append(annotations, ruletypes.Label{Name: ruletypes.AnnotationCompareOp, Value: smpl.CompareOperator.Literal()})
annotations = append(annotations, ruletypes.Label{Name: ruletypes.AnnotationMatchType, Value: smpl.MatchType.Literal()})
if smpl.IsRecovering {
lb.Set(ruletypes.LabelIsRecovering, "true")
}
if smpl.IsMissing {
lb.Set(ruletypes.AlertNameLabel, "[No data] "+r.Name())
lb.Set(ruletypes.NoDataLabel, "true")
@@ -367,13 +352,13 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
link := r.prepareLinksToTraces(ctx, ts, smpl.Metric)
if link != "" && r.hostFromSource() != "" {
r.logger.InfoContext(ctx, "adding traces link to annotations", slog.String("annotation.link", fmt.Sprintf("%s/traces-explorer?%s", r.hostFromSource(), link)))
annotations = append(annotations, ruletypes.Label{Name: ruletypes.AnnotationRelatedTraces, Value: fmt.Sprintf("%s/traces-explorer?%s", r.hostFromSource(), link)})
annotations = append(annotations, ruletypes.Label{Name: "related_traces", Value: fmt.Sprintf("%s/traces-explorer?%s", r.hostFromSource(), link)})
}
case ruletypes.AlertTypeLogs:
link := r.prepareLinksToLogs(ctx, ts, smpl.Metric)
if link != "" && r.hostFromSource() != "" {
r.logger.InfoContext(ctx, "adding logs link to annotations", slog.String("annotation.link", fmt.Sprintf("%s/logs/logs-explorer?%s", r.hostFromSource(), link)))
annotations = append(annotations, ruletypes.Label{Name: ruletypes.AnnotationRelatedLogs, Value: fmt.Sprintf("%s/logs/logs-explorer?%s", r.hostFromSource(), link)})
annotations = append(annotations, ruletypes.Label{Name: "related_logs", Value: fmt.Sprintf("%s/logs/logs-explorer?%s", r.hostFromSource(), link)})
}
}

View File

@@ -869,7 +869,7 @@ func TestThresholdRuleTracesLink(t *testing.T) {
assert.Equal(t, c.expectAlerts, alertsFound, "case %d", idx)
for _, item := range rule.Active {
for name, value := range item.Annotations.Map() {
if name == ruletypes.AnnotationRelatedTraces {
if name == "related_traces" {
assert.NotEmpty(t, value, "case %d", idx)
assert.Contains(t, value, "GET")
}
@@ -986,7 +986,7 @@ func TestThresholdRuleLogsLink(t *testing.T) {
assert.Equal(t, c.expectAlerts, alertsFound, "case %d", idx)
for _, item := range rule.Active {
for name, value := range item.Annotations.Map() {
if name == ruletypes.AnnotationRelatedLogs {
if name == "related_logs" {
assert.NotEmpty(t, value, "case %d", idx)
assert.Contains(t, value, "testcontainer")
}

View File

@@ -1,29 +0,0 @@
package alertmanagertypes
import (
"context"
"log/slog"
"github.com/SigNoz/signoz/pkg/types/emailtypes"
"github.com/prometheus/alertmanager/notify"
"github.com/prometheus/alertmanager/template"
"github.com/prometheus/alertmanager/types"
)
// Templater expands user-authored title and body templates against a group
// of alerts. Implemented by pkg/alertmanager/alertmanagertemplate.
type Templater interface {
Expand(ctx context.Context, req ExpandRequest, alerts []*types.Alert) (*ExpandResult, error)
}
// NotificationDeps carries the shared helpers every notifier needs to turn
// custom templates into channel-ready content. EmailTemplateStore is only
// consumed by the email notifier; other channels ignore it.
type NotificationDeps struct {
Templater Templater
EmailTemplateStore emailtypes.TemplateStore
}
// ReceiverIntegrationsFunc constructs the notify.Integration list for a
// configured receiver.
type ReceiverIntegrationsFunc = func(nc Receiver, tmpl *template.Template, logger *slog.Logger, deps NotificationDeps) ([]notify.Integration, error)

View File

@@ -4,11 +4,10 @@ import (
"context"
"encoding/json"
"fmt"
"github.com/prometheus/common/model"
"log/slog"
"time"
"github.com/prometheus/common/model"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/prometheus/alertmanager/notify"
"github.com/prometheus/alertmanager/template"
@@ -19,7 +18,8 @@ import (
type (
// Receiver is the type for the receiver configuration.
Receiver = config.Receiver
Receiver = config.Receiver
ReceiverIntegrationsFunc = func(nc Receiver, tmpl *template.Template, logger *slog.Logger) ([]notify.Integration, error)
)
// Creates a new receiver from a string. The input is initialized with the default values from the upstream alertmanager.
@@ -50,7 +50,7 @@ func NewReceiver(input string) (Receiver, error) {
return receiverWithDefaults, nil
}
func TestReceiver(ctx context.Context, receiver Receiver, receiverIntegrationsFunc ReceiverIntegrationsFunc, config *Config, tmpl *template.Template, logger *slog.Logger, deps NotificationDeps, lSet model.LabelSet, alert ...*Alert) error {
func TestReceiver(ctx context.Context, receiver Receiver, receiverIntegrationsFunc ReceiverIntegrationsFunc, config *Config, tmpl *template.Template, logger *slog.Logger, lSet model.LabelSet, alert ...*Alert) error {
ctx = notify.WithGroupKey(ctx, fmt.Sprintf("%s-%s-%d", receiver.Name, lSet.Fingerprint(), time.Now().Unix()))
ctx = notify.WithGroupLabels(ctx, lSet)
ctx = notify.WithReceiverName(ctx, receiver.Name)
@@ -72,7 +72,7 @@ func TestReceiver(ctx context.Context, receiver Receiver, receiverIntegrationsFu
return err
}
integrations, err := receiverIntegrationsFunc(receiver, tmpl, logger, deps)
integrations, err := receiverIntegrationsFunc(receiver, tmpl, logger)
if err != nil {
return err
}

View File

@@ -12,14 +12,13 @@ import (
var (
// Templates is a list of all the templates that are supported by the emailing service.
// This list should be updated whenever a new template is added.
Templates = []TemplateName{TemplateNameInvitationEmail, TemplateNameResetPassword, TemplateNameAlertEmailNotification}
Templates = []TemplateName{TemplateNameInvitationEmail, TemplateNameResetPassword}
)
var (
TemplateNameInvitationEmail = TemplateName{valuer.NewString("invitation")}
TemplateNameResetPassword = TemplateName{valuer.NewString("reset_password")}
TemplateNameAPIKeyEvent = TemplateName{valuer.NewString("api_key_event")}
TemplateNameAlertEmailNotification = TemplateName{valuer.NewString("alert_email_notification")}
TemplateNameInvitationEmail = TemplateName{valuer.NewString("invitation")}
TemplateNameResetPassword = TemplateName{valuer.NewString("reset_password")}
TemplateNameAPIKeyEvent = TemplateName{valuer.NewString("api_key_event")}
)
type TemplateName struct{ valuer.String }
@@ -32,8 +31,6 @@ func NewTemplateName(name string) (TemplateName, error) {
return TemplateNameResetPassword, nil
case TemplateNameAPIKeyEvent.StringValue():
return TemplateNameAPIKeyEvent, nil
case TemplateNameAlertEmailNotification.StringValue():
return TemplateNameAlertEmailNotification, nil
default:
return TemplateName{}, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid template name: %s", name)
}

View File

@@ -77,28 +77,6 @@ func (c CompareOperator) Normalize() CompareOperator {
}
}
// Literal returns the canonical literal (string) form of the operator.
func (c CompareOperator) Literal() string {
switch c.Normalize() {
case ValueIsAbove:
return ValueIsAboveLiteral.StringValue()
case ValueIsBelow:
return ValueIsBelowLiteral.StringValue()
case ValueIsEq:
return ValueIsEqLiteral.StringValue()
case ValueIsNotEq:
return ValueIsNotEqLiteral.StringValue()
case ValueAboveOrEq:
return ValueAboveOrEqLiteral.StringValue()
case ValueBelowOrEq:
return ValueBelowOrEqLiteral.StringValue()
case ValueOutsideBounds:
return ValueOutsideBoundsLiteral.StringValue()
default:
return c.StringValue()
}
}
func (c CompareOperator) Validate() error {
switch c {
case ValueIsAbove,

View File

@@ -56,24 +56,6 @@ func (m MatchType) Normalize() MatchType {
}
}
// Literal returns the canonical literal (string) form of the match type.
func (m MatchType) Literal() string {
switch m.Normalize() {
case AtleastOnce:
return AtleastOnceLiteral.StringValue()
case AllTheTimes:
return AllTheTimesLiteral.StringValue()
case OnAverage:
return OnAverageLiteral.StringValue()
case InTotal:
return InTotalLiteral.StringValue()
case Last:
return LastLiteral.StringValue()
default:
return m.StringValue()
}
}
func (m MatchType) Validate() error {
switch m {
case

View File

@@ -24,10 +24,6 @@ type Sample struct {
RecoveryTarget *float64
TargetUnit string
// CompareOperator and MatchType carry the threshold evaluation context
CompareOperator CompareOperator
MatchType MatchType
}
func (s Sample) String() string {

View File

@@ -1,17 +0,0 @@
package ruletypes
var CustomTemplatingAnnotations = []string{
AnnotationTitleTemplate,
AnnotationBodyTemplate,
}
// IsCustomTemplatingAnnotation checks if the given annotation is a custom templating annotation
// in order to avoid expanding them in the rule manager layer.
func IsCustomTemplatingAnnotation(name string) bool {
for _, annotation := range CustomTemplatingAnnotations {
if annotation == name {
return true
}
}
return false
}

View File

@@ -167,8 +167,6 @@ func (r BasicRuleThresholds) Eval(s *qbtypes.TimeSeries, unit string, evalData E
smpl.RecoveryTarget = threshold.RecoveryTarget
}
smpl.TargetUnit = threshold.TargetUnit
smpl.CompareOperator = threshold.CompareOperator
smpl.MatchType = threshold.MatchType
resultVector = append(resultVector, smpl)
continue
} else if evalData.SendUnmatched {
@@ -178,12 +176,10 @@ func (r BasicRuleThresholds) Eval(s *qbtypes.TimeSeries, unit string, evalData E
}
// prepare the sample with the first point of the series
smpl := Sample{
Point: Point{T: series.Values[0].Timestamp, V: series.Values[0].Value},
Metric: PrepareSampleLabelsForRule(series.Labels, threshold.Name),
Target: *threshold.TargetValue,
TargetUnit: threshold.TargetUnit,
CompareOperator: threshold.CompareOperator,
MatchType: threshold.MatchType,
Point: Point{T: series.Values[0].Timestamp, V: series.Values[0].Value},
Metric: PrepareSampleLabelsForRule(series.Labels, threshold.Name),
Target: *threshold.TargetValue,
TargetUnit: threshold.TargetUnit,
}
if threshold.RecoveryTarget != nil {
smpl.RecoveryTarget = threshold.RecoveryTarget
@@ -205,8 +201,6 @@ func (r BasicRuleThresholds) Eval(s *qbtypes.TimeSeries, unit string, evalData E
smpl.Target = *threshold.TargetValue
smpl.RecoveryTarget = threshold.RecoveryTarget
smpl.TargetUnit = threshold.TargetUnit
smpl.CompareOperator = threshold.CompareOperator
smpl.MatchType = threshold.MatchType
// IsRecovering to notify that metrics is in recovery stage
smpl.IsRecovering = true
resultVector = append(resultVector, smpl)

View File

@@ -1,120 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>{{.Title}}</title>
<style>
code {
background: #f0f0f0;
padding: 2px 6px;
border-radius: 3px;
font-family: 'SFMono-Regular', Consolas, 'Liberation Mono', Menlo, monospace;
font-size: 13px;
}
pre {
background: #f0f0f0;
padding: 12px 16px;
border-radius: 6px;
font-size: 13px;
overflow-x: auto;
white-space: pre;
}
pre code {
background: none;
padding: 0;
border-radius: 0;
font-size: inherit;
}
table:not([role="presentation"]) {
width: 100%;
border-collapse: collapse;
font-size: 14px;
}
table:not([role="presentation"]) th {
font-weight: 600;
text-align: left;
padding: 8px 12px;
border-bottom: 2px solid #d0d0d0;
}
table:not([role="presentation"]) td {
padding: 8px 12px;
border-bottom: 1px solid #e8e8e8;
}
table:not([role="presentation"]) tr:last-child td {
border-bottom: none;
}
</style>
</head>
<body style="margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,'Helvetica Neue',Arial,sans-serif;line-height:1.6;color:#333;background:#fff">
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="background:#fff">
<tr>
<td align="center" style="padding:0">
<table role="presentation" width="600" cellspacing="0" cellpadding="0" border="0" style="max-width:600px;width:100%;border:1px solid #e2e2e2;border-radius:12px;overflow:hidden">
<tr>
<td align="center" style="padding:20px 20px 12px">
<h2 style="margin:0 0 8px;font-size:20px;color:#333">{{.Title}}</h2>
<p style="margin:0;font-size:14px;color:#666">
Status: <strong>{{.Status}}</strong>
{{if .TotalFiring}} | Firing: <strong style="color:#e53e3e">{{.TotalFiring}}</strong>{{end}}
{{if .TotalResolved}} | Resolved: <strong style="color:#38a169">{{.TotalResolved}}</strong>{{end}}
</p>
</td>
</tr>
<tr>
<td style="padding:0 20px">
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0">
<tr><td style="border-top:1px solid #e2e2e2;font-size:0;line-height:0" height="1">&nbsp;</td></tr>
</table>
</td>
</tr>
{{range .Bodies}}
<tr>
<td style="padding:8px 20px">
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0">
<tr>
<td style="padding:16px;background:#fafafa;border:1px solid #e8e8e8;border-radius:6px">
{{.}}
</td>
</tr>
</table>
</td>
</tr>
{{end}}
{{if .ExternalURL}}
<tr>
<td style="padding:16px 20px">
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0">
<tr>
<td align="center">
<a href="{{.ExternalURL}}" target="_blank" style="display:inline-block;padding:12px 32px;font-size:14px;font-weight:600;color:#fff;background:#4E74F8;text-decoration:none;border-radius:4px">
View in SigNoz
</a>
</td>
</tr>
</table>
</td>
</tr>
{{end}}
<tr>
<td align="center" style="padding:8px 16px 16px">
<p style="margin:0;font-size:12px;color:#999;line-height:1.5">
Sent by SigNoz AlertManager
</p>
</td>
</tr>
</table>
</td>
</tr>
</table>
</body>
</html>

View File

@@ -23,7 +23,6 @@ pytest_plugins = [
"fixtures.notification_channel",
"fixtures.alerts",
"fixtures.cloudintegrations",
"fixtures.maildev",
]

View File

@@ -4,40 +4,15 @@ import time
from datetime import datetime, timedelta
from http import HTTPStatus
from typing import List
from urllib.parse import urlparse
import requests
from fixtures import types
from fixtures.logger import setup_logger
from fixtures.maildev import verify_email_received
logger = setup_logger(__name__)
def _is_json_subset(subset, superset) -> bool:
"""Check if subset is contained within superset recursively.
- For dicts: all keys in subset must exist in superset with matching values
- For lists: all items in subset must be present in superset
- For scalars: exact equality
"""
if isinstance(subset, dict):
if not isinstance(superset, dict):
return False
return all(
key in superset and _is_json_subset(value, superset[key])
for key, value in subset.items()
)
if isinstance(subset, list):
if not isinstance(superset, list):
return False
return all(
any(_is_json_subset(sub_item, sup_item) for sup_item in superset)
for sub_item in subset
)
return subset == superset
def collect_webhook_firing_alerts(
webhook_test_container: types.TestContainerDocker, notification_channel_name: str
) -> List[types.FiringAlert]:
@@ -99,93 +74,6 @@ def _verify_alerts_labels(
return (fired_count, missing_alerts)
def verify_webhook_notification_expectation(
notification_channel: types.TestContainerDocker,
validation_data: dict,
) -> bool:
"""Check if wiremock received a request at the given path
whose JSON body is a superset of the expected json_body."""
path = validation_data["path"]
json_body = validation_data["json_body"]
url = notification_channel.host_configs["8080"].get("__admin/requests/find")
res = requests.post(url, json={"method": "POST", "url": path}, timeout=5)
assert res.status_code == HTTPStatus.OK, (
f"Failed to find requests for path {path}, "
f"status code: {res.status_code}, response: {res.text}"
)
for req in res.json()["requests"]:
body = json.loads(base64.b64decode(req["bodyAsBase64"]).decode("utf-8"))
# logger.info("Webhook request body: %s", json.dumps(body, indent=2))
if _is_json_subset(json_body, body):
return True
return False
def _check_notification_validation(
validation: types.NotificationValidation,
notification_channel: types.TestContainerDocker,
maildev: types.TestContainerDocker,
) -> bool:
"""Dispatch a single validation check to the appropriate verifier."""
if validation.destination_type == "webhook":
return verify_webhook_notification_expectation(
notification_channel, validation.validation_data
)
if validation.destination_type == "email":
return verify_email_received(maildev, validation.validation_data)
raise ValueError(f"Invalid destination type: {validation.destination_type}")
def verify_notification_expectation(
notification_channel: types.TestContainerDocker,
maildev: types.TestContainerDocker,
expected_notification: types.AMNotificationExpectation,
) -> bool:
"""Poll for expected notifications across webhook and email channels."""
time_to_wait = datetime.now() + timedelta(
seconds=expected_notification.wait_time_seconds
)
while datetime.now() < time_to_wait:
all_found = all(
_check_notification_validation(v, notification_channel, maildev)
for v in expected_notification.notification_validations
)
if expected_notification.should_notify and all_found:
logger.info("All expected notifications found")
return True
time.sleep(1)
# Timeout reached
if not expected_notification.should_notify:
# Verify no notifications were received
for validation in expected_notification.notification_validations:
found = _check_notification_validation(
validation, notification_channel, maildev
)
assert not found, (
f"Expected no notification but found one for "
f"{validation.destination_type} with data {validation.validation_data}"
)
logger.info("No notifications found, as expected")
return True
# Expected notifications but didn't get them all — report missing
missing = [
v
for v in expected_notification.notification_validations
if not _check_notification_validation(v, notification_channel, maildev)
]
assert (
len(missing) == 0
), f"Expected all notifications to be found but missing: {missing}"
return True
def verify_webhook_alert_expectation(
test_alert_container: types.TestContainerDocker,
notification_channel_name: str,
@@ -247,40 +135,6 @@ def verify_webhook_alert_expectation(
return True # should not reach here
def update_raw_channel_config(
channel_config: dict,
channel_name: str,
notification_channel: types.TestContainerDocker,
) -> dict:
"""
Updates the channel config to point to the given wiremock
notification_channel container to receive notifications.
"""
config = channel_config.copy()
config["name"] = channel_name
url_field_map = {
"slack_configs": "api_url",
"msteamsv2_configs": "webhook_url",
"webhook_configs": "url",
"pagerduty_configs": "url",
"opsgenie_configs": "api_url",
}
for config_key, url_field in url_field_map.items():
if config_key in config:
for entry in config[config_key]:
if url_field in entry:
original_url = entry[url_field]
path = urlparse(original_url).path
entry[url_field] = notification_channel.container_configs[
"8080"
].get(path)
return config
def update_rule_channel_name(rule_data: dict, channel_name: str):
"""
updates the channel name in the thresholds

View File

@@ -132,9 +132,7 @@ def gateway(
@pytest.fixture(name="make_http_mocks", scope="function")
def make_http_mocks(
request: pytest.FixtureRequest,
) -> Callable[[types.TestContainerDocker, List[Mapping]], None]:
def make_http_mocks() -> Callable[[types.TestContainerDocker, List[Mapping]], None]:
def _make_http_mocks(
container: types.TestContainerDocker, mappings: List[Mapping]
) -> None:
@@ -143,10 +141,7 @@ def make_http_mocks(
for mapping in mappings:
Mappings.create_mapping(mapping=mapping)
def cleanup():
Mappings.delete_all_mappings()
Requests.reset_request_journal()
yield _make_http_mocks
request.addfinalizer(cleanup)
return _make_http_mocks
Mappings.delete_all_mappings()
Requests.reset_request_journal()

View File

@@ -1,134 +0,0 @@
import json
from http import HTTPStatus
from typing import List
import docker
import docker.errors
import pytest
import requests
from testcontainers.core.container import DockerContainer, Network
from fixtures import dev, types
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
@pytest.fixture(name="maildev", scope="package")
def maildev(
network: Network, request: pytest.FixtureRequest, pytestconfig: pytest.Config
) -> types.TestContainerDocker:
"""
Package-scoped fixture for MailDev container.
Provides SMTP (port 1025) and HTTP API (port 1080) for email testing.
"""
def create() -> types.TestContainerDocker:
container = DockerContainer(image="maildev/maildev:2.2.1")
container.with_exposed_ports(1025, 1080)
container.with_network(network=network)
container.start()
return types.TestContainerDocker(
id=container.get_wrapped_container().id,
host_configs={
"1025": types.TestContainerUrlConfig(
scheme="smtp",
address=container.get_container_host_ip(),
port=container.get_exposed_port(1025),
),
"1080": types.TestContainerUrlConfig(
scheme="http",
address=container.get_container_host_ip(),
port=container.get_exposed_port(1080),
),
},
container_configs={
"1025": types.TestContainerUrlConfig(
scheme="smtp",
address=container.get_wrapped_container().name,
port=1025,
),
"1080": types.TestContainerUrlConfig(
scheme="http",
address=container.get_wrapped_container().name,
port=1080,
),
},
)
def delete(container: types.TestContainerDocker):
client = docker.from_env()
try:
client.containers.get(container_id=container.id).stop()
client.containers.get(container_id=container.id).remove(v=True)
except docker.errors.NotFound:
logger.info(
"Skipping removal of MailDev, MailDev(%s) not found. Maybe it was manually removed?",
{"id": container.id},
)
def restore(cache: dict) -> types.TestContainerDocker:
return types.TestContainerDocker.from_cache(cache)
return dev.wrap(
request,
pytestconfig,
"maildev",
lambda: types.TestContainerDocker(id="", host_configs={}, container_configs={}),
create,
delete,
restore,
)
def get_all_mails(_maildev: types.TestContainerDocker) -> List[dict]:
"""
Fetches all emails from the MailDev HTTP API.
Returns list of dicts with keys: subject, html, text.
"""
url = _maildev.host_configs["1080"].get("/email")
response = requests.get(url, timeout=5)
assert response.status_code == HTTPStatus.OK, (
f"Failed to fetch emails from MailDev, "
f"status code: {response.status_code}, response: {response.text}"
)
emails = response.json()
# logger.info("Emails: %s", json.dumps(emails, indent=2))
return [
{
"subject": email.get("subject", ""),
"html": email.get("html", ""),
"text": email.get("text", ""),
}
for email in emails
]
def verify_email_received(_maildev: types.TestContainerDocker, filters: dict) -> bool:
"""
Checks if any email in MailDev matches all the given filters.
Filters are matched with exact equality against the email fields (subject, html, text).
Returns True if at least one matching email is found.
"""
emails = get_all_mails(_maildev)
for email in emails:
logger.info("Email: %s", json.dumps(email, indent=2))
if all(
key in email and filter_value == email[key]
for key, filter_value in filters.items()
):
return True
return False
def delete_all_mails(_maildev: types.TestContainerDocker) -> None:
"""
Deletes all emails from the MailDev inbox.
"""
url = _maildev.host_configs["1080"].get("/email/all")
response = requests.delete(url, timeout=5)
assert response.status_code == HTTPStatus.OK, (
f"Failed to delete emails from MailDev, "
f"status code: {response.status_code}, response: {response.text}"
)

View File

@@ -71,31 +71,6 @@ def notification_channel(
)
@pytest.fixture(name="create_notification_channel", scope="function")
def create_notification_channel(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> Callable[[dict], str]:
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
def _create_notification_channel(channel_config: dict) -> str:
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/channels"),
json=channel_config,
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.CREATED, (
f"Failed to create channel, "
f"Response: {response.text} "
f"Response status: {response.status_code}"
)
return response.json()["data"]["id"]
return _create_notification_channel
@pytest.fixture(name="create_webhook_notification_channel", scope="function")
def create_webhook_notification_channel(
signoz: types.SigNoz,

View File

@@ -1,81 +0,0 @@
# pylint: disable=line-too-long
"""
Default notification channel configs shared across alertmanager tests.
"""
slack_default_config = {
# channel name configured on runtime
"slack_configs": [
{
"api_url": "services/TEAM_ID/BOT_ID/TOKEN_ID", # base_url configured on runtime
"title": '[{{ .Status | toUpper }}{{ if eq .Status "firing" }}:{{ .Alerts.Firing | len }}{{ end }}] {{ .CommonLabels.alertname }} for {{ .CommonLabels.job }}\n {{- if gt (len .CommonLabels) (len .GroupLabels) -}}\n {{" "}}(\n {{- with .CommonLabels.Remove .GroupLabels.Names }}\n {{- range $index, $label := .SortedPairs -}}\n {{ if $index }}, {{ end }}\n {{- $label.Name }}="{{ $label.Value -}}"\n {{- end }}\n {{- end -}}\n )\n {{- end }}',
"text": '{{ range .Alerts -}}\r\n *Alert:* {{ .Labels.alertname }}{{ if .Labels.severity }} - {{ .Labels.severity }}{{ end }}\r\n\r\n *Summary:* {{ .Annotations.summary }}\r\n *Description:* {{ .Annotations.description }}\r\n *RelatedLogs:* {{ if gt (len .Annotations.related_logs) 0 -}} View in <{{ .Annotations.related_logs }}|logs explorer> {{- end}}\r\n *RelatedTraces:* {{ if gt (len .Annotations.related_traces) 0 -}} View in <{{ .Annotations.related_traces }}|traces explorer> {{- end}}\r\n\r\n *Details:*\r\n {{ range .Labels.SortedPairs -}}\r\n {{- if ne .Name "ruleId" -}}\r\n \u2022 *{{ .Name }}:* {{ .Value }}\r\n {{ end -}}\r\n {{ end -}}\r\n{{ end }}',
}
],
}
# MSTeams default config
msteams_default_config = {
"msteamsv2_configs": [
{
"webhook_url": "msteams/webhook_url", # base_url configured on runtime
"title": '[{{ .Status | toUpper }}{{ if eq .Status "firing" }}:{{ .Alerts.Firing | len }}{{ end }}] {{ .CommonLabels.alertname }} for {{ .CommonLabels.job }}\n {{- if gt (len .CommonLabels) (len .GroupLabels) -}}\n {{" "}}(\n {{- with .CommonLabels.Remove .GroupLabels.Names }}\n {{- range $index, $label := .SortedPairs -}}\n {{ if $index }}, {{ end }}\n {{- $label.Name }}="{{ $label.Value -}}"\n {{- end }}\n {{- end -}}\n )\n {{- end }}',
"text": '{{ range .Alerts -}}\r\n *Alert:* {{ .Labels.alertname }}{{ if .Labels.severity }} - {{ .Labels.severity }}{{ end }}\r\n\r\n *Summary:* {{ .Annotations.summary }}\r\n *Description:* {{ .Annotations.description }}\r\n *RelatedLogs:* {{ if gt (len .Annotations.related_logs) 0 -}} View in <{{ .Annotations.related_logs }}|logs explorer> {{- end}}\r\n *RelatedTraces:* {{ if gt (len .Annotations.related_traces) 0 -}} View in <{{ .Annotations.related_traces }}|traces explorer> {{- end}}\r\n\r\n *Details:*\r\n {{ range .Labels.SortedPairs -}}\r\n {{- if ne .Name "ruleId" -}}\r\n \u2022 *{{ .Name }}:* {{ .Value }}\r\n {{ end -}}\r\n {{ end -}}\r\n{{ end }}',
}
],
}
# pagerduty default config
pagerduty_default_config = {
"pagerduty_configs": [
{
"routing_key": "PagerDutyRoutingKey",
"url": "v2/enqueue", # base_url configured on runtime
"client": "SigNoz Alert Manager",
"client_url": "https://enter-signoz-host-n-port-here/alerts",
"description": '[{{ .Status | toUpper }}{{ if eq .Status "firing" }}:{{ .Alerts.Firing | len }}{{ end }}] {{ .CommonLabels.alertname }} for {{ .CommonLabels.job }}\n\t{{- if gt (len .CommonLabels) (len .GroupLabels) -}}\n\t {{" "}}(\n\t {{- with .CommonLabels.Remove .GroupLabels.Names }}\n\t\t{{- range $index, $label := .SortedPairs -}}\n\t\t {{ if $index }}, {{ end }}\n\t\t {{- $label.Name }}="{{ $label.Value -}}"\n\t\t{{- end }}\n\t {{- end -}}\n\t )\n\t{{- end }}',
"details": {
"firing": '{{ template "pagerduty.default.instances" .Alerts.Firing }}',
"num_firing": "{{ .Alerts.Firing | len }}",
"num_resolved": "{{ .Alerts.Resolved | len }}",
"resolved": '{{ template "pagerduty.default.instances" .Alerts.Resolved }}',
},
"source": "SigNoz Alert Manager",
"severity": "{{ (index .Alerts 0).Labels.severity }}",
}
],
}
# opsgenie default config
opsgenie_default_config = {
"opsgenie_configs": [
{
"api_key": "OpsGenieAPIKey",
"api_url": "/", # base_url configured on runtime
"description": '{{ if gt (len .Alerts.Firing) 0 -}}\r\n\tAlerts Firing:\r\n\t{{ range .Alerts.Firing }}\r\n\t - Message: {{ .Annotations.description }}\r\n\tLabels:\r\n\t{{ range .Labels.SortedPairs -}}\r\n\t\t{{- if ne .Name "ruleId" }} - {{ .Name }} = {{ .Value }}\r\n\t{{ end -}}\r\n\t{{- end }} Annotations:\r\n\t{{ range .Annotations.SortedPairs }} - {{ .Name }} = {{ .Value }}\r\n\t{{ end }} Source: {{ .GeneratorURL }}\r\n\t{{ end }}\r\n{{- end }}\r\n{{ if gt (len .Alerts.Resolved) 0 -}}\r\n\tAlerts Resolved:\r\n\t{{ range .Alerts.Resolved }}\r\n\t - Message: {{ .Annotations.description }}\r\n\tLabels:\r\n\t{{ range .Labels.SortedPairs -}}\r\n\t\t{{- if ne .Name "ruleId" }} - {{ .Name }} = {{ .Value }}\r\n\t{{ end -}}\r\n\t{{- end }} Annotations:\r\n\t{{ range .Annotations.SortedPairs }} - {{ .Name }} = {{ .Value }}\r\n\t{{ end }} Source: {{ .GeneratorURL }}\r\n\t{{ end }}\r\n{{- end }}',
"priority": '{{ if eq (index .Alerts 0).Labels.severity "critical" }}P1{{ else if eq (index .Alerts 0).Labels.severity "warning" }}P2{{ else if eq (index .Alerts 0).Labels.severity "info" }}P3{{ else }}P4{{ end }}',
"message": "{{ .CommonLabels.alertname }}",
"details": {},
}
]
}
# webhook default config
webhook_default_config = {
"webhook_configs": [
{
"url": "webhook/webhook_url", # base_url configured on runtime
}
],
}
# email default config
email_default_config = {
"email_configs": [
{
"to": "test@example.com",
"html": '<html><body>{{ range .Alerts -}}\r\n *Alert:* {{ .Labels.alertname }}{{ if .Labels.severity }} - {{ .Labels.severity }}{{ end }}\r\n\r\n *Summary:* {{ .Annotations.summary }}\r\n *Description:* {{ .Annotations.description }}\r\n *RelatedLogs:* {{ if gt (len .Annotations.related_logs) 0 -}} View in <{{ .Annotations.related_logs }}|logs explorer> {{- end}}\r\n *RelatedTraces:* {{ if gt (len .Annotations.related_traces) 0 -}} View in <{{ .Annotations.related_traces }}|traces explorer> {{- end}}\r\n\r\n *Details:*\r\n {{ range .Labels.SortedPairs -}}\r\n {{- if ne .Name "ruleId" -}}\r\n \u2022 *{{ .Name }}:* {{ .Value }}\r\n {{ end -}}\r\n {{ end -}}\r\n{{ end }}</body></html>',
"headers": {
"Subject": '[{{ .Status | toUpper }}{{ if eq .Status "firing" }}:{{ .Alerts.Firing | len }}{{ end }}] {{ .CommonLabels.alertname }} for {{ .CommonLabels.job }}\n {{- if gt (len .CommonLabels) (len .GroupLabels) -}}\n {{" "}}(\n {{- with .CommonLabels.Remove .GroupLabels.Names }}\n {{- range $index, $label := .SortedPairs -}}\n {{ if $index }}, {{ end }}\n {{- $label.Name }}="{{ $label.Value -}}"\n {{- end }}\n {{- end -}}\n )\n {{- end }}'
},
}
],
}

View File

@@ -203,40 +203,3 @@ class AlertTestCase:
alert_data: List[AlertData]
# list of alert expectations for the test case
alert_expectation: AlertExpectation
@dataclass(frozen=True)
class NotificationValidation:
# destination type of the notification, either webhook or email
# slack, msteams, pagerduty, opsgenie, webhook channels send notifications through webhook
# email channels send notifications through email
destination_type: Literal["webhook", "email"]
# validation data for validating the received notification payload
validation_data: dict[str, any]
@dataclass(frozen=True)
class AMNotificationExpectation:
# whether we expect any notifications to be fired or not, false when testing downtime scenarios
# or don't expect any notifications to be fired in given time period
should_notify: bool
# seconds to wait for the notifications to be fired, if no
# notifications are fired in the expected time, the test will fail
wait_time_seconds: int
# list of notifications to expect, as a single rule can trigger multiple notifications
# spanning across different notifiers
notification_validations: List[NotificationValidation]
@dataclass(frozen=True)
class AlertManagerNotificationTestCase:
# name of the test case
name: str
# path to the rule file in testdata directory
rule_path: str
# list of alert data that will be inserted into the database for the rule to be triggered
alert_data: List[AlertData]
# configuration for the notification channel
channel_config: dict[str, any]
# notification expectations for the test case
notification_expectation: AMNotificationExpectation

View File

@@ -1,427 +0,0 @@
# pylint: disable=line-too-long
import json
import uuid
from datetime import datetime, timedelta, timezone
from typing import Callable, List
import pytest
from wiremock.client import HttpMethods, Mapping, MappingRequest, MappingResponse
from fixtures import types
from fixtures.alertutils import (
update_raw_channel_config,
update_rule_channel_name,
verify_notification_expectation,
)
from fixtures.logger import setup_logger
from fixtures.maildev import delete_all_mails
from fixtures.notification_channelutils import (
email_default_config,
msteams_default_config,
opsgenie_default_config,
pagerduty_default_config,
slack_default_config,
webhook_default_config,
)
from fixtures.utils import get_testdata_file_path
# tests to verify the notifiers sending out the notifications with expected content
NOTIFIERS_TEST = [
types.AlertManagerNotificationTestCase(
name="slack_notifier_default_templating",
rule_path="alerts/test_scenarios/threshold_above_at_least_once/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_above_at_least_once/alert_data.jsonl",
),
],
channel_config=slack_default_config,
notification_expectation=types.AMNotificationExpectation(
should_notify=True,
# extra wait for alertmanager server setup
wait_time_seconds=60,
notification_validations=[
types.NotificationValidation(
destination_type="webhook",
validation_data={
"path": "/services/TEAM_ID/BOT_ID/TOKEN_ID",
"json_body": {
"username": "Alertmanager",
"attachments": [
{
"title": '[FIRING:1] threshold_above_at_least_once for (alertname="threshold_above_at_least_once", severity="critical", threshold.name="critical")',
"text": "*Alert:* threshold_above_at_least_once - critical\r\n\r\n *Summary:* This alert is fired when the defined metric (current value: 15) crosses the threshold (10)\r\n *Description:* This alert is fired when the defined metric (current value: 15) crosses the threshold (10)\r\n *RelatedLogs:* \r\n *RelatedTraces:* \r\n\r\n *Details:*\r\n • *alertname:* threshold_above_at_least_once\r\n • *severity:* critical\r\n • *threshold.name:* critical\r\n ",
"color": "danger",
"mrkdwn_in": ["fallback", "pretext", "text"],
}
],
},
},
),
],
),
),
types.AlertManagerNotificationTestCase(
name="msteams_notifier_default_templating",
rule_path="alerts/test_scenarios/threshold_above_at_least_once/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_above_at_least_once/alert_data.jsonl",
),
],
channel_config=msteams_default_config,
notification_expectation=types.AMNotificationExpectation(
should_notify=True,
wait_time_seconds=30,
notification_validations=[
types.NotificationValidation(
destination_type="webhook",
validation_data={
"path": "/msteams/webhook_url",
"json_body": {
"type": "message",
"attachments": [
{
"contentType": "application/vnd.microsoft.card.adaptive",
"content": {
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"type": "AdaptiveCard",
"version": "1.2",
"body": [
{
"type": "TextBlock",
"text": '[FIRING:1] threshold_above_at_least_once for (alertname="threshold_above_at_least_once", severity="critical", threshold.name="critical")',
"weight": "Bolder",
"size": "Medium",
"wrap": True,
"style": "heading",
"color": "Attention",
},
{
"type": "TextBlock",
"text": "Alerts",
"weight": "Bolder",
"size": "Medium",
"wrap": True,
"color": "Attention",
},
{
"type": "TextBlock",
"text": "Labels",
"weight": "Bolder",
"size": "Medium",
},
{
"type": "FactSet",
"text": "",
"facts": [
{
"title": "threshold.name",
"value": "critical",
}
],
},
{
"type": "TextBlock",
"text": "Annotations",
"weight": "Bolder",
"size": "Medium",
},
{
"type": "FactSet",
"text": "",
"facts": [
{
"title": "threshold.value",
"value": "10",
},
{
"title": "compare_op",
"value": "above",
},
{
"title": "match_type",
"value": "at_least_once",
},
{"title": "value", "value": "15"},
{
"title": "description",
"value": "This alert is fired when the defined metric (current value: 15) crosses the threshold (10)",
},
],
},
],
"msteams": {"width": "full"},
"actions": [
{
"type": "Action.OpenUrl",
"title": "View Alert",
}
],
},
}
],
},
},
),
],
),
),
types.AlertManagerNotificationTestCase(
name="pagerduty_notifier_default_templating",
rule_path="alerts/test_scenarios/threshold_above_at_least_once/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_above_at_least_once/alert_data.jsonl",
),
],
channel_config=pagerduty_default_config,
notification_expectation=types.AMNotificationExpectation(
should_notify=True,
wait_time_seconds=30,
notification_validations=[
types.NotificationValidation(
destination_type="webhook",
validation_data={
"path": "/v2/enqueue",
"json_body": {
"routing_key": "PagerDutyRoutingKey",
"event_action": "trigger",
"payload": {
"summary": '[FIRING:1] threshold_above_at_least_once for (alertname="threshold_above_at_least_once", severity="critical", threshold.name="critical")',
"source": "SigNoz Alert Manager",
"severity": "critical",
"custom_details": {
"firing": {
"Annotations": [
"compare_op = above",
{
"description = This alert is fired when the defined metric (current value": "15) crosses the threshold (10)"
},
"match_type = at_least_once",
"threshold.value = 10",
"value = 15",
],
"Labels": [
"alertname = threshold_above_at_least_once",
"severity = critical",
"threshold.name = critical",
],
}
},
},
"client": "SigNoz Alert Manager",
"client_url": "https://enter-signoz-host-n-port-here/alerts",
},
},
),
],
),
),
types.AlertManagerNotificationTestCase(
name="opsgenie_notifier_default_templating",
rule_path="alerts/test_scenarios/threshold_above_at_least_once/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_above_at_least_once/alert_data.jsonl",
),
],
channel_config=opsgenie_default_config,
notification_expectation=types.AMNotificationExpectation(
should_notify=True,
wait_time_seconds=30,
notification_validations=[
types.NotificationValidation(
destination_type="webhook",
validation_data={
"path": "/v2/alerts",
"json_body": {
"message": "threshold_above_at_least_once",
"description": "Alerts Firing:\r\n\t\r\n\t - Message: This alert is fired when the defined metric (current value: 15) crosses the threshold (10)\r\n\tLabels:\r\n\t - alertname = threshold_above_at_least_once\r\n\t - severity = critical\r\n\t - threshold.name = critical\r\n\t Annotations:\r\n\t - compare_op = above\r\n\t - description = This alert is fired when the defined metric (current value: 15) crosses the threshold (10)\r\n\t - match_type = at_least_once\r\n\t - summary = This alert is fired when the defined metric (current value: 15) crosses the threshold (10)\r\n\t - threshold.value = 10\r\n\t - value = 15\r\n\t Source: \r\n\t\r\n",
"details": {
"alertname": "threshold_above_at_least_once",
"severity": "critical",
"threshold.name": "critical",
},
"priority": "P1",
},
},
),
],
),
),
types.AlertManagerNotificationTestCase(
name="webhook_notifier_default_templating",
rule_path="alerts/test_scenarios/threshold_above_at_least_once/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_above_at_least_once/alert_data.jsonl",
),
],
channel_config=webhook_default_config,
notification_expectation=types.AMNotificationExpectation(
should_notify=True,
wait_time_seconds=30,
notification_validations=[
types.NotificationValidation(
destination_type="webhook",
validation_data={
"path": "/webhook/webhook_url",
"json_body": {
"status": "firing",
"alerts": [
{
"status": "firing",
"labels": {
"alertname": "threshold_above_at_least_once",
"severity": "critical",
"threshold.name": "critical",
},
"annotations": {
"compare_op": "above",
"description": "This alert is fired when the defined metric (current value: 15) crosses the threshold (10)",
"match_type": "at_least_once",
"summary": "This alert is fired when the defined metric (current value: 15) crosses the threshold (10)",
"threshold.value": "10",
"value": "15",
},
}
],
"commonLabels": {
"alertname": "threshold_above_at_least_once",
"severity": "critical",
"threshold.name": "critical",
},
"commonAnnotations": {
"compare_op": "above",
"description": "This alert is fired when the defined metric (current value: 15) crosses the threshold (10)",
"match_type": "at_least_once",
"summary": "This alert is fired when the defined metric (current value: 15) crosses the threshold (10)",
"threshold.value": "10",
"value": "15",
},
},
},
),
],
),
),
types.AlertManagerNotificationTestCase(
name="email_notifier_default_templating",
rule_path="alerts/test_scenarios/threshold_above_at_least_once/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_above_at_least_once/alert_data.jsonl",
),
],
channel_config=email_default_config,
notification_expectation=types.AMNotificationExpectation(
should_notify=True,
wait_time_seconds=30,
notification_validations=[
types.NotificationValidation(
destination_type="email",
validation_data={
"subject": '[FIRING:1] threshold_above_at_least_once for (alertname="threshold_above_at_least_once", severity="critical", threshold.name="critical")',
"html": "<html><body>*Alert:* threshold_above_at_least_once - critical\n\n *Summary:* This alert is fired when the defined metric (current value: 15) crosses the threshold (10)\n *Description:* This alert is fired when the defined metric (current value: 15) crosses the threshold (10)\n *RelatedLogs:* \n *RelatedTraces:* \n\n *Details:*\n \u2022 *alertname:* threshold_above_at_least_once\n \u2022 *severity:* critical\n \u2022 *threshold.name:* critical\n </body></html>",
},
),
],
),
),
]
logger = setup_logger(__name__)
@pytest.mark.parametrize(
"notifier_test_case",
NOTIFIERS_TEST,
ids=lambda notifier_test_case: notifier_test_case.name,
)
def test_notifier_templating(
# wiremock container for webhook notifications
notification_channel: types.TestContainerDocker,
# function to create wiremock mocks
make_http_mocks: Callable[[types.TestContainerDocker, List[Mapping]], None],
create_notification_channel: Callable[[dict], str],
# function to create alert rule
create_alert_rule: Callable[[dict], str],
# Alert data insertion related fixture
insert_alert_data: Callable[[List[types.AlertData], datetime], None],
# Mail dev container for email verification
maildev: types.TestContainerDocker,
# test case from parametrize
notifier_test_case: types.AlertManagerNotificationTestCase,
):
# generate unique channel name
channel_name = str(uuid.uuid4())
# update channel config: set name and rewrite URLs to wiremock
channel_config = update_raw_channel_config(
notifier_test_case.channel_config, channel_name, notification_channel
)
logger.info("Channel config: %s", {"channel_config": channel_config})
# setup wiremock mocks for webhook-based notification validations
webhook_validations = [
v
for v in notifier_test_case.notification_expectation.notification_validations
if v.destination_type == "webhook"
]
if len(webhook_validations) > 0:
mock_mappings = [
Mapping(
request=MappingRequest(
method=HttpMethods.POST, url=v.validation_data["path"]
),
response=MappingResponse(status=200, json_body={}),
persistent=False,
)
for v in webhook_validations
]
make_http_mocks(notification_channel, mock_mappings)
logger.info("Mock mappings created")
# clear mails if any destination is email
if any(
v.destination_type == "email"
for v in notifier_test_case.notification_expectation.notification_validations
):
delete_all_mails(maildev)
logger.info("Mails deleted")
# create notification channel
create_notification_channel(channel_config)
logger.info("Channel created with name: %s", {"channel_name": channel_name})
# insert alert data
insert_alert_data(
notifier_test_case.alert_data,
base_time=datetime.now(tz=timezone.utc) - timedelta(minutes=5),
)
# create alert rule
rule_path = get_testdata_file_path(notifier_test_case.rule_path)
with open(rule_path, "r", encoding="utf-8") as f:
rule_data = json.loads(f.read())
update_rule_channel_name(rule_data, channel_name)
rule_id = create_alert_rule(rule_data)
logger.info(
"rule created: %s", {"rule_id": rule_id, "rule_name": rule_data["alert"]}
)
# verify notification expectations
verify_notification_expectation(
notification_channel,
maildev,
notifier_test_case.notification_expectation,
)

File diff suppressed because one or more lines are too long

View File

@@ -1,50 +0,0 @@
import pytest
from testcontainers.core.container import Network
from fixtures import types
from fixtures.signoz import create_signoz
@pytest.fixture(name="signoz", scope="package")
def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
network: Network,
zeus: types.TestContainerDocker,
gateway: types.TestContainerDocker,
sqlstore: types.TestContainerSQL,
clickhouse: types.TestContainerClickhouse,
request: pytest.FixtureRequest,
pytestconfig: pytest.Config,
maildev: types.TestContainerDocker,
notification_channel: types.TestContainerDocker,
) -> types.SigNoz:
"""
Package-scoped fixture for setting up SigNoz.
Overrides SMTP, PagerDuty, and OpsGenie URLs to point to test containers.
"""
return create_signoz(
network=network,
zeus=zeus,
gateway=gateway,
sqlstore=sqlstore,
clickhouse=clickhouse,
request=request,
pytestconfig=pytestconfig,
env_overrides={
# SMTP config for email notifications via maildev
"SIGNOZ_ALERTMANAGER_SIGNOZ_GLOBAL_SMTP__SMARTHOST": f"{maildev.container_configs['1025'].address}:{maildev.container_configs['1025'].port}",
"SIGNOZ_ALERTMANAGER_SIGNOZ_GLOBAL_SMTP__REQUIRE__TLS": "false",
"SIGNOZ_ALERTMANAGER_SIGNOZ_GLOBAL_SMTP__FROM": "alertmanager@signoz.io",
# PagerDuty API URL -> wiremock (default: https://events.pagerduty.com/v2/enqueue)
"SIGNOZ_ALERTMANAGER_SIGNOZ_GLOBAL_PAGERDUTY__URL": notification_channel.container_configs[
"8080"
].get(
"/v2/enqueue"
),
# OpsGenie API URL -> wiremock (default: https://api.opsgenie.com/)
"SIGNOZ_ALERTMANAGER_SIGNOZ_GLOBAL_OPSGENIE__API__URL": notification_channel.container_configs[
"8080"
].get(
"/"
),
},
)

View File

@@ -45,7 +45,5 @@ def test_teardown(
idp: types.TestContainerIDP, # pylint: disable=unused-argument
create_user_admin: types.Operation, # pylint: disable=unused-argument
migrator: types.Operation, # pylint: disable=unused-argument
maildev: types.TestContainerDocker, # pylint: disable=unused-argument
notification_channel: types.TestContainerDocker, # pylint: disable=unused-argument
) -> None:
pass

View File

@@ -1,20 +0,0 @@
{ "timestamp": "2026-01-29T10:00:00.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "User login successful", "severity_text": "INFO" }
{ "timestamp": "2026-01-29T10:00:30.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "payment failure: gateway timeout", "severity_text": "ERROR" }
{ "timestamp": "2026-01-29T10:01:00.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "payment failure: card declined", "severity_text": "ERROR" }
{ "timestamp": "2026-01-29T10:01:30.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "Database connection established", "severity_text": "INFO" }
{ "timestamp": "2026-01-29T10:02:00.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "payment failure: insufficient funds", "severity_text": "ERROR" }
{ "timestamp": "2026-01-29T10:02:30.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "payment failure: invalid token", "severity_text": "ERROR" }
{ "timestamp": "2026-01-29T10:03:00.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "API request received", "severity_text": "INFO" }
{ "timestamp": "2026-01-29T10:03:30.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "payment failure: gateway timeout", "severity_text": "ERROR" }
{ "timestamp": "2026-01-29T10:04:00.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "payment failure: card declined", "severity_text": "ERROR" }
{ "timestamp": "2026-01-29T10:04:30.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "payment failure: invalid token", "severity_text": "ERROR" }
{ "timestamp": "2026-01-29T10:05:00.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "payment failure: gateway timeout", "severity_text": "ERROR" }
{ "timestamp": "2026-01-29T10:05:30.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "payment failure: card declined", "severity_text": "ERROR" }
{ "timestamp": "2026-01-29T10:06:00.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "payment failure: gateway timeout", "severity_text": "ERROR" }
{ "timestamp": "2026-01-29T10:06:30.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "payment failure: insufficient funds", "severity_text": "ERROR" }
{ "timestamp": "2026-01-29T10:07:00.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "payment failure: card declined", "severity_text": "ERROR" }
{ "timestamp": "2026-01-29T10:07:30.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "payment failure: gateway timeout", "severity_text": "ERROR" }
{ "timestamp": "2026-01-29T10:08:00.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "Response sent to client", "severity_text": "INFO" }
{ "timestamp": "2026-01-29T10:08:30.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "payment failure: invalid token", "severity_text": "ERROR" }
{ "timestamp": "2026-01-29T10:09:00.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "payment failure: card declined", "severity_text": "ERROR" }
{ "timestamp": "2026-01-29T10:10:00.000000Z", "resources": { "service.name": "payment-service" }, "attributes": { "code.file": "payment_handler.py" }, "body": "payment failure: gateway timeout", "severity_text": "ERROR" }

View File

@@ -1,71 +0,0 @@
{
"alert": "content_templating_logs",
"ruleType": "threshold_rule",
"alertType": "LOGS_BASED_ALERT",
"condition": {
"thresholds": {
"kind": "basic",
"spec": [
{
"name": "critical",
"target": 0,
"matchType": "1",
"op": "1",
"channels": [
"test channel"
]
}
]
},
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"filter": {
"expression": "body CONTAINS 'payment failure'"
},
"aggregations": [
{
"expression": "count()"
}
],
"groupBy": [
{"name": "service.name", "fieldContext": "resource"}
]
}
}
]
},
"selectedQueryName": "A"
},
"evaluation": {
"kind": "rolling",
"spec": {
"evalWindow": "5m0s",
"frequency": "15s"
}
},
"labels": {},
"annotations": {
"description": "Payment failure spike detected on $service_name",
"summary": "Payment failures elevated on $service_name",
"title_template": "[$status] Payment failure spike in $service_name",
"body_template": "**Severity:** $severity\n**Status:** $status\n\n**Service:** $service_name\n\n**Condition ($threshold_name):**\n- **Current:** $value\n- **Threshold:** $compare_op $threshold\n\n**Description:** Payment failures observed at $value over the evaluation window, crossing the $threshold_name threshold of $compare_op $threshold on $service_name. Investigate downstream payment processor health.\n\n**Runbook:** https://signoz.io/docs/runbooks/payment-failure-spike"
},
"notificationSettings": {
"groupBy": [],
"usePolicy": false,
"renotify": {
"enabled": false,
"interval": "30m",
"alertStates": []
}
},
"version": "v5",
"schemaVersion": "v2alpha1"
}

View File

@@ -1,12 +0,0 @@
{"metric_name":"container_memory_bytes_content_templating","labels":{"namespace":"production","pod":"checkout-7d9c8b5f4-x2k9p","container":"checkout","node":"ip-10-0-1-23","severity":"critical","service":"checkout"},"timestamp":"2026-01-29T10:01:00+00:00","value":80,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"bytes","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"container_memory_bytes_content_templating","labels":{"namespace":"production","pod":"checkout-7d9c8b5f4-x2k9p","container":"checkout","node":"ip-10-0-1-23","severity":"critical","service":"checkout"},"timestamp":"2026-01-29T10:02:00+00:00","value":95,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"bytes","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"container_memory_bytes_content_templating","labels":{"namespace":"production","pod":"checkout-7d9c8b5f4-x2k9p","container":"checkout","node":"ip-10-0-1-23","severity":"critical","service":"checkout"},"timestamp":"2026-01-29T10:03:00+00:00","value":110,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"bytes","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"container_memory_bytes_content_templating","labels":{"namespace":"production","pod":"checkout-7d9c8b5f4-x2k9p","container":"checkout","node":"ip-10-0-1-23","severity":"critical","service":"checkout"},"timestamp":"2026-01-29T10:04:00+00:00","value":120,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"bytes","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"container_memory_bytes_content_templating","labels":{"namespace":"production","pod":"checkout-7d9c8b5f4-x2k9p","container":"checkout","node":"ip-10-0-1-23","severity":"critical","service":"checkout"},"timestamp":"2026-01-29T10:05:00+00:00","value":125,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"bytes","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"container_memory_bytes_content_templating","labels":{"namespace":"production","pod":"checkout-7d9c8b5f4-x2k9p","container":"checkout","node":"ip-10-0-1-23","severity":"critical","service":"checkout"},"timestamp":"2026-01-29T10:06:00+00:00","value":130,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"bytes","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"container_memory_bytes_content_templating","labels":{"namespace":"production","pod":"checkout-7d9c8b5f4-x2k9p","container":"checkout","node":"ip-10-0-1-23","severity":"critical","service":"checkout"},"timestamp":"2026-01-29T10:07:00+00:00","value":135,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"bytes","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"container_memory_bytes_content_templating","labels":{"namespace":"production","pod":"checkout-7d9c8b5f4-x2k9p","container":"checkout","node":"ip-10-0-1-23","severity":"critical","service":"checkout"},"timestamp":"2026-01-29T10:08:00+00:00","value":140,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"bytes","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"container_memory_bytes_content_templating","labels":{"namespace":"production","pod":"checkout-7d9c8b5f4-x2k9p","container":"checkout","node":"ip-10-0-1-23","severity":"critical","service":"checkout"},"timestamp":"2026-01-29T10:09:00+00:00","value":145,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"bytes","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"container_memory_bytes_content_templating","labels":{"namespace":"production","pod":"checkout-7d9c8b5f4-x2k9p","container":"checkout","node":"ip-10-0-1-23","severity":"critical","service":"checkout"},"timestamp":"2026-01-29T10:10:00+00:00","value":150,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"bytes","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"container_memory_bytes_content_templating","labels":{"namespace":"production","pod":"checkout-7d9c8b5f4-x2k9p","container":"checkout","node":"ip-10-0-1-23","severity":"critical","service":"checkout"},"timestamp":"2026-01-29T10:11:00+00:00","value":155,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"bytes","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"container_memory_bytes_content_templating","labels":{"namespace":"production","pod":"checkout-7d9c8b5f4-x2k9p","container":"checkout","node":"ip-10-0-1-23","severity":"critical","service":"checkout"},"timestamp":"2026-01-29T10:12:00+00:00","value":160,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"bytes","env":"default","resource_attrs":{},"scope_attrs":{}}

View File

@@ -1,74 +0,0 @@
{
"alert": "content_templating_metrics",
"ruleType": "threshold_rule",
"alertType": "METRIC_BASED_ALERT",
"condition": {
"thresholds": {
"kind": "basic",
"spec": [
{
"name": "critical",
"target": 100,
"matchType": "1",
"op": "1",
"channels": [
"test channel"
]
}
]
},
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [
{
"metricName": "container_memory_bytes_content_templating",
"timeAggregation": "avg",
"spaceAggregation": "max"
}
],
"groupBy": [
{"name": "namespace", "fieldContext": "attribute", "fieldDataType": "string"},
{"name": "pod", "fieldContext": "attribute", "fieldDataType": "string"},
{"name": "container", "fieldContext": "attribute", "fieldDataType": "string"},
{"name": "node", "fieldContext": "attribute", "fieldDataType": "string"},
{"name": "severity", "fieldContext": "attribute", "fieldDataType": "string"}
]
}
}
]
},
"selectedQueryName": "A"
},
"evaluation": {
"kind": "rolling",
"spec": {
"evalWindow": "5m0s",
"frequency": "15s"
}
},
"labels": {},
"annotations": {
"description": "Container $container in pod $pod ($namespace) exceeded memory threshold",
"summary": "High container memory in $namespace/$pod",
"title_template": "[$status] High container memory in $namespace/$pod",
"body_template": "**Severity:** $severity\n**Status:** $status\n\n**Pod Details:**\n- **Namespace:** $namespace\n- **Pod:** $pod\n- **Container:** $container\n- **Node:** $node\n\n**Condition ($threshold_name):**\n- **Current:** $value\n- **Threshold:** $compare_op $threshold\n\n**Description:** Container $container in pod $pod ($namespace) has memory usage at $value, which crossed the $threshold_name threshold of $compare_op $threshold. Immediate investigation is recommended to prevent OOMKill.\n\n**Runbook:** https://signoz.io/docs/runbooks/container-memory-near-limit"
},
"notificationSettings": {
"groupBy": [],
"usePolicy": false,
"renotify": {
"enabled": false,
"interval": "30m",
"alertStates": []
}
},
"version": "v5",
"schemaVersion": "v2alpha1"
}

View File

@@ -1,20 +0,0 @@
{ "timestamp": "2026-01-29T10:00:00.000000Z", "duration": "PT1.2S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6a1", "span_id": "c1b2c3d4e5f6a7b8", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:00:30.000000Z", "duration": "PT1.4S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6a2", "span_id": "c2b3c4d5e6f7a8b9", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:01:00.000000Z", "duration": "PT1.6S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6a3", "span_id": "c3b4c5d6e7f8a9b0", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:01:30.000000Z", "duration": "PT1.8S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6a4", "span_id": "c4b5c6d7e8f9a0b1", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:02:00.000000Z", "duration": "PT2.1S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6a5", "span_id": "c5b6c7d8e9f0a1b2", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:02:30.000000Z", "duration": "PT2.3S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6a6", "span_id": "c6b7c8d9e0f1a2b3", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:03:00.000000Z", "duration": "PT2.5S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6a7", "span_id": "c7b8c9d0e1f2a3b4", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:03:30.000000Z", "duration": "PT2.7S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6a8", "span_id": "c8b9c0d1e2f3a4b5", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:04:00.000000Z", "duration": "PT2.9S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6a9", "span_id": "c9b0c1d2e3f4a5b6", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:04:30.000000Z", "duration": "PT3.1S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6b1", "span_id": "d1c2d3e4f5a6b7c8", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:05:00.000000Z", "duration": "PT3.3S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6b2", "span_id": "d2c3d4e5f6a7b8c9", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:05:30.000000Z", "duration": "PT3.5S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6b3", "span_id": "d3c4d5e6f7a8b9c0", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:06:00.000000Z", "duration": "PT3.7S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6b4", "span_id": "d4c5d6e7f8a9b0c1", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:06:30.000000Z", "duration": "PT3.9S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6b5", "span_id": "d5c6d7e8f9a0b1c2", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:07:00.000000Z", "duration": "PT4.1S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6b6", "span_id": "d6c7d8e9f0a1b2c3", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:07:30.000000Z", "duration": "PT4.3S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6b7", "span_id": "d7c8d9e0f1a2b3c4", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:08:00.000000Z", "duration": "PT4.5S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6b8", "span_id": "d8c9d0e1f2a3b4c5", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:08:30.000000Z", "duration": "PT4.7S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6b9", "span_id": "d9c0d1e2f3a4b5c6", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:09:00.000000Z", "duration": "PT4.9S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6c1", "span_id": "e1d2e3f4a5b6c7d8", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }
{ "timestamp": "2026-01-29T10:10:00.000000Z", "duration": "PT5.1S", "trace_id": "591f6d3d6b0a1f9e8a71b2c3d4e5f6c2", "span_id": "e2d3e4f5a6b7c8d9", "parent_span_id": "", "name": "POST /checkout", "kind": 2, "status_code": 1, "status_message": "", "resources": { "deployment.environment": "production", "service.name": "checkout-service", "os.type": "linux", "host.name": "ip-10-0-1-23" }, "attributes": { "net.transport": "IP.TCP", "http.scheme": "http", "http.user_agent": "Integration Test", "http.request.method": "POST", "http.response.status_code": "200", "http.request.path": "/checkout" } }

View File

@@ -1,73 +0,0 @@
{
"alert": "content_templating_traces",
"ruleType": "threshold_rule",
"alertType": "TRACES_BASED_ALERT",
"condition": {
"thresholds": {
"kind": "basic",
"spec": [
{
"name": "critical",
"target": 1,
"matchType": "1",
"op": "1",
"channels": [
"test channel"
],
"targetUnit": "s"
}
]
},
"compositeQuery": {
"queryType": "builder",
"unit": "ns",
"panelType": "graph",
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "traces",
"filter": {
"expression": "http.request.path = '/checkout'"
},
"aggregations": [
{
"expression": "p90(duration_nano)"
}
],
"groupBy": [
{"name": "service.name", "fieldContext": "resource"}
]
}
}
]
},
"selectedQueryName": "A"
},
"evaluation": {
"kind": "rolling",
"spec": {
"evalWindow": "5m0s",
"frequency": "15s"
}
},
"labels": {},
"annotations": {
"description": "p90 latency high on $service_name",
"summary": "p90 latency exceeded threshold on $service_name",
"title_template": "[$status] p90 latency high on $service_name",
"body_template": "**Severity:** $severity\n**Status:** $status\n\n**Service:** $service_name\n\n**Condition ($threshold_name):**\n- **Current:** $value\n- **Threshold:** $compare_op $threshold\n\n**Description:** p90 request latency on $service_name reached $value, crossing the $threshold_name threshold of $compare_op $threshold. Investigate downstream dependencies and recent deploys.\n\n**Runbook:** https://signoz.io/docs/runbooks/high-latency"
},
"notificationSettings": {
"groupBy": [],
"usePolicy": false,
"renotify": {
"enabled": false,
"interval": "30m",
"alertStates": []
}
},
"version": "v5",
"schemaVersion": "v2alpha1"
}