Compare commits

...

1 Commits

Author SHA1 Message Date
Abhishek Kumar Singh
1cc1bc6f5b test: added alert manager notifier e2e 2026-04-13 21:43:59 +05:30
8 changed files with 779 additions and 15 deletions

View File

@@ -22,6 +22,7 @@ pytest_plugins = [
"fixtures.notification_channel",
"fixtures.alerts",
"fixtures.cloudintegrations",
"fixtures.maildev",
]

View File

@@ -9,10 +9,34 @@ import requests
from fixtures import types
from fixtures.logger import setup_logger
from fixtures.maildev import verify_email_received
logger = setup_logger(__name__)
def _is_json_subset(subset, superset) -> bool:
"""Check if subset is contained within superset recursively.
- For dicts: all keys in subset must exist in superset with matching values
- For lists: all items in subset must be present in superset
- For scalars: exact equality
"""
if isinstance(subset, dict):
if not isinstance(superset, dict):
return False
return all(
key in superset and _is_json_subset(value, superset[key])
for key, value in subset.items()
)
if isinstance(subset, list):
if not isinstance(superset, list):
return False
return all(
any(_is_json_subset(sub_item, sup_item) for sup_item in superset)
for sub_item in subset
)
return subset == superset
def collect_webhook_firing_alerts(
webhook_test_container: types.TestContainerDocker, notification_channel_name: str
) -> List[types.FiringAlert]:
@@ -73,6 +97,92 @@ def _verify_alerts_labels(
return (fired_count, missing_alerts)
def verify_webhook_notification_expectation(
notification_channel: types.TestContainerDocker,
validation_data: dict,
) -> bool:
"""Check if wiremock received a POST request at the given path
whose JSON body is a superset of the expected json_body."""
path = validation_data["path"]
json_body = validation_data["json_body"]
url = notification_channel.host_configs["8080"].get("__admin/requests/find")
res = requests.post(url, json={"method": "POST", "url": path}, timeout=5)
assert res.status_code == HTTPStatus.OK, (
f"Failed to find requests for path {path}, "
f"status code: {res.status_code}, response: {res.text}"
)
for req in res.json()["requests"]:
body = json.loads(base64.b64decode(req["bodyAsBase64"]).decode("utf-8"))
if _is_json_subset(json_body, body):
logger.debug("Found request for path %s with body %s and expected body %s", path, json.dumps(body), json.dumps(json_body))
return True
else:
logger.debug("Request found for path %s with body %s does not contain expected body %s", path, json.dumps(body), json.dumps(json_body))
return False
def _check_validation(
validation: types.NotificationValidation,
notification_channel: types.TestContainerDocker,
maildev: types.TestContainerDocker,
) -> bool:
"""Dispatch a single validation check to the appropriate verifier."""
if validation.destination_type == "webhook":
return verify_webhook_notification_expectation(
notification_channel, validation.validation_data
)
if validation.destination_type == "email":
return verify_email_received(maildev, validation.validation_data)
raise ValueError(f"Invalid destination type: {validation.destination_type}")
def verify_notification_expectation(
notification_channel: types.TestContainerDocker,
maildev: types.TestContainerDocker,
expected_notification: types.AMNotificationExpectation,
) -> bool:
"""Poll for expected notifications across webhook and email channels.
Follows the same wait-and-check pattern as verify_webhook_alert_expectation."""
time_to_wait = datetime.now() + timedelta(
seconds=expected_notification.wait_time_seconds
)
while datetime.now() < time_to_wait:
all_found = all(
_check_validation(v, notification_channel, maildev)
for v in expected_notification.notification_validations
)
if expected_notification.should_notify and all_found:
logger.info("All expected notifications found")
return True
time.sleep(1)
# Timeout reached
if not expected_notification.should_notify:
# Verify no notifications were received
for validation in expected_notification.notification_validations:
found = _check_validation(validation, notification_channel, maildev)
assert not found, (
f"Expected no notification but found one for "
f"{validation.destination_type} with data {validation.validation_data}"
)
logger.info("No notifications found, as expected")
return True
# Expected notifications but didn't get them all — report missing
missing = [
v for v in expected_notification.notification_validations
if not _check_validation(v, notification_channel, maildev)
]
assert len(missing) == 0, (
f"Expected all notifications to be found but missing: {missing}"
)
return True
def verify_webhook_alert_expectation(
test_alert_container: types.TestContainerDocker,
@@ -135,6 +245,43 @@ def verify_webhook_alert_expectation(
return True # should not reach here
def update_channel_config_urls(
channel_config: dict,
notification_channel: types.TestContainerDocker,
) -> dict:
"""
Updates the API/webhook URLs in channel config to point to the wiremock
notification_channel container. Only modifies URL-bearing configs:
slack_configs, msteams_configs, webhook_configs.
PagerDuty and OpsGenie URLs come from the alertmanager global config
(overridden via env vars), so they are not modified here.
"""
from urllib.parse import urlparse
config = channel_config.copy()
url_field_map = {
"slack_configs": "api_url",
"msteams_configs": "webhook_url",
"webhook_configs": "url",
"pagerduty_configs": "url",
"opsgenie_configs": "api_url",
}
for config_key, url_field in url_field_map.items():
if config_key in config:
for entry in config[config_key]:
if url_field in entry:
original_url = entry[url_field]
path = urlparse(original_url).path
entry[url_field] = notification_channel.container_configs[
"8080"
].get(path)
return config
def update_rule_channel_name(rule_data: dict, channel_name: str):
"""
updates the channel name in the thresholds

View File

@@ -0,0 +1,136 @@
from http import HTTPStatus
import json
from typing import List
import docker
import docker.errors
import pytest
import requests
from testcontainers.core.container import DockerContainer, Network
from fixtures import dev, types
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
@pytest.fixture(name="maildev", scope="package")
def maildev(
network: Network, request: pytest.FixtureRequest, pytestconfig: pytest.Config
) -> types.TestContainerDocker:
"""
Package-scoped fixture for MailDev container.
Provides SMTP (port 1025) and HTTP API (port 1080) for email testing.
"""
def create() -> types.TestContainerDocker:
container = DockerContainer(image="maildev/maildev:2.1.0")
container.with_exposed_ports(1025, 1080)
container.with_network(network=network)
container.start()
return types.TestContainerDocker(
id=container.get_wrapped_container().id,
host_configs={
"1025": types.TestContainerUrlConfig(
scheme="smtp",
address=container.get_container_host_ip(),
port=container.get_exposed_port(1025),
),
"1080": types.TestContainerUrlConfig(
scheme="http",
address=container.get_container_host_ip(),
port=container.get_exposed_port(1080),
),
},
container_configs={
"1025": types.TestContainerUrlConfig(
scheme="smtp",
address=container.get_wrapped_container().name,
port=1025,
),
"1080": types.TestContainerUrlConfig(
scheme="http",
address=container.get_wrapped_container().name,
port=1080,
),
},
)
def delete(container: types.TestContainerDocker):
client = docker.from_env()
try:
client.containers.get(container_id=container.id).stop()
client.containers.get(container_id=container.id).remove(v=True)
except docker.errors.NotFound:
logger.info(
"Skipping removal of MailDev, MailDev(%s) not found. Maybe it was manually removed?",
{"id": container.id},
)
def restore(cache: dict) -> types.TestContainerDocker:
return types.TestContainerDocker.from_cache(cache)
return dev.wrap(
request,
pytestconfig,
"maildev",
lambda: types.TestContainerDocker(id="", host_configs={}, container_configs={}),
create,
delete,
restore,
)
def get_all_mails(maildev: types.TestContainerDocker) -> List[dict]:
"""
Fetches all emails from the MailDev HTTP API.
Returns list of dicts with keys: subject, html, text.
"""
url = maildev.host_configs["1080"].get("/email")
response = requests.get(url, timeout=5)
assert response.status_code == HTTPStatus.OK, (
f"Failed to fetch emails from MailDev, "
f"status code: {response.status_code}, response: {response.text}"
)
emails = response.json()
logger.debug("Emails: %s", json.dumps(emails, indent=2))
return [
{
"subject": email.get("subject", ""),
"html": email.get("html", ""),
"text": email.get("text", ""),
}
for email in emails
]
def verify_email_received(
maildev: types.TestContainerDocker, filters: dict
) -> bool:
"""
Checks if any email in MailDev matches all the given filters.
Filters are matched with exact equality against the email fields (subject, html, text).
Returns True if at least one matching email is found.
"""
emails = get_all_mails(maildev)
for email in emails:
logger.debug("Email: %s", json.dumps(email, indent=2))
if all(
key in email and filter_value == email[key]
for key, filter_value in filters.items()
):
return True
return False
def delete_all_mails(maildev: types.TestContainerDocker) -> None:
"""
Deletes all emails from the MailDev inbox.
"""
url = maildev.host_configs["1080"].get("/email/all")
response = requests.delete(url, timeout=5)
assert response.status_code == HTTPStatus.OK, (
f"Failed to delete emails from MailDev, "
f"status code: {response.status_code}, response: {response.text}"
)

View File

@@ -70,6 +70,29 @@ def notification_channel(
restore,
)
@pytest.fixture(name="create_notification_channel", scope="function")
def create_notification_channel(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> Callable[[dict], str]:
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
def _create_notification_channel(channel_config: dict) -> str:
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/channels"),
json=channel_config,
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.CREATED, (
f"Failed to create channel, "
f"Response: {response.text} "
f"Response status: {response.status_code}"
)
return response.json()["data"]["id"]
return _create_notification_channel
@pytest.fixture(name="create_webhook_notification_channel", scope="function")
def create_webhook_notification_channel(

View File

@@ -16,6 +16,11 @@ from fixtures.logger import setup_logger
logger = setup_logger(__name__)
# Toggle: True = build SigNoz image from local Dockerfile.
# False = pull public image from Docker Hub (https://hub.docker.com/r/signoz/signoz/tags).
BUILD_LOCAL_IMAGE = False
REMOTE_IMAGE = "signoz/signoz:latest"
def create_signoz(
network: Network,
@@ -44,22 +49,28 @@ def create_signoz(
if arch == "x86_64":
arch = "amd64"
# Build the image
dockerfile_path = "cmd/enterprise/Dockerfile.integration"
if with_web:
dockerfile_path = "cmd/enterprise/Dockerfile.with-web.integration"
if BUILD_LOCAL_IMAGE:
# Build the image from local Dockerfile
dockerfile_path = "cmd/enterprise/Dockerfile.integration"
if with_web:
dockerfile_path = "cmd/enterprise/Dockerfile.with-web.integration"
self = DockerImage(
path="../../",
dockerfile_path=dockerfile_path,
tag="signoz:integration",
buildargs={
"TARGETARCH": arch,
"ZEUSURL": zeus.container_configs["8080"].base(),
},
)
self = DockerImage(
path="../../",
dockerfile_path=dockerfile_path,
tag="signoz:integration",
buildargs={
"TARGETARCH": arch,
"ZEUSURL": zeus.container_configs["8080"].base(),
},
)
self.build()
self.build()
image = "signoz:integration"
else:
# Pull public image from Docker Hub
docker.from_env().images.pull(REMOTE_IMAGE)
image = REMOTE_IMAGE
env = (
{
@@ -87,7 +98,7 @@ def create_signoz(
if env_overrides:
env = env | env_overrides
container = DockerContainer("signoz:integration")
container = DockerContainer(image)
for k, v in env.items():
container.with_env(k, v)
container.with_exposed_ports(8080)

View File

@@ -203,3 +203,37 @@ class AlertTestCase:
alert_data: List[AlertData]
# list of alert expectations for the test case
alert_expectation: AlertExpectation
@dataclass(frozen=True)
class NotificationValidation:
# destination type of the notification, either webhook or email
# slack, msteams, pagerduty, opsgenie, webhook channels send notifications through webhook
# email channels send notifications through email
destination_type: Literal["webhook", "email"]
# validation data for validating the received notification payload
validation_data: dict[str, any]
@dataclass(frozen=True)
class AMNotificationExpectation:
# whether we expect any notifications to be fired or not, false when testing downtime scenarios
# or don't expect any notifications to be fired in given time period
should_notify: bool
# seconds to wait for the notifications to be fired, if no
# notifications are fired in the expected time, the test will fail
wait_time_seconds: int
# list of notifications to expect, as a single rule can trigger multiple notifications
# spanning across different notifiers
notification_validations: List[NotificationValidation]
@dataclass(frozen=True)
class AlertManagerNotificationTestCase:
# name of the test case
name: str
# path to the rule file in testdata directory
rule_path: str
# list of alert data that will be inserted into the database for the rule to be triggered
alert_data: List[AlertData]
# configuration for the notification channel
channel_config: dict[str, any]
# notification expectations for the test case
notification_expectation: AMNotificationExpectation

View File

@@ -0,0 +1,371 @@
import json
import uuid
from datetime import datetime, timedelta, timezone
from typing import Callable, List
import pytest
from wiremock.client import HttpMethods, Mapping, MappingRequest, MappingResponse
from fixtures import types
from fixtures.alertutils import (
update_channel_config_urls,
update_rule_channel_name,
verify_notification_expectation,
)
from fixtures.logger import setup_logger
from fixtures.utils import get_testdata_file_path
"""
Default notification configs for each of the notifiers
"""
slack_default_config = {
# channel name configured on runtime
"slack_configs": [{
"api_url": "services/TEAM_ID/BOT_ID/TOKEN_ID",
"title":"[{{ .Status | toUpper }}{{ if eq .Status \"firing\" }}:{{ .Alerts.Firing | len }}{{ end }}] {{ .CommonLabels.alertname }} for {{ .CommonLabels.job }}\n {{- if gt (len .CommonLabels) (len .GroupLabels) -}}\n {{\" \"}}(\n {{- with .CommonLabels.Remove .GroupLabels.Names }}\n {{- range $index, $label := .SortedPairs -}}\n {{ if $index }}, {{ end }}\n {{- $label.Name }}=\"{{ $label.Value -}}\"\n {{- end }}\n {{- end -}}\n )\n {{- end }}",
"text":"{{ range .Alerts -}}\r\n *Alert:* {{ .Labels.alertname }}{{ if .Labels.severity }} - {{ .Labels.severity }}{{ end }}\r\n\r\n *Summary:* {{ .Annotations.summary }}\r\n *Description:* {{ .Annotations.description }}\r\n *RelatedLogs:* {{ if gt (len .Annotations.related_logs) 0 -}} View in <{{ .Annotations.related_logs }}|logs explorer> {{- end}}\r\n *RelatedTraces:* {{ if gt (len .Annotations.related_traces) 0 -}} View in <{{ .Annotations.related_traces }}|traces explorer> {{- end}}\r\n\r\n *Details:*\r\n {{ range .Labels.SortedPairs -}}\r\n {{- if ne .Name \"ruleId\" -}}\r\n \u2022 *{{ .Name }}:* {{ .Value }}\r\n {{ end -}}\r\n {{ end -}}\r\n{{ end }}"
}],
}
# MSTeams default config
msteams_default_config = {
"msteams_configs": [{
"webhook_url": "msteams/webhook_url",
"title":"[{{ .Status | toUpper }}{{ if eq .Status \"firing\" }}:{{ .Alerts.Firing | len }}{{ end }}] {{ .CommonLabels.alertname }} for {{ .CommonLabels.job }}\n {{- if gt (len .CommonLabels) (len .GroupLabels) -}}\n {{\" \"}}(\n {{- with .CommonLabels.Remove .GroupLabels.Names }}\n {{- range $index, $label := .SortedPairs -}}\n {{ if $index }}, {{ end }}\n {{- $label.Name }}=\"{{ $label.Value -}}\"\n {{- end }}\n {{- end -}}\n )\n {{- end }}",
"text":"{{ range .Alerts -}}\r\n *Alert:* {{ .Labels.alertname }}{{ if .Labels.severity }} - {{ .Labels.severity }}{{ end }}\r\n\r\n *Summary:* {{ .Annotations.summary }}\r\n *Description:* {{ .Annotations.description }}\r\n *RelatedLogs:* {{ if gt (len .Annotations.related_logs) 0 -}} View in <{{ .Annotations.related_logs }}|logs explorer> {{- end}}\r\n *RelatedTraces:* {{ if gt (len .Annotations.related_traces) 0 -}} View in <{{ .Annotations.related_traces }}|traces explorer> {{- end}}\r\n\r\n *Details:*\r\n {{ range .Labels.SortedPairs -}}\r\n {{- if ne .Name \"ruleId\" -}}\r\n \u2022 *{{ .Name }}:* {{ .Value }}\r\n {{ end -}}\r\n {{ end -}}\r\n{{ end }}"
}],
}
# pagerduty default config
pagerduty_default_config = {
"pagerduty_configs": [{
"routing_key":"PagerDutyRoutingKey",
"url":"v2/enqueue",
"client":"SigNoz Alert Manager",
"client_url":"https://enter-signoz-host-n-port-here/alerts",
"description":"[{{ .Status | toUpper }}{{ if eq .Status \"firing\" }}:{{ .Alerts.Firing | len }}{{ end }}] {{ .CommonLabels.alertname }} for {{ .CommonLabels.job }}\n\t{{- if gt (len .CommonLabels) (len .GroupLabels) -}}\n\t {{\" \"}}(\n\t {{- with .CommonLabels.Remove .GroupLabels.Names }}\n\t\t{{- range $index, $label := .SortedPairs -}}\n\t\t {{ if $index }}, {{ end }}\n\t\t {{- $label.Name }}=\"{{ $label.Value -}}\"\n\t\t{{- end }}\n\t {{- end -}}\n\t )\n\t{{- end }}",
"details":{
"firing":"{{ template \"pagerduty.default.instances\" .Alerts.Firing }}",
"num_firing":"{{ .Alerts.Firing | len }}",
"num_resolved":"{{ .Alerts.Resolved | len }}",
"resolved":"{{ template \"pagerduty.default.instances\" .Alerts.Resolved }}"
},
"source":"SigNoz Alert Manager",
"severity":"{{ (index .Alerts 0).Labels.severity }}"
}],
}
# opsgenie default config
opsgenie_default_config = {
"opsgenie_configs": [
{
"api_key": "OpsGenieAPIKey",
"api_url": "/",
"description": "{{ if gt (len .Alerts.Firing) 0 -}}\r\n\tAlerts Firing:\r\n\t{{ range .Alerts.Firing }}\r\n\t - Message: {{ .Annotations.description }}\r\n\tLabels:\r\n\t{{ range .Labels.SortedPairs -}}\r\n\t\t{{- if ne .Name \"ruleId\" }} - {{ .Name }} = {{ .Value }}\r\n\t{{ end -}}\r\n\t{{- end }} Annotations:\r\n\t{{ range .Annotations.SortedPairs }} - {{ .Name }} = {{ .Value }}\r\n\t{{ end }} Source: {{ .GeneratorURL }}\r\n\t{{ end }}\r\n{{- end }}\r\n{{ if gt (len .Alerts.Resolved) 0 -}}\r\n\tAlerts Resolved:\r\n\t{{ range .Alerts.Resolved }}\r\n\t - Message: {{ .Annotations.description }}\r\n\tLabels:\r\n\t{{ range .Labels.SortedPairs -}}\r\n\t\t{{- if ne .Name \"ruleId\" }} - {{ .Name }} = {{ .Value }}\r\n\t{{ end -}}\r\n\t{{- end }} Annotations:\r\n\t{{ range .Annotations.SortedPairs }} - {{ .Name }} = {{ .Value }}\r\n\t{{ end }} Source: {{ .GeneratorURL }}\r\n\t{{ end }}\r\n{{- end }}",
"priority": "{{ if eq (index .Alerts 0).Labels.severity \"critical\" }}P1{{ else if eq (index .Alerts 0).Labels.severity \"warning\" }}P2{{ else if eq (index .Alerts 0).Labels.severity \"info\" }}P3{{ else }}P4{{ end }}",
"message": "{{ .CommonLabels.alertname }}",
"details": {}
}
]
}
# webhook default config
webhook_default_config = {
"webhook_configs": [{
"url": "webhook/webhook_url",
}],
}
# email default config
email_default_config = {
"email_configs": [{
"to": "test@example.com",
"html": "<html><body>{{ range .Alerts -}}\r\n *Alert:* {{ .Labels.alertname }}{{ if .Labels.severity }} - {{ .Labels.severity }}{{ end }}\r\n\r\n *Summary:* {{ .Annotations.summary }}\r\n *Description:* {{ .Annotations.description }}\r\n *RelatedLogs:* {{ if gt (len .Annotations.related_logs) 0 -}} View in <{{ .Annotations.related_logs }}|logs explorer> {{- end}}\r\n *RelatedTraces:* {{ if gt (len .Annotations.related_traces) 0 -}} View in <{{ .Annotations.related_traces }}|traces explorer> {{- end}}\r\n\r\n *Details:*\r\n {{ range .Labels.SortedPairs -}}\r\n {{- if ne .Name \"ruleId\" -}}\r\n \u2022 *{{ .Name }}:* {{ .Value }}\r\n {{ end -}}\r\n {{ end -}}\r\n{{ end }}</body></html>",
"headers": {
"Subject": "[{{ .Status | toUpper }}{{ if eq .Status \"firing\" }}:{{ .Alerts.Firing | len }}{{ end }}] {{ .CommonLabels.alertname }} for {{ .CommonLabels.job }}\n {{- if gt (len .CommonLabels) (len .GroupLabels) -}}\n {{\" \"}}(\n {{- with .CommonLabels.Remove .GroupLabels.Names }}\n {{- range $index, $label := .SortedPairs -}}\n {{ if $index }}, {{ end }}\n {{- $label.Name }}=\"{{ $label.Value -}}\"\n {{- end }}\n {{- end -}}\n )\n {{- end }}"
}
}],
}
# tests to verify the notifiers sending out the notifications with expected content
# test out all notifiers integration weather they're sending out notifications with default
# templating for the following notifiers: webhook, slack, email, pagerduty, opsgenie, msteams
NOTIFIERS_TEST = [
types.AlertManagerNotificationTestCase(
name="slack_notifier_default_templating",
rule_path="alerts/test_scenarios/threshold_above_at_least_once/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_above_at_least_once/alert_data.jsonl",
),
],
channel_config=slack_default_config,
notification_expectation=types.AMNotificationExpectation(
should_notify=True,
wait_time_seconds=30,
notification_validations=[
types.NotificationValidation(
destination_type="webhook",
validation_data={
"path": "/services/TEAM_ID/BOT_ID/TOKEN_ID",
"json_body": {
"attachments": [
{
"title": "[FIRING:1] threshold_above_at_least_once for (alertname=\"threshold_above_at_least_once\", severity=\"critical\", threshold.name=\"critical\")",
"text": "*Alert:* threshold_above_at_least_once - critical\r\n\r\n *Summary:* This alert is fired when the defined metric (current value: 15) crosses the threshold (10)\r\n *Description:* This alert is fired when the defined metric (current value: 15) crosses the threshold (10)\r\n *RelatedLogs:* \r\n *RelatedTraces:* \r\n\r\n *Details:*\r\n • *alertname:* threshold_above_at_least_once\r\n • *severity:* critical\r\n • *threshold.name:* critical\r\n ",
}]}
},
),
],
),
),
types.AlertManagerNotificationTestCase(
name="msteams_notifier_default_templating",
rule_path="alerts/test_scenarios/threshold_above_at_least_once/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_above_at_least_once/alert_data.jsonl",
),
],
channel_config=msteams_default_config,
notification_expectation=types.AMNotificationExpectation(
should_notify=True,
wait_time_seconds=30,
notification_validations=[
types.NotificationValidation(
destination_type="webhook",
validation_data={
"path": "/msteams/webhook_url",
"json_body": {
"@context": "http://schema.org/extensions",
"type": "MessageCard",
"title": "[FIRING:1] threshold_above_at_least_once for (alertname=\"threshold_above_at_least_once\", severity=\"critical\", threshold.name=\"critical\")",
"text": "*Alert:* threshold_above_at_least_once - critical\r\n\r\n *Summary:* This alert is fired when the defined metric (current value: 15) crosses the threshold (10)\r\n *Description:* This alert is fired when the defined metric (current value: 15) crosses the threshold (10)\r\n *RelatedLogs:* \r\n *RelatedTraces:* \r\n\r\n *Details:*\r\n • *alertname:* threshold_above_at_least_once\r\n • *severity:* critical\r\n • *threshold.name:* critical\r\n ",
"themeColor": "8C1A1A"
}
},
),
],
),
),
types.AlertManagerNotificationTestCase(
name="pagerduty_notifier_default_templating",
rule_path="alerts/test_scenarios/threshold_above_at_least_once/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_above_at_least_once/alert_data.jsonl",
),
],
channel_config=pagerduty_default_config,
notification_expectation=types.AMNotificationExpectation(
should_notify=True,
wait_time_seconds=30,
notification_validations=[
types.NotificationValidation(
destination_type="webhook",
validation_data={
"path": "/v2/enqueue",
"json_body": {
"routing_key": "PagerDutyRoutingKey",
"payload": {
"summary": "[FIRING:1] threshold_above_at_least_once for (alertname=\"threshold_above_at_least_once\", severity=\"critical\", threshold.name=\"critical\")",
"custom_details": {
"firing": {
"Annotations": [{"description = This alert is fired when the defined metric (current value": "15) crosses the threshold (10)"}],
"Labels": ["alertname = threshold_above_at_least_once","severity = critical","threshold.name = critical"],
}}
},
"client": "SigNoz Alert Manager",
"client_url": "https://enter-signoz-host-n-port-here/alerts"
}
},
),
],
),
),
types.AlertManagerNotificationTestCase(
name="opsgenie_notifier_default_templating",
rule_path="alerts/test_scenarios/threshold_above_at_least_once/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_above_at_least_once/alert_data.jsonl",
),
],
channel_config=opsgenie_default_config,
notification_expectation=types.AMNotificationExpectation(
should_notify=True,
wait_time_seconds=30,
notification_validations=[
types.NotificationValidation(
destination_type="webhook",
validation_data={
"path": "/v2/alerts",
"json_body": {
"message": "threshold_above_at_least_once",
"description": "Alerts Firing:\r\n\t\r\n\t - Message: This alert is fired when the defined metric (current value: 15) crosses the threshold (10)\r\n\tLabels:\r\n\t - alertname = threshold_above_at_least_once\r\n\t - severity = critical\r\n\t - threshold.name = critical\r\n\t Annotations:\r\n\t - description = This alert is fired when the defined metric (current value: 15) crosses the threshold (10)\r\n\t - summary = This alert is fired when the defined metric (current value: 15) crosses the threshold (10)\r\n\t Source: \r\n\t\r\n",
"details": {
"alertname": "threshold_above_at_least_once",
"severity": "critical",
"threshold.name": "critical"
},
"priority": "P1"
}
},
),
],
),
),
types.AlertManagerNotificationTestCase(
name="webhook_notifier_default_templating",
rule_path="alerts/test_scenarios/threshold_above_at_least_once/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_above_at_least_once/alert_data.jsonl",
),
],
channel_config=webhook_default_config,
notification_expectation=types.AMNotificationExpectation(
should_notify=True,
wait_time_seconds=30,
notification_validations=[
types.NotificationValidation(
destination_type="webhook",
validation_data={
"path": "/webhook/webhook_url",
"json_body": {
"status": "firing",
"alerts": [{
"status": "firing",
"labels": {"alertname": "threshold_above_at_least_once","severity": "critical","threshold.name": "critical"},
"annotations": {"summary": "This alert is fired when the defined metric (current value: 15) crosses the threshold (10)"
}}
],
"commonLabels": {"alertname": "threshold_above_at_least_once","severity": "critical","threshold.name": "critical"},
"commonAnnotations": {"summary": "This alert is fired when the defined metric (current value: 15) crosses the threshold (10)"}
}
},
),
],
),
),
types.AlertManagerNotificationTestCase(
name="email_notifier_default_templating",
rule_path="alerts/test_scenarios/threshold_above_at_least_once/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_above_at_least_once/alert_data.jsonl",
),
],
channel_config=email_default_config,
notification_expectation=types.AMNotificationExpectation(
should_notify=True,
wait_time_seconds=30,
notification_validations=[
types.NotificationValidation(
destination_type="email",
validation_data={
"subject": "[FIRING:1] threshold_above_at_least_once for (alertname=\"threshold_above_at_least_once\", severity=\"critical\", threshold.name=\"critical\")",
"html": "<html><body>*Alert:* threshold_above_at_least_once - critical\n\n *Summary:* This alert is fired when the defined metric (current value: 15) crosses the threshold (10)\n *Description:* This alert is fired when the defined metric (current value: 15) crosses the threshold (10)\n *RelatedLogs:* \n *RelatedTraces:* \n\n *Details:*\n \u2022 *alertname:* threshold_above_at_least_once\n \u2022 *severity:* critical\n \u2022 *threshold.name:* critical\n </body></html>"
},
),
],
),
),
]
logger = setup_logger(__name__)
@pytest.mark.parametrize(
"notifier_test_case",
NOTIFIERS_TEST,
ids=lambda notifier_test_case: notifier_test_case.name,
)
def test_notifier_templating(
# Notification channel related fixtures
notification_channel: types.TestContainerDocker,
make_http_mocks: Callable[[types.TestContainerDocker, List[Mapping]], None],
create_notification_channel: Callable[[dict], str],
# Alert rule related fixtures
create_alert_rule: Callable[[dict], str],
# Alert data insertion related fixtures
insert_alert_data: Callable[[List[types.AlertData], datetime], None],
# Mail dev container for email verification
maildev: types.TestContainerDocker,
# Test case from parametrize
notifier_test_case: types.AlertManagerNotificationTestCase,
):
# Generate unique channel name
channel_name = str(uuid.uuid4())
# Update channel config: set name and rewrite URLs to wiremock
channel_config = update_channel_config_urls(
notifier_test_case.channel_config, notification_channel
)
channel_config["name"] = channel_name
# Setup wiremock mocks for webhook-based validations
webhook_validations = [
v
for v in notifier_test_case.notification_expectation.notification_validations
if v.destination_type == "webhook"
]
mock_mappings = [
Mapping(
request=MappingRequest(
method=HttpMethods.POST, url=v.validation_data["path"]
),
response=MappingResponse(status=200, json_body={}),
persistent=False,
)
for v in webhook_validations
]
if mock_mappings:
make_http_mocks(notification_channel, mock_mappings)
logger.info("Mock mappings created: %s", {"mock_mappings": mock_mappings})
# Create notification channel
create_notification_channel(channel_config)
logger.info("Channel created with name: %s", {"channel_name": channel_name})
# Insert alert data
insert_alert_data(
notifier_test_case.alert_data,
base_time=datetime.now(tz=timezone.utc) - timedelta(minutes=5),
)
# Create alert rule
rule_path = get_testdata_file_path(notifier_test_case.rule_path)
with open(rule_path, "r", encoding="utf-8") as f:
rule_data = json.loads(f.read())
update_rule_channel_name(rule_data, channel_name)
rule_id = create_alert_rule(rule_data)
logger.info(
"rule created: %s", {"rule_id": rule_id, "rule_name": rule_data["alert"]}
)
# Verify notification expectations
verify_notification_expectation(
notification_channel,
maildev,
notifier_test_case.notification_expectation,
)

View File

@@ -0,0 +1,41 @@
from fixtures import types
from fixtures.signoz import create_signoz
import pytest
from testcontainers.core.container import Network
@pytest.fixture(name="signoz", scope="package")
def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
network: Network,
zeus: types.TestContainerDocker,
gateway: types.TestContainerDocker,
sqlstore: types.TestContainerSQL,
clickhouse: types.TestContainerClickhouse,
request: pytest.FixtureRequest,
pytestconfig: pytest.Config,
maildev: types.TestContainerDocker,
notification_channel: types.TestContainerDocker,
) -> types.SigNoz:
"""
Package-scoped fixture for setting up SigNoz.
Overrides SMTP, PagerDuty, and OpsGenie URLs to point to test containers.
"""
return create_signoz(
network=network,
zeus=zeus,
gateway=gateway,
sqlstore=sqlstore,
clickhouse=clickhouse,
request=request,
pytestconfig=pytestconfig,
env_overrides={
# SMTP config for email notifications via maildev
"SIGNOZ_ALERTMANAGER_SIGNOZ_GLOBAL_SMTP__SMARTHOST": f"{maildev.container_configs['1025'].address}:{maildev.container_configs['1025'].port}",
"SIGNOZ_ALERTMANAGER_SIGNOZ_GLOBAL_SMTP__REQUIRE__TLS": "false",
"SIGNOZ_ALERTMANAGER_SIGNOZ_GLOBAL_SMTP__FROM": "alertmanager@signoz.io",
# PagerDuty API URL -> wiremock (default: https://events.pagerduty.com/v2/enqueue)
"SIGNOZ_ALERTMANAGER_SIGNOZ_GLOBAL_PAGERDUTY__URL": notification_channel.container_configs["8080"].get("/v2/enqueue"),
# OpsGenie API URL -> wiremock (default: https://api.opsgenie.com/)
"SIGNOZ_ALERTMANAGER_SIGNOZ_GLOBAL_OPSGENIE__API__URL": notification_channel.container_configs["8080"].get("/"),
},
)