mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-03 08:33:26 +00:00
test(integration): added fixture for inserting alert data (#10101)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
* chore: fixture for notification channel * chore: return notification channel info in Create notification channel API * fix: change scope of create channel fixture to function level * test: added fixture for creating alert rule * chore: added debug message on assertion failure * refactor: improve error handling in webhook notification channel deletion * fix: enhance error handling in alert rule creation and deletion * chore: ran py linter and fmt * chore: ran py linter and fmt * fix: add timeout to alert rule creation and deletion requests * fix: silenced pylint on too broad exception * fix: suppress pylint warnings for broad exception handling in alert rule deletion * test: added fixture for inserting alert data * refactor: added fixture for getting test data file path * feat: add alerts to integration CI workflow * chore: linter fixes * chore: changed scope for get_testdata_file_path * chore: py-formatter * chore: py-formatter * chore: updated get_testdata_file_path fixture to a util function * chore: removed wrong ref --------- Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
This commit is contained in:
committed by
GitHub
parent
30a6721472
commit
afdb674068
@@ -1,5 +1,6 @@
|
|||||||
|
from datetime import datetime, timezone
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
from typing import Callable
|
from typing import Callable, List
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import requests
|
import requests
|
||||||
@@ -7,6 +8,10 @@ import requests
|
|||||||
from fixtures import types
|
from fixtures import types
|
||||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||||
from fixtures.logger import setup_logger
|
from fixtures.logger import setup_logger
|
||||||
|
from fixtures.logs import Logs
|
||||||
|
from fixtures.metrics import Metrics
|
||||||
|
from fixtures.traces import Traces
|
||||||
|
from fixtures.utils import get_testdata_file_path
|
||||||
|
|
||||||
logger = setup_logger(__name__)
|
logger = setup_logger(__name__)
|
||||||
|
|
||||||
@@ -52,3 +57,54 @@ def create_alert_rule(
|
|||||||
_delete_alert_rule(rule_id)
|
_delete_alert_rule(rule_id)
|
||||||
except Exception as e: # pylint: disable=broad-exception-caught
|
except Exception as e: # pylint: disable=broad-exception-caught
|
||||||
logger.error("Error deleting rule: %s", {"rule_id": rule_id, "error": e})
|
logger.error("Error deleting rule: %s", {"rule_id": rule_id, "error": e})
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="insert_alert_data", scope="function")
|
||||||
|
def insert_alert_data(
|
||||||
|
insert_metrics: Callable[[List[Metrics]], None],
|
||||||
|
insert_traces: Callable[[List[Traces]], None],
|
||||||
|
insert_logs: Callable[[List[Logs]], None],
|
||||||
|
) -> Callable[[List[types.AlertData]], None]:
|
||||||
|
|
||||||
|
def _insert_alert_data(
|
||||||
|
alert_data_items: List[types.AlertData],
|
||||||
|
base_time: datetime = None,
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
metrics: List[Metrics] = []
|
||||||
|
traces: List[Traces] = []
|
||||||
|
logs: List[Logs] = []
|
||||||
|
|
||||||
|
now = base_time or datetime.now(tz=timezone.utc).replace(
|
||||||
|
second=0, microsecond=0
|
||||||
|
)
|
||||||
|
|
||||||
|
for data_item in alert_data_items:
|
||||||
|
if data_item.type == "metrics":
|
||||||
|
_metrics = Metrics.load_from_file(
|
||||||
|
get_testdata_file_path(data_item.data_path),
|
||||||
|
base_time=now,
|
||||||
|
)
|
||||||
|
metrics.extend(_metrics)
|
||||||
|
elif data_item.type == "traces":
|
||||||
|
_traces = Traces.load_from_file(
|
||||||
|
get_testdata_file_path(data_item.data_path),
|
||||||
|
base_time=now,
|
||||||
|
)
|
||||||
|
traces.extend(_traces)
|
||||||
|
elif data_item.type == "logs":
|
||||||
|
_logs = Logs.load_from_file(
|
||||||
|
get_testdata_file_path(data_item.data_path),
|
||||||
|
base_time=now,
|
||||||
|
)
|
||||||
|
logs.extend(_logs)
|
||||||
|
|
||||||
|
# Add data to ClickHouse if any data is present
|
||||||
|
if len(metrics) > 0:
|
||||||
|
insert_metrics(metrics)
|
||||||
|
if len(traces) > 0:
|
||||||
|
insert_traces(traces)
|
||||||
|
if len(logs) > 0:
|
||||||
|
insert_logs(logs)
|
||||||
|
|
||||||
|
yield _insert_alert_data
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ from ksuid import KsuidMs
|
|||||||
|
|
||||||
from fixtures import types
|
from fixtures import types
|
||||||
from fixtures.fingerprint import LogsOrTracesFingerprint
|
from fixtures.fingerprint import LogsOrTracesFingerprint
|
||||||
|
from fixtures.utils import parse_timestamp
|
||||||
|
|
||||||
|
|
||||||
class LogsResource(ABC):
|
class LogsResource(ABC):
|
||||||
@@ -329,6 +330,59 @@ class Logs(ABC):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(
|
||||||
|
cls,
|
||||||
|
data: dict,
|
||||||
|
) -> "Logs":
|
||||||
|
"""Create a Logs instance from a dict."""
|
||||||
|
# parse timestamp from iso format
|
||||||
|
timestamp = parse_timestamp(data["timestamp"])
|
||||||
|
return cls(
|
||||||
|
timestamp=timestamp,
|
||||||
|
resources=data.get("resources", {}),
|
||||||
|
attributes=data.get("attributes", {}),
|
||||||
|
body=data["body"],
|
||||||
|
severity_text=data.get("severity_text", "INFO"),
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def load_from_file(
|
||||||
|
cls,
|
||||||
|
file_path: str,
|
||||||
|
base_time: Optional[datetime.datetime] = None,
|
||||||
|
) -> List["Logs"]:
|
||||||
|
"""Load logs from a JSONL file."""
|
||||||
|
|
||||||
|
data_list = []
|
||||||
|
with open(file_path, "r", encoding="utf-8") as f:
|
||||||
|
for line in f:
|
||||||
|
line = line.strip()
|
||||||
|
if not line:
|
||||||
|
continue
|
||||||
|
data_list.append(json.loads(line))
|
||||||
|
|
||||||
|
# If base_time provided, calculate time offset
|
||||||
|
time_offset = datetime.timedelta(0)
|
||||||
|
if base_time is not None:
|
||||||
|
# Find earliest timestamp
|
||||||
|
earliest = None
|
||||||
|
for data in data_list:
|
||||||
|
ts = parse_timestamp(data["timestamp"])
|
||||||
|
if earliest is None or ts < earliest:
|
||||||
|
earliest = ts
|
||||||
|
if earliest is not None:
|
||||||
|
time_offset = base_time - earliest
|
||||||
|
|
||||||
|
logs = []
|
||||||
|
for data in data_list:
|
||||||
|
original_ts = parse_timestamp(data["timestamp"])
|
||||||
|
adjusted_ts = original_ts + time_offset
|
||||||
|
data["timestamp"] = adjusted_ts.isoformat()
|
||||||
|
logs.append(cls.from_dict(data))
|
||||||
|
|
||||||
|
return logs
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(name="insert_logs", scope="function")
|
@pytest.fixture(name="insert_logs", scope="function")
|
||||||
def insert_logs(
|
def insert_logs(
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import numpy as np
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from fixtures import types
|
from fixtures import types
|
||||||
|
from fixtures.utils import parse_timestamp
|
||||||
|
|
||||||
|
|
||||||
class MetricsTimeSeries(ABC):
|
class MetricsTimeSeries(ABC):
|
||||||
@@ -341,10 +342,7 @@ class Metrics(ABC):
|
|||||||
metric_name_override: If provided, overrides the metric_name from data
|
metric_name_override: If provided, overrides the metric_name from data
|
||||||
"""
|
"""
|
||||||
# parse timestamp from iso format
|
# parse timestamp from iso format
|
||||||
ts_str = data["timestamp"]
|
timestamp = parse_timestamp(data["timestamp"])
|
||||||
if ts_str.endswith("Z"):
|
|
||||||
ts_str = ts_str[:-1] + "+00:00"
|
|
||||||
timestamp = datetime.datetime.fromisoformat(ts_str)
|
|
||||||
|
|
||||||
return cls(
|
return cls(
|
||||||
metric_name=metric_name_override or data["metric_name"],
|
metric_name=metric_name_override or data["metric_name"],
|
||||||
@@ -397,10 +395,7 @@ class Metrics(ABC):
|
|||||||
# Find earliest timestamp
|
# Find earliest timestamp
|
||||||
earliest = None
|
earliest = None
|
||||||
for data in data_list:
|
for data in data_list:
|
||||||
ts_str = data["timestamp"]
|
ts = parse_timestamp(data["timestamp"])
|
||||||
if ts_str.endswith("Z"):
|
|
||||||
ts_str = ts_str[:-1] + "+00:00"
|
|
||||||
ts = datetime.datetime.fromisoformat(ts_str)
|
|
||||||
if earliest is None or ts < earliest:
|
if earliest is None or ts < earliest:
|
||||||
earliest = ts
|
earliest = ts
|
||||||
if earliest is not None:
|
if earliest is not None:
|
||||||
@@ -408,10 +403,7 @@ class Metrics(ABC):
|
|||||||
|
|
||||||
metrics = []
|
metrics = []
|
||||||
for data in data_list:
|
for data in data_list:
|
||||||
ts_str = data["timestamp"]
|
original_ts = parse_timestamp(data["timestamp"])
|
||||||
if ts_str.endswith("Z"):
|
|
||||||
ts_str = ts_str[:-1] + "+00:00"
|
|
||||||
original_ts = datetime.datetime.fromisoformat(ts_str)
|
|
||||||
adjusted_ts = original_ts + time_offset
|
adjusted_ts = original_ts + time_offset
|
||||||
data["timestamp"] = adjusted_ts.isoformat()
|
data["timestamp"] = adjusted_ts.isoformat()
|
||||||
metrics.append(
|
metrics.append(
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ import pytest
|
|||||||
|
|
||||||
from fixtures import types
|
from fixtures import types
|
||||||
from fixtures.fingerprint import LogsOrTracesFingerprint
|
from fixtures.fingerprint import LogsOrTracesFingerprint
|
||||||
|
from fixtures.utils import parse_duration, parse_timestamp
|
||||||
|
|
||||||
|
|
||||||
class TracesKind(Enum):
|
class TracesKind(Enum):
|
||||||
@@ -23,12 +24,20 @@ class TracesKind(Enum):
|
|||||||
SPAN_KIND_PRODUCER = 4
|
SPAN_KIND_PRODUCER = 4
|
||||||
SPAN_KIND_CONSUMER = 5
|
SPAN_KIND_CONSUMER = 5
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_value(cls, value: int) -> "TracesKind":
|
||||||
|
return cls(value)
|
||||||
|
|
||||||
|
|
||||||
class TracesStatusCode(Enum):
|
class TracesStatusCode(Enum):
|
||||||
STATUS_CODE_UNSET = 0
|
STATUS_CODE_UNSET = 0
|
||||||
STATUS_CODE_OK = 1
|
STATUS_CODE_OK = 1
|
||||||
STATUS_CODE_ERROR = 2
|
STATUS_CODE_ERROR = 2
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_value(cls, value: int) -> "TracesStatusCode":
|
||||||
|
return cls(value)
|
||||||
|
|
||||||
|
|
||||||
class TracesRefType(Enum):
|
class TracesRefType(Enum):
|
||||||
REF_TYPE_CHILD_OF = "CHILD_OF"
|
REF_TYPE_CHILD_OF = "CHILD_OF"
|
||||||
@@ -602,6 +611,83 @@ class Traces(ABC):
|
|||||||
dtype=object,
|
dtype=object,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(
|
||||||
|
cls,
|
||||||
|
data: dict,
|
||||||
|
) -> "Traces":
|
||||||
|
"""Create a Traces instance from a dict."""
|
||||||
|
# parse timestamp from iso format
|
||||||
|
timestamp = parse_timestamp(data["timestamp"])
|
||||||
|
duration = parse_duration(data.get("duration", "PT1S"))
|
||||||
|
|
||||||
|
kind = TracesKind.from_value(
|
||||||
|
data.get("kind", TracesKind.SPAN_KIND_INTERNAL.value)
|
||||||
|
)
|
||||||
|
status_code = TracesStatusCode.from_value(
|
||||||
|
data.get("status_code", TracesStatusCode.STATUS_CODE_UNSET.value)
|
||||||
|
)
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
timestamp=timestamp,
|
||||||
|
duration=duration,
|
||||||
|
trace_id=data["trace_id"],
|
||||||
|
span_id=data["span_id"],
|
||||||
|
parent_span_id=data.get("parent_span_id", ""),
|
||||||
|
name=data.get("name", "default span"),
|
||||||
|
kind=kind,
|
||||||
|
status_code=status_code,
|
||||||
|
status_message=data.get("status_message", ""),
|
||||||
|
resources=data.get("resources", {}),
|
||||||
|
attributes=data.get("attributes", {}),
|
||||||
|
trace_state=data.get("trace_state", ""),
|
||||||
|
flags=data.get("flags", 0),
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def load_from_file(
|
||||||
|
cls,
|
||||||
|
file_path: str,
|
||||||
|
base_time: Optional[datetime.datetime] = None,
|
||||||
|
) -> List["Traces"]:
|
||||||
|
"""Load traces from a JSONL file."""
|
||||||
|
|
||||||
|
data_list = []
|
||||||
|
with open(file_path, "r", encoding="utf-8") as f:
|
||||||
|
for line in f:
|
||||||
|
line = line.strip()
|
||||||
|
if not line:
|
||||||
|
continue
|
||||||
|
data_list.append(json.loads(line))
|
||||||
|
|
||||||
|
if not data_list:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# If base_time provided, calculate time offset
|
||||||
|
time_offset = datetime.timedelta(0)
|
||||||
|
if base_time is not None:
|
||||||
|
# Find earliest timestamp
|
||||||
|
earliest = None
|
||||||
|
for data in data_list:
|
||||||
|
ts = parse_timestamp(data["timestamp"])
|
||||||
|
if earliest is None or ts < earliest:
|
||||||
|
earliest = ts
|
||||||
|
if earliest is not None:
|
||||||
|
time_offset = base_time - earliest
|
||||||
|
|
||||||
|
traces = []
|
||||||
|
for data in data_list:
|
||||||
|
# add time offset to timestamp
|
||||||
|
original_ts = parse_timestamp(data["timestamp"])
|
||||||
|
duration = parse_duration(data.get("duration", "PT1S"))
|
||||||
|
adjusted_ts = original_ts + time_offset
|
||||||
|
data["timestamp"] = adjusted_ts.isoformat()
|
||||||
|
# parse duration of the span
|
||||||
|
data["duration"] = duration
|
||||||
|
traces.append(cls.from_dict(data))
|
||||||
|
|
||||||
|
return traces
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(name="insert_traces", scope="function")
|
@pytest.fixture(name="insert_traces", scope="function")
|
||||||
def insert_traces(
|
def insert_traces(
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Dict
|
from typing import Dict, Literal
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
import clickhouse_connect
|
import clickhouse_connect
|
||||||
@@ -162,3 +162,14 @@ class Network:
|
|||||||
|
|
||||||
def __log__(self) -> str:
|
def __log__(self) -> str:
|
||||||
return f"Network(id={self.id}, name={self.name})"
|
return f"Network(id={self.id}, name={self.name})"
|
||||||
|
|
||||||
|
|
||||||
|
# Alerts related types
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class AlertData:
|
||||||
|
# type of the alert data, one of 'metrics', 'logs', 'traces'
|
||||||
|
type: Literal["metrics", "logs", "traces"]
|
||||||
|
# path to the data file in testdata directory
|
||||||
|
data_path: str
|
||||||
|
|||||||
33
tests/integration/fixtures/utils.py
Normal file
33
tests/integration/fixtures/utils.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import datetime
|
||||||
|
import os
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import isodate
|
||||||
|
|
||||||
|
|
||||||
|
# parses the given timestamp string from ISO format to datetime.datetime
|
||||||
|
def parse_timestamp(ts_str: str) -> datetime.datetime:
|
||||||
|
"""
|
||||||
|
Parse a timestamp string from ISO format.
|
||||||
|
"""
|
||||||
|
if ts_str.endswith("Z"):
|
||||||
|
ts_str = ts_str[:-1] + "+00:00"
|
||||||
|
return datetime.datetime.fromisoformat(ts_str)
|
||||||
|
|
||||||
|
|
||||||
|
# parses the given duration to datetime.timedelta
|
||||||
|
def parse_duration(duration: Any) -> datetime.timedelta:
|
||||||
|
"""
|
||||||
|
Parse a duration string from ISO format.
|
||||||
|
"""
|
||||||
|
# if it's string then parse it as iso format
|
||||||
|
if isinstance(duration, str):
|
||||||
|
return isodate.parse_duration(duration)
|
||||||
|
if isinstance(duration, datetime.timedelta):
|
||||||
|
return duration
|
||||||
|
return datetime.timedelta(seconds=duration)
|
||||||
|
|
||||||
|
|
||||||
|
def get_testdata_file_path(file: str) -> str:
|
||||||
|
testdata_dir = os.path.join(os.path.dirname(__file__), "..", "testdata")
|
||||||
|
return os.path.join(testdata_dir, file)
|
||||||
@@ -15,6 +15,7 @@ dependencies = [
|
|||||||
"requests>=2.32.4",
|
"requests>=2.32.4",
|
||||||
"sqlalchemy>=2.0.43",
|
"sqlalchemy>=2.0.43",
|
||||||
"selenium>=4.40.0",
|
"selenium>=4.40.0",
|
||||||
|
"isodate>=0.7.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[dependency-groups]
|
[dependency-groups]
|
||||||
|
|||||||
11
tests/integration/uv.lock
generated
11
tests/integration/uv.lock
generated
@@ -450,6 +450,7 @@ version = "0.1.0"
|
|||||||
source = { virtual = "." }
|
source = { virtual = "." }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "clickhouse-connect" },
|
{ name = "clickhouse-connect" },
|
||||||
|
{ name = "isodate" },
|
||||||
{ name = "numpy" },
|
{ name = "numpy" },
|
||||||
{ name = "psycopg2" },
|
{ name = "psycopg2" },
|
||||||
{ name = "pytest" },
|
{ name = "pytest" },
|
||||||
@@ -472,6 +473,7 @@ dev = [
|
|||||||
[package.metadata]
|
[package.metadata]
|
||||||
requires-dist = [
|
requires-dist = [
|
||||||
{ name = "clickhouse-connect", specifier = ">=0.8.18" },
|
{ name = "clickhouse-connect", specifier = ">=0.8.18" },
|
||||||
|
{ name = "isodate", specifier = ">=0.7.2" },
|
||||||
{ name = "numpy", specifier = ">=2.3.2" },
|
{ name = "numpy", specifier = ">=2.3.2" },
|
||||||
{ name = "psycopg2", specifier = ">=2.9.10" },
|
{ name = "psycopg2", specifier = ">=2.9.10" },
|
||||||
{ name = "pytest", specifier = ">=8.3.5" },
|
{ name = "pytest", specifier = ">=8.3.5" },
|
||||||
@@ -491,6 +493,15 @@ dev = [
|
|||||||
{ name = "pylint", specifier = ">=3.3.6" },
|
{ name = "pylint", specifier = ">=3.3.6" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "isodate"
|
||||||
|
version = "0.7.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705, upload-time = "2024-10-08T23:04:11.5Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320, upload-time = "2024-10-08T23:04:09.501Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "isort"
|
name = "isort"
|
||||||
version = "7.0.0"
|
version = "7.0.0"
|
||||||
|
|||||||
Reference in New Issue
Block a user