Compare commits

..

1 Commits

Author SHA1 Message Date
srikanthccv
54b429acae chore: perses research 2026-02-15 00:51:42 +05:30
425 changed files with 11565 additions and 26232 deletions

6
.github/CODEOWNERS vendored
View File

@@ -43,12 +43,6 @@
/pkg/analytics/ @vikrantgupta25
/pkg/statsreporter/ @vikrantgupta25
# Emailing Owners
/pkg/emailing/ @vikrantgupta25
/pkg/types/emailtypes/ @vikrantgupta25
/templates/email/ @vikrantgupta25
# Querier Owners
/pkg/querier/ @srikanthccv

View File

@@ -53,9 +53,9 @@ jobs:
- sqlite
clickhouse-version:
- 25.5.6
- 25.10.5
- 25.10.1
schema-migrator-version:
- v0.142.0
- v0.129.7
postgres-version:
- 15
if: |

7
.gitignore vendored
View File

@@ -1,11 +1,8 @@
node_modules
# editor
.vscode
!.vscode/settings.json
.zed
.idea
deploy/docker/environment_tiny/common_test
frontend/node_modules
@@ -34,6 +31,8 @@ frontend/yarn-debug.log*
frontend/yarn-error.log*
frontend/src/constants/env.ts
.idea
**/build
**/storage
**/locust-scripts/__pycache__/
@@ -230,3 +229,5 @@ cython_debug/
pyrightconfig.json
# cursor files
frontend/.cursor/

View File

@@ -14,8 +14,5 @@
},
"[sql]": {
"editor.defaultFormatter": "adpyke.vscode-sql-formatter"
},
"[html]": {
"editor.defaultFormatter": "vscode.html-language-features"
}
}

View File

@@ -176,6 +176,25 @@ Wir haben Benchmarks veröffentlicht, die Loki mit SigNoz vergleichen. Schauen S
Wir ❤️ Beiträge zum Projekt, egal ob große oder kleine. Bitte lies dir zuerst die [CONTRIBUTING.md](CONTRIBUTING.md), durch, bevor du anfängst, Beiträge zu SigNoz zu machen.
Du bist dir nicht sicher, wie du anfangen sollst? Schreib uns einfach auf dem #contributing Kanal in unserer [slack community](https://signoz.io/slack)
### Unsere Projektbetreuer
#### Backend
- [Ankit Nayan](https://github.com/ankitnayan)
- [Nityananda Gohain](https://github.com/nityanandagohain)
- [Srikanth Chekuri](https://github.com/srikanthccv)
- [Vishal Sharma](https://github.com/makeavish)
#### Frontend
- [Palash Gupta](https://github.com/palashgdev)
- [Yunus M](https://github.com/YounixM)
- [Rajat Dabade](https://github.com/Rajat-Dabade)
#### DevOps
- [Prashant Shahi](https://github.com/prashant-shahi)
<br /><br />
## Dokumentation

View File

@@ -221,6 +221,34 @@ We ❤️ contributions big or small. Please read [CONTRIBUTING.md](CONTRIBUTING
Not sure how to get started? Just ping us on `#contributing` in our [slack community](https://signoz.io/slack)
### Project maintainers
#### Backend
- [Ankit Nayan](https://github.com/ankitnayan)
- [Nityananda Gohain](https://github.com/nityanandagohain)
- [Srikanth Chekuri](https://github.com/srikanthccv)
- [Vishal Sharma](https://github.com/makeavish)
- [Shivanshu Raj Shrivastava](https://github.com/shivanshuraj1333)
- [Ekansh Gupta](https://github.com/eKuG)
- [Aniket Agarwal](https://github.com/aniketio-ctrl)
#### Frontend
- [Yunus M](https://github.com/YounixM)
- [Vikrant Gupta](https://github.com/vikrantgupta25)
- [Sagar Rajput](https://github.com/SagarRajput-7)
- [Shaheer Kochai](https://github.com/ahmadshaheer)
- [Amlan Kumar Nandy](https://github.com/amlannandy)
- [Sahil Khan](https://github.com/sawhil)
- [Aditya Singh](https://github.com/aks07)
- [Abhi Kumar](https://github.com/ahrefabhi)
#### DevOps
- [Prashant Shahi](https://github.com/prashant-shahi)
- [Vibhu Pandey](https://github.com/therealpandey)
<br /><br />

View File

@@ -187,6 +187,25 @@ Jaeger 仅仅是一个分布式追踪系统。 但是 SigNoz 可以提供 metric
如果你不知道如何开始? 只需要在 [slack 社区](https://signoz.io/slack) 通过 `#contributing` 频道联系我们。
### 项目维护人员
#### 后端
- [Ankit Nayan](https://github.com/ankitnayan)
- [Nityananda Gohain](https://github.com/nityanandagohain)
- [Srikanth Chekuri](https://github.com/srikanthccv)
- [Vishal Sharma](https://github.com/makeavish)
#### 前端
- [Palash Gupta](https://github.com/palashgdev)
- [Yunus M](https://github.com/YounixM)
- [Rajat Dabade](https://github.com/Rajat-Dabade)
#### 运维开发
- [Prashant Shahi](https://github.com/prashant-shahi)
<br /><br />
## 文档

View File

@@ -85,9 +85,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return noopgateway.NewProviderFactory()
},
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
return querier.NewHandler(ps, q, a)
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)

View File

@@ -9,7 +9,6 @@ import (
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
eequerier "github.com/SigNoz/signoz/ee/querier"
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
@@ -125,10 +124,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return httpgateway.NewProviderFactory(licensing)
},
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
communityHandler := querier.NewHandler(ps, q, a)
return eequerier.NewHandler(ps, q, communityHandler)
},
)
if err != nil {

View File

@@ -193,15 +193,6 @@ emailing:
templates:
# The directory containing the email templates. This directory should contain a list of files defined at pkg/types/emailtypes/template.go.
directory: /opt/signoz/conf/templates/email
format:
header:
enabled: false
logo_url: ""
help:
enabled: false
email: ""
footer:
enabled: false
smtp:
# The SMTP server address.
address: localhost:25
@@ -294,6 +285,7 @@ flagger:
config:
boolean:
use_span_metrics: true
interpolation_enabled: false
kafka_span_eval: false
string:
float:
@@ -308,14 +300,3 @@ user:
allow_self: true
# The duration within which a user can reset their password.
max_token_lifetime: 6h
root:
# Whether to enable the root user. When enabled, a root user is provisioned
# on startup using the email and password below. The root user cannot be
# deleted, updated, or have their password changed through the UI.
enabled: false
# The email address of the root user.
email: ""
# The password of the root user. Must meet password requirements.
password: ""
# The name of the organization to create or look up for the root user.
org_name: default

View File

@@ -176,7 +176,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.112.0
image: signoz/signoz:v0.111.0
command:
- --config=/root/config/prometheus.yml
ports:

View File

@@ -117,7 +117,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.112.0
image: signoz/signoz:v0.111.0
command:
- --config=/root/config/prometheus.yml
ports:

View File

@@ -179,7 +179,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.112.0}
image: signoz/signoz:${VERSION:-v0.111.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml

View File

@@ -111,7 +111,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.112.0}
image: signoz/signoz:${VERSION:-v0.111.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml

File diff suppressed because it is too large Load Diff

View File

@@ -155,7 +155,6 @@ The `handler.New` function ties the HTTP handler to OpenAPI metadata via `OpenAP
- **Request / RequestContentType**:
- `Request` is a Go type that describes the request body or form.
- `RequestContentType` is usually `"application/json"` or `"application/x-www-form-urlencoded"` (for callbacks like SAML).
- **RequestExamples**: An array of `handler.OpenAPIExample` that provide concrete request payloads in the generated spec. See [Adding request examples](#adding-request-examples) below.
- **Response / ResponseContentType**:
- `Response` is the Go type for the successful response payload.
- `ResponseContentType` is usually `"application/json"`; use `""` for responses without a body.
@@ -173,170 +172,8 @@ See existing examples in:
- `addUserRoutes` (for typical JSON request/response)
- `addSessionRoutes` (for form-encoded and redirect flows)
## OpenAPI schema details for request/response types
The OpenAPI spec is generated from the Go types you pass as `Request` and `Response` in `OpenAPIDef`. The following struct tags and interfaces control how those types appear in the generated schema.
### Adding request examples
Use the `RequestExamples` field in `OpenAPIDef` to provide concrete request payloads. Each example is a `handler.OpenAPIExample`:
```go
type OpenAPIExample struct {
Name string // unique key for the example (e.g. "traces_time_series")
Summary string // short description shown in docs (e.g. "Time series: count spans grouped by service")
Description string // optional longer description
Value any // the example payload, typically map[string]any
}
```
For reference, see `pkg/apiserver/signozapiserver/querier.go` which defines examples inline for the `/api/v5/query_range` endpoint:
```go
if err := router.Handle("/api/v5/query_range", handler.New(provider.authZ.ViewAccess(provider.querierHandler.QueryRange), handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"querier"},
Summary: "Query range",
Description: "Execute a composite query over a time range.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
RequestExamples: []handler.OpenAPIExample{
{
Name: "traces_time_series",
Summary: "Time series: count spans grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
// ...
},
},
},
},
},
},
// ... more examples
},
// ...
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
```
### `required` tag
Use `required:"true"` on struct fields where the property is expected to be **present** in the JSON payload. This is different from the zero value, a field can have its zero value (e.g. `0`, `""`, `false`) and still be required. The `required` tag means the key itself must exist in the JSON object.
```go
type ListItem struct {
...
}
type ListResponse struct {
List []ListItem `json:"list" required:"true" nullable:"true"`
Total uint64 `json:"total" required:"true"`
}
```
In this example, a response like `{"list": null, "total": 0}` is valid. Both keys are present (satisfying `required`), `total` has its zero value, and `list` is null (allowed by `nullable`). But `{"total": 0}` would violate the schema because the `list` key is missing.
### `nullable` tag
Use `nullable:"true"` on struct fields that can be `null` in the JSON payload. This is especially important for **slice and map fields** because in Go, the zero value for these types is `nil`, which serializes to `null` in JSON (not `[]` or `{}`).
Be explicit about the distinction:
- **Nullable list** (`nullable:"true"`): the field can be `null`. Use this when the Go code may return `nil` for the slice.
- **Non-nullable list** (no `nullable` tag): the field is always an array, never `null`. Ensure the Go code initializes it to an empty slice (e.g. `make([]T, 0)`) before serializing.
```go
// Non-nullable: Go code must ensure this is always an initialized slice.
type NonNullableExample struct {
Items []Item `json:"items" required:"true"`
}
```
When defining your types, ask yourself: "Can this field be `null` in the JSON response, or is it always an array/object?" If the Go code ever returns a `nil` slice or map, mark it `nullable:"true"`.
### `Enum()` method
For types that have a fixed set of acceptable values, implement the `Enum() []any` method. This generates an `enum` constraint in the JSON schema so the OpenAPI spec accurately restricts the values.
```go
type Signal struct {
valuer.String
}
var (
SignalTraces = Signal{valuer.NewString("traces")}
SignalLogs = Signal{valuer.NewString("logs")}
SignalMetrics = Signal{valuer.NewString("metrics")}
)
func (Signal) Enum() []any {
return []any{
SignalTraces,
SignalLogs,
SignalMetrics,
}
}
```
This produces the following in the generated OpenAPI spec:
```yaml
Signal:
enum:
- traces
- logs
- metrics
type: string
```
Every type with a known set of values **must** implement `Enum()`. Without it, the JSON schema will only show the base type (e.g. `string`) with no value constraints.
### `JSONSchema()` method (custom schema)
For types that need a completely custom JSON schema (for example, a field that accepts either a string or a number), implement the `jsonschema.Exposer` interface:
```go
var _ jsonschema.Exposer = Step{}
func (Step) JSONSchema() (jsonschema.Schema, error) {
s := jsonschema.Schema{}
s.WithDescription("Step interval. Accepts a duration string or seconds.")
strSchema := jsonschema.Schema{}
strSchema.WithType(jsonschema.String.Type())
strSchema.WithExamples("60s", "5m", "1h")
numSchema := jsonschema.Schema{}
numSchema.WithType(jsonschema.Number.Type())
numSchema.WithExamples(60, 300, 3600)
s.OneOf = []jsonschema.SchemaOrBool{
strSchema.ToSchemaOrBool(),
numSchema.ToSchemaOrBool(),
}
return s, nil
}
```
## What should I remember?
- **Keep handlers thin**: focus on HTTP concerns and delegate logic to modules/services.
- **Always register routes through `signozapiserver`** using `handler.New` and a complete `OpenAPIDef`.
- **Choose accurate request/response types** from the `types` packages so OpenAPI schemas are correct.
- **Add `required:"true"`** on fields where the key must be present in the JSON (this is about key presence, not about the zero value).
- **Add `nullable:"true"`** on fields that can be `null`. Pay special attention to slices and maps -- in Go these default to `nil` which serializes to `null`. If the field should always be an array, initialize it and do not mark it nullable.
- **Implement `Enum()`** on every type that has a fixed set of acceptable values so the JSON schema generates proper `enum` constraints.
- **Add request examples** via `RequestExamples` in `OpenAPIDef` for any non-trivial endpoint. See `pkg/apiserver/signozapiserver/querier.go` for reference.

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,436 @@
# Feasibility Analysis: Adopting Perses.dev Specification for SigNoz Dashboards
## Executive Summary
SigNoz's dashboard JSON has been a free-form `map[string]interface{}` with no schema enforcement. This document evaluates adopting [Perses.dev](https://perses.dev/) (a CNCF Sandbox project) as a structured dashboard specification. The conclusion is that **wholesale adoption is not recommended**, but several Perses design patterns should be borrowed into a SigNoz-native v6 dashboard schema.
---
## 1. Current State: SigNoz Dashboard JSON
### 1.1 Structure Overview
The dashboard is stored as `StorableDashboardData = map[string]interface{}` in Go (see `pkg/types/dashboardtypes/dashboard.go`). Top-level fields:
| Field | Type | Description |
|-------|------|-------------|
| `title` | `string` | Dashboard display name |
| `description` | `string` | Dashboard description text |
| `tags` | `string[]` | Categorization tags (e.g., `["redis", "database"]`) |
| `image` | `string` | Base64 or URL-encoded SVG for dashboard icon/thumbnail |
| `version` | `string` | Schema version: `"v4"` or `"v5"` |
| `layout` | `Layout[]` | React-grid-layout positioning for each widget |
| `panelMap` | `Record<string, {widgets: Layout[], collapsed: boolean}>` | Groups panels under row widgets for collapsible sections |
| `widgets` | `Widget[]` | Array of panel/widget definitions |
| `variables` | `Record<string, IDashboardVariable>` | Dashboard variable definitions |
| `uploadedGrafana` | `boolean` | Flag indicating if imported from Grafana |
### 1.2 Panel/Widget Types
| Panel Type | Constant | API Request Type |
|-----------|----------|-----------------|
| Time Series | `graph` | `time_series` |
| Bar Chart | `bar` | `time_series` |
| Table | `table` | `scalar` |
| Pie Chart | `pie` | `scalar` |
| Single Value | `value` | `scalar` |
| List/Logs | `list` | `raw` |
| Trace | `trace` | `trace` |
| Histogram | `histogram` | `distribution` |
| Row (group header) | `row` | N/A |
### 1.3 Query System
Each widget carries a `query` object with three modes simultaneously:
```json
{
"queryType": "builder|clickhouse_sql|promql",
"builder": {
"queryData": [],
"queryFormulas": []
},
"clickhouse_sql": [],
"promql": []
}
```
Only `queryType` determines which mode is active; the other sections carry empty placeholder defaults.
#### Builder Query v4 (legacy, widely used)
- `aggregateOperator` + `aggregateAttribute` as separate fields
- `filters` with structured `items[]` containing key objects with synthetic IDs
- `having: []` as array
#### Builder Query v5 (newer, integration dashboards)
- `aggregations[]` array with `metricName`, `timeAggregation`, `spaceAggregation` combined
- `filter` with expression string (e.g., `"host_name IN $host_name"`)
- `having: {expression: ""}` as object
### 1.4 Query Range V5 API
The V5 execution API wraps queries in a `QueryEnvelope` discriminated union:
```
QueryEnvelope = {type: QueryType, spec: any}
```
Seven query types: `builder_query`, `builder_formula`, `builder_sub_query`, `builder_join`, `builder_trace_operator`, `promql`, `clickhouse_sql`
Six request types: `scalar`, `time_series`, `raw`, `raw_stream`, `trace`, `distribution`
Three signals with distinct aggregation models:
- **Metrics**: `metricName + temporality + timeAggregation + spaceAggregation`
- **Traces**: Expression-based (e.g., `"COUNT()"`, `"p99(duration_nano)"`)
- **Logs**: Expression-based (e.g., `"COUNT()"`, `"count_distinct(host.name)"`)
17 post-processing functions, server-side formula evaluation, SQL-style joins, and trace span relationship operators.
### 1.5 Documented Pain Points
1. **`StorableDashboardData` is `map[string]interface{}`**: All nested property access requires manual type assertions with fragile `ok` checks.
2. **Two incompatible query schema versions coexisting**: v4 and v5 query formats coexist in the same codebase. The backend migration layer (`pkg/transition/migrate_common.go`) converts at execution time.
3. **Massive boilerplate**: Every widget carries `selectedLogFields` and `selectedTracesFields` arrays even for metrics-only panels. Identical 5-element arrays copy-pasted hundreds of times.
4. **Duplicate query slots**: Every widget carries all three query types (`builder`, `clickhouse_sql`, `promql`) with empty placeholders for inactive types.
5. **Variable key inconsistency**: Variables keyed by human-readable name (e.g., `"Account"`) OR UUID depending on dashboard.
6. **Variables coupled to ClickHouse SQL**: Variable queries use raw `SELECT ... FROM signoz_metrics.distributed_time_series_v4_1day`, coupling dashboard definitions to internal storage schema.
7. **Redundant synthetic IDs**: Filter keys contain derived `id` fields like `"cloud_account_id--string--tag--false"`.
8. **Spelling errors baked in**: `"timePreferance"` (misspelled) is embedded in the serialized JSON contract.
9. **Layout/widget coupling implicit**: Layout items reference widgets by matching `i` to `id` with no schema enforcement. `panelMap` adds another implicit layer.
10. **No schema validation**: Dashboard data has no Go struct for validation. Relies entirely on frontend TypeScript types with extensive optional markers.
---
## 2. Perses.dev Specification Overview
### 2.1 What is Perses?
Perses is a CNCF Sandbox project providing:
- An **open dashboard specification** (implemented in Go, CUE, TypeScript)
- A **plugin-based extension model** for panels, queries, datasources, and variables
- **Dashboard-as-Code** via CUE and Go SDKs
- **Static validation** via `percli` CLI
- **Grafana migration** tooling
Adopters: Chronosphere, RedHat, SAP, Amadeus.
### 2.2 Dashboard Structure
```yaml
kind: Dashboard
metadata:
name: "..."
project: "..."
spec:
display: {name, description}
datasources: {name: DatasourceSpec} # inline or referenced
variables: [Variable] # ordered list
panels: {id: Panel} # map of panel definitions
layouts: [Layout] # separate from panels
duration: "5m"
refreshInterval: "30s"
```
### 2.3 Core Design: Plugin = `{kind: string, spec: any}`
The universal extension point. Panels, queries, datasources, and variables are all plugins:
```go
type Plugin struct {
Kind string `json:"kind"`
Metadata *PluginMetadata `json:"metadata,omitempty"`
Spec any `json:"spec"`
}
```
### 2.4 Panel Structure
```yaml
kind: Panel
spec:
display: {name, description}
plugin: {kind: "TimeSeriesChart", spec: {...}}
queries:
- kind: TimeSeriesQuery
spec:
plugin:
kind: PrometheusTimeSeriesQuery
spec: {query: "up", datasource: "$ds"}
```
### 2.5 Layout System
Panels separated from layout. Grid-based positioning with JSON `$ref` pointers:
```yaml
kind: Grid
spec:
display:
title: "Section Name"
collapse: {open: true}
items:
- x: 0, y: 0, width: 6, height: 6
content: {"$ref": "#/spec/panels/my_panel"}
```
### 2.6 Supported Datasources
| Datasource | Plugin Kind | Protocol |
|---|---|---|
| Prometheus | `PrometheusDatasource` | PromQL |
| Tempo | `TempoDatasource` | TraceQL |
| Loki | `LokiDatasource` | LogQL |
| Pyroscope | `PyroscopeDatasource` | Pyroscope API |
| ClickHouse | Community plugin | SQL |
| VictoriaLogs | Community plugin | VictoriaLogs API |
### 2.7 Plugin System
Five plugin categories: datasource, query, panel, variable, explore. Each distributes as a compressed archive with CUE schemas, React components (via module federation), and optional Grafana migration logic.
---
## 3. Feasibility Assessment
### 3.1 Support for Logs/Metrics/Traces/Events/Profiles
| Signal | Perses Status | SigNoz Requirement | Gap |
|---|---|---|---|
| **Metrics** | Prometheus plugin (mature) | ClickHouse-backed with dual aggregation model (time + space) | **Significant** - Perses assumes PromQL |
| **Traces** | Tempo plugin (exists) | ClickHouse-backed with trace operators, span-level queries, joins | **Significant** - Perses Tempo does basic TraceQL |
| **Logs** | Loki plugin (exists) | ClickHouse-backed with builder queries, raw list views, streaming | **Moderate** - Perses Loki uses LogQL |
| **Profiles** | Pyroscope plugin (exists) | Not yet core in SigNoz dashboards | Low gap |
| **Events** | No plugin | Future SigNoz need | Would require custom plugin |
Perses has plugins for all four pillars, but each assumes a specific backend protocol (PromQL, TraceQL, LogQL). SigNoz uses a **unified query builder** abstracting over ClickHouse. This is a fundamental architectural mismatch.
### 3.2 Extensibility
Perses's plugin architecture is genuinely extensible. SigNoz could create custom plugins (`SigNozDatasource`, `SigNozBuilderQuery`, etc.). However, this means:
- Writing and maintaining a **full Perses plugin ecosystem** for SigNoz
- Plugin must handle all 7 query types and 3 signal types
- CUE schema definitions for all SigNoz query structures
- Tracking Perses upstream changes (still a Sandbox project, not graduated)
### 3.3 Coupling Analysis
| Dimension | Current SigNoz | With Perses | Assessment |
|---|---|---|---|
| Dashboard to Storage | Variables use raw ClickHouse SQL | Would need SigNoz query plugin | Improvement possible |
| Dashboard to Frontend | Widget types tightly coupled to React | Perses separates panel spec from rendering | Improvement |
| Dashboard to Query API | Widgets carry full query objects | Plugin-typed, referenced via datasource | Improvement, but adds indirection |
| Dashboard to Perses | N/A | Depends on Perses versioning, plugin compat, CUE toolchain | **New coupling** |
### 3.4 Support for Query Range V5
This is the **most critical gap**:
| SigNoz V5 Feature | Perses Equivalent | Plugin Solvable? |
|---|---|---|
| `builder_query` with signal-specific aggregation | Plugin `spec: any` | Yes, but SigNoz-specific |
| `builder_formula` (cross-query math) | No formula concept | **Partially** - needs custom panel logic |
| `builder_join` (SQL-style cross-signal joins) | No equivalent | **No** - fundamentally different model |
| `builder_trace_operator` (span relationships) | No equivalent | **No** - unique to SigNoz |
| `builder_sub_query` (nested queries) | No equivalent | Would need plugin extension |
| Multiple query types per panel | Single-typed queries | Would need wrapper plugin |
| Post-processing functions (ewma, anomaly, timeShift) | No equivalent | Would need to be in plugin spec |
---
## 4. Why NOT Adopt Perses Wholesale
### 4.1 SigNoz-inside-Perses
Every SigNoz query feature would live inside `spec: any` blobs within Perses plugin wrappers:
```json
{
"kind": "TimeSeriesQuery",
"spec": {
"plugin": {
"kind": "SigNozBuilderQuery",
"spec": { /* ALL SigNoz-specific content here */ }
}
}
}
```
Perses validates the envelope (`kind: TimeSeriesQuery` exists, `plugin.kind` is registered). But the actual content is opaque to Perses. You'd still need your own validation for everything inside `spec`.
### 4.2 Formulas, Joins, and Trace Operators Have No Home
Perses model: `Panel -> queries[] -> Query`. Each query is independent.
SigNoz model: `Panel -> compositeQuery -> {queries[], formulas[], joins[], traceOperators[]}`. Queries reference each other by name. Formulas combine results. Joins cross signals.
You'd have to either:
- Shove the entire compositeQuery into a single plugin spec (making Perses query structure meaningless)
- Fork/extend Perses core spec (ongoing merge conflicts)
### 4.3 Plugin Maintenance Burden
Required custom plugins:
| Plugin | Purpose |
|---|---|
| `SigNozDatasource` | Points to SigNoz query-service |
| `SigNozBuilderQuery` | Wraps v5 builder queries for metrics/logs/traces |
| `SigNozFormulaQuery` | Wraps formula evaluation |
| `SigNozTraceOperatorQuery` | Wraps trace structural operators |
| `SigNozJoinQuery` | Wraps cross-signal joins |
| `SigNozSubQuery` | Wraps nested queries |
| `SigNozAttributeValuesVariable` | Variable from attribute values |
| `SigNozQueryVariable` | Variable from query results |
Each needs: CUE schema, React component, Grafana migration handler, and tests. Every v5 feature addition requires plugin schema updates.
### 4.4 Community Mismatch
Perses adopters are primarily Prometheus-centric. A `SigNozDatasource` plugin is useful only to SigNoz. You'd be the sole maintainer of the plugin suite.
### 4.5 The Counterargument
The one strong argument FOR wholesale adoption: **you get out of the "dashboard spec" business entirely**. Even if 80% is SigNoz-specific plugins, the 20% Perses handles (metadata, layout, display, variable ordering, versioning, RBAC scoping) is real work you don't have to maintain. If Perses graduates from CNCF sandbox, ecosystem benefits compound.
This trade-off doesn't justify the ongoing plugin maintenance tax, especially since those patterns are straightforward to implement natively. However, if SigNoz plans to eventually expose PromQL/TraceQL/LogQL-compatible endpoints, the calculus changes significantly.
---
## 5. Recommendation: Borrow Patterns, Build Native
### 5.1 Patterns to Adopt from Perses
| Perses Pattern | SigNoz Adoption |
|---|---|
| **`kind` + `spec` envelope** | Use for query types, panel types, variables. Consistent with v5 `QueryEnvelope`. |
| **Panels separated from Layout** | `panels: {}` map + `layouts: []` referencing by ID. |
| **Ordered variables as array** | Move from `variables: {name: {...}}` to `variables: [...]`. |
| **CUE or JSON Schema validation** | Define formal schema for dashboards. Use for CI and import/export. |
| **Dashboard-as-Code SDK** | Go/TypeScript SDK for programmatic dashboard generation. |
| **Metadata structure** | `kind` + `apiVersion` + `metadata` + `spec` top-level (Kubernetes-style). |
### 5.2 Proposed v6 Dashboard Structure
```json
{
"kind": "Dashboard",
"apiVersion": "signoz.io/v1",
"metadata": {
"name": "redis-overview",
"title": "Redis Overview",
"description": "...",
"tags": ["redis", "database"],
"image": "..."
},
"spec": {
"defaults": {
"timeRange": "5m",
"refreshInterval": "30s"
},
"variables": [
{
"kind": "QueryVariable",
"spec": {
"name": "host_name",
"signal": "metrics",
"attributeName": "host_name",
"multiSelect": true
}
}
],
"panels": {
"hit_rate": {
"kind": "TimeSeriesPanel",
"spec": {
"title": "Hit Rate",
"description": "Cache hit rate across hosts",
"display": {
"yAxisUnit": "percent",
"legend": {"position": "bottom"}
},
"query": {
"type": "composite",
"spec": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "redis_keyspace_hits", "timeAggregation": "rate", "spaceAggregation": "sum"}],
"filter": {"expression": "host_name IN $host_name"}
}
}
],
"formulas": [
{"type": "builder_formula", "spec": {"expression": "A / (A + B) * 100"}}
]
}
}
}
}
},
"layouts": [
{
"kind": "Grid",
"spec": {
"title": "Overview",
"collapsible": true,
"collapsed": false,
"items": [
{"panel": "hit_rate", "x": 0, "y": 0, "w": 6, "h": 6}
]
}
}
]
}
}
```
### 5.3 Key Improvements Over Current Format
| Issue | Current | v6 |
|---|---|---|
| No validation | `map[string]interface{}` | JSON Schema enforced |
| Boilerplate | Every widget has selectedLogFields, all query modes | Only active query mode stored, display options per panel type |
| Variable ordering | `order` field inside map entries | Array position |
| Variable keys | Name or UUID inconsistently | `name` field in spec, array position |
| Layout coupling | Implicit `i` matches `id` | Explicit `panel` reference in layout items |
| Spelling errors | `timePreferance` | `timePreference` (fixed) |
| Query structure | Flat list of all queries + formulas | Typed envelope matching v5 API |
| Row grouping | Separate `panelMap` with duplicate layout entries | Integrated into `layouts[]` with `collapsible` flag |
### 5.4 Migration Path
1. **v4/v5 to v6 migration**: Build a Go migration function that transforms existing dashboards to v6 format. Handle both v4 and v5 query formats.
2. **Backward compatibility**: Support reading v4/v5 dashboards with automatic upgrade to v6 on save.
3. **Frontend**: Update TypeScript interfaces to match v6 schema. Remove legacy response converters once v5 API is fully adopted.
4. **Validation**: Add JSON Schema validation on dashboard create/update API endpoints.
5. **Integration dashboards**: Regenerate all dashboards in `SigNoz/dashboards` repo using v6 format.
---
## References
- [Perses Homepage](https://perses.dev/)
- [Perses Dashboard API](https://perses.dev/perses/docs/api/dashboard/)
- [Perses Open Specification](https://perses.dev/perses/docs/concepts/open-specification/)
- [Perses Plugin Creation](https://perses.dev/perses/docs/plugins/creation/)
- [Perses Prometheus Plugin Model](https://perses.dev/plugins/docs/prometheus/model/)
- [Perses GitHub Repository](https://github.com/perses/perses)
- [SigNoz Dashboards Repository](https://github.com/SigNoz/dashboards)
- SigNoz source: `pkg/types/dashboardtypes/dashboard.go`
- SigNoz source: `pkg/types/querybuildertypes/querybuildertypesv5/`
- SigNoz source: `frontend/src/types/api/dashboard/getAll.ts`
- SigNoz source: `frontend/src/types/api/queryBuilder/queryBuilderData.ts`
- SigNoz source: `frontend/src/types/api/v5/queryRange.ts`
- SigNoz source: `pkg/transition/migrate_common.go`

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,378 @@
// SigNoz Perses Plugin Schemas
//
// CUE schemas for all SigNoz-specific Perses plugin types.
// These define the `spec` shape inside Perses Plugin wrappers:
// { "kind": "<PluginKind>", "spec": <validated by this file> }
//
// Perses core types (Dashboard, Panel, Layout, Variable envelopes)
// are validated by Perses itself. This file only covers SigNoz plugins.
//
// Usage:
// percli lint --schema-dir ./signoz-perses-plugins dashboard.json
//
// Reference: https://perses.dev/perses/docs/api/plugin/
package signoz
// ============================================================================
// Datasource Plugin
// ============================================================================
// SigNozDatasource configures the connection to a SigNoz query service.
// Used inside: DatasourceSpec.plugin { kind: "SigNozDatasource", spec: ... }
#SigNozDatasource: {
// Direct URL for embedded mode (SigNoz serves its own dashboards).
// The frontend calls SigNoz APIs directly at this base URL.
directUrl?: string & =~"^/|^https?://"
// HTTP proxy config for external mode (standalone Perses connecting to SigNoz).
proxy?: #HTTPProxy
}
#HTTPProxy: {
kind: "HTTPProxy"
spec: {
url: string & =~"^https?://"
allowedEndpoints?: [...#AllowedEndpoint]
}
}
#AllowedEndpoint: {
endpointPattern: string
method: "GET" | "POST" | "PUT" | "DELETE"
}
// ============================================================================
// Query Plugins
// ============================================================================
// SigNozBuilderQuery is a single builder query for one signal.
// Used inside: TimeSeriesQuery.spec.plugin { kind: "SigNozBuilderQuery", spec: ... }
// Use this when the panel has independent queries (no formulas, joins, or trace operators).
#SigNozBuilderQuery: {
name: #QueryName
signal: #Signal
source?: string
disabled?: bool | *false
// Metrics use structured aggregations; traces/logs use expression aggregations
aggregations?: [...#MetricAggregation | #ExpressionAggregation]
filter?: #Filter
groupBy?: [...#GroupByKey]
order?: [...#OrderBy]
having?: #Having
selectFields?: [...#TelemetryFieldKey]
limit?: int & >=0 & <=10000
limitBy?: #LimitBy
offset?: int & >=0
cursor?: string
secondaryAggregations?: [...#SecondaryAggregation]
functions?: [...#PostProcessingFunction]
legend?: string
reduceTo?: #ReduceTo
stepInterval?: #StepInterval
}
// SigNozCompositeQuery bundles multiple queries with formulas, joins, or trace operators.
// Used inside: TimeSeriesQuery.spec.plugin { kind: "SigNozCompositeQuery", spec: ... }
// Use this when a panel needs cross-query references (A/B formulas, joins, trace operators).
#SigNozCompositeQuery: {
queries: [...#CompositeQueryEntry] & [_, ...] // at least one query
formulas?: [...#FormulaEntry]
joins?: [...#JoinEntry]
traceOperators?: [...#TraceOperatorEntry]
}
// A query within a composite. Same shape as SigNozBuilderQuery.
#CompositeQueryEntry: #SigNozBuilderQuery
// A formula referencing other queries by name.
#FormulaEntry: {
name: #QueryName
expression: string & !="" // e.g. "A/B * 100"
disabled?: bool | *false
order?: [...#OrderBy]
limit?: int & >=0 & <=10000
having?: #Having
functions?: [...#PostProcessingFunction]
legend?: string
}
// A join combining results from two queries.
#JoinEntry: {
name: #QueryName
disabled?: bool | *false
left: #QueryRef
right: #QueryRef
joinType: "inner" | "left" | "right" | "full" | "cross"
on: string & !="" // join condition expression
aggregations?: [...#MetricAggregation | #ExpressionAggregation]
selectFields?: [...#TelemetryFieldKey]
filter?: #Filter
groupBy?: [...#GroupByKey]
having?: #Having
order?: [...#OrderBy]
limit?: int & >=0 & <=10000
secondaryAggregations?: [...#SecondaryAggregation]
functions?: [...#PostProcessingFunction]
}
// A trace operator expressing span relationships.
#TraceOperatorEntry: {
name: #QueryName
disabled?: bool | *false
// Operators: => (direct descendant), -> (indirect descendant),
// && (AND), || (OR), NOT (exclude). Example: "A => B && C"
expression: string & !=""
filter?: #Filter
returnSpansFrom?: string
order?: [...#TraceOrderBy]
aggregations?: [...#ExpressionAggregation]
stepInterval?: #StepInterval
groupBy?: [...#GroupByKey]
having?: #Having
limit?: int & >=0 & <=10000
offset?: int & >=0
cursor?: string
legend?: string
selectFields?: [...#TelemetryFieldKey]
functions?: [...#PostProcessingFunction]
}
// SigNozPromQL wraps a raw PromQL query.
// Used inside: TimeSeriesQuery.spec.plugin { kind: "SigNozPromQL", spec: ... }
#SigNozPromQL: {
name: string
query: string & !=""
disabled?: bool | *false
step?: #StepInterval
stats?: bool | *false
legend?: string
}
// SigNozClickHouseSQL wraps a raw ClickHouse SQL query.
// Used inside: TimeSeriesQuery.spec.plugin { kind: "SigNozClickHouseSQL", spec: ... }
#SigNozClickHouseSQL: {
name: string
query: string & !=""
disabled?: bool | *false
legend?: string
}
// ============================================================================
// Variable Plugins
// ============================================================================
// SigNozQueryVariable resolves variable values using a builder query.
// Used inside: ListVariable.spec.plugin { kind: "SigNozQueryVariable", spec: ... }
#SigNozQueryVariable: {
// The query that produces variable values.
// Uses the same builder query model as panels.
query: #SigNozBuilderQuery | #SigNozCompositeQuery | #SigNozPromQL | #SigNozClickHouseSQL
}
// SigNozAttributeValues resolves variable values from attribute autocomplete.
// Used inside: ListVariable.spec.plugin { kind: "SigNozAttributeValues", spec: ... }
// This is a simpler alternative to SigNozQueryVariable for common cases
// like "list all values of host.name for metric X".
#SigNozAttributeValues: {
signal: #Signal
metricName?: string // required when signal is "metrics"
attributeName: string & !=""
filter?: #Filter // optional pre-filter
}
// ============================================================================
// Signals
// ============================================================================
#Signal: "metrics" | "traces" | "logs"
// Extensible to "events" | "profiles" in future
// ============================================================================
// Aggregations
// ============================================================================
// MetricAggregation defines the two-level aggregation model for metrics:
// time aggregation (within each time bucket) then space aggregation (across dimensions).
#MetricAggregation: {
metricName: string & !=""
temporality?: #MetricTemporality
timeAggregation?: #TimeAggregation
spaceAggregation?: #SpaceAggregation
reduceTo?: #ReduceTo
}
// ExpressionAggregation uses ClickHouse aggregate function syntax for traces/logs.
// Examples: "count()", "sum(item_price)", "p99(duration_nano)", "countIf(day > 10)"
#ExpressionAggregation: {
expression: string & !=""
alias?: string
}
#MetricTemporality: "delta" | "cumulative" | "unspecified"
#TimeAggregation:
"latest" | "sum" | "avg" | "min" | "max" |
"count" | "count_distinct" | "rate" | "increase"
#SpaceAggregation:
"sum" | "avg" | "min" | "max" | "count" |
"p50" | "p75" | "p90" | "p95" | "p99"
#ReduceTo: "sum" | "count" | "avg" | "min" | "max" | "last" | "median"
// ============================================================================
// Filters
// ============================================================================
// Filter uses expression syntax instead of structured items with synthetic IDs.
// Supports: =, !=, >, >=, <, <=, IN, NOT IN, LIKE, NOT LIKE, ILIKE, NOT ILIKE,
// BETWEEN, NOT BETWEEN, EXISTS, NOT EXISTS, REGEXP, NOT REGEXP, CONTAINS, NOT CONTAINS.
// Variable interpolation: $variable_name
#Filter: {
expression: string
}
// ============================================================================
// Group By, Order By, Having
// ============================================================================
#GroupByKey: {
name: string & !=""
signal?: #Signal
fieldContext?: #FieldContext
fieldDataType?: #FieldDataType
}
#OrderBy: {
key: #OrderByKey
direction: "asc" | "desc"
}
#OrderByKey: {
name: string & !=""
signal?: #Signal
fieldContext?: #FieldContext
fieldDataType?: #FieldDataType
}
#TraceOrderBy: {
key: {
name: "span_count" | "trace_duration"
}
direction: "asc" | "desc"
}
// Having applies a post-aggregation filter.
// Example: "count() > 100"
#Having: {
expression: string & !=""
}
// ============================================================================
// Secondary Aggregations & Limits
// ============================================================================
#SecondaryAggregation: {
expression: string
alias?: string
stepInterval?: #StepInterval
groupBy?: [...#GroupByKey]
order?: [...#OrderBy]
limit?: int & >=0 & <=10000
limitBy?: #LimitBy
}
#LimitBy: {
keys: [...string] & [_, ...] // at least one key
value: string // max rows per group (string for compatibility)
}
// ============================================================================
// Post-Processing Functions
// ============================================================================
#PostProcessingFunction: {
name: #FunctionName
args?: [...#FunctionArg]
}
#FunctionName:
// Threshold functions
"cutOffMin" | "cutOffMax" | "clampMin" | "clampMax" |
// Math functions
"absolute" | "runningDiff" | "log2" | "log10" | "cumulativeSum" |
// Smoothing functions (exponentially weighted moving average)
"ewma3" | "ewma5" | "ewma7" |
// Smoothing functions (sliding median window)
"median3" | "median5" | "median7" |
// Time functions
"timeShift" |
// Analysis functions
"anomaly" |
// Gap filling
"fillZero"
#FunctionArg: {
name?: string
value: number | string | bool
}
// ============================================================================
// Telemetry Field References
// ============================================================================
#TelemetryFieldKey: {
name: string & !=""
description?: string
unit?: string
signal?: #Signal
fieldContext?: #FieldContext
fieldDataType?: #FieldDataType
}
#FieldContext:
"resource" | "scope" | "span" | "event" | "link" |
"log" | "metric" | "body" | "trace"
#FieldDataType:
"string" | "int64" | "float64" | "bool" |
"array(string)" | "array(int64)" | "array(float64)" | "array(bool)"
// ============================================================================
// Common Types
// ============================================================================
// QueryName must be a valid identifier: starts with letter, contains letters/digits/underscores.
#QueryName: =~"^[A-Za-z][A-Za-z0-9_]*$"
// QueryRef references another query by name within a composite query.
#QueryRef: {
name: string & !=""
}
// StepInterval accepts seconds (numeric) or duration string ("15s", "1m", "1h").
#StepInterval: number & >=0 | =~"^[0-9]+(ns|us|ms|s|m|h)$"

View File

@@ -1,178 +0,0 @@
package querier
import (
"bytes"
"context"
"encoding/json"
"io"
"net/http"
"runtime/debug"
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/types/authtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/valuer"
)
type handler struct {
set factory.ProviderSettings
querier querier.Querier
community querier.Handler
}
func NewHandler(set factory.ProviderSettings, querier querier.Querier, community querier.Handler) querier.Handler {
return &handler{
set: set,
querier: querier,
community: community,
}
}
func (h *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
bodyBytes, err := io.ReadAll(req.Body)
if err != nil {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
return
}
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := req.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
var queryRangeRequest qbtypes.QueryRangeRequest
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
return
}
defer func() {
if r := recover(); r != nil {
stackTrace := string(debug.Stack())
queryJSON, _ := json.Marshal(queryRangeRequest)
h.set.Logger.ErrorContext(ctx, "panic in QueryRange",
"error", r,
"user", claims.UserID,
"payload", string(queryJSON),
"stacktrace", stackTrace,
)
render.Error(rw, errors.NewInternalf(
errors.CodeInternal,
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
))
}
}()
if err := queryRangeRequest.Validate(); err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, err)
return
}
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
anomalies, err := h.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
if err != nil {
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
return
}
results := []any{}
for _, item := range anomalies.Results {
results = append(results, item)
}
// Build step intervals from the anomaly query
stepIntervals := make(map[string]uint64)
if anomalyQuery.StepInterval.Duration > 0 {
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
}
finalResp := &qbtypes.QueryRangeResponse{
Type: queryRangeRequest.RequestType,
Data: qbtypes.QueryData{
Results: results,
},
Meta: qbtypes.ExecStats{
StepIntervals: stepIntervals,
},
}
render.Success(rw, http.StatusOK, finalResp)
return
}
// regular query range request, delegate to community handler
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
h.community.QueryRange(rw, req)
}
func (h *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
h.community.QueryRawStream(rw, req)
}
func (h *handler) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
h.community.ReplaceVariables(rw, req)
}
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
for _, fn := range anomalyQuery.Functions {
if fn.Name == qbtypes.FunctionNameAnomaly {
for _, arg := range fn.Args {
if arg.Name == "seasonality" {
if seasonalityStr, ok := arg.Value.(string); ok {
switch seasonalityStr {
case "weekly":
return anomalyV2.SeasonalityWeekly
case "hourly":
return anomalyV2.SeasonalityHourly
}
}
}
}
}
}
return anomalyV2.SeasonalityDaily // default
}
func (h *handler) createAnomalyProvider(seasonality anomalyV2.Seasonality) anomalyV2.Provider {
switch seasonality {
case anomalyV2.SeasonalityWeekly:
return anomalyV2.NewWeeklyProvider(
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](h.querier),
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](h.set.Logger),
)
case anomalyV2.SeasonalityHourly:
return anomalyV2.NewHourlyProvider(
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](h.querier),
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](h.set.Logger),
)
default:
return anomalyV2.NewDailyProvider(
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](h.querier),
anomalyV2.WithLogger[*anomalyV2.DailyProvider](h.set.Logger),
)
}
}
func (h *handler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
seasonality := extractSeasonality(anomalyQuery)
provider := h.createAnomalyProvider(seasonality)
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
}

View File

@@ -2,13 +2,16 @@ package api
import (
"net/http"
"net/http/httputil"
"time"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
@@ -29,6 +32,7 @@ type APIHandlerOptions struct {
IntegrationsController *integrations.Controller
CloudIntegrationsController *cloudintegrations.Controller
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
Gateway *httputil.ReverseProxy
GatewayUrl string
// Querier Influx Interval
FluxInterval time.Duration
@@ -41,7 +45,7 @@ type APIHandler struct {
}
// NewAPIHandler returns an APIHandler
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.Config) (*APIHandler, error) {
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
Reader: opts.DataConnector,
RuleManager: opts.RulesManager,
@@ -52,8 +56,9 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
}, config)
})
if err != nil {
return nil, err
@@ -74,6 +79,10 @@ func (ah *APIHandler) UM() *usage.Manager {
return ah.opts.UsageManager
}
func (ah *APIHandler) Gateway() *httputil.ReverseProxy {
return ah.opts.Gateway
}
// RegisterRoutes registers routes for this handler on the given router
func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
// note: add ee override methods first
@@ -96,6 +105,14 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
// v4
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
// v5
router.HandleFunc("/api/v5/query_range", am.ViewAccess(ah.queryRangeV5)).Methods(http.MethodPost)
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
// Gateway
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.EditAccess(ah.ServeGatewayHTTP))
ah.APIHandler.RegisterRoutes(router, am)
}

View File

@@ -0,0 +1,58 @@
package api
import (
"net/http"
"strings"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
validPath := false
for _, allowedPrefix := range gateway.AllowedPrefix {
if strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+allowedPrefix) {
validPath = true
break
}
}
if !validPath {
rw.WriteHeader(http.StatusNotFound)
return
}
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
if err != nil {
render.Error(rw, err)
return
}
//Create headers
var licenseKey string
if license != nil {
licenseKey = license.Key
}
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
req.Header.Set("X-Consumer-Username", "lid:00000000-0000-0000-0000-000000000000")
req.Header.Set("X-Consumer-Groups", "ns:default")
ah.Gateway().ServeHTTP(rw, req)
}

View File

@@ -2,11 +2,16 @@ package api
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"runtime/debug"
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
@@ -15,6 +20,8 @@ import (
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
@@ -137,3 +144,140 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
}
}
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
for _, fn := range anomalyQuery.Functions {
if fn.Name == qbtypes.FunctionNameAnomaly {
for _, arg := range fn.Args {
if arg.Name == "seasonality" {
if seasonalityStr, ok := arg.Value.(string); ok {
switch seasonalityStr {
case "weekly":
return anomalyV2.SeasonalityWeekly
case "hourly":
return anomalyV2.SeasonalityHourly
}
}
}
}
}
}
return anomalyV2.SeasonalityDaily // default
}
func createAnomalyProvider(aH *APIHandler, seasonality anomalyV2.Seasonality) anomalyV2.Provider {
switch seasonality {
case anomalyV2.SeasonalityWeekly:
return anomalyV2.NewWeeklyProvider(
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](aH.Signoz.Querier),
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](aH.Signoz.Instrumentation.Logger()),
)
case anomalyV2.SeasonalityHourly:
return anomalyV2.NewHourlyProvider(
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](aH.Signoz.Querier),
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](aH.Signoz.Instrumentation.Logger()),
)
default:
return anomalyV2.NewDailyProvider(
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](aH.Signoz.Querier),
anomalyV2.WithLogger[*anomalyV2.DailyProvider](aH.Signoz.Instrumentation.Logger()),
)
}
}
func (aH *APIHandler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
seasonality := extractSeasonality(anomalyQuery)
provider := createAnomalyProvider(aH, seasonality)
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
}
func (aH *APIHandler) queryRangeV5(rw http.ResponseWriter, req *http.Request) {
bodyBytes, err := io.ReadAll(req.Body)
if err != nil {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
return
}
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := req.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
var queryRangeRequest qbtypes.QueryRangeRequest
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
return
}
defer func() {
if r := recover(); r != nil {
stackTrace := string(debug.Stack())
queryJSON, _ := json.Marshal(queryRangeRequest)
aH.Signoz.Instrumentation.Logger().ErrorContext(ctx, "panic in QueryRange",
"error", r,
"user", claims.UserID,
"payload", string(queryJSON),
"stacktrace", stackTrace,
)
render.Error(rw, errors.NewInternalf(
errors.CodeInternal,
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
))
}
}()
if err := queryRangeRequest.Validate(); err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, err)
return
}
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
anomalies, err := aH.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
if err != nil {
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
return
}
results := []any{}
for _, item := range anomalies.Results {
results = append(results, item)
}
// Build step intervals from the anomaly query
stepIntervals := make(map[string]uint64)
if anomalyQuery.StepInterval.Duration > 0 {
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
}
finalResp := &qbtypes.QueryRangeResponse{
Type: queryRangeRequest.RequestType,
Data: qbtypes.QueryData{
Results: results,
},
Meta: qbtypes.ExecStats{
StepIntervals: stepIntervals,
},
}
render.Success(rw, http.StatusOK, finalResp)
return
} else {
// regular query range request, let the querier handle it
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
aH.QuerierAPI.QueryRange(rw, req)
}
}

View File

@@ -19,6 +19,7 @@ import (
"github.com/gorilla/handlers"
"github.com/SigNoz/signoz/ee/query-service/app/api"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/rules"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
@@ -71,6 +72,11 @@ type Server struct {
// NewServer creates and initializes Server
func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
gatewayProxy, err := gateway.NewProxy(config.Gateway.URL.String(), gateway.RoutePrefix)
if err != nil {
return nil, err
}
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
Provider: "memory",
Memory: cache.Memory{
@@ -164,11 +170,12 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
CloudIntegrationsController: cloudIntegrationsController,
LogsParsingPipelineController: logParsingPipelineController,
FluxInterval: config.Querier.FluxInterval,
Gateway: gatewayProxy,
GatewayUrl: config.Gateway.URL.String(),
GlobalConfig: config.Global,
}
apiHandler, err := api.NewAPIHandler(apiOpts, signoz, config)
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
if err != nil {
return nil, err
}
@@ -233,6 +240,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterInfraMetricsRoutes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am)
apiHandler.RegisterQueryRangeV5Routes(r, am)
apiHandler.RegisterWebSocketPaths(r, am)
apiHandler.RegisterMessagingQueuesRoutes(r, am)
apiHandler.RegisterThirdPartyApiRoutes(r, am)

View File

@@ -0,0 +1,9 @@
package gateway
import (
"net/http/httputil"
)
func NewNoopProxy() (*httputil.ReverseProxy, error) {
return &httputil.ReverseProxy{}, nil
}

View File

@@ -0,0 +1,66 @@
package gateway
import (
"net/http"
"net/http/httputil"
"net/url"
"path"
"strings"
)
var (
RoutePrefix string = "/api/gateway"
AllowedPrefix []string = []string{"/v1/workspaces/me", "/v2/profiles/me", "/v2/deployments/me"}
)
type proxy struct {
url *url.URL
stripPath string
}
func NewProxy(u string, stripPath string) (*httputil.ReverseProxy, error) {
url, err := url.Parse(u)
if err != nil {
return nil, err
}
proxy := &proxy{url: url, stripPath: stripPath}
return &httputil.ReverseProxy{
Rewrite: proxy.rewrite,
ModifyResponse: proxy.modifyResponse,
ErrorHandler: proxy.errorHandler,
}, nil
}
func (p *proxy) rewrite(pr *httputil.ProxyRequest) {
pr.SetURL(p.url)
pr.SetXForwarded()
pr.Out.URL.Path = cleanPath(strings.ReplaceAll(pr.Out.URL.Path, p.stripPath, ""))
}
func (p *proxy) modifyResponse(res *http.Response) error {
return nil
}
func (p *proxy) errorHandler(rw http.ResponseWriter, req *http.Request, err error) {
rw.WriteHeader(http.StatusBadGateway)
}
func cleanPath(p string) string {
if p == "" {
return "/"
}
if p[0] != '/' {
p = "/" + p
}
np := path.Clean(p)
if p[len(p)-1] == '/' && np != "/" {
if len(p) == len(np)+1 && strings.HasPrefix(p, np) {
np = p
} else {
np += "/"
}
}
return np
}

View File

@@ -0,0 +1,61 @@
package gateway
import (
"context"
"net/http"
"net/http/httputil"
"net/url"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestProxyRewrite(t *testing.T) {
testCases := []struct {
name string
url *url.URL
stripPath string
in *url.URL
expected *url.URL
}{
{
name: "SamePathAdded",
url: &url.URL{Scheme: "http", Host: "backend", Path: "/path1"},
stripPath: "/strip",
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/path1/path1"},
},
{
name: "NoStripPathInput",
url: &url.URL{Scheme: "http", Host: "backend"},
stripPath: "",
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
},
{
name: "NoStripPathPresentInReq",
url: &url.URL{Scheme: "http", Host: "backend"},
stripPath: "/not-found",
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
},
}
for _, tc := range testCases {
proxy, err := NewProxy(tc.url.String(), tc.stripPath)
require.NoError(t, err)
inReq, err := http.NewRequest(http.MethodGet, tc.in.String(), nil)
require.NoError(t, err)
proxyReq := &httputil.ProxyRequest{
In: inReq,
Out: inReq.Clone(context.Background()),
}
proxy.Rewrite(proxyReq)
assert.Equal(t, tc.expected.Host, proxyReq.Out.URL.Host)
assert.Equal(t, tc.expected.Scheme, proxyReq.Out.URL.Scheme)
assert.Equal(t, tc.expected.Path, proxyReq.Out.URL.Path)
assert.Equal(t, tc.expected.Query(), proxyReq.Out.URL.Query())
}
}

View File

@@ -3,7 +3,6 @@ package httpzeus
import (
"bytes"
"context"
"encoding/json"
"io"
"net/http"
"net/url"
@@ -11,7 +10,6 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/client"
"github.com/SigNoz/signoz/pkg/types/zeustypes"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/tidwall/gjson"
)
@@ -121,13 +119,8 @@ func (provider *Provider) PutMeters(ctx context.Context, key string, data []byte
return err
}
func (provider *Provider) PutProfile(ctx context.Context, key string, profile *zeustypes.PostableProfile) error {
body, err := json.Marshal(profile)
if err != nil {
return err
}
_, err = provider.do(
func (provider *Provider) PutProfile(ctx context.Context, key string, body []byte) error {
_, err := provider.do(
ctx,
provider.config.URL.JoinPath("/v2/profiles/me"),
http.MethodPut,
@@ -138,15 +131,10 @@ func (provider *Provider) PutProfile(ctx context.Context, key string, profile *z
return err
}
func (provider *Provider) PutHost(ctx context.Context, key string, host *zeustypes.PostableHost) error {
body, err := json.Marshal(host)
if err != nil {
return err
}
_, err = provider.do(
func (provider *Provider) PutHost(ctx context.Context, key string, body []byte) error {
_, err := provider.do(
ctx,
provider.config.URL.JoinPath("/v2/deployments/me/host"),
provider.config.URL.JoinPath("/v2/deployments/me/hosts"),
http.MethodPut,
key,
body,
@@ -181,28 +169,21 @@ func (provider *Provider) do(ctx context.Context, url *url.URL, method string, k
return body, nil
}
errorMessage := gjson.GetBytes(body, "error").String()
if errorMessage == "" {
errorMessage = "an unknown error occurred"
}
return nil, provider.errFromStatusCode(response.StatusCode, errorMessage)
return nil, provider.errFromStatusCode(response.StatusCode)
}
// This can be taken down to the client package
func (provider *Provider) errFromStatusCode(statusCode int, errorMessage string) error {
func (provider *Provider) errFromStatusCode(statusCode int) error {
switch statusCode {
case http.StatusBadRequest:
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, errorMessage)
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "bad request")
case http.StatusUnauthorized:
return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, errorMessage)
return errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated")
case http.StatusForbidden:
return errors.New(errors.TypeForbidden, errors.CodeForbidden, errorMessage)
return errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "forbidden")
case http.StatusNotFound:
return errors.New(errors.TypeNotFound, errors.CodeNotFound, errorMessage)
case http.StatusConflict:
return errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, errorMessage)
return errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "not found")
}
return errors.New(errors.TypeInternal, errors.CodeInternal, errorMessage)
return errors.Newf(errors.TypeInternal, errors.CodeInternal, "internal")
}

View File

@@ -1,74 +0,0 @@
---
description: Core testing conventions - imports, rendering, MSW, interactions, queries
globs: **/*.test.{ts,tsx}
alwaysApply: false
---
# Testing Conventions
## Imports
Always import from the test harness, never directly from `@testing-library/react`:
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { server, rest } from 'mocks-server/server';
```
## Router
Use the built-in router in `render`:
```ts
render(<Page />, undefined, { initialRoute: '/traces-explorer' });
```
Only mock `useLocation` / `useParams` if the test depends on their values.
## MSW
Global MSW server runs automatically. Override per-test:
```ts
server.use(
rest.get('*/api/v1/foo', (_req, res, ctx) =>
res(ctx.status(200), ctx.json({ ok: true })))
);
```
Keep large response fixtures in `mocks-server/__mockdata_`.
## Interactions
- Prefer `userEvent` for real user interactions (click, type, select, tab).
- Use `fireEvent` only for low-level events not covered by `userEvent` (e.g., scroll, resize). Wrap in `act(...)` if needed.
- Always `await` interactions:
```ts
const user = userEvent.setup({ pointerEventsCheck: 0 });
await user.click(screen.getByRole('button', { name: /save/i }));
```
## Timers
No global fake timers. Per-test only, for debounce/throttle:
```ts
jest.useFakeTimers();
const user = userEvent.setup({ advanceTimers: (ms) => jest.advanceTimersByTime(ms) });
await user.type(screen.getByRole('textbox'), 'query');
jest.advanceTimersByTime(400);
jest.useRealTimers();
```
## Queries
Prefer accessible queries: `getByRole` > `findByRole` > `getByLabelText` > visible text > `data-testid` (last resort).
## Anti-patterns
- Never import from `@testing-library/react` directly
- Never use global fake timers
- Never wrap `render` in `act(...)`
- Never mock infra dependencies locally (router, react-query)
- Limit to 3-5 focused tests per file

View File

@@ -1,54 +0,0 @@
---
description: When to use global vs local mocks in tests
globs: **/*.test.{ts,tsx}
alwaysApply: false
---
# Mock Strategy
## Use Global Mocks For
High-frequency dependencies (20+ test files):
- Core infrastructure: react-router-dom, react-query, antd
- Browser APIs: ResizeObserver, matchMedia, localStorage
- Utility libraries: date-fns, lodash
Available global mock files (from jest.config.ts):
- `uplot` -> `__mocks__/uplotMock.ts`
## Use Local Mocks For
- Business logic dependencies (API endpoints, custom hooks, domain components)
- Test-specific behavior (different data per test, error scenarios, loading states)
## Decision Tree
```
Used in 20+ test files?
YES -> Global mock
NO -> Business logic or test-specific?
YES -> Local mock
NO -> Consider global if usage grows
```
## Correct Usage
```ts
// Global mocks are already available - just import
import { useLocation } from 'react-router-dom';
// Local mocks for business logic
jest.mock('../api/tracesService', () => ({
getTraces: jest.fn(() => mockTracesData),
}));
```
## Anti-patterns
```ts
// Never re-mock globally mocked dependencies locally
jest.mock('react-router-dom', () => ({ ... }));
// Never put test-specific data in global mocks
jest.mock('../api/tracesService', () => ({ getTraces: jest.fn(() => specificTestData) }));
```

View File

@@ -1,54 +0,0 @@
---
description: TypeScript type safety requirements for Jest tests
globs: **/*.test.{ts,tsx}
alwaysApply: false
---
# TypeScript Type Safety in Tests
All Jest tests must be fully type-safe. Never use `any`.
## Mock Function Typing
```ts
// Use jest.mocked for module mocks
import useFoo from 'hooks/useFoo';
jest.mock('hooks/useFoo');
const mockUseFoo = jest.mocked(useFoo);
// Use jest.MockedFunction for standalone mocks
const mockFetch = jest.fn() as jest.MockedFunction<(id: number) => Promise<User>>;
```
## Mock Data
Define interfaces for all mock data:
```ts
const mockUser: User = { id: 1, name: 'John', email: 'john@example.com' };
const mockProps: ComponentProps = {
title: 'Test',
data: [mockUser],
onSelect: jest.fn() as jest.MockedFunction<(user: User) => void>,
};
```
## Hook Mocking Pattern
```ts
import useFoo from 'hooks/useFoo';
jest.mock('hooks/useFoo');
const mockUseFoo = jest.mocked(useFoo);
mockUseFoo.mockReturnValue(/* minimal shape */);
```
Prefer helpers (`rqSuccess`, `rqLoading`, `rqError`) for React Query results.
## Checklist
- All mock functions use `jest.MockedFunction<T>` or `jest.mocked()`
- All mock data has proper interfaces
- No `any` types in test files
- Component props are typed
- API response types are defined

484
frontend/.cursorrules Normal file
View File

@@ -0,0 +1,484 @@
# Persona
You are an expert developer with deep knowledge of Jest, React Testing Library, MSW, and TypeScript, tasked with creating unit tests for this repository.
# Auto-detect TypeScript Usage
Check for TypeScript in the project through tsconfig.json or package.json dependencies.
Adjust syntax based on this detection.
# TypeScript Type Safety for Jest Tests
**CRITICAL**: All Jest tests MUST be fully type-safe with proper TypeScript types.
**Type Safety Requirements:**
- Use proper TypeScript interfaces for all mock data
- Type all Jest mock functions with `jest.MockedFunction<T>`
- Use generic types for React components and hooks
- Define proper return types for mock functions
- Use `as const` for literal types when needed
- Avoid `any` type use proper typing instead
# Unit Testing Focus
Focus on critical functionality (business logic, utility functions, component behavior)
Mock dependencies (API calls, external modules) before imports
Test multiple data scenarios (valid inputs, invalid inputs, edge cases)
Write maintainable tests with descriptive names grouped in describe blocks
# Global vs Local Mocks
**Use Global Mocks for:**
- High-frequency dependencies (20+ test files)
- Core infrastructure (react-router-dom, react-query, antd)
- Standard implementations across the app
- Browser APIs (ResizeObserver, matchMedia, localStorage)
- Utility libraries (date-fns, lodash)
**Use Local Mocks for:**
- Business logic dependencies (5-15 test files)
- Test-specific behavior (different data per test)
- API endpoints with specific responses
- Domain-specific components
- Error scenarios and edge cases
**Global Mock Files Available (from jest.config.ts):**
- `uplot` → `__mocks__/uplotMock.ts`
# Repo-specific Testing Conventions
## Imports
Always import from our harness:
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
```
For API mocks:
```ts
import { server, rest } from 'mocks-server/server';
```
Do not import directly from `@testing-library/react`.
## Router
Use the router built into render:
```ts
render(<Page />, undefined, { initialRoute: '/traces-explorer' });
```
Only mock `useLocation` / `useParams` if the test depends on them.
## Hook Mocks
Pattern:
```ts
import useFoo from 'hooks/useFoo';
jest.mock('hooks/useFoo');
const mockUseFoo = jest.mocked(useFoo);
mockUseFoo.mockReturnValue(/* minimal shape */ as any);
```
Prefer helpers (`rqSuccess`, `rqLoading`, `rqError`) for React Query results.
## MSW
Global MSW server runs automatically.
Override per-test:
```ts
server.use(
rest.get('*/api/v1/foo', (_req, res, ctx) => res(ctx.status(200), ctx.json({ ok: true })))
);
```
Keep large responses in `mocks-server/__mockdata_`.
## Interactions
- Prefer `userEvent` for real user interactions (click, type, select, tab).
- Use `fireEvent` only for low-level/programmatic events not covered by `userEvent` (e.g., scroll, resize, setting `element.scrollTop` for virtualization). Wrap in `act(...)` if needed.
- Always await interactions:
```ts
const user = userEvent.setup({ pointerEventsCheck: 0 });
await user.click(screen.getByRole('button', { name: /save/i }));
```
```ts
// Example: virtualized list scroll (no userEvent helper)
const scroller = container.querySelector('[data-test-id="virtuoso-scroller"]') as HTMLElement;
scroller.scrollTop = targetScrollTop;
act(() => { fireEvent.scroll(scroller); });
```
## Timers
❌ No global fake timers.
✅ Per-test only, for debounce/throttle:
```ts
jest.useFakeTimers();
const user = userEvent.setup({ advanceTimers: (ms) => jest.advanceTimersByTime(ms) });
await user.type(screen.getByRole('textbox'), 'query');
jest.advanceTimersByTime(400);
jest.useRealTimers();
```
## Queries
Prefer accessible queries (`getByRole`, `findByRole`, `getByLabelText`).
Fallback: visible text.
Last resort: `data-testid`.
# Example Test (using only configured global mocks)
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { server, rest } from 'mocks-server/server';
import MyComponent from '../MyComponent';
describe('MyComponent', () => {
it('renders and interacts', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(
rest.get('*/api/v1/example', (_req, res, ctx) => res(ctx.status(200), ctx.json({ value: 42 })))
);
render(<MyComponent />, undefined, { initialRoute: '/foo' });
expect(await screen.findByText(/value: 42/i)).toBeInTheDocument();
await user.click(screen.getByRole('button', { name: /refresh/i }));
await waitFor(() => expect(screen.getByText(/loading/i)).toBeInTheDocument());
});
});
```
# Anti-patterns
❌ Importing RTL directly
❌ Using global fake timers
❌ Wrapping render in `act(...)`
❌ Mocking infra dependencies locally (router, react-query)
✅ Use our harness (`tests/test-utils`)
✅ Use MSW for API overrides
✅ Use userEvent + await
✅ Pin time only in tests that assert relative dates
# Best Practices
- **Critical Functionality**: Prioritize testing business logic and utilities
- **Dependency Mocking**: Global mocks for infra, local mocks for business logic
- **Data Scenarios**: Always test valid, invalid, and edge cases
- **Descriptive Names**: Make test intent clear
- **Organization**: Group related tests in describe
- **Consistency**: Match repo conventions
- **Edge Cases**: Test null, undefined, unexpected values
- **Limit Scope**: 35 focused tests per file
- **Use Helpers**: `rqSuccess`, `makeUser`, etc.
- **No Any**: Enforce type safety
# Example Test
```ts
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { server, rest } from 'mocks-server/server';
import MyComponent from '../MyComponent';
describe('MyComponent', () => {
it('renders and interacts', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(
rest.get('*/api/v1/example', (_req, res, ctx) => res(ctx.status(200), ctx.json({ value: 42 })))
);
render(<MyComponent />, undefined, { initialRoute: '/foo' });
expect(await screen.findByText(/value: 42/i)).toBeInTheDocument();
await user.click(screen.getByRole('button', { name: /refresh/i }));
await waitFor(() => expect(screen.getByText(/loading/i)).toBeInTheDocument());
});
});
```
# Anti-patterns
❌ Importing RTL directly
❌ Using global fake timers
❌ Wrapping render in `act(...)`
❌ Mocking infra dependencies locally (router, react-query)
✅ Use our harness (`tests/test-utils`)
✅ Use MSW for API overrides
✅ Use userEvent + await
✅ Pin time only in tests that assert relative dates
# TypeScript Type Safety Examples
## Proper Mock Typing
```ts
// ✅ GOOD - Properly typed mocks
interface User {
id: number;
name: string;
email: string;
}
interface ApiResponse<T> {
data: T;
status: number;
message: string;
}
// Type the mock functions
const mockFetchUser = jest.fn() as jest.MockedFunction<(id: number) => Promise<ApiResponse<User>>>;
const mockUpdateUser = jest.fn() as jest.MockedFunction<(user: User) => Promise<ApiResponse<User>>>;
// Mock implementation with proper typing
mockFetchUser.mockResolvedValue({
data: { id: 1, name: 'John Doe', email: 'john@example.com' },
status: 200,
message: 'Success'
});
// ❌ BAD - Using any type
const mockFetchUser = jest.fn() as any; // Don't do this
```
## React Component Testing with Types
```ts
// ✅ GOOD - Properly typed component testing
interface ComponentProps {
title: string;
data: User[];
onUserSelect: (user: User) => void;
isLoading?: boolean;
}
const TestComponent: React.FC<ComponentProps> = ({ title, data, onUserSelect, isLoading = false }) => {
// Component implementation
};
describe('TestComponent', () => {
it('should render with proper props', () => {
// Arrange - Type the props properly
const mockProps: ComponentProps = {
title: 'Test Title',
data: [{ id: 1, name: 'John', email: 'john@example.com' }],
onUserSelect: jest.fn() as jest.MockedFunction<(user: User) => void>,
isLoading: false
};
// Act
render(<TestComponent {...mockProps} />);
// Assert
expect(screen.getByText('Test Title')).toBeInTheDocument();
});
});
```
## Hook Testing with Types
```ts
// ✅ GOOD - Properly typed hook testing
interface UseUserDataReturn {
user: User | null;
loading: boolean;
error: string | null;
refetch: () => void;
}
const useUserData = (id: number): UseUserDataReturn => {
// Hook implementation
};
describe('useUserData', () => {
it('should return user data with proper typing', () => {
// Arrange
const mockUser: User = { id: 1, name: 'John', email: 'john@example.com' };
mockFetchUser.mockResolvedValue({
data: mockUser,
status: 200,
message: 'Success'
});
// Act
const { result } = renderHook(() => useUserData(1));
// Assert
expect(result.current.user).toEqual(mockUser);
expect(result.current.loading).toBe(false);
expect(result.current.error).toBeNull();
});
});
```
## Global Mock Type Safety
```ts
// ✅ GOOD - Type-safe global mocks
// In __mocks__/routerMock.ts
export const mockUseLocation = (overrides: Partial<Location> = {}): Location => ({
pathname: '/traces',
search: '',
hash: '',
state: null,
key: 'test-key',
...overrides,
});
// In test files
const location = useLocation(); // Properly typed from global mock
expect(location.pathname).toBe('/traces');
```
# TypeScript Configuration for Jest
## Required Jest Configuration
```json
// jest.config.ts
{
"preset": "ts-jest/presets/js-with-ts-esm",
"globals": {
"ts-jest": {
"useESM": true,
"isolatedModules": true,
"tsconfig": "<rootDir>/tsconfig.jest.json"
}
},
"extensionsToTreatAsEsm": [".ts", ".tsx"],
"moduleFileExtensions": ["ts", "tsx", "js", "json"]
}
```
## TypeScript Jest Configuration
```json
// tsconfig.jest.json
{
"extends": "./tsconfig.json",
"compilerOptions": {
"types": ["jest", "@testing-library/jest-dom"],
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"moduleResolution": "node"
},
"include": [
"src/**/*",
"**/*.test.ts",
"**/*.test.tsx",
"__mocks__/**/*"
]
}
```
## Common Type Safety Patterns
### Mock Function Typing
```ts
// ✅ GOOD - Proper mock function typing
const mockApiCall = jest.fn() as jest.MockedFunction<typeof apiCall>;
const mockEventHandler = jest.fn() as jest.MockedFunction<(event: Event) => void>;
// ❌ BAD - Using any
const mockApiCall = jest.fn() as any;
```
### Generic Mock Typing
```ts
// ✅ GOOD - Generic mock typing
interface MockApiResponse<T> {
data: T;
status: number;
}
const mockFetchData = jest.fn() as jest.MockedFunction<
<T>(endpoint: string) => Promise<MockApiResponse<T>>
>;
// Usage
mockFetchData<User>('/users').mockResolvedValue({
data: { id: 1, name: 'John' },
status: 200
});
```
### React Testing Library with Types
```ts
// ✅ GOOD - Typed testing utilities
import { render, screen, RenderResult } from '@testing-library/react';
import { ComponentProps } from 'react';
type TestComponentProps = ComponentProps<typeof TestComponent>;
const renderTestComponent = (props: Partial<TestComponentProps> = {}): RenderResult => {
const defaultProps: TestComponentProps = {
title: 'Test',
data: [],
onSelect: jest.fn(),
...props
};
return render(<TestComponent {...defaultProps} />);
};
```
### Error Handling with Types
```ts
// ✅ GOOD - Typed error handling
interface ApiError {
message: string;
code: number;
details?: Record<string, unknown>;
}
const mockApiError: ApiError = {
message: 'API Error',
code: 500,
details: { endpoint: '/users' }
};
mockFetchUser.mockRejectedValue(new Error(JSON.stringify(mockApiError)));
```
## Type Safety Checklist
- [ ] All mock functions use `jest.MockedFunction<T>`
- [ ] All mock data has proper interfaces
- [ ] No `any` types in test files
- [ ] Generic types are used where appropriate
- [ ] Error types are properly defined
- [ ] Component props are typed
- [ ] Hook return types are defined
- [ ] API response types are defined
- [ ] Global mocks are type-safe
- [ ] Test utilities are properly typed
# Mock Decision Tree
```
Is it used in 20+ test files?
├─ YES → Use Global Mock
│ ├─ react-router-dom
│ ├─ react-query
│ ├─ antd components
│ └─ browser APIs
└─ NO → Is it business logic?
├─ YES → Use Local Mock
│ ├─ API endpoints
│ ├─ Custom hooks
│ └─ Domain components
└─ NO → Is it test-specific?
├─ YES → Use Local Mock
│ ├─ Error scenarios
│ ├─ Loading states
│ └─ Specific data
└─ NO → Consider Global Mock
└─ If it becomes frequently used
```
# Common Anti-Patterns to Avoid
❌ **Don't mock global dependencies locally:**
```js
// BAD - This is already globally mocked
jest.mock('react-router-dom', () => ({ ... }));
```
❌ **Don't create global mocks for test-specific data:**
```js
// BAD - This should be local
jest.mock('../api/tracesService', () => ({
getTraces: jest.fn(() => specificTestData)
}));
```
✅ **Do use global mocks for infrastructure:**
```js
// GOOD - Use global mock
import { useLocation } from 'react-router-dom';
```
✅ **Do create local mocks for business logic:**
```js
// GOOD - Local mock for specific test needs
jest.mock('../api/tracesService', () => ({
getTraces: jest.fn(() => mockTracesData)
}));
```

View File

@@ -55,10 +55,8 @@
"@signozhq/icons": "0.1.0",
"@signozhq/input": "0.0.2",
"@signozhq/popover": "0.0.0",
"@signozhq/radio-group": "0.0.2",
"@signozhq/resizable": "0.0.0",
"@signozhq/sonner": "0.1.0",
"@signozhq/switch": "0.0.2",
"@signozhq/table": "0.3.7",
"@signozhq/tooltip": "0.0.2",
"@tanstack/react-table": "8.20.6",

View File

@@ -12,6 +12,5 @@
"pipeline": "Pipeline",
"pipelines": "Pipelines",
"archives": "Archives",
"logs_to_metrics": "Logs To Metrics",
"roles": "Roles"
"logs_to_metrics": "Logs To Metrics"
}

View File

@@ -12,6 +12,5 @@
"pipeline": "Pipeline",
"pipelines": "Pipelines",
"archives": "Archives",
"logs_to_metrics": "Logs To Metrics",
"roles": "Roles"
"logs_to_metrics": "Logs To Metrics"
}

View File

@@ -73,6 +73,5 @@
"API_MONITORING": "SigNoz | External APIs",
"METER_EXPLORER": "SigNoz | Meter Explorer",
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
"METER": "SigNoz | Meter",
"ROLES_SETTINGS": "SigNoz | Roles"
"METER": "SigNoz | Meter"
}

View File

@@ -0,0 +1,29 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
CreateIngestionKeyProps,
IngestionKeyProps,
} from 'types/api/ingestionKeys/types';
const createIngestionKey = async (
props: CreateIngestionKeyProps,
): Promise<SuccessResponse<IngestionKeyProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.post('/workspaces/me/keys', {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default createIngestionKey;

View File

@@ -0,0 +1,26 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
const deleteIngestionKey = async (
id: string,
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.delete(
`/workspaces/me/keys/${id}`,
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default deleteIngestionKey;

View File

@@ -0,0 +1,21 @@
import { GatewayApiV1Instance } from 'api';
import { AxiosResponse } from 'axios';
import {
AllIngestionKeyProps,
GetIngestionKeyProps,
} from 'types/api/ingestionKeys/types';
export const getAllIngestionKeys = (
props: GetIngestionKeyProps,
): Promise<AxiosResponse<AllIngestionKeyProps>> => {
// eslint-disable-next-line @typescript-eslint/naming-convention
const { search, per_page, page } = props;
const BASE_URL = '/workspaces/me/keys';
const URL_QUERY_PARAMS =
search && search.length > 0
? `/search?name=${search}&page=1&per_page=100`
: `?page=${page}&per_page=${per_page}`;
return GatewayApiV1Instance.get(`${BASE_URL}${URL_QUERY_PARAMS}`);
};

View File

@@ -0,0 +1,65 @@
/* eslint-disable @typescript-eslint/no-throw-literal */
import { GatewayApiV1Instance } from 'api';
import axios from 'axios';
import {
AddLimitProps,
LimitSuccessProps,
} from 'types/api/ingestionKeys/limits/types';
interface SuccessResponse<T> {
statusCode: number;
error: null;
message: string;
payload: T;
}
interface ErrorResponse {
statusCode: number;
error: string;
message: string;
payload: null;
}
const createLimitForIngestionKey = async (
props: AddLimitProps,
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.post(
`/workspaces/me/keys/${props.keyID}/limits`,
{
...props,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
if (axios.isAxiosError(error)) {
// Axios error
const errResponse: ErrorResponse = {
statusCode: error.response?.status || 500,
error: error.response?.data?.error,
message: error.response?.data?.status || 'An error occurred',
payload: null,
};
throw errResponse;
} else {
// Non-Axios error
const errResponse: ErrorResponse = {
statusCode: 500,
error: 'Unknown error',
message: 'An unknown error occurred',
payload: null,
};
throw errResponse;
}
}
};
export default createLimitForIngestionKey;

View File

@@ -0,0 +1,26 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
const deleteLimitsForIngestionKey = async (
id: string,
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.delete(
`/workspaces/me/limits/${id}`,
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default deleteLimitsForIngestionKey;

View File

@@ -0,0 +1,65 @@
/* eslint-disable @typescript-eslint/no-throw-literal */
import { GatewayApiV1Instance } from 'api';
import axios from 'axios';
import {
LimitSuccessProps,
UpdateLimitProps,
} from 'types/api/ingestionKeys/limits/types';
interface SuccessResponse<T> {
statusCode: number;
error: null;
message: string;
payload: T;
}
interface ErrorResponse {
statusCode: number;
error: string;
message: string;
payload: null;
}
const updateLimitForIngestionKey = async (
props: UpdateLimitProps,
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.patch(
`/workspaces/me/limits/${props.limitID}`,
{
config: props.config,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
if (axios.isAxiosError(error)) {
// Axios error
const errResponse: ErrorResponse = {
statusCode: error.response?.status || 500,
error: error.response?.data?.error,
message: error.response?.data?.status || 'An error occurred',
payload: null,
};
throw errResponse;
} else {
// Non-Axios error
const errResponse: ErrorResponse = {
statusCode: 500,
error: 'Unknown error',
message: 'An unknown error occurred',
payload: null,
};
throw errResponse;
}
}
};
export default updateLimitForIngestionKey;

View File

@@ -0,0 +1,32 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
IngestionKeysPayloadProps,
UpdateIngestionKeyProps,
} from 'types/api/ingestionKeys/types';
const updateIngestionKey = async (
props: UpdateIngestionKeyProps,
): Promise<SuccessResponse<IngestionKeysPayloadProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.patch(
`/workspaces/me/keys/${props.id}`,
{
...props.data,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default updateIngestionKey;

View File

@@ -4,6 +4,8 @@ export const apiV2 = '/api/v2/';
export const apiV3 = '/api/v3/';
export const apiV4 = '/api/v4/';
export const apiV5 = '/api/v5/';
export const gatewayApiV1 = '/api/gateway/v1/';
export const gatewayApiV2 = '/api/gateway/v2/';
export const apiAlertManager = '/api/alertmanager/';
export default apiV1;

View File

@@ -0,0 +1,7 @@
import { GatewayApiV2Instance as axios } from 'api';
import { AxiosResponse } from 'axios';
import { DeploymentsDataProps } from 'types/api/customDomain/types';
export const getDeploymentsData = (): Promise<
AxiosResponse<DeploymentsDataProps>
> => axios.get(`/deployments/me`);

View File

@@ -0,0 +1,16 @@
import { GatewayApiV2Instance as axios } from 'api';
import { AxiosError } from 'axios';
import { SuccessResponse } from 'types/api';
import {
PayloadProps,
UpdateCustomDomainProps,
} from 'types/api/customDomain/types';
const updateSubDomainAPI = async (
props: UpdateCustomDomainProps,
): Promise<SuccessResponse<PayloadProps> | AxiosError> =>
axios.put(`/deployments/me/host`, {
...props.data,
});
export default updateSubDomainAPI;

View File

@@ -678,7 +678,7 @@ export const useUpdateIngestionKeyLimit = <
* @summary Search ingestion keys for workspace
*/
export const searchIngestionKeys = (
params: SearchIngestionKeysParams,
params?: SearchIngestionKeysParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<SearchIngestionKeys200>({
@@ -699,7 +699,7 @@ export const getSearchIngestionKeysQueryOptions = <
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
TError = RenderErrorResponseDTO
>(
params: SearchIngestionKeysParams,
params?: SearchIngestionKeysParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof searchIngestionKeys>>,
@@ -737,7 +737,7 @@ export function useSearchIngestionKeys<
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
TError = RenderErrorResponseDTO
>(
params: SearchIngestionKeysParams,
params?: SearchIngestionKeysParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof searchIngestionKeys>>,
@@ -762,7 +762,7 @@ export function useSearchIngestionKeys<
*/
export const invalidateSearchIngestionKeys = async (
queryClient: QueryClient,
params: SearchIngestionKeysParams,
params?: SearchIngestionKeysParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(

View File

@@ -20,20 +20,17 @@ import { useMutation, useQuery } from 'react-query';
import { GeneratedAPIInstance } from '../../../index';
import type {
GetMetricAlerts200,
GetMetricAlertsPathParameters,
GetMetricAlertsParams,
GetMetricAttributes200,
GetMetricAttributesParams,
GetMetricAttributesPathParameters,
GetMetricDashboards200,
GetMetricDashboardsPathParameters,
GetMetricDashboardsParams,
GetMetricHighlights200,
GetMetricHighlightsPathParameters,
GetMetricHighlightsParams,
GetMetricMetadata200,
GetMetricMetadataPathParameters,
GetMetricMetadataParams,
GetMetricsStats200,
GetMetricsTreemap200,
ListMetrics200,
ListMetricsParams,
MetricsexplorertypesMetricAttributesRequestDTO,
MetricsexplorertypesStatsRequestDTO,
MetricsexplorertypesTreemapRequestDTO,
MetricsexplorertypesUpdateMetricMetadataRequestDTO,
@@ -46,128 +43,30 @@ type AwaitedInput<T> = PromiseLike<T> | T;
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
/**
* This endpoint returns a list of distinct metric names within the specified time range
* @summary List metric names
* This endpoint returns associated alerts for a specified metric
* @summary Get metric alerts
*/
export const listMetrics = (
params?: ListMetricsParams,
export const getMetricAlerts = (
params: GetMetricAlertsParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<ListMetrics200>({
url: `/api/v2/metrics`,
return GeneratedAPIInstance<GetMetricAlerts200>({
url: `/api/v2/metric/alerts`,
method: 'GET',
params,
signal,
});
};
export const getListMetricsQueryKey = (params?: ListMetricsParams) => {
return ['listMetrics', ...(params ? [params] : [])] as const;
};
export const getListMetricsQueryOptions = <
TData = Awaited<ReturnType<typeof listMetrics>>,
TError = RenderErrorResponseDTO
>(
params?: ListMetricsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof listMetrics>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getListMetricsQueryKey(params);
const queryFn: QueryFunction<Awaited<ReturnType<typeof listMetrics>>> = ({
signal,
}) => listMetrics(params, signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof listMetrics>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type ListMetricsQueryResult = NonNullable<
Awaited<ReturnType<typeof listMetrics>>
>;
export type ListMetricsQueryError = RenderErrorResponseDTO;
/**
* @summary List metric names
*/
export function useListMetrics<
TData = Awaited<ReturnType<typeof listMetrics>>,
TError = RenderErrorResponseDTO
>(
params?: ListMetricsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof listMetrics>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getListMetricsQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary List metric names
*/
export const invalidateListMetrics = async (
queryClient: QueryClient,
params?: ListMetricsParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getListMetricsQueryKey(params) },
options,
);
return queryClient;
};
/**
* This endpoint returns associated alerts for a specified metric
* @summary Get metric alerts
*/
export const getMetricAlerts = (
{ metricName }: GetMetricAlertsPathParameters,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricAlerts200>({
url: `/api/v2/metrics/${metricName}/alerts`,
method: 'GET',
signal,
});
};
export const getGetMetricAlertsQueryKey = ({
metricName,
}: GetMetricAlertsPathParameters) => {
return ['getMetricAlerts'] as const;
export const getGetMetricAlertsQueryKey = (params?: GetMetricAlertsParams) => {
return ['getMetricAlerts', ...(params ? [params] : [])] as const;
};
export const getGetMetricAlertsQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricAlerts>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricAlertsPathParameters,
params: GetMetricAlertsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricAlerts>>,
@@ -178,19 +77,13 @@ export const getGetMetricAlertsQueryOptions = <
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetMetricAlertsQueryKey({ metricName });
const queryKey = queryOptions?.queryKey ?? getGetMetricAlertsQueryKey(params);
const queryFn: QueryFunction<Awaited<ReturnType<typeof getMetricAlerts>>> = ({
signal,
}) => getMetricAlerts({ metricName }, signal);
}) => getMetricAlerts(params, signal);
return {
queryKey,
queryFn,
enabled: !!metricName,
...queryOptions,
} as UseQueryOptions<
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getMetricAlerts>>,
TError,
TData
@@ -210,7 +103,7 @@ export function useGetMetricAlerts<
TData = Awaited<ReturnType<typeof getMetricAlerts>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricAlertsPathParameters,
params: GetMetricAlertsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricAlerts>>,
@@ -219,7 +112,7 @@ export function useGetMetricAlerts<
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricAlertsQueryOptions({ metricName }, options);
const queryOptions = getGetMetricAlertsQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
@@ -235,126 +128,11 @@ export function useGetMetricAlerts<
*/
export const invalidateGetMetricAlerts = async (
queryClient: QueryClient,
{ metricName }: GetMetricAlertsPathParameters,
params: GetMetricAlertsParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricAlertsQueryKey({ metricName }) },
options,
);
return queryClient;
};
/**
* This endpoint returns attribute keys and their unique values for a specified metric
* @summary Get metric attributes
*/
export const getMetricAttributes = (
{ metricName }: GetMetricAttributesPathParameters,
params?: GetMetricAttributesParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricAttributes200>({
url: `/api/v2/metrics/${metricName}/attributes`,
method: 'GET',
params,
signal,
});
};
export const getGetMetricAttributesQueryKey = (
{ metricName }: GetMetricAttributesPathParameters,
params?: GetMetricAttributesParams,
) => {
return ['getMetricAttributes', ...(params ? [params] : [])] as const;
};
export const getGetMetricAttributesQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricAttributes>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricAttributesPathParameters,
params?: GetMetricAttributesParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ??
getGetMetricAttributesQueryKey({ metricName }, params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMetricAttributes>>
> = ({ signal }) => getMetricAttributes({ metricName }, params, signal);
return {
queryKey,
queryFn,
enabled: !!metricName,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetMetricAttributesQueryResult = NonNullable<
Awaited<ReturnType<typeof getMetricAttributes>>
>;
export type GetMetricAttributesQueryError = RenderErrorResponseDTO;
/**
* @summary Get metric attributes
*/
export function useGetMetricAttributes<
TData = Awaited<ReturnType<typeof getMetricAttributes>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricAttributesPathParameters,
params?: GetMetricAttributesParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricAttributesQueryOptions(
{ metricName },
params,
options,
);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get metric attributes
*/
export const invalidateGetMetricAttributes = async (
queryClient: QueryClient,
{ metricName }: GetMetricAttributesPathParameters,
params?: GetMetricAttributesParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricAttributesQueryKey({ metricName }, params) },
{ queryKey: getGetMetricAlertsQueryKey(params) },
options,
);
@@ -366,27 +144,28 @@ export const invalidateGetMetricAttributes = async (
* @summary Get metric dashboards
*/
export const getMetricDashboards = (
{ metricName }: GetMetricDashboardsPathParameters,
params: GetMetricDashboardsParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricDashboards200>({
url: `/api/v2/metrics/${metricName}/dashboards`,
url: `/api/v2/metric/dashboards`,
method: 'GET',
params,
signal,
});
};
export const getGetMetricDashboardsQueryKey = ({
metricName,
}: GetMetricDashboardsPathParameters) => {
return ['getMetricDashboards'] as const;
export const getGetMetricDashboardsQueryKey = (
params?: GetMetricDashboardsParams,
) => {
return ['getMetricDashboards', ...(params ? [params] : [])] as const;
};
export const getGetMetricDashboardsQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricDashboards>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricDashboardsPathParameters,
params: GetMetricDashboardsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricDashboards>>,
@@ -398,18 +177,13 @@ export const getGetMetricDashboardsQueryOptions = <
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetMetricDashboardsQueryKey({ metricName });
queryOptions?.queryKey ?? getGetMetricDashboardsQueryKey(params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMetricDashboards>>
> = ({ signal }) => getMetricDashboards({ metricName }, signal);
> = ({ signal }) => getMetricDashboards(params, signal);
return {
queryKey,
queryFn,
enabled: !!metricName,
...queryOptions,
} as UseQueryOptions<
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getMetricDashboards>>,
TError,
TData
@@ -429,7 +203,7 @@ export function useGetMetricDashboards<
TData = Awaited<ReturnType<typeof getMetricDashboards>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricDashboardsPathParameters,
params: GetMetricDashboardsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricDashboards>>,
@@ -438,10 +212,7 @@ export function useGetMetricDashboards<
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricDashboardsQueryOptions(
{ metricName },
options,
);
const queryOptions = getGetMetricDashboardsQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
@@ -457,11 +228,11 @@ export function useGetMetricDashboards<
*/
export const invalidateGetMetricDashboards = async (
queryClient: QueryClient,
{ metricName }: GetMetricDashboardsPathParameters,
params: GetMetricDashboardsParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricDashboardsQueryKey({ metricName }) },
{ queryKey: getGetMetricDashboardsQueryKey(params) },
options,
);
@@ -473,27 +244,28 @@ export const invalidateGetMetricDashboards = async (
* @summary Get metric highlights
*/
export const getMetricHighlights = (
{ metricName }: GetMetricHighlightsPathParameters,
params: GetMetricHighlightsParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricHighlights200>({
url: `/api/v2/metrics/${metricName}/highlights`,
url: `/api/v2/metric/highlights`,
method: 'GET',
params,
signal,
});
};
export const getGetMetricHighlightsQueryKey = ({
metricName,
}: GetMetricHighlightsPathParameters) => {
return ['getMetricHighlights'] as const;
export const getGetMetricHighlightsQueryKey = (
params?: GetMetricHighlightsParams,
) => {
return ['getMetricHighlights', ...(params ? [params] : [])] as const;
};
export const getGetMetricHighlightsQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricHighlights>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricHighlightsPathParameters,
params: GetMetricHighlightsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricHighlights>>,
@@ -505,18 +277,13 @@ export const getGetMetricHighlightsQueryOptions = <
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetMetricHighlightsQueryKey({ metricName });
queryOptions?.queryKey ?? getGetMetricHighlightsQueryKey(params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMetricHighlights>>
> = ({ signal }) => getMetricHighlights({ metricName }, signal);
> = ({ signal }) => getMetricHighlights(params, signal);
return {
queryKey,
queryFn,
enabled: !!metricName,
...queryOptions,
} as UseQueryOptions<
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getMetricHighlights>>,
TError,
TData
@@ -536,7 +303,7 @@ export function useGetMetricHighlights<
TData = Awaited<ReturnType<typeof getMetricHighlights>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricHighlightsPathParameters,
params: GetMetricHighlightsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricHighlights>>,
@@ -545,10 +312,7 @@ export function useGetMetricHighlights<
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricHighlightsQueryOptions(
{ metricName },
options,
);
const queryOptions = getGetMetricHighlightsQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
@@ -564,115 +328,11 @@ export function useGetMetricHighlights<
*/
export const invalidateGetMetricHighlights = async (
queryClient: QueryClient,
{ metricName }: GetMetricHighlightsPathParameters,
params: GetMetricHighlightsParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricHighlightsQueryKey({ metricName }) },
options,
);
return queryClient;
};
/**
* This endpoint returns metadata information like metric description, unit, type, temporality, monotonicity for a specified metric
* @summary Get metric metadata
*/
export const getMetricMetadata = (
{ metricName }: GetMetricMetadataPathParameters,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricMetadata200>({
url: `/api/v2/metrics/${metricName}/metadata`,
method: 'GET',
signal,
});
};
export const getGetMetricMetadataQueryKey = ({
metricName,
}: GetMetricMetadataPathParameters) => {
return ['getMetricMetadata'] as const;
};
export const getGetMetricMetadataQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricMetadataPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetMetricMetadataQueryKey({ metricName });
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMetricMetadata>>
> = ({ signal }) => getMetricMetadata({ metricName }, signal);
return {
queryKey,
queryFn,
enabled: !!metricName,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetMetricMetadataQueryResult = NonNullable<
Awaited<ReturnType<typeof getMetricMetadata>>
>;
export type GetMetricMetadataQueryError = RenderErrorResponseDTO;
/**
* @summary Get metric metadata
*/
export function useGetMetricMetadata<
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
TError = RenderErrorResponseDTO
>(
{ metricName }: GetMetricMetadataPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricMetadataQueryOptions({ metricName }, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get metric metadata
*/
export const invalidateGetMetricMetadata = async (
queryClient: QueryClient,
{ metricName }: GetMetricMetadataPathParameters,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricMetadataQueryKey({ metricName }) },
{ queryKey: getGetMetricHighlightsQueryKey(params) },
options,
);
@@ -778,6 +438,189 @@ export const useUpdateMetricMetadata = <
return useMutation(mutationOptions);
};
/**
* This endpoint returns attribute keys and their unique values for a specified metric
* @summary Get metric attributes
*/
export const getMetricAttributes = (
metricsexplorertypesMetricAttributesRequestDTO: MetricsexplorertypesMetricAttributesRequestDTO,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricAttributes200>({
url: `/api/v2/metrics/attributes`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: metricsexplorertypesMetricAttributesRequestDTO,
signal,
});
};
export const getGetMetricAttributesMutationOptions = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
{ data: MetricsexplorertypesMetricAttributesRequestDTO },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
{ data: MetricsexplorertypesMetricAttributesRequestDTO },
TContext
> => {
const mutationKey = ['getMetricAttributes'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof getMetricAttributes>>,
{ data: MetricsexplorertypesMetricAttributesRequestDTO }
> = (props) => {
const { data } = props ?? {};
return getMetricAttributes(data);
};
return { mutationFn, ...mutationOptions };
};
export type GetMetricAttributesMutationResult = NonNullable<
Awaited<ReturnType<typeof getMetricAttributes>>
>;
export type GetMetricAttributesMutationBody = MetricsexplorertypesMetricAttributesRequestDTO;
export type GetMetricAttributesMutationError = RenderErrorResponseDTO;
/**
* @summary Get metric attributes
*/
export const useGetMetricAttributes = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
{ data: MetricsexplorertypesMetricAttributesRequestDTO },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof getMetricAttributes>>,
TError,
{ data: MetricsexplorertypesMetricAttributesRequestDTO },
TContext
> => {
const mutationOptions = getGetMetricAttributesMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* This endpoint returns metadata information like metric description, unit, type, temporality, monotonicity for a specified metric
* @summary Get metric metadata
*/
export const getMetricMetadata = (
params: GetMetricMetadataParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetMetricMetadata200>({
url: `/api/v2/metrics/metadata`,
method: 'GET',
params,
signal,
});
};
export const getGetMetricMetadataQueryKey = (
params?: GetMetricMetadataParams,
) => {
return ['getMetricMetadata', ...(params ? [params] : [])] as const;
};
export const getGetMetricMetadataQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
TError = RenderErrorResponseDTO
>(
params: GetMetricMetadataParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetMetricMetadataQueryKey(params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMetricMetadata>>
> = ({ signal }) => getMetricMetadata(params, signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetMetricMetadataQueryResult = NonNullable<
Awaited<ReturnType<typeof getMetricMetadata>>
>;
export type GetMetricMetadataQueryError = RenderErrorResponseDTO;
/**
* @summary Get metric metadata
*/
export function useGetMetricMetadata<
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
TError = RenderErrorResponseDTO
>(
params: GetMetricMetadataParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricMetadata>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricMetadataQueryOptions(params, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get metric metadata
*/
export const invalidateGetMetricMetadata = async (
queryClient: QueryClient,
params: GetMetricMetadataParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricMetadataQueryKey(params) },
options,
);
return queryClient;
};
/**
* This endpoint provides list of metrics with their number of samples and timeseries for the given time range
* @summary Get metrics statistics

View File

@@ -1,191 +0,0 @@
/**
* ! Do not edit manually
* * The file has been auto-generated using Orval for SigNoz
* * regenerate with 'yarn generate:api'
* SigNoz
*/
import type {
MutationFunction,
UseMutationOptions,
UseMutationResult,
} from 'react-query';
import { useMutation } from 'react-query';
import { GeneratedAPIInstance } from '../../../index';
import type {
Querybuildertypesv5QueryRangeRequestDTO,
QueryRangeV5200,
RenderErrorResponseDTO,
ReplaceVariables200,
} from '../sigNoz.schemas';
type AwaitedInput<T> = PromiseLike<T> | T;
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
/**
* Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.
* @summary Query range
*/
export const queryRangeV5 = (
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<QueryRangeV5200>({
url: `/api/v5/query_range`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: querybuildertypesv5QueryRangeRequestDTO,
signal,
});
};
export const getQueryRangeV5MutationOptions = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof queryRangeV5>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof queryRangeV5>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
> => {
const mutationKey = ['queryRangeV5'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof queryRangeV5>>,
{ data: Querybuildertypesv5QueryRangeRequestDTO }
> = (props) => {
const { data } = props ?? {};
return queryRangeV5(data);
};
return { mutationFn, ...mutationOptions };
};
export type QueryRangeV5MutationResult = NonNullable<
Awaited<ReturnType<typeof queryRangeV5>>
>;
export type QueryRangeV5MutationBody = Querybuildertypesv5QueryRangeRequestDTO;
export type QueryRangeV5MutationError = RenderErrorResponseDTO;
/**
* @summary Query range
*/
export const useQueryRangeV5 = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof queryRangeV5>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof queryRangeV5>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
> => {
const mutationOptions = getQueryRangeV5MutationOptions(options);
return useMutation(mutationOptions);
};
/**
* Replace variables in a query
* @summary Replace variables
*/
export const replaceVariables = (
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<ReplaceVariables200>({
url: `/api/v5/substitute_vars`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: querybuildertypesv5QueryRangeRequestDTO,
signal,
});
};
export const getReplaceVariablesMutationOptions = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof replaceVariables>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof replaceVariables>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
> => {
const mutationKey = ['replaceVariables'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof replaceVariables>>,
{ data: Querybuildertypesv5QueryRangeRequestDTO }
> = (props) => {
const { data } = props ?? {};
return replaceVariables(data);
};
return { mutationFn, ...mutationOptions };
};
export type ReplaceVariablesMutationResult = NonNullable<
Awaited<ReturnType<typeof replaceVariables>>
>;
export type ReplaceVariablesMutationBody = Querybuildertypesv5QueryRangeRequestDTO;
export type ReplaceVariablesMutationError = RenderErrorResponseDTO;
/**
* @summary Replace variables
*/
export const useReplaceVariables = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof replaceVariables>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof replaceVariables>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
> => {
const mutationOptions = getReplaceVariablesMutationOptions(options);
return useMutation(mutationOptions);
};

File diff suppressed because it is too large Load Diff

View File

@@ -1,269 +0,0 @@
/**
* ! Do not edit manually
* * The file has been auto-generated using Orval for SigNoz
* * regenerate with 'yarn generate:api'
* SigNoz
*/
import type {
InvalidateOptions,
MutationFunction,
QueryClient,
QueryFunction,
QueryKey,
UseMutationOptions,
UseMutationResult,
UseQueryOptions,
UseQueryResult,
} from 'react-query';
import { useMutation, useQuery } from 'react-query';
import { GeneratedAPIInstance } from '../../../index';
import type {
GetHosts200,
RenderErrorResponseDTO,
ZeustypesPostableHostDTO,
ZeustypesPostableProfileDTO,
} from '../sigNoz.schemas';
type AwaitedInput<T> = PromiseLike<T> | T;
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
/**
* This endpoint gets the host info from zeus.
* @summary Get host info from Zeus.
*/
export const getHosts = (signal?: AbortSignal) => {
return GeneratedAPIInstance<GetHosts200>({
url: `/api/v2/zeus/hosts`,
method: 'GET',
signal,
});
};
export const getGetHostsQueryKey = () => {
return ['getHosts'] as const;
};
export const getGetHostsQueryOptions = <
TData = Awaited<ReturnType<typeof getHosts>>,
TError = RenderErrorResponseDTO
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof getHosts>>, TError, TData>;
}) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getGetHostsQueryKey();
const queryFn: QueryFunction<Awaited<ReturnType<typeof getHosts>>> = ({
signal,
}) => getHosts(signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getHosts>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetHostsQueryResult = NonNullable<
Awaited<ReturnType<typeof getHosts>>
>;
export type GetHostsQueryError = RenderErrorResponseDTO;
/**
* @summary Get host info from Zeus.
*/
export function useGetHosts<
TData = Awaited<ReturnType<typeof getHosts>>,
TError = RenderErrorResponseDTO
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof getHosts>>, TError, TData>;
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetHostsQueryOptions(options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get host info from Zeus.
*/
export const invalidateGetHosts = async (
queryClient: QueryClient,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetHostsQueryKey() },
options,
);
return queryClient;
};
/**
* This endpoint saves the host of a deployment to zeus.
* @summary Put host in Zeus for a deployment.
*/
export const putHost = (zeustypesPostableHostDTO: ZeustypesPostableHostDTO) => {
return GeneratedAPIInstance<void>({
url: `/api/v2/zeus/hosts`,
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
data: zeustypesPostableHostDTO,
});
};
export const getPutHostMutationOptions = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof putHost>>,
TError,
{ data: ZeustypesPostableHostDTO },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof putHost>>,
TError,
{ data: ZeustypesPostableHostDTO },
TContext
> => {
const mutationKey = ['putHost'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof putHost>>,
{ data: ZeustypesPostableHostDTO }
> = (props) => {
const { data } = props ?? {};
return putHost(data);
};
return { mutationFn, ...mutationOptions };
};
export type PutHostMutationResult = NonNullable<
Awaited<ReturnType<typeof putHost>>
>;
export type PutHostMutationBody = ZeustypesPostableHostDTO;
export type PutHostMutationError = RenderErrorResponseDTO;
/**
* @summary Put host in Zeus for a deployment.
*/
export const usePutHost = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof putHost>>,
TError,
{ data: ZeustypesPostableHostDTO },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof putHost>>,
TError,
{ data: ZeustypesPostableHostDTO },
TContext
> => {
const mutationOptions = getPutHostMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* This endpoint saves the profile of a deployment to zeus.
* @summary Put profile in Zeus for a deployment.
*/
export const putProfile = (
zeustypesPostableProfileDTO: ZeustypesPostableProfileDTO,
) => {
return GeneratedAPIInstance<void>({
url: `/api/v2/zeus/profiles`,
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
data: zeustypesPostableProfileDTO,
});
};
export const getPutProfileMutationOptions = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof putProfile>>,
TError,
{ data: ZeustypesPostableProfileDTO },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof putProfile>>,
TError,
{ data: ZeustypesPostableProfileDTO },
TContext
> => {
const mutationKey = ['putProfile'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof putProfile>>,
{ data: ZeustypesPostableProfileDTO }
> = (props) => {
const { data } = props ?? {};
return putProfile(data);
};
return { mutationFn, ...mutationOptions };
};
export type PutProfileMutationResult = NonNullable<
Awaited<ReturnType<typeof putProfile>>
>;
export type PutProfileMutationBody = ZeustypesPostableProfileDTO;
export type PutProfileMutationError = RenderErrorResponseDTO;
/**
* @summary Put profile in Zeus for a deployment.
*/
export const usePutProfile = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof putProfile>>,
TError,
{ data: ZeustypesPostableProfileDTO },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof putProfile>>,
TError,
{ data: ZeustypesPostableProfileDTO },
TContext
> => {
const mutationOptions = getPutProfileMutationOptions(options);
return useMutation(mutationOptions);
};

View File

@@ -15,7 +15,15 @@ import { Events } from 'constants/events';
import { LOCALSTORAGE } from 'constants/localStorage';
import { eventEmitter } from 'utils/getEventEmitter';
import apiV1, { apiAlertManager, apiV2, apiV3, apiV4, apiV5 } from './apiV1';
import apiV1, {
apiAlertManager,
apiV2,
apiV3,
apiV4,
apiV5,
gatewayApiV1,
gatewayApiV2,
} from './apiV1';
import { Logout } from './utils';
const RESPONSE_TIMEOUT_THRESHOLD = 5000; // 5 seconds
@@ -203,6 +211,24 @@ LogEventAxiosInstance.interceptors.response.use(
LogEventAxiosInstance.interceptors.request.use(interceptorsRequestResponse);
//
// gateway Api V1
export const GatewayApiV1Instance = axios.create({
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV1}`,
});
GatewayApiV1Instance.interceptors.response.use(
interceptorsResponse,
interceptorRejected,
);
GatewayApiV1Instance.interceptors.request.use(interceptorsRequestResponse);
//
// gateway Api V2
export const GatewayApiV2Instance = axios.create({
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV2}`,
});
// generated API Instance
export const GeneratedAPIInstance = axios.create({
baseURL: ENVIRONMENT.baseURL,
@@ -214,6 +240,14 @@ GeneratedAPIInstance.interceptors.response.use(
interceptorRejected,
);
GatewayApiV2Instance.interceptors.response.use(
interceptorsResponse,
interceptorRejected,
);
GatewayApiV2Instance.interceptors.request.use(interceptorsRequestResponse);
//
AxiosAlertManagerInstance.interceptors.response.use(
interceptorsResponse,
interceptorRejected,

View File

@@ -11,10 +11,13 @@ export const getMetricMetadata = async (
): Promise<SuccessResponseV2<MetricMetadataResponse> | ErrorResponseV2> => {
try {
const encodedMetricName = encodeURIComponent(metricName);
const response = await axios.get(`/metrics/${encodedMetricName}/metadata`, {
signal,
headers,
});
const response = await axios.get(
`/metrics/metadata?metricName=${encodedMetricName}`,
{
signal,
headers,
},
);
return {
httpStatusCode: response.status,

View File

@@ -0,0 +1,20 @@
import { GatewayApiV2Instance } from 'api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { UpdateProfileProps } from 'types/api/onboarding/types';
const updateProfile = async (
props: UpdateProfileProps,
): Promise<SuccessResponse<UpdateProfileProps> | ErrorResponse> => {
const response = await GatewayApiV2Instance.put('/profiles/me', {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
};
export default updateProfile;

View File

@@ -0,0 +1,19 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
const deleteDomain = async (id: string): Promise<SuccessResponseV2<null>> => {
try {
const response = await axios.delete<null>(`/domains/${id}`);
return {
httpStatusCode: response.status,
data: null,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default deleteDomain;

View File

@@ -0,0 +1,25 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
import { UpdatableAuthDomain } from 'types/api/v1/domains/put';
const put = async (
props: UpdatableAuthDomain,
): Promise<SuccessResponseV2<null>> => {
try {
const response = await axios.put<RawSuccessResponse<null>>(
`/domains/${props.id}`,
{ config: props.config },
);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default put;

View File

@@ -0,0 +1,24 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
import { GettableAuthDomain } from 'types/api/v1/domains/list';
const listAllDomain = async (): Promise<
SuccessResponseV2<GettableAuthDomain[]>
> => {
try {
const response = await axios.get<RawSuccessResponse<GettableAuthDomain[]>>(
`/domains`,
);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default listAllDomain;

View File

@@ -0,0 +1,26 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
import { GettableAuthDomain } from 'types/api/v1/domains/list';
import { PostableAuthDomain } from 'types/api/v1/domains/post';
const post = async (
props: PostableAuthDomain,
): Promise<SuccessResponseV2<GettableAuthDomain>> => {
try {
const response = await axios.post<RawSuccessResponse<GettableAuthDomain>>(
`/domains`,
props,
);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default post;

View File

@@ -73,7 +73,7 @@ describe('convertV5ResponseToLegacy', () => {
const v5Data: QueryRangeResponseV5 = {
type: 'time_series',
data: { results: [timeSeries] },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
};
const params = makeBaseParams('time_series', [
@@ -156,7 +156,7 @@ describe('convertV5ResponseToLegacy', () => {
const v5Data: QueryRangeResponseV5 = {
type: 'scalar',
data: { results: [scalar] },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
};
const params = makeBaseParams('scalar', [
@@ -239,7 +239,7 @@ describe('convertV5ResponseToLegacy', () => {
const v5Data: QueryRangeResponseV5 = {
type: 'scalar',
data: { results: [scalar] },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
};
const params = makeBaseParams('scalar', [

View File

@@ -388,7 +388,6 @@ export function convertV5ResponseToLegacy(
warnings: v5Data?.data?.warning || [],
},
warning: v5Data?.warning || undefined,
meta: v5Data?.meta,
},
warning: v5Data?.warning || undefined,
};
@@ -407,7 +406,6 @@ export function convertV5ResponseToLegacy(
payload: {
data: convertedData,
warning: v5Response.payload?.data?.warning || undefined,
meta: v5Data?.meta,
},
};

View File

@@ -21,9 +21,7 @@ import '@signozhq/design-tokens';
import '@signozhq/icons';
import '@signozhq/input';
import '@signozhq/popover';
import '@signozhq/radio-group';
import '@signozhq/resizable';
import '@signozhq/sonner';
import '@signozhq/switch';
import '@signozhq/table';
import '@signozhq/tooltip';

View File

@@ -1,9 +1,8 @@
/* eslint-disable no-nested-ternary */
import { useCallback, useEffect, useMemo, useRef } from 'react';
import { useCallback, useEffect, useMemo } from 'react';
import { useQuery } from 'react-query';
import { Virtuoso, VirtuosoHandle } from 'react-virtuoso';
import { Virtuoso } from 'react-virtuoso';
import { Card } from 'antd';
import LogDetail from 'components/LogDetail';
import RawLogView from 'components/Logs/RawLogView';
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
@@ -11,8 +10,6 @@ import LogsError from 'container/LogsError/LogsError';
import { LogsLoading } from 'container/LogsLoading/LogsLoading';
import { FontSize } from 'container/OptionsMenu/types';
import { useHandleLogsPagination } from 'hooks/infraMonitoring/useHandleLogsPagination';
import useLogDetailHandlers from 'hooks/logs/useLogDetailHandlers';
import useScrollToLog from 'hooks/logs/useScrollToLog';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { ILog } from 'types/api/logs/log';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
@@ -31,15 +28,6 @@ interface Props {
}
function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
const virtuosoRef = useRef<VirtuosoHandle>(null);
const {
activeLog,
onAddToQuery,
selectedTab,
handleSetActiveLog,
handleCloseLogDetail,
} = useLogDetailHandlers();
const basePayload = getHostLogsQueryPayload(
timeRange.startTime,
timeRange.endTime,
@@ -84,40 +72,29 @@ function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
setIsPaginating(false);
}, [data, setIsPaginating]);
const handleScrollToLog = useScrollToLog({
logs,
virtuosoRef,
});
const getItemContent = useCallback(
(_: number, logToRender: ILog): JSX.Element => {
return (
<div key={logToRender.id}>
<RawLogView
isTextOverflowEllipsisDisabled
data={logToRender}
linesPerRow={5}
fontSize={FontSize.MEDIUM}
selectedFields={[
{
dataType: 'string',
type: '',
name: 'body',
},
{
dataType: 'string',
type: '',
name: 'timestamp',
},
]}
onSetActiveLog={handleSetActiveLog}
onClearActiveLog={handleCloseLogDetail}
isActiveLog={activeLog?.id === logToRender.id}
/>
</div>
);
},
[activeLog, handleSetActiveLog, handleCloseLogDetail],
(_: number, logToRender: ILog): JSX.Element => (
<RawLogView
isTextOverflowEllipsisDisabled
key={logToRender.id}
data={logToRender}
linesPerRow={5}
fontSize={FontSize.MEDIUM}
selectedFields={[
{
dataType: 'string',
type: '',
name: 'body',
},
{
dataType: 'string',
type: '',
name: 'timestamp',
},
]}
/>
),
[],
);
const renderFooter = useCallback(
@@ -141,7 +118,6 @@ function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
<Virtuoso
className="host-metrics-logs-virtuoso"
key="host-metrics-logs-virtuoso"
ref={virtuosoRef}
data={logs}
endReached={loadMoreLogs}
totalCount={logs.length}
@@ -163,24 +139,7 @@ function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
{!isLoading && !isError && logs.length === 0 && <NoLogsContainer />}
{isError && !isLoading && <LogsError />}
{!isLoading && !isError && logs.length > 0 && (
<div
className="host-metrics-logs-list-container"
data-log-detail-ignore="true"
>
{renderContent}
</div>
)}
{selectedTab && activeLog && (
<LogDetail
log={activeLog}
onClose={handleCloseLogDetail}
logs={logs}
onNavigateLog={handleSetActiveLog}
selectedTab={selectedTab}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
onScrollToLog={handleScrollToLog}
/>
<div className="host-metrics-logs-list-container">{renderContent}</div>
)}
</div>
);

View File

@@ -13,9 +13,6 @@ export type LogDetailProps = {
handleChangeSelectedView?: ChangeViewFunctionType;
isListViewPanel?: boolean;
listViewPanelSelectedFields?: IField[] | null;
logs?: ILog[];
onNavigateLog?: (log: ILog) => void;
onScrollToLog?: (logId: string) => void;
} & Pick<AddToQueryHOCProps, 'onAddToQuery'> &
Partial<Pick<ActionItemProps, 'onClickActionItem'>> &
Pick<DrawerProps, 'onClose'>;

View File

@@ -15,8 +15,6 @@
}
.log-detail-drawer__title-right {
display: flex;
align-items: center;
.ant-btn {
display: flex;
align-items: center;
@@ -42,7 +40,6 @@
display: flex;
flex-direction: column;
padding: 16px;
padding-bottom: 0;
}
.title {
@@ -69,10 +66,6 @@
margin-bottom: 16px;
}
.log-detail-drawer__content {
height: 100%;
}
.log-detail-drawer__log {
width: 100%;
display: flex;
@@ -190,115 +183,9 @@
.ant-drawer-close {
padding: 0px;
}
.log-detail-drawer__footer-hint {
position: sticky;
bottom: 0;
left: 0;
right: 0;
padding: 8px 16px;
text-align: left;
color: var(--text-vanilla-200);
background: var(--bg-ink-400);
z-index: 10;
.log-detail-drawer__footer-hint-content {
display: flex;
align-items: center;
gap: 4px;
}
.log-detail-drawer__footer-hint-icon {
display: inline;
vertical-align: middle;
color: var(--text-vanilla-200);
}
.log-detail-drawer__footer-hint-text {
font-size: 13px;
margin: 0;
}
}
.log-arrows {
display: flex;
box-shadow: 0 1px 4px 0 rgba(0, 0, 0, 0.1);
border-radius: 6px;
padding: 2px 6px;
align-items: center;
margin-left: 8px;
}
.log-arrow-btn {
padding: 0;
min-width: 28px;
height: 28px;
border-radius: 4px;
background: var(--bg-ink-400);
color: var(--text-vanilla-400);
border: 1px solid var(--bg-ink-300);
box-shadow: 0 1px 2px 0 rgba(0, 0, 0, 0.08);
display: flex;
align-items: center;
justify-content: center;
transition: background-color 0.2s ease-in-out;
}
.log-arrow-btn-up,
.log-arrow-btn-down {
background: var(--bg-ink-400);
}
.log-arrow-btn:active,
.log-arrow-btn:focus {
background: var(--bg-ink-300);
color: var(--text-vanilla-100);
}
.log-arrow-btn[disabled] {
opacity: 0.5;
cursor: not-allowed;
background: var(--bg-ink-500);
color: var(--text-vanilla-200);
.log-arrow-btn:hover:not([disabled]) {
background: var(--bg-ink-300);
color: var(--text-vanilla-100);
}
}
}
.lightMode {
.log-arrows {
background: var(--bg-vanilla-100);
box-shadow: 0 1px 4px 0 rgba(0, 0, 0, 0.04);
}
.log-arrow-btn {
background: var(--bg-vanilla-100);
color: var(--text-ink-400);
border: 1px solid var(--bg-vanilla-300);
box-shadow: 0 1px 2px 0 rgba(0, 0, 0, 0.04);
}
.log-arrow-btn-up,
.log-arrow-btn-down {
background: var(--bg-vanilla-100);
}
.log-arrow-btn:active,
.log-arrow-btn:focus {
background: var(--bg-vanilla-200);
color: var(--text-ink-500);
}
.log-arrow-btn:hover:not([disabled]) {
background: var(--bg-vanilla-200);
color: var(--text-ink-500);
}
.log-arrow-btn[disabled] {
background: var(--bg-vanilla-100);
color: var(--text-ink-200);
}
.ant-drawer-header {
border-bottom: 1px solid var(--bg-vanilla-400);
background: var(--bg-vanilla-100);
@@ -365,33 +252,4 @@
color: var(--text-ink-300);
}
}
.log-detail-drawer__footer-hint {
position: sticky;
bottom: 0;
left: 0;
right: 0;
padding: 8px 16px;
text-align: left;
color: var(--text-vanilla-700);
background: var(--bg-vanilla-100);
z-index: 10;
.log-detail-drawer__footer-hint-content {
display: flex;
align-items: center;
gap: 4px;
}
.log-detail-drawer__footer-hint-icon {
display: inline;
vertical-align: middle;
color: var(--text-vanilla-700);
}
.log-detail-drawer__footer-hint-text {
font-size: 13px;
margin: 0;
}
}
}

View File

@@ -1,5 +1,5 @@
/* eslint-disable sonarjs/cognitive-complexity */
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useCallback, useMemo, useState } from 'react';
import { useSelector } from 'react-redux';
import { useCopyToClipboard, useLocation } from 'react-use';
import { Color, Spacing } from '@signozhq/design-tokens';
@@ -32,12 +32,8 @@ import { useSafeNavigate } from 'hooks/useSafeNavigate';
import createQueryParams from 'lib/createQueryParams';
import { cloneDeep } from 'lodash-es';
import {
ArrowDown,
ArrowUp,
BarChart2,
Braces,
ChevronDown,
ChevronUp,
Compass,
Copy,
Filter,
@@ -64,9 +60,6 @@ function LogDetailInner({
isListViewPanel = false,
listViewPanelSelectedFields,
handleChangeSelectedView,
logs,
onNavigateLog,
onScrollToLog,
}: LogDetailInnerProps): JSX.Element {
const initialContextQuery = useInitialQuery(log);
const [contextQuery, setContextQuery] = useState<Query | undefined>(
@@ -81,78 +74,6 @@ function LogDetailInner({
const [isEdit, setIsEdit] = useState<boolean>(false);
const { stagedQuery, updateAllQueriesOperators } = useQueryBuilder();
// Handle clicks outside to close drawer, except on explicitly ignored regions
useEffect(() => {
const handleClickOutside = (e: MouseEvent): void => {
const target = e.target as HTMLElement;
// Don't close if clicking on explicitly ignored regions
if (target.closest('[data-log-detail-ignore="true"]')) {
return;
}
// Close the drawer for any other outside click
onClose?.(e as any);
};
document.addEventListener('mousedown', handleClickOutside);
return (): void => {
document.removeEventListener('mousedown', handleClickOutside);
};
}, [onClose]);
// Keyboard navigation - handle up/down arrow keys
// Only listen when in OVERVIEW tab
useEffect(() => {
if (
!logs ||
!onNavigateLog ||
logs.length === 0 ||
selectedView !== VIEW_TYPES.OVERVIEW
) {
return;
}
const handleKeyDown = (e: KeyboardEvent): void => {
const currentIndex = logs.findIndex((l) => l.id === log.id);
if (currentIndex === -1) {
return;
}
if (e.key === 'ArrowUp') {
e.preventDefault();
e.stopPropagation();
// Navigate to previous log
if (currentIndex > 0) {
const prevLog = logs[currentIndex - 1];
onNavigateLog(prevLog);
// Trigger scroll to the log element
if (onScrollToLog) {
onScrollToLog(prevLog.id);
}
}
} else if (e.key === 'ArrowDown') {
e.preventDefault();
e.stopPropagation();
// Navigate to next log
if (currentIndex < logs.length - 1) {
const nextLog = logs[currentIndex + 1];
onNavigateLog(nextLog);
// Trigger scroll to the log element
if (onScrollToLog) {
onScrollToLog(nextLog.id);
}
}
}
};
document.addEventListener('keydown', handleKeyDown);
return (): void => {
document.removeEventListener('keydown', handleKeyDown);
};
}, [log.id, logs, onNavigateLog, onScrollToLog, selectedView]);
const listQuery = useMemo(() => {
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) {
return null;
@@ -306,87 +227,32 @@ function LogDetailInner({
);
const logType = log?.attributes_string?.log_level || LogType.INFO;
const currentLogIndex = logs ? logs.findIndex((l) => l.id === log.id) : -1;
const isPrevDisabled =
!logs || !onNavigateLog || logs.length === 0 || currentLogIndex <= 0;
const isNextDisabled =
!logs ||
!onNavigateLog ||
logs.length === 0 ||
currentLogIndex === logs.length - 1;
type HandleNavigateLogParams = {
direction: 'next' | 'previous';
};
const handleNavigateLog = ({ direction }: HandleNavigateLogParams): void => {
if (!logs || !onNavigateLog || currentLogIndex === -1) {
return;
}
if (direction === 'previous' && !isPrevDisabled) {
const prevLog = logs[currentLogIndex - 1];
onNavigateLog(prevLog);
onScrollToLog?.(prevLog.id);
} else if (direction === 'next' && !isNextDisabled) {
const nextLog = logs[currentLogIndex + 1];
onNavigateLog(nextLog);
onScrollToLog?.(nextLog.id);
}
};
return (
<Drawer
width="60%"
mask={false}
maskClosable={false}
maskStyle={{ background: 'none' }}
title={
<div className="log-detail-drawer__title" data-log-detail-ignore="true">
<div className="log-detail-drawer__title">
<div className="log-detail-drawer__title-left">
<Divider type="vertical" className={cx('log-type-indicator', LogType)} />
<Typography.Text className="title">Log details</Typography.Text>
</div>
<div className="log-detail-drawer__title-right">
<div className="log-arrows">
<Tooltip
title={isPrevDisabled ? '' : 'Move to previous log'}
placement="top"
mouseLeaveDelay={0}
{showOpenInExplorerBtn && (
<div className="log-detail-drawer__title-right">
<Button
className="open-in-explorer-btn"
icon={<Compass size={16} />}
onClick={handleOpenInExplorer}
>
<Button
icon={<ChevronUp size={14} />}
className="log-arrow-btn log-arrow-btn-up"
disabled={isPrevDisabled}
onClick={(): void => handleNavigateLog({ direction: 'previous' })}
/>
</Tooltip>
<Tooltip
title={isNextDisabled ? '' : 'Move to next log'}
placement="top"
mouseLeaveDelay={0}
>
<Button
icon={<ChevronDown size={14} />}
className="log-arrow-btn log-arrow-btn-down"
disabled={isNextDisabled}
onClick={(): void => handleNavigateLog({ direction: 'next' })}
/>
</Tooltip>
Open in Explorer
</Button>
</div>
{showOpenInExplorerBtn && (
<div>
<Button
className="open-in-explorer-btn"
icon={<Compass size={16} />}
onClick={handleOpenInExplorer}
>
Open in Explorer
</Button>
</div>
)}
</div>
)}
</div>
}
placement="right"
// closable
onClose={drawerCloseHandler}
open={log !== null}
style={{
@@ -397,164 +263,138 @@ function LogDetailInner({
destroyOnClose
closeIcon={<X size={16} style={{ marginTop: Spacing.MARGIN_1 }} />}
>
<div className="log-detail-drawer__content" data-log-detail-ignore="true">
<div className="log-detail-drawer__log">
<Divider type="vertical" className={cx('log-type-indicator', logType)} />
<Tooltip title={removeEscapeCharacters(log?.body)} placement="left">
<div className="log-body" dangerouslySetInnerHTML={htmlBody} />
</Tooltip>
<div className="log-detail-drawer__log">
<Divider type="vertical" className={cx('log-type-indicator', logType)} />
<Tooltip title={removeEscapeCharacters(log?.body)} placement="left">
<div className="log-body" dangerouslySetInnerHTML={htmlBody} />
</Tooltip>
<div className="log-overflow-shadow">&nbsp;</div>
</div>
<div className="log-overflow-shadow">&nbsp;</div>
</div>
<div className="tabs-and-search">
<Radio.Group
className="views-tabs"
onChange={handleModeChange}
value={selectedView}
<div className="tabs-and-search">
<Radio.Group
className="views-tabs"
onChange={handleModeChange}
value={selectedView}
>
<Radio.Button
className={
// eslint-disable-next-line sonarjs/no-duplicate-string
selectedView === VIEW_TYPES.OVERVIEW ? 'selected_view tab' : 'tab'
}
value={VIEW_TYPES.OVERVIEW}
>
<Radio.Button
className={
// eslint-disable-next-line sonarjs/no-duplicate-string
selectedView === VIEW_TYPES.OVERVIEW ? 'selected_view tab' : 'tab'
}
value={VIEW_TYPES.OVERVIEW}
>
<div className="view-title">
<Table size={14} />
Overview
</div>
</Radio.Button>
<Radio.Button
className={
selectedView === VIEW_TYPES.JSON ? 'selected_view tab' : 'tab'
}
value={VIEW_TYPES.JSON}
>
<div className="view-title">
<Braces size={14} />
JSON
</div>
</Radio.Button>
<Radio.Button
className={
selectedView === VIEW_TYPES.CONTEXT ? 'selected_view tab' : 'tab'
}
value={VIEW_TYPES.CONTEXT}
>
<div className="view-title">
<TextSelect size={14} />
Context
</div>
</Radio.Button>
<Radio.Button
className={
selectedView === VIEW_TYPES.INFRAMETRICS ? 'selected_view tab' : 'tab'
}
value={VIEW_TYPES.INFRAMETRICS}
>
<div className="view-title">
<BarChart2 size={14} />
Metrics
</div>
</Radio.Button>
</Radio.Group>
<div className="log-detail-drawer__actions">
{selectedView === VIEW_TYPES.CONTEXT && (
<Tooltip
title="Show Filters"
placement="topLeft"
aria-label="Show Filters"
>
<Button
className="action-btn"
icon={<Filter size={16} />}
onClick={handleFilterVisible}
/>
</Tooltip>
)}
<div className="view-title">
<Table size={14} />
Overview
</div>
</Radio.Button>
<Radio.Button
className={selectedView === VIEW_TYPES.JSON ? 'selected_view tab' : 'tab'}
value={VIEW_TYPES.JSON}
>
<div className="view-title">
<Braces size={14} />
JSON
</div>
</Radio.Button>
<Radio.Button
className={
selectedView === VIEW_TYPES.CONTEXT ? 'selected_view tab' : 'tab'
}
value={VIEW_TYPES.CONTEXT}
>
<div className="view-title">
<TextSelect size={14} />
Context
</div>
</Radio.Button>
<Radio.Button
className={
selectedView === VIEW_TYPES.INFRAMETRICS ? 'selected_view tab' : 'tab'
}
value={VIEW_TYPES.INFRAMETRICS}
>
<div className="view-title">
<BarChart2 size={14} />
Metrics
</div>
</Radio.Button>
</Radio.Group>
<div className="log-detail-drawer__actions">
{selectedView === VIEW_TYPES.CONTEXT && (
<Tooltip
title={selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'}
title="Show Filters"
placement="topLeft"
aria-label={
selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'
}
aria-label="Show Filters"
>
<Button
className="action-btn"
icon={<Copy size={16} />}
onClick={selectedView === VIEW_TYPES.JSON ? handleJSONCopy : onLogCopy}
icon={<Filter size={16} />}
onClick={handleFilterVisible}
/>
</Tooltip>
</div>
</div>
{isFilterVisible && contextQuery?.builder.queryData[0] && (
<div className="log-detail-drawer-query-container">
<QuerySearch
onChange={(value): void => handleQueryExpressionChange(value, 0)}
dataSource={DataSource.LOGS}
queryData={contextQuery?.builder.queryData[0]}
onRun={handleRunQuery}
)}
<Tooltip
title={selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'}
placement="topLeft"
aria-label={
selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'
}
>
<Button
className="action-btn"
icon={<Copy size={16} />}
onClick={selectedView === VIEW_TYPES.JSON ? handleJSONCopy : onLogCopy}
/>
</div>
)}
{selectedView === VIEW_TYPES.OVERVIEW && (
<Overview
logData={log}
onAddToQuery={onAddToQuery}
onClickActionItem={onClickActionItem}
isListViewPanel={isListViewPanel}
selectedOptions={options}
listViewPanelSelectedFields={listViewPanelSelectedFields}
handleChangeSelectedView={handleChangeSelectedView}
/>
)}
{selectedView === VIEW_TYPES.JSON && <JSONView logData={log} />}
{selectedView === VIEW_TYPES.CONTEXT && (
<ContextView
log={log}
filters={filters}
contextQuery={contextQuery}
isEdit={isEdit}
/>
)}
{selectedView === VIEW_TYPES.INFRAMETRICS && (
<InfraMetrics
clusterName={log.resources_string?.[RESOURCE_KEYS.CLUSTER_NAME] || ''}
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
timestamp={log.timestamp.toString()}
dataSource={DataSource.LOGS}
/>
)}
{selectedView === VIEW_TYPES.OVERVIEW && (
<div className="log-detail-drawer__footer-hint">
<div className="log-detail-drawer__footer-hint-content">
<Typography.Text
type="secondary"
className="log-detail-drawer__footer-hint-text"
>
Use
</Typography.Text>
<ArrowUp size={14} className="log-detail-drawer__footer-hint-icon" />
<span>/</span>
<ArrowDown size={14} className="log-detail-drawer__footer-hint-icon" />
<Typography.Text
type="secondary"
className="log-detail-drawer__footer-hint-text"
>
to view previous/next log
</Typography.Text>
</div>
</div>
)}
</Tooltip>
</div>
</div>
{isFilterVisible && contextQuery?.builder.queryData[0] && (
<div className="log-detail-drawer-query-container">
<QuerySearch
onChange={(value): void => handleQueryExpressionChange(value, 0)}
dataSource={DataSource.LOGS}
queryData={contextQuery?.builder.queryData[0]}
onRun={handleRunQuery}
/>
</div>
)}
{selectedView === VIEW_TYPES.OVERVIEW && (
<Overview
logData={log}
onAddToQuery={onAddToQuery}
onClickActionItem={onClickActionItem}
isListViewPanel={isListViewPanel}
selectedOptions={options}
listViewPanelSelectedFields={listViewPanelSelectedFields}
handleChangeSelectedView={handleChangeSelectedView}
/>
)}
{selectedView === VIEW_TYPES.JSON && <JSONView logData={log} />}
{selectedView === VIEW_TYPES.CONTEXT && (
<ContextView
log={log}
filters={filters}
contextQuery={contextQuery}
isEdit={isEdit}
/>
)}
{selectedView === VIEW_TYPES.INFRAMETRICS && (
<InfraMetrics
clusterName={log.resources_string?.[RESOURCE_KEYS.CLUSTER_NAME] || ''}
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
timestamp={log.timestamp.toString()}
dataSource={DataSource.LOGS}
/>
)}
</Drawer>
);
}

View File

@@ -2,11 +2,13 @@ import { memo, useCallback, useMemo } from 'react';
import { blue } from '@ant-design/colors';
import { Typography } from 'antd';
import cx from 'classnames';
import LogDetail from 'components/LogDetail';
import { VIEW_TYPES } from 'components/LogDetail/constants';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import { ChangeViewFunctionType } from 'container/ExplorerOptions/types';
import { getSanitizedLogBody } from 'container/LogDetailedView/utils';
import { FontSize } from 'container/OptionsMenu/types';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
import { useIsDarkMode } from 'hooks/useDarkMode';
// utils
@@ -102,17 +104,12 @@ function LogSelectedField({
type ListLogViewProps = {
logData: ILog;
selectedFields: IField[];
onSetActiveLog: (
log: ILog,
selectedTab?: typeof VIEW_TYPES[keyof typeof VIEW_TYPES],
) => void;
onSetActiveLog: (log: ILog) => void;
onAddToQuery: AddToQueryHOCProps['onAddToQuery'];
activeLog?: ILog | null;
linesPerRow: number;
fontSize: FontSize;
handleChangeSelectedView?: ChangeViewFunctionType;
isActiveLog?: boolean;
onClearActiveLog?: () => void;
};
function ListLogView({
@@ -123,8 +120,7 @@ function ListLogView({
activeLog,
linesPerRow,
fontSize,
isActiveLog,
onClearActiveLog,
handleChangeSelectedView,
}: ListLogViewProps): JSX.Element {
const flattenLogData = useMemo(() => FlatLogData(logData), [logData]);
@@ -133,24 +129,35 @@ function ListLogView({
);
const isReadOnlyLog = !isLogsExplorerPage;
const {
activeLog: activeContextLog,
onAddToQuery: handleAddToQuery,
onSetActiveLog: handleSetActiveContextLog,
onClearActiveLog: handleClearActiveContextLog,
} = useActiveLog();
const isDarkMode = useIsDarkMode();
const handleDetailedView = useCallback(() => {
if (isActiveLog) {
onClearActiveLog?.();
return;
}
const handlerClearActiveContextLog = useCallback(
(event: React.MouseEvent | React.KeyboardEvent) => {
event.preventDefault();
event.stopPropagation();
handleClearActiveContextLog();
},
[handleClearActiveContextLog],
);
const handleDetailedView = useCallback(() => {
onSetActiveLog(logData);
}, [logData, onSetActiveLog, isActiveLog, onClearActiveLog]);
}, [logData, onSetActiveLog]);
const handleShowContext = useCallback(
(event: React.MouseEvent) => {
event.preventDefault();
event.stopPropagation();
onSetActiveLog(logData, VIEW_TYPES.CONTEXT);
handleSetActiveContextLog(logData);
},
[logData, onSetActiveLog],
[logData, handleSetActiveContextLog],
);
const updatedSelecedFields = useMemo(
@@ -179,7 +186,11 @@ function ListLogView({
return (
<>
<Container
$isActiveLog={isHighlighted || activeLog?.id === logData.id}
$isActiveLog={
isHighlighted ||
activeLog?.id === logData.id ||
activeContextLog?.id === logData.id
}
$isDarkMode={isDarkMode}
$logType={logType}
onClick={handleDetailedView}
@@ -240,6 +251,15 @@ function ListLogView({
/>
)}
</Container>
{activeContextLog && (
<LogDetail
log={activeContextLog}
onAddToQuery={handleAddToQuery}
selectedTab={VIEW_TYPES.CONTEXT}
onClose={handlerClearActiveContextLog}
handleChangeSelectedView={handleChangeSelectedView}
/>
)}
</>
);
}

View File

@@ -1,15 +1,19 @@
import {
KeyboardEvent,
memo,
MouseEvent,
MouseEventHandler,
useCallback,
useMemo,
useState,
} from 'react';
import { Color } from '@signozhq/design-tokens';
import { Tooltip } from 'antd';
import { VIEW_TYPES } from 'components/LogDetail/constants';
import { DrawerProps, Tooltip } from 'antd';
import LogDetail from 'components/LogDetail';
import { VIEW_TYPES, VIEWS } from 'components/LogDetail/constants';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import { getSanitizedLogBody } from 'container/LogDetailedView/utils';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
// hooks
import { useIsDarkMode } from 'hooks/useDarkMode';
@@ -35,8 +39,7 @@ function RawLogView({
selectedFields = [],
fontSize,
onLogClick,
onSetActiveLog,
onClearActiveLog,
handleChangeSelectedView,
}: RawLogViewProps): JSX.Element {
const {
isHighlighted: isUrlHighlighted,
@@ -45,6 +48,15 @@ function RawLogView({
} = useCopyLogLink(data.id);
const flattenLogData = useMemo(() => FlatLogData(data), [data]);
const {
activeLog,
onSetActiveLog,
onClearActiveLog,
onAddToQuery,
} = useActiveLog();
const [selectedTab, setSelectedTab] = useState<VIEWS | undefined>();
const isDarkMode = useIsDarkMode();
const isReadOnlyLog = !isLogsExplorerPage || isReadOnly;
@@ -122,24 +134,34 @@ function RawLogView({
// Use custom click handler if provided, otherwise use default behavior
if (onLogClick) {
onLogClick(data, event);
return;
} else {
onSetActiveLog(data);
setSelectedTab(VIEW_TYPES.OVERVIEW);
}
if (isActiveLog) {
onClearActiveLog?.();
return;
}
onSetActiveLog?.(data);
},
[isReadOnly, onLogClick, isActiveLog, onSetActiveLog, data, onClearActiveLog],
[isReadOnly, data, onSetActiveLog, onLogClick],
);
const handleCloseLogDetail: DrawerProps['onClose'] = useCallback(
(
event: MouseEvent<Element, globalThis.MouseEvent> | KeyboardEvent<Element>,
) => {
event.preventDefault();
event.stopPropagation();
onClearActiveLog();
setSelectedTab(undefined);
},
[onClearActiveLog],
);
const handleShowContext: MouseEventHandler<HTMLElement> = useCallback(
(event) => {
event.preventDefault();
event.stopPropagation();
onSetActiveLog?.(data, VIEW_TYPES.CONTEXT);
// handleSetActiveContextLog(data);
setSelectedTab(VIEW_TYPES.CONTEXT);
onSetActiveLog(data);
},
[data, onSetActiveLog],
);
@@ -159,7 +181,7 @@ function RawLogView({
$isDarkMode={isDarkMode}
$isReadOnly={isReadOnly}
$isHightlightedLog={isUrlHighlighted}
$isActiveLog={isActiveLog}
$isActiveLog={activeLog?.id === data.id || isActiveLog}
$isCustomHighlighted={isHighlighted}
$logType={logType}
fontSize={fontSize}
@@ -196,6 +218,17 @@ function RawLogView({
onLogCopy={onLogCopy}
/>
)}
{selectedTab && (
<LogDetail
selectedTab={selectedTab}
log={activeLog}
onClose={handleCloseLogDetail}
onAddToQuery={onAddToQuery}
onClickActionItem={onAddToQuery}
handleChangeSelectedView={handleChangeSelectedView}
/>
)}
</RawLogViewContainer>
);
}

View File

@@ -45,6 +45,9 @@ export const RawLogViewContainer = styled(Row)<{
: `margin: 2px 0;`}
}
${({ $isActiveLog, $logType }): string =>
getActiveLogBackground($isActiveLog, true, $logType)}
${({ $isReadOnly, $isActiveLog, $isDarkMode, $logType }): string =>
$isActiveLog
? getActiveLogBackground($isActiveLog, $isDarkMode, $logType)

View File

@@ -1,5 +1,4 @@
import { MouseEvent } from 'react';
import { VIEW_TYPES } from 'components/LogDetail/constants';
import { ChangeViewFunctionType } from 'container/ExplorerOptions/types';
import { FontSize } from 'container/OptionsMenu/types';
import { IField } from 'types/api/logs/fields';
@@ -17,11 +16,6 @@ export interface RawLogViewProps {
selectedFields?: IField[];
onLogClick?: (log: ILog, event: MouseEvent) => void;
handleChangeSelectedView?: ChangeViewFunctionType;
onSetActiveLog?: (
log: ILog,
selectedTab?: typeof VIEW_TYPES[keyof typeof VIEW_TYPES],
) => void;
onClearActiveLog?: () => void;
}
export interface RawLogContentProps {

View File

@@ -21,7 +21,7 @@ export function getDefaultCellStyle(isDarkMode?: boolean): CSSProperties {
export const defaultTableStyle: CSSProperties = {
minWidth: '40rem',
maxWidth: '90rem',
maxWidth: '60rem',
};
export const defaultListViewPanelStyle: CSSProperties = {

View File

@@ -78,10 +78,12 @@ function TestWrapper({ children }: { children: React.ReactNode }): JSX.Element {
describe('VariableItem Integration Tests', () => {
let user: ReturnType<typeof userEvent.setup>;
let mockOnValueUpdate: jest.Mock;
let mockSetVariablesToGetUpdated: jest.Mock;
beforeEach(() => {
user = userEvent.setup();
mockOnValueUpdate = jest.fn();
mockSetVariablesToGetUpdated = jest.fn();
jest.clearAllMocks();
});
@@ -100,6 +102,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -145,6 +150,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -187,6 +195,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -236,6 +247,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -258,6 +272,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -291,6 +308,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -324,6 +344,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -346,6 +369,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -379,6 +405,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -432,6 +461,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -476,6 +508,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -513,6 +548,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -544,6 +582,9 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);

View File

@@ -282,11 +282,11 @@ export default function QuickFilters(props: IQuickFiltersProps): JSX.Element {
size="small"
style={{ marginLeft: 'auto' }}
checked={showIP ?? true}
onChange={(checked): void => {
onClick={(): void => {
logEvent('API Monitoring: Show IP addresses clicked', {
showIP: checked,
showIP: !(showIP ?? true),
});
setParams({ showIP: checked });
setParams({ showIP });
}}
/>
</div>

View File

@@ -1,8 +1,4 @@
import { ENVIRONMENT } from 'constants/env';
import {
ApiMonitoringParams,
useApiMonitoringParams,
} from 'container/ApiMonitoring/queryParams';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import {
otherFiltersResponse,
@@ -22,15 +18,10 @@ import { QuickFiltersConfig } from './constants';
jest.mock('hooks/queryBuilder/useQueryBuilder', () => ({
useQueryBuilder: jest.fn(),
}));
jest.mock('container/ApiMonitoring/queryParams');
const handleFilterVisibilityChange = jest.fn();
const redirectWithQueryBuilderData = jest.fn();
const putHandler = jest.fn();
const mockSetApiMonitoringParams = jest.fn() as jest.MockedFunction<
(newParams: Partial<ApiMonitoringParams>, replace?: boolean) => void
>;
const mockUseApiMonitoringParams = jest.mocked(useApiMonitoringParams);
const BASE_URL = ENVIRONMENT.baseURL;
const SIGNAL = SignalType.LOGS;
@@ -93,28 +84,6 @@ TestQuickFilters.defaultProps = {
config: QuickFiltersConfig,
};
function TestQuickFiltersApiMonitoring({
signal = SignalType.LOGS,
config = QuickFiltersConfig,
}: {
signal?: SignalType;
config?: IQuickFiltersConfig[];
}): JSX.Element {
return (
<QuickFilters
source={QuickFiltersSource.API_MONITORING}
config={config}
handleFilterVisibilityChange={handleFilterVisibilityChange}
signal={signal}
/>
);
}
TestQuickFiltersApiMonitoring.defaultProps = {
signal: '',
config: QuickFiltersConfig,
};
beforeAll(() => {
server.listen();
});
@@ -143,10 +112,6 @@ beforeEach(() => {
lastUsedQuery: 0,
redirectWithQueryBuilderData,
});
mockUseApiMonitoringParams.mockReturnValue([
{ showIP: true } as ApiMonitoringParams,
mockSetApiMonitoringParams,
]);
setupServer();
});
@@ -286,24 +251,6 @@ describe('Quick Filters', () => {
);
});
});
it('toggles Show IP addresses and updates API Monitoring params', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<TestQuickFiltersApiMonitoring />);
// Switch should be rendered and initially checked
expect(screen.getByText('Show IP addresses')).toBeInTheDocument();
const toggle = screen.getByRole('switch');
expect(toggle).toHaveAttribute('aria-checked', 'true');
await user.click(toggle);
await waitFor(() => {
expect(mockSetApiMonitoringParams).toHaveBeenCalledWith(
expect.objectContaining({ showIP: false }),
);
});
});
});
describe('Quick Filters with custom filters', () => {

View File

@@ -55,7 +55,6 @@ const ROUTES = {
LOGS_INDEX_FIELDS: '/logs-explorer/index-fields',
TRACE_EXPLORER: '/trace-explorer',
BILLING: '/settings/billing',
ROLES_SETTINGS: '/settings/roles',
SUPPORT: '/support',
LOGS_SAVE_VIEWS: '/logs/saved-views',
TRACES_SAVE_VIEWS: '/traces/saved-views',

View File

@@ -202,7 +202,7 @@ function AllEndPoints({
const onRowClick = useCallback(
(props: any): void => {
setSelectedEndPointName(props[SPAN_ATTRIBUTES.HTTP_URL] as string);
setSelectedEndPointName(props[SPAN_ATTRIBUTES.URL_PATH] as string);
setSelectedView(VIEWS.ENDPOINT_STATS);
const initialItems = [
...(filters?.items || []),
@@ -213,7 +213,7 @@ function AllEndPoints({
op: 'AND',
});
setParams({
selectedEndPointName: props[SPAN_ATTRIBUTES.HTTP_URL] as string,
selectedEndPointName: props[SPAN_ATTRIBUTES.URL_PATH] as string,
selectedView: VIEWS.ENDPOINT_STATS,
endPointDetailsLocalFilters: {
items: initialItems,

View File

@@ -33,7 +33,7 @@ import { SPAN_ATTRIBUTES } from './constants';
const httpUrlKey = {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.HTTP_URL,
key: SPAN_ATTRIBUTES.URL_PATH,
type: 'tag',
};
@@ -93,7 +93,7 @@ function EndPointDetails({
return currentFilters; // No change needed, prevents loop
}
// Rebuild filters: Keep non-http_url filters and add/update http_url filter based on prop
// Rebuild filters: Keep non-http.url filters and add/update http.url filter based on prop
const otherFilters = currentFilters?.items?.filter(
(item) => item.key?.key !== httpUrlKey.key,
);
@@ -125,7 +125,7 @@ function EndPointDetails({
(newFilters: IBuilderQuery['filters']): void => {
// 1. Update local filters state immediately
setFilters(newFilters);
// Filter out http_url filter before saving to params
// Filter out http.url filter before saving to params
const filteredNewFilters = {
op: 'AND',
items:
@@ -299,6 +299,7 @@ function EndPointDetails({
endPointStatusCodeLatencyBarChartsDataQuery
}
domainName={domainName}
endPointName={endPointName}
filters={filters}
timeRange={timeRange}
onDragSelect={onDragSelect}

View File

@@ -56,15 +56,15 @@ function TopErrors({
{
items: endPointName
? [
// Remove any existing http_url filters from initialFilters to avoid duplicates
// Remove any existing http.url filters from initialFilters to avoid duplicates
...(initialFilters?.items?.filter(
(item) => item.key?.key !== SPAN_ATTRIBUTES.HTTP_URL,
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
) || []),
{
id: '92b8a1c1',
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.HTTP_URL,
key: SPAN_ATTRIBUTES.URL_PATH,
type: 'tag',
},
op: '=',

View File

@@ -9,7 +9,6 @@ import { ENTITY_VERSION_V5 } from 'constants/app';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { SPAN_ATTRIBUTES } from '../constants';
import DomainMetrics from './DomainMetrics';
// Mock the API call
@@ -127,9 +126,11 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
'count()',
);
// Verify exact domain filter expression structure
expect(queryA.filter.expression).toContain("http_host = '0.0.0.0'");
expect(queryA.filter.expression).toContain(
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
expect(queryA.filter.expression).toContain(
'url.full EXISTS OR http.url EXISTS',
);
// Verify Query B - p99 latency
@@ -141,13 +142,17 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
'p99(duration_nano)',
);
// Verify exact domain filter expression structure
expect(queryB.filter.expression).toContain("http_host = '0.0.0.0'");
expect(queryB.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
// Verify Query C - error count (disabled)
const queryC = queryData.find((q: any) => q.queryName === 'C');
expect(queryC).toBeDefined();
expect(queryC.disabled).toBe(true);
expect(queryC.filter.expression).toContain("http_host = '0.0.0.0'");
expect(queryC.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
expect(queryC.aggregations?.[0]).toBeDefined();
expect((queryC.aggregations?.[0] as TraceAggregation)?.expression).toBe(
'count()',
@@ -164,7 +169,9 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
'max(timestamp)',
);
// Verify exact domain filter expression structure
expect(queryD.filter.expression).toContain("http_host = '0.0.0.0'");
expect(queryD.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
// Verify Formula F1 - error rate calculation
const formulas = payload.query.builder.queryFormulas;

View File

@@ -153,7 +153,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
// Verify exact domain filter expression structure
if (queryA.filter) {
expect(queryA.filter.expression).toContain(
`http_host = 'api.example.com'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
);
expect(queryA.filter.expression).toContain("kind_string = 'Client'");
}
@@ -171,7 +171,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
// Verify exact domain filter expression structure
if (queryB.filter) {
expect(queryB.filter.expression).toContain(
`http_host = 'api.example.com'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
);
expect(queryB.filter.expression).toContain("kind_string = 'Client'");
}
@@ -185,7 +185,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
expect(queryC.aggregateOperator).toBe('count');
if (queryC.filter) {
expect(queryC.filter.expression).toContain(
`http_host = 'api.example.com'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
);
expect(queryC.filter.expression).toContain("kind_string = 'Client'");
expect(queryC.filter.expression).toContain('has_error = true');
@@ -204,7 +204,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
// Verify exact domain filter expression structure
if (queryD.filter) {
expect(queryD.filter.expression).toContain(
`http_host = 'api.example.com'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
);
expect(queryD.filter.expression).toContain("kind_string = 'Client'");
}
@@ -221,7 +221,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
}
if (queryE.filter) {
expect(queryE.filter.expression).toContain(
`http_host = 'api.example.com'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
);
expect(queryE.filter.expression).toContain("kind_string = 'Client'");
}
@@ -291,7 +291,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
expect(query.filter.expression).toContain('staging');
// Also verify domain filter is still present
expect(query.filter.expression).toContain(
"http_host = 'api.internal.com'",
"(net.peer.name = 'api.internal.com' OR server.address = 'api.internal.com')",
);
// Verify client kind filter is present
expect(query.filter.expression).toContain("kind_string = 'Client'");

View File

@@ -34,6 +34,7 @@ function StatusCodeBarCharts({
endPointStatusCodeBarChartsDataQuery,
endPointStatusCodeLatencyBarChartsDataQuery,
domainName,
endPointName,
filters,
timeRange,
onDragSelect,
@@ -47,6 +48,7 @@ function StatusCodeBarCharts({
unknown
>;
domainName: string;
endPointName: string;
filters: IBuilderQuery['filters'];
timeRange: {
startTime: number;
@@ -142,11 +144,11 @@ function StatusCodeBarCharts({
const widget = useMemo<Widgets>(
() =>
getStatusCodeBarChartWidgetData(domainName, {
getStatusCodeBarChartWidgetData(domainName, endPointName, {
items: [...(filters?.items || [])],
op: filters?.op || 'AND',
}),
[domainName, filters],
[domainName, endPointName, filters],
);
const graphClickHandler = useCallback(
@@ -164,7 +166,6 @@ function StatusCodeBarCharts({
xValue,
TWO_AND_HALF_MINUTES_IN_MILLISECONDS,
);
handleGraphClick({
xValue,
yValue,

View File

@@ -12,8 +12,8 @@ export const VIEW_TYPES = {
// Span attribute keys - these are the source of truth for all attribute keys
export const SPAN_ATTRIBUTES = {
HTTP_URL: 'http_url',
URL_PATH: 'http.url',
RESPONSE_STATUS_CODE: 'response_status_code',
SERVER_NAME: 'http_host',
SERVER_NAME: 'net.peer.name',
SERVER_PORT: 'net.peer.port',
} as const;

View File

@@ -280,7 +280,7 @@ describe('API Monitoring Utils', () => {
const endpointFilter = result?.items?.find(
(item) =>
item.key &&
item.key.key === SPAN_ATTRIBUTES.HTTP_URL &&
item.key.key === SPAN_ATTRIBUTES.URL_PATH &&
item.value === endPointName,
);
expect(endpointFilter).toBeDefined();
@@ -344,12 +344,13 @@ describe('API Monitoring Utils', () => {
describe('getFormattedEndPointDropDownData', () => {
it('should format endpoint dropdown data correctly', () => {
// Arrange
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
const mockData = [
{
data: {
// eslint-disable-next-line sonarjs/no-duplicate-string
[URL_PATH_KEY]: '/api/users',
'url.full': 'http://example.com/api/users',
A: 150, // count or other metric
},
},
@@ -357,6 +358,7 @@ describe('API Monitoring Utils', () => {
data: {
// eslint-disable-next-line sonarjs/no-duplicate-string
[URL_PATH_KEY]: '/api/orders',
'url.full': 'http://example.com/api/orders',
A: 75,
},
},
@@ -404,7 +406,7 @@ describe('API Monitoring Utils', () => {
it('should handle items without URL path', () => {
// Arrange
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
type MockDataType = {
data: {
[key: string]: string | number;
@@ -710,11 +712,13 @@ describe('API Monitoring Utils', () => {
it('should generate widget configuration for status code bar chart', () => {
// Arrange
const domainName = 'test-domain';
const endPointName = '/api/test';
const filters = { items: [], op: 'AND' };
// Act
const result = getStatusCodeBarChartWidgetData(
domainName,
endPointName,
filters as IBuilderQuery['filters'],
);
@@ -737,11 +741,21 @@ describe('API Monitoring Utils', () => {
if (domainFilter) {
expect(domainFilter.value).toBe(domainName);
}
// Should have endpoint filter if provided
const endpointFilter = queryData.filters?.items?.find(
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.URL_PATH,
);
expect(endpointFilter).toBeDefined();
if (endpointFilter) {
expect(endpointFilter.value).toBe(endPointName);
}
});
it('should include custom filters in the widget configuration', () => {
// Arrange
const domainName = 'test-domain';
const endPointName = '/api/test';
const customFilter = {
id: 'custom-filter',
key: {
@@ -757,6 +771,7 @@ describe('API Monitoring Utils', () => {
// Act
const result = getStatusCodeBarChartWidgetData(
domainName,
endPointName,
filters as IBuilderQuery['filters'],
);

View File

@@ -25,7 +25,7 @@ jest.mock('container/GridCardLayout/GridCard', () => ({
type="button"
data-testid="row-click-button"
onClick={(): void =>
customOnRowClick({ [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test' })
customOnRowClick({ [SPAN_ATTRIBUTES.URL_PATH]: '/api/test' })
}
>
Click Row

View File

@@ -6,10 +6,10 @@
* These tests validate the migration from V4 to V5 format for getAllEndpointsWidgetData:
* - Filter format change: filters.items[] → filter.expression
* - Aggregation format: aggregateAttribute → aggregations[] array
* - Domain filter: http_host = '${domainName}'
* - Domain filter: (net.peer.name OR server.address)
* - Kind filter: kind_string = 'Client'
* - Four queries: A (count), B (p99 latency), C (max timestamp), D (error count - disabled)
* - GroupBy: http_url with type 'attribute'
* - GroupBy: Both http.url AND url.full with type 'attribute'
*/
import { getAllEndpointsWidgetData } from 'container/ApiMonitoring/utils';
import {
@@ -18,8 +18,6 @@ import {
} from 'types/api/queryBuilder/queryAutocompleteResponse';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
describe('AllEndpointsWidget - V5 Migration Validation', () => {
const mockDomainName = 'api.example.com';
const emptyFilters: IBuilderQuery['filters'] = {
@@ -94,28 +92,28 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
const baseExpression = `http_host = '${mockDomainName}' AND kind_string = 'Client'`;
const baseExpression = `(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') AND kind_string = 'Client'`;
// Queries A, B, C have identical base filter
expect(queryA.filter?.expression).toBe(
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
);
expect(queryB.filter?.expression).toBe(
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
);
expect(queryC.filter?.expression).toBe(
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
);
// Query D has additional has_error filter
expect(queryD.filter?.expression).toBe(
`${baseExpression} AND has_error = true AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${baseExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
);
});
});
describe('2. GroupBy Structure', () => {
it(`default groupBy includes ${SPAN_ATTRIBUTES.HTTP_URL} with type attribute`, () => {
it('default groupBy includes both http.url and url.full with type attribute', () => {
const widget = getAllEndpointsWidgetData(
emptyGroupBy,
mockDomainName,
@@ -126,13 +124,23 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
// All queries should have the same default groupBy
queryData.forEach((query) => {
expect(query.groupBy).toHaveLength(1);
expect(query.groupBy).toHaveLength(2);
// http.url
expect(query.groupBy).toContainEqual({
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: SPAN_ATTRIBUTES.HTTP_URL,
key: 'http.url',
type: 'attribute',
});
// url.full
expect(query.groupBy).toContainEqual({
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: 'url.full',
type: 'attribute',
});
});
@@ -162,18 +170,19 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
// All queries should have defaults + custom groupBy
queryData.forEach((query) => {
expect(query.groupBy).toHaveLength(3); // 1 default + 2 custom
expect(query.groupBy).toHaveLength(4); // 2 defaults + 2 custom
// First two should be defaults (http_url)
expect(query.groupBy[0].key).toBe(SPAN_ATTRIBUTES.HTTP_URL);
// First two should be defaults (http.url, url.full)
expect(query.groupBy[0].key).toBe('http.url');
expect(query.groupBy[1].key).toBe('url.full');
// Last two should be custom (matching subset of properties)
expect(query.groupBy[1]).toMatchObject({
expect(query.groupBy[2]).toMatchObject({
dataType: DataTypes.String,
key: 'service.name',
type: 'resource',
});
expect(query.groupBy[2]).toMatchObject({
expect(query.groupBy[3]).toMatchObject({
dataType: DataTypes.String,
key: 'deployment.environment',
type: 'resource',

View File

@@ -258,7 +258,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
value: '/api/test',
}),
]),
@@ -278,7 +278,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
value: '/api/test',
}),
]),
@@ -360,7 +360,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
value: '/api/test',
}),
]),
@@ -373,7 +373,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
value: '/api/test',
}),
]),

View File

@@ -191,7 +191,7 @@ describe('EndPointsDropDown Component', () => {
it('formats data using the utility function', () => {
const mockRows = [
{ data: { [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test', A: 10 } },
{ data: { [SPAN_ATTRIBUTES.URL_PATH]: '/api/test', A: 10 } },
];
const dataProps = {

View File

@@ -6,18 +6,15 @@
* These tests validate the migration from V4 to V5 format for the third payload
* in getEndPointDetailsQueryPayload (endpoint dropdown data):
* - Filter format change: filters.items[] → filter.expression
* - Domain handling: http_host = '${domainName}'
* - Domain handling: (net.peer.name OR server.address)
* - Kind filter: kind_string = 'Client'
* - Existence check: http_url EXISTS
* - Existence check: (http.url EXISTS OR url.full EXISTS)
* - Aggregation: count() expression
* - GroupBy: http_url with type 'attribute'
* - GroupBy: Both http.url AND url.full with type 'attribute'
*/
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
describe('EndpointDropdown - V5 Migration Validation', () => {
const mockDomainName = 'api.example.com';
const mockStartTime = 1000;
@@ -46,9 +43,9 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters');
// Base filter 1: Domain http_host = '${domainName}'
// Base filter 1: Domain (net.peer.name OR server.address)
expect(queryA.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Base filter 2: Kind
@@ -56,7 +53,7 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
// Base filter 3: Existence check
expect(queryA.filter?.expression).toContain(
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
'(http.url EXISTS OR url.full EXISTS)',
);
// V5 Aggregation format: aggregations array (not aggregateAttribute)
@@ -67,11 +64,16 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
});
expect(queryA).not.toHaveProperty('aggregateAttribute');
// GroupBy: http_url
expect(queryA.groupBy).toHaveLength(1);
// GroupBy: Both http.url and url.full
expect(queryA.groupBy).toHaveLength(2);
expect(queryA.groupBy).toContainEqual({
key: SPAN_ATTRIBUTES.HTTP_URL,
dataType: DataTypes.String,
key: 'http.url',
dataType: 'string',
type: 'attribute',
});
expect(queryA.groupBy).toContainEqual({
key: 'url.full',
dataType: 'string',
type: 'attribute',
});
});
@@ -118,7 +120,53 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
// Exact filter expression with custom filters merged
expect(expression).toBe(
`${SPAN_ATTRIBUTES.SERVER_NAME} = 'api.example.com' AND kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS service.name = 'user-service' AND deployment.environment = 'production'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND deployment.environment = 'production'",
);
});
});
describe('3. HTTP URL Filter Special Handling', () => {
it('converts http.url filter to (http.url OR url.full) expression', () => {
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
items: [
{
id: 'http-url-filter',
key: {
key: 'http.url',
dataType: 'string' as any,
type: 'tag',
},
op: '=',
value: '/api/users',
},
{
id: 'service-filter',
key: {
key: 'service.name',
dataType: 'string' as any,
type: 'resource',
},
op: '=',
value: 'user-service',
},
],
op: 'AND',
};
const payload = getEndPointDetailsQueryPayload(
mockDomainName,
mockStartTime,
mockEndTime,
filtersWithHttpUrl,
);
const dropdownQuery = payload[2];
const expression =
dropdownQuery.query.builder.queryData[0].filter?.expression;
// CRITICAL: Exact filter expression with http.url converted to OR logic
expect(expression).toBe(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND (http.url = '/api/users' OR url.full = '/api/users')",
);
});
});

View File

@@ -33,7 +33,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
expect(queryData).not.toHaveProperty('filters.items');
});
it('uses new domain filter format: (http_host)', () => {
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
const widget = getRateOverTimeWidgetData(
mockDomainName,
mockEndpointName,
@@ -44,7 +44,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify EXACT new filter format with OR operator
expect(queryData?.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Endpoint name is used in legend, not filter
@@ -90,7 +90,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify domain filter is present
expect(queryData?.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Verify custom filters are merged into the expression
@@ -120,7 +120,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
expect(queryData).not.toHaveProperty('filters.items');
});
it('uses new domain filter format: (http_host)', () => {
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
const widget = getLatencyOverTimeWidgetData(
mockDomainName,
mockEndpointName,
@@ -132,7 +132,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify EXACT new filter format with OR operator
expect(queryData.filter).toBeDefined();
expect(queryData?.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Endpoint name is used in legend, not filter
@@ -166,7 +166,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify domain filter is present
expect(queryData?.filter?.expression).toContain(
`http_host = '${mockDomainName}' service.name = 'user-service'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') service.name = 'user-service'`,
);
});
});

View File

@@ -142,6 +142,7 @@ describe('StatusCodeBarCharts', () => {
endTime: 1609545600000,
};
const mockDomainName = 'test-domain';
const mockEndPointName = '/api/test';
const onDragSelectMock = jest.fn();
const refetchFn = jest.fn();
@@ -231,6 +232,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -266,6 +268,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -308,6 +311,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -352,6 +356,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -399,6 +404,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -413,6 +419,7 @@ describe('StatusCodeBarCharts', () => {
// but we've confirmed the function is mocked and ready to be tested
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
mockDomainName,
mockEndPointName,
expect.objectContaining({
items: [],
op: 'AND',
@@ -460,6 +467,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockCustomFilters as IBuilderQuery['filters']}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -469,6 +477,7 @@ describe('StatusCodeBarCharts', () => {
// Assert widget creation was called with the correct parameters
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
mockDomainName,
mockEndPointName,
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({ id: 'custom-filter' }),

View File

@@ -10,7 +10,7 @@
*
* V5 Changes:
* - Filter format change: filters.items[] → filter.expression
* - Domain filter: (http_host)
* - Domain filter: (net.peer.name OR server.address)
* - Kind filter: kind_string = 'Client'
* - stepInterval: 60 → null
* - Grouped by response_status_code
@@ -47,9 +47,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters.items');
// Base filter 1: Domain (http_host)
// Base filter 1: Domain (net.peer.name OR server.address)
expect(queryA.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Base filter 2: Kind
@@ -96,9 +96,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters.items');
// Base filter 1: Domain (http_host)
// Base filter 1: Domain (net.peer.name OR server.address)
expect(queryA.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Base filter 2: Kind
@@ -177,7 +177,7 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(callsExpression).toBe(latencyExpression);
// Verify base filters
expect(callsExpression).toContain('http_host');
expect(callsExpression).toContain('net.peer.name');
expect(callsExpression).toContain("kind_string = 'Client'");
// Verify custom filters are merged
@@ -187,4 +187,51 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(callsExpression).toContain('production');
});
});
describe('4. HTTP URL Filter Handling', () => {
it('converts http.url filter to (http.url OR url.full) expression in both charts', () => {
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
items: [
{
id: 'http-url-filter',
key: {
key: 'http.url',
dataType: 'string' as any,
type: 'tag',
},
op: '=',
value: '/api/metrics',
},
],
op: 'AND',
};
const payload = getEndPointDetailsQueryPayload(
mockDomainName,
mockStartTime,
mockEndTime,
filtersWithHttpUrl,
);
const callsChartQuery = payload[4];
const latencyChartQuery = payload[5];
const callsExpression =
callsChartQuery.query.builder.queryData[0].filter?.expression;
const latencyExpression =
latencyChartQuery.query.builder.queryData[0].filter?.expression;
// CRITICAL: http.url converted to OR logic
expect(callsExpression).toContain(
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
);
expect(latencyExpression).toContain(
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
);
// Base filters still present
expect(callsExpression).toContain('net.peer.name');
expect(callsExpression).toContain("kind_string = 'Client'");
});
});
});

View File

@@ -6,8 +6,8 @@
* These tests validate the migration from V4 to V5 format for the second payload
* in getEndPointDetailsQueryPayload (status code table data):
* - Filter format change: filters.items[] → filter.expression
* - URL handling: Special logic for http_url
* - Domain filter: http_host = '${domainName}'
* - URL handling: Special logic for (http.url OR url.full)
* - Domain filter: (net.peer.name OR server.address)
* - Kind filter: kind_string = 'Client'
* - Kind filter: response_status_code EXISTS
* - Three queries: A (count), B (p99 latency), C (rate)
@@ -45,9 +45,9 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters.items');
// Base filter 1: Domain (http_host)
// Base filter 1: Domain (net.peer.name OR server.address)
expect(queryA.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Base filter 2: Kind
@@ -149,7 +149,7 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
statusCodeQuery.query.builder.queryData[0].filter?.expression;
// Base filters present
expect(expression).toContain('http_host');
expect(expression).toContain('net.peer.name');
expect(expression).toContain("kind_string = 'Client'");
expect(expression).toContain('response_status_code EXISTS');
@@ -165,4 +165,62 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
expect(queries[1].filter?.expression).toBe(queries[2].filter?.expression);
});
});
describe('4. HTTP URL Filter Handling', () => {
it('converts http.url filter to (http.url OR url.full) expression', () => {
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
items: [
{
id: 'http-url-filter',
key: {
key: 'http.url',
dataType: 'string' as any,
type: 'tag',
},
op: '=',
value: '/api/users',
},
{
id: 'service-filter',
key: {
key: 'service.name',
dataType: 'string' as any,
type: 'resource',
},
op: '=',
value: 'user-service',
},
],
op: 'AND',
};
const payload = getEndPointDetailsQueryPayload(
mockDomainName,
mockStartTime,
mockEndTime,
filtersWithHttpUrl,
);
const statusCodeQuery = payload[1];
const expression =
statusCodeQuery.query.builder.queryData[0].filter?.expression;
// CRITICAL: http.url converted to OR logic
expect(expression).toContain(
"(http.url = '/api/users' OR url.full = '/api/users')",
);
// Other filters still present
expect(expression).toContain('service.name');
expect(expression).toContain('user-service');
// Base filters present
expect(expression).toContain('net.peer.name');
expect(expression).toContain("kind_string = 'Client'");
expect(expression).toContain('response_status_code EXISTS');
// All ANDed together (at least 2 ANDs: domain+kind, custom filter, url condition)
expect(expression?.match(/AND/g)?.length).toBeGreaterThanOrEqual(2);
});
});
});

View File

@@ -4,7 +4,6 @@ import { rest, server } from 'mocks-server/server';
import { fireEvent, render, screen, waitFor, within } from 'tests/test-utils';
import { DataSource } from 'types/common/queryBuilder';
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
import TopErrors from '../Explorer/Domains/DomainDetails/TopErrors';
import { getTopErrorsQueryPayload } from '../utils';
@@ -84,7 +83,7 @@ describe('TopErrors', () => {
{
columns: [
{
name: SPAN_ATTRIBUTES.HTTP_URL,
name: 'http.url',
fieldDataType: 'string',
fieldContext: 'attribute',
},
@@ -124,7 +123,7 @@ describe('TopErrors', () => {
table: {
rows: [
{
http_url: '/api/test',
'http.url': '/api/test',
A: 100,
},
],
@@ -206,7 +205,7 @@ describe('TopErrors', () => {
expect(navigateMock).toHaveBeenCalledWith({
filters: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
key: expect.objectContaining({ key: 'http.url' }),
op: '=',
value: '/api/test',
}),
@@ -216,7 +215,7 @@ describe('TopErrors', () => {
value: 'true',
}),
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.SERVER_NAME }),
key: expect.objectContaining({ key: 'net.peer.name' }),
op: '=',
value: 'test-domain',
}),
@@ -335,7 +334,7 @@ describe('TopErrors', () => {
// Verify all required filters are present
expect(filterExpression).toContain(
`kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS AND ${SPAN_ATTRIBUTES.SERVER_NAME} = 'test-domain' AND has_error = true`,
`kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) AND (net.peer.name = 'test-domain' OR server.address = 'test-domain') AND has_error = true`,
);
});
});

View File

@@ -15,6 +15,7 @@ import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsAppli
import { convertNanoToMilliseconds } from 'container/MetricsExplorer/Summary/utils';
import dayjs from 'dayjs';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { cloneDeep } from 'lodash-es';
import { ArrowUpDown, ChevronDown, ChevronRight, Info } from 'lucide-react';
import { getWidgetQuery } from 'pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil';
@@ -56,12 +57,12 @@ export const getDisplayValue = (value: unknown): string =>
isEmptyFilterValue(value) ? '-' : String(value);
export const getDomainNameFilterExpression = (domainName: string): string =>
`http_host = '${domainName}'`;
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`;
export const clientKindExpression = `kind_string = 'Client'`;
/**
* Converts filters to expression
* Converts filters to expression, handling http.url specially by creating (http.url OR url.full) condition
* @param filters Filters to convert
* @param baseExpression Base expression to combine with filters
* @returns Filter expression string
@@ -74,6 +75,34 @@ export const convertFiltersWithUrlHandling = (
return baseExpression;
}
// Check if filters contain http.url (SPAN_ATTRIBUTES.URL_PATH)
const httpUrlFilter = filters.items?.find(
(item) => item.key?.key === SPAN_ATTRIBUTES.URL_PATH,
);
// If http.url filter exists, create modified filters with (http.url OR url.full)
if (httpUrlFilter && httpUrlFilter.value) {
// Remove ALL http.url filters from items (guards against duplicates)
const otherFilters = filters.items?.filter(
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
);
// Convert to expression first with other filters
const {
filter: intermediateFilter,
} = convertFiltersToExpressionWithExistingQuery(
{ ...filters, items: otherFilters || [] },
baseExpression,
);
// Add the OR condition for http.url and url.full
const urlValue = httpUrlFilter.value;
const urlCondition = `(http.url = '${urlValue}' OR url.full = '${urlValue}')`;
return intermediateFilter.expression.trim()
? `${intermediateFilter.expression} AND ${urlCondition}`
: urlCondition;
}
const { filter } = convertFiltersToExpressionWithExistingQuery(
filters,
baseExpression,
@@ -342,7 +371,7 @@ export const formatDataForTable = (
});
};
const urlExpression = `${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`;
const urlExpression = `(url.full EXISTS OR http.url EXISTS)`;
export const getDomainMetricsQueryPayload = (
domainName: string,
@@ -559,7 +588,14 @@ const defaultGroupBy = [
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: SPAN_ATTRIBUTES.HTTP_URL,
key: SPAN_ATTRIBUTES.URL_PATH,
type: 'attribute',
},
{
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: 'url.full',
type: 'attribute',
},
// {
@@ -602,7 +638,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: '',
type: 'tag',
},
op: '=',
value: domainName,
@@ -649,7 +685,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: '',
type: 'tag',
},
op: '=',
value: domainName,
@@ -697,7 +733,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: '',
type: 'tag',
},
op: '=',
value: domainName,
@@ -744,7 +780,7 @@ export const getEndPointsQueryPayload = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: '',
type: 'tag',
},
op: '=',
value: domainName,
@@ -831,8 +867,8 @@ function buildFilterExpression(
): string {
const baseFilterParts = [
`kind_string = 'Client'`,
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${SPAN_ATTRIBUTES.SERVER_NAME} = '${domainName}'`,
`(http.url EXISTS OR url.full EXISTS)`,
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
`has_error = true`,
];
if (showStatusCodeErrors) {
@@ -874,7 +910,12 @@ export const getTopErrorsQueryPayload = (
filter: { expression: filterExpression },
groupBy: [
{
name: SPAN_ATTRIBUTES.HTTP_URL,
name: 'http.url',
fieldDataType: 'string',
fieldContext: 'attribute',
},
{
name: 'url.full',
fieldDataType: 'string',
fieldContext: 'attribute',
},
@@ -1093,11 +1134,11 @@ export const formatEndPointsDataForTable = (
if (!isGroupedByAttribute) {
formattedData = data?.map((endpoint) => {
const { port } = extractPortAndEndpoint(
(endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '',
(endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '',
);
return {
key: v4(),
endpointName: (endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '-',
endpointName: (endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '-',
port,
callCount:
endpoint.data.A === 'n/a' || endpoint.data.A === undefined
@@ -1221,7 +1262,9 @@ export const formatTopErrorsDataForTable = (
return {
key: v4(),
endpointName: getDisplayValue(rowObj[SPAN_ATTRIBUTES.HTTP_URL]),
endpointName: getDisplayValue(
rowObj[SPAN_ATTRIBUTES.URL_PATH] || rowObj['url.full'],
),
statusCode: getDisplayValue(rowObj[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]),
statusMessage: getDisplayValue(rowObj.status_message),
count: getDisplayValue(rowObj.__result_0),
@@ -1238,10 +1281,10 @@ export const getTopErrorsCoRelationQueryFilters = (
{
id: 'ea16470b',
key: {
key: SPAN_ATTRIBUTES.HTTP_URL,
key: 'http.url',
dataType: DataTypes.String,
type: 'tag',
id: `${SPAN_ATTRIBUTES.HTTP_URL}--string--tag--false`,
id: 'http.url--string--tag--false',
},
op: '=',
value: endPointName,
@@ -1259,7 +1302,7 @@ export const getTopErrorsCoRelationQueryFilters = (
{
id: 'e8a043b7',
key: {
key: SPAN_ATTRIBUTES.SERVER_NAME,
key: 'net.peer.name',
dataType: DataTypes.String,
type: '',
},
@@ -1738,7 +1781,7 @@ export const getEndPointDetailsQueryPayload = (
filters || { items: [], op: 'AND' },
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
),
},
expression: 'A',
@@ -1750,7 +1793,12 @@ export const getEndPointDetailsQueryPayload = (
orderBy: [],
groupBy: [
{
key: SPAN_ATTRIBUTES.HTTP_URL,
key: SPAN_ATTRIBUTES.URL_PATH,
dataType: DataTypes.String,
type: 'attribute',
},
{
key: 'url.full',
dataType: DataTypes.String,
type: 'attribute',
},
@@ -2150,7 +2198,7 @@ export const getEndPointZeroStateQueryPayload = (
key: {
key: SPAN_ATTRIBUTES.SERVER_NAME,
dataType: DataTypes.String,
type: '',
type: 'tag',
},
op: '=',
value: domainName,
@@ -2177,7 +2225,7 @@ export const getEndPointZeroStateQueryPayload = (
orderBy: [],
groupBy: [
{
key: SPAN_ATTRIBUTES.HTTP_URL,
key: SPAN_ATTRIBUTES.URL_PATH,
dataType: DataTypes.String,
type: 'tag',
},
@@ -2371,7 +2419,8 @@ export const statusCodeWidgetInfo = [
interface EndPointDropDownResponseRow {
data: {
[SPAN_ATTRIBUTES.HTTP_URL]: string;
[SPAN_ATTRIBUTES.URL_PATH]: string;
'url.full': string;
A: number;
};
}
@@ -2390,8 +2439,8 @@ export const getFormattedEndPointDropDownData = (
}
return data.map((row) => ({
key: v4(),
label: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
value: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
label: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
value: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
}));
};
@@ -2720,6 +2769,7 @@ export const groupStatusCodes = (
export const getStatusCodeBarChartWidgetData = (
domainName: string,
endPointName: string,
filters: IBuilderQuery['filters'],
): Widgets => ({
query: {
@@ -2743,11 +2793,25 @@ export const getStatusCodeBarChartWidgetData = (
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.SERVER_NAME,
type: '',
type: 'tag',
},
op: '=',
value: domainName,
},
...(endPointName
? [
{
id: '8b1be6f0',
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.URL_PATH,
type: 'tag',
},
op: '=',
value: endPointName,
},
]
: []),
...(filters?.items || []),
],
op: 'AND',
@@ -2869,7 +2933,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND http_url EXISTS`,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
),
},
functions: [],
@@ -2901,7 +2965,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND http_url EXISTS`,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
),
},
functions: [],
@@ -2933,7 +2997,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND http_url EXISTS`,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
),
},
functions: [],
@@ -2965,7 +3029,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND has_error = true AND http_url EXISTS`,
)} AND ${clientKindExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
),
},
functions: [],
@@ -2996,12 +3060,24 @@ export const getAllEndpointsWidgetData = (
);
widget.renderColumnCell = {
[SPAN_ATTRIBUTES.HTTP_URL]: (url: string | number): ReactNode => {
if (isEmptyFilterValue(url) || !url || url === 'n/a') {
[SPAN_ATTRIBUTES.URL_PATH]: (
url: string | number,
record?: RowData,
): ReactNode => {
// First try to use the url from the column value
let urlValue = url;
// If url is empty/null and we have the record, fallback to url.full
if (isEmptyFilterValue(url) && record) {
const { 'url.full': urlFull } = record;
urlValue = urlFull;
}
if (!urlValue || urlValue === 'n/a') {
return <span>-</span>;
}
const { endpoint } = extractPortAndEndpoint(String(url));
const { endpoint } = extractPortAndEndpoint(String(urlValue));
return <span>{getDisplayValue(endpoint)}</span>;
},
A: (numOfCalls: any): ReactNode => (
@@ -3056,8 +3132,8 @@ export const getAllEndpointsWidgetData = (
};
widget.customColTitles = {
[SPAN_ATTRIBUTES.HTTP_URL]: 'Endpoint',
[SPAN_ATTRIBUTES.SERVER_PORT]: 'Port',
[SPAN_ATTRIBUTES.URL_PATH]: 'Endpoint',
'net.peer.port': 'Port',
};
widget.title = (
@@ -3082,10 +3158,12 @@ export const getAllEndpointsWidgetData = (
</div>
);
widget.hiddenColumns = ['url.full'];
return widget;
};
const keysToRemove = [SPAN_ATTRIBUTES.HTTP_URL, 'A', 'B', 'C', 'F1'];
const keysToRemove = ['http.url', 'url.full', 'A', 'B', 'C', 'F1'];
export const getGroupByFiltersFromGroupByValues = (
rowData: any,
@@ -3143,7 +3221,7 @@ export const getRateOverTimeWidgetData = (
filter: {
expression: convertFiltersWithUrlHandling(
filters || { items: [], op: 'AND' },
`http_host = '${domainName}'`,
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
),
},
functions: [],
@@ -3194,7 +3272,7 @@ export const getLatencyOverTimeWidgetData = (
filter: {
expression: convertFiltersWithUrlHandling(
filters || { items: [], op: 'AND' },
`http_host = '${domainName}'`,
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
),
},
functions: [],

View File

@@ -1,6 +1,7 @@
/* eslint-disable jsx-a11y/no-static-element-interactions */
/* eslint-disable jsx-a11y/click-events-have-key-events */
import { useEffect, useMemo, useState } from 'react';
import { useEffect, useState } from 'react';
import { useMutation } from 'react-query';
import { useCopyToClipboard } from 'react-use';
import { Color } from '@signozhq/design-tokens';
import {
@@ -14,16 +15,14 @@ import {
Tag,
Typography,
} from 'antd';
import {
RenderErrorResponseDTO,
ZeustypesHostDTO,
} from 'api/generated/services/sigNoz.schemas';
import { useGetHosts, usePutHost } from 'api/generated/services/zeus';
import updateSubDomainAPI from 'api/customDomain/updateSubDomain';
import { AxiosError } from 'axios';
import LaunchChatSupport from 'components/LaunchChatSupport/LaunchChatSupport';
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
import { useNotifications } from 'hooks/useNotifications';
import { InfoIcon, Link2, Pencil } from 'lucide-react';
import { useAppContext } from 'providers/App/App';
import { HostsProps } from 'types/api/customDomain/types';
import './CustomDomainSettings.styles.scss';
@@ -36,7 +35,7 @@ export default function CustomDomainSettings(): JSX.Element {
const { notifications } = useNotifications();
const [isEditModalOpen, setIsEditModalOpen] = useState(false);
const [isPollingEnabled, setIsPollingEnabled] = useState(false);
const [hosts, setHosts] = useState<ZeustypesHostDTO[] | null>(null);
const [hosts, setHosts] = useState<HostsProps[] | null>(null);
const [updateDomainError, setUpdateDomainError] = useState<AxiosError | null>(
null,
@@ -58,37 +57,36 @@ export default function CustomDomainSettings(): JSX.Element {
};
const {
data: hostsData,
isLoading: isLoadingHosts,
isFetching: isFetchingHosts,
refetch: refetchHosts,
} = useGetHosts();
data: deploymentsData,
isLoading: isLoadingDeploymentsData,
isFetching: isFetchingDeploymentsData,
refetch: refetchDeploymentsData,
} = useGetDeploymentsData(true);
const {
mutate: updateSubDomain,
isLoading: isLoadingUpdateCustomDomain,
} = usePutHost<AxiosError<RenderErrorResponseDTO>>();
const stripProtocol = (url: string): string => {
return url?.split('://')[1] ?? url;
};
const dnsSuffix = useMemo(() => {
const defaultHost = hosts?.find((h) => h.is_default);
return defaultHost?.url && defaultHost?.name
? defaultHost.url.split(`${defaultHost.name}.`)[1] || ''
: '';
}, [hosts]);
} = useMutation(updateSubDomainAPI, {
onSuccess: () => {
setIsPollingEnabled(true);
refetchDeploymentsData();
setIsEditModalOpen(false);
},
onError: (error: AxiosError) => {
setUpdateDomainError(error);
setIsPollingEnabled(false);
},
});
useEffect(() => {
if (isFetchingHosts) {
if (isFetchingDeploymentsData) {
return;
}
if (hostsData?.data?.status === 'success') {
setHosts(hostsData?.data?.data?.hosts ?? null);
if (deploymentsData?.data?.status === 'success') {
setHosts(deploymentsData.data.data.hosts);
const activeCustomDomain = hostsData?.data?.data?.hosts?.find(
const activeCustomDomain = deploymentsData.data.data.hosts.find(
(host) => !host.is_default,
);
@@ -99,36 +97,32 @@ export default function CustomDomainSettings(): JSX.Element {
}
}
if (hostsData?.data?.data?.state !== 'HEALTHY' && isPollingEnabled) {
if (deploymentsData?.data?.data?.state !== 'HEALTHY' && isPollingEnabled) {
setTimeout(() => {
refetchHosts();
refetchDeploymentsData();
}, 3000);
}
if (hostsData?.data?.data?.state === 'HEALTHY') {
if (deploymentsData?.data?.data.state === 'HEALTHY') {
setIsPollingEnabled(false);
}
}, [hostsData, refetchHosts, isPollingEnabled, isFetchingHosts]);
}, [
deploymentsData,
refetchDeploymentsData,
isPollingEnabled,
isFetchingDeploymentsData,
]);
const onUpdateCustomDomainSettings = (): void => {
editForm
.validateFields()
.then((values) => {
if (values.subdomain) {
updateSubDomain(
{ data: { name: values.subdomain } },
{
onSuccess: () => {
setIsPollingEnabled(true);
refetchHosts();
setIsEditModalOpen(false);
},
onError: (error: AxiosError<RenderErrorResponseDTO>) => {
setUpdateDomainError(error as AxiosError);
setIsPollingEnabled(false);
},
updateSubDomain({
data: {
name: values.subdomain,
},
);
});
setCustomDomainDetails({
subdomain: values.subdomain,
@@ -140,8 +134,10 @@ export default function CustomDomainSettings(): JSX.Element {
});
};
const onCopyUrlHandler = (url: string): void => {
setCopyUrl(stripProtocol(url));
const onCopyUrlHandler = (host: string): void => {
const url = `${host}.${deploymentsData?.data.data.cluster.region.dns}`;
setCopyUrl(url);
notifications.success({
message: 'Copied to clipboard',
});
@@ -161,7 +157,7 @@ export default function CustomDomainSettings(): JSX.Element {
</div>
<div className="custom-domain-settings-content">
{!isLoadingHosts && (
{!isLoadingDeploymentsData && (
<Card className="custom-domain-settings-card">
<div className="custom-domain-settings-content-header">
Team {org?.[0]?.displayName} Information
@@ -173,9 +169,10 @@ export default function CustomDomainSettings(): JSX.Element {
<div
className="custom-domain-url"
key={host.name}
onClick={(): void => onCopyUrlHandler(host.url || '')}
onClick={(): void => onCopyUrlHandler(host.name)}
>
<Link2 size={12} /> {stripProtocol(host.url || '')}
<Link2 size={12} /> {host.name}.
{deploymentsData?.data.data.cluster.region.dns}
{host.is_default && <Tag color={Color.BG_ROBIN_500}>Default</Tag>}
</div>
))}
@@ -184,7 +181,11 @@ export default function CustomDomainSettings(): JSX.Element {
<div className="custom-domain-url-edit-btn">
<Button
className="periscope-btn"
disabled={isLoadingHosts || isFetchingHosts || isPollingEnabled}
disabled={
isLoadingDeploymentsData ||
isFetchingDeploymentsData ||
isPollingEnabled
}
type="default"
icon={<Pencil size={10} />}
onClick={(): void => setIsEditModalOpen(true)}
@@ -197,7 +198,7 @@ export default function CustomDomainSettings(): JSX.Element {
{isPollingEnabled && (
<Alert
className="custom-domain-update-status"
message={`Updating your URL to ⎯ ${customDomainDetails?.subdomain}.${dnsSuffix}. This may take a few mins.`}
message={`Updating your URL to ⎯ ${customDomainDetails?.subdomain}.${deploymentsData?.data.data.cluster.region.dns}. This may take a few mins.`}
type="info"
icon={<InfoIcon size={12} />}
/>
@@ -205,7 +206,7 @@ export default function CustomDomainSettings(): JSX.Element {
</Card>
)}
{isLoadingHosts && (
{isLoadingDeploymentsData && (
<Card className="custom-domain-settings-card">
<Skeleton
className="custom-domain-settings-skeleton"
@@ -254,7 +255,7 @@ export default function CustomDomainSettings(): JSX.Element {
addonBefore={updateDomainError && <InfoIcon size={12} color="red" />}
placeholder="Enter Domain"
onChange={(): void => setUpdateDomainError(null)}
addonAfter={dnsSuffix}
addonAfter={deploymentsData?.data.data.cluster.region.dns}
autoFocus
/>
</Form.Item>
@@ -266,8 +267,7 @@ export default function CustomDomainSettings(): JSX.Element {
{updateDomainError.status === 409 ? (
<Alert
message={
(updateDomainError?.response?.data as RenderErrorResponseDTO)?.error
?.message ||
(updateDomainError?.response?.data as { error?: string })?.error ||
'Youve already updated the custom domain once today. To make further changes, please contact our support team for assistance.'
}
type="warning"
@@ -275,10 +275,7 @@ export default function CustomDomainSettings(): JSX.Element {
/>
) : (
<Typography.Text type="danger">
{
(updateDomainError?.response?.data as RenderErrorResponseDTO)?.error
?.message
}
{(updateDomainError.response?.data as { error: string })?.error}
</Typography.Text>
)}
</div>

View File

@@ -1,128 +0,0 @@
import { GetHosts200 } from 'api/generated/services/sigNoz.schemas';
import { rest, server } from 'mocks-server/server';
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import CustomDomainSettings from '../CustomDomainSettings';
const ZEUS_HOSTS_ENDPOINT = '*/api/v2/zeus/hosts';
const mockHostsResponse: GetHosts200 = {
status: 'success',
data: {
name: 'accepted-starfish',
state: 'HEALTHY',
tier: 'PREMIUM',
hosts: [
{
name: 'accepted-starfish',
is_default: true,
url: 'https://accepted-starfish.test.cloud',
},
{
name: 'custom-host',
is_default: false,
url: 'https://custom-host.test.cloud',
},
],
},
};
describe('CustomDomainSettings', () => {
afterEach(() => server.resetHandlers());
it('renders host URLs with protocol stripped and marks the default host', async () => {
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(mockHostsResponse)),
),
);
render(<CustomDomainSettings />);
await screen.findByText(/accepted-starfish\.test\.cloud/i);
await screen.findByText(/custom-host\.test\.cloud/i);
expect(screen.getByText('Default')).toBeInTheDocument();
});
it('opens edit modal with DNS suffix derived from the default host', async () => {
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(mockHostsResponse)),
),
);
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<CustomDomainSettings />);
await screen.findByText(/accepted-starfish\.test\.cloud/i);
await user.click(
screen.getByRole('button', { name: /customize team[']s url/i }),
);
expect(
screen.getByRole('dialog', { name: /customize your team[']s url/i }),
).toBeInTheDocument();
// DNS suffix is the part of the default host URL after the name prefix
expect(screen.getByText('test.cloud')).toBeInTheDocument();
});
it('submits PUT to /zeus/hosts with the entered subdomain as the payload', async () => {
let capturedBody: Record<string, unknown> = {};
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(mockHostsResponse)),
),
rest.put(ZEUS_HOSTS_ENDPOINT, async (req, res, ctx) => {
capturedBody = await req.json<Record<string, unknown>>();
return res(ctx.status(200), ctx.json({}));
}),
);
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<CustomDomainSettings />);
await screen.findByText(/accepted-starfish\.test\.cloud/i);
await user.click(
screen.getByRole('button', { name: /customize team[']s url/i }),
);
const input = screen.getByPlaceholderText(/enter domain/i);
await user.clear(input);
await user.type(input, 'myteam');
await user.click(screen.getByRole('button', { name: /apply changes/i }));
await waitFor(() => {
expect(capturedBody).toEqual({ name: 'myteam' });
});
});
it('shows contact support option when domain update returns 409', async () => {
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(mockHostsResponse)),
),
rest.put(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(
ctx.status(409),
ctx.json({ error: { message: 'Already updated today' } }),
),
),
);
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<CustomDomainSettings />);
await screen.findByText(/accepted-starfish\.test\.cloud/i);
await user.click(
screen.getByRole('button', { name: /customize team[']s url/i }),
);
await user.type(screen.getByPlaceholderText(/enter domain/i), 'myteam');
await user.click(screen.getByRole('button', { name: /apply changes/i }));
expect(
await screen.findByRole('button', { name: /contact support/i }),
).toBeInTheDocument();
});
});

View File

@@ -9,6 +9,74 @@
padding: 0px;
}
.dashboard-header {
border-bottom: 1px solid var(--bg-slate-400);
display: flex;
justify-content: space-between;
gap: 16px;
align-items: center;
padding: 0 8px;
box-sizing: border-box;
.dashboard-breadcrumbs {
width: 100%;
height: 48px;
display: flex;
gap: 6px;
align-items: center;
max-width: 80%;
.dashboard-btn {
display: flex;
align-items: center;
color: var(--bg-vanilla-400);
font-family: Inter;
font-size: 14px;
font-style: normal;
font-weight: 400;
line-height: 20px; /* 142.857% */
letter-spacing: -0.07px;
padding: 0px;
height: 20px;
}
.dashboard-btn:hover {
background-color: unset;
}
.id-btn {
display: flex;
align-items: center;
padding: 0px 2px;
border-radius: 2px;
background: rgba(113, 144, 249, 0.1);
color: var(--bg-robin-400);
font-family: Inter;
font-size: 14px;
font-style: normal;
font-weight: 400;
line-height: 20px; /* 142.857% */
height: 20px;
max-width: calc(100% - 120px);
span {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.ant-btn-icon {
margin-inline-end: 4px;
}
}
.id-btn:hover {
background: rgba(113, 144, 249, 0.1);
color: var(--bg-robin-300);
}
}
}
.dashboard-details {
display: flex;
justify-content: space-between;
@@ -467,6 +535,15 @@
.dashboard-description-container {
color: var(--bg-ink-400);
.dashboard-header {
border-bottom: 1px solid var(--bg-vanilla-300);
.dashboard-breadcrumbs {
.dashboard-btn {
color: var(--bg-ink-400);
}
}
}
.dashboard-details {
.left-section {
.dashboard-title {

View File

@@ -16,7 +16,9 @@ import {
} from 'antd';
import logEvent from 'api/common/logEvent';
import ConfigureIcon from 'assets/Integrations/ConfigureIcon';
import HeaderRightSection from 'components/HeaderRightSection/HeaderRightSection';
import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
import ROUTES from 'constants/routes';
import { DeleteButton } from 'container/ListOfDashboard/TableComponents/DeleteButton';
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
@@ -25,6 +27,7 @@ import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
import useComponentPermission from 'hooks/useComponentPermission';
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
import { useNotifications } from 'hooks/useNotifications';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import { isEmpty } from 'lodash-es';
import {
Check,
@@ -34,6 +37,7 @@ import {
FolderKanban,
Fullscreen,
Globe,
LayoutGrid,
LockKeyhole,
PenLine,
X,
@@ -47,7 +51,6 @@ import { ROLES, USER_ROLES } from 'types/roles';
import { ComponentTypes } from 'utils/permission';
import { v4 as uuid } from 'uuid';
import DashboardHeader from '../components/DashboardHeader/DashboardHeader';
import DashboardGraphSlider from '../ComponentsSlider';
import DashboardSettings from '../DashboardSettings';
import { Base64Icons } from '../DashboardSettings/General/utils';
@@ -68,6 +71,7 @@ interface DashboardDescriptionProps {
// eslint-disable-next-line sonarjs/cognitive-complexity
function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
const { safeNavigate } = useSafeNavigate();
const { handle } = props;
const {
selectedDashboard,
@@ -76,6 +80,7 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
layouts,
setLayouts,
isDashboardLocked,
listSortOrder,
setSelectedDashboard,
handleToggleDashboardSlider,
setSelectedRowWidgetId,
@@ -287,6 +292,17 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
});
}
function goToListPage(): void {
const urlParams = new URLSearchParams();
urlParams.set('columnKey', listSortOrder.columnKey as string);
urlParams.set('order', listSortOrder.order as string);
urlParams.set('page', listSortOrder.pagination as string);
urlParams.set('search', listSortOrder.search as string);
const generatedUrl = `${ROUTES.ALL_DASHBOARD}?${urlParams.toString()}`;
safeNavigate(generatedUrl);
}
const {
data: publicDashboardResponse,
isLoading: isLoadingPublicDashboardData,
@@ -335,7 +351,32 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
return (
<Card className="dashboard-description-container">
<DashboardHeader />
<div className="dashboard-header">
<section className="dashboard-breadcrumbs">
<Button
type="text"
icon={<LayoutGrid size={14} />}
className="dashboard-btn"
onClick={(): void => goToListPage()}
>
Dashboard /
</Button>
<Button type="text" className="id-btn dashboard-name-btn">
<img
src={image}
alt="dashboard-icon"
style={{ height: '14px', width: '14px' }}
/>
{title}
</Button>
</section>
<HeaderRightSection
enableAnnouncements={false}
enableShare
enableFeedback
/>
</div>
<section className="dashboard-details">
<div className="left-section">
<img src={image} alt="dashboard-img" className="dashboard-img" />

View File

@@ -50,10 +50,6 @@
}
.variable-select {
.ant-select-selector {
overflow-y: hidden !important;
}
.ant-select-item {
display: flex;
align-items: center;

View File

@@ -9,15 +9,11 @@ import {
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
import {
enqueueDescendantsOfVariable,
enqueueFetchOfAllVariables,
initializeVariableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { GlobalReducer } from 'types/reducer/globalTime';
import { onUpdateVariableNode } from './util';
import VariableItem from './VariableItem';
import './DashboardVariableSelection.styles.scss';
@@ -26,6 +22,8 @@ function DashboardVariableSelection(): JSX.Element | null {
const {
setSelectedDashboard,
updateLocalStorageDashboardVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
} = useDashboard();
const { updateUrlVariable, getUrlVariables } = useVariablesFromUrl();
@@ -34,9 +32,6 @@ function DashboardVariableSelection(): JSX.Element | null {
const sortedVariablesArray = useDashboardVariablesSelector(
(state) => state.sortedVariablesArray,
);
const dynamicVariableOrder = useDashboardVariablesSelector(
(state) => state.dynamicVariableOrder,
);
const dependencyData = useDashboardVariablesSelector(
(state) => state.dependencyData,
);
@@ -55,22 +50,18 @@ function DashboardVariableSelection(): JSX.Element | null {
}, [getUrlVariables, updateUrlVariable, dashboardVariables]);
// Memoize the order key to avoid unnecessary triggers
const variableOrderKey = useMemo(() => {
const queryVariableOrderKey = dependencyData?.order?.join(',') ?? '';
const dynamicVariableOrderKey = dynamicVariableOrder?.join(',') ?? '';
return `${queryVariableOrderKey}|${dynamicVariableOrderKey}`;
}, [dependencyData?.order, dynamicVariableOrder]);
const dependencyOrderKey = useMemo(
() => dependencyData?.order?.join(',') ?? '',
[dependencyData?.order],
);
// Initialize fetch store then start a new fetch cycle.
// Runs on dependency order changes, and time range changes.
// Trigger refetch when dependency order changes or global time changes
useEffect(() => {
const allVariableNames = sortedVariablesArray
.map((v) => v.name)
.filter((name): name is string => !!name);
initializeVariableFetchStore(allVariableNames);
enqueueFetchOfAllVariables();
if (dependencyData?.order && dependencyData.order.length > 0) {
setVariablesToGetUpdated(dependencyData?.order || []);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [variableOrderKey, minTime, maxTime]);
}, [dependencyOrderKey, minTime, maxTime]);
// Performance optimization: For dynamic variables with allSelected=true, we don't store
// individual values in localStorage since we can always derive them from available options.
@@ -130,14 +121,29 @@ function DashboardVariableSelection(): JSX.Element | null {
return prev;
});
// Cascade: enqueue query-type descendants for refetching
enqueueDescendantsOfVariable(name);
if (dependencyData) {
const updatedVariables: string[] = [];
onUpdateVariableNode(
name,
dependencyData.graph,
dependencyData.order,
(node) => updatedVariables.push(node),
);
setVariablesToGetUpdated((prev) => [
...new Set([...prev, ...updatedVariables.filter((v) => v !== name)]),
]);
} else {
setVariablesToGetUpdated((prev) => prev.filter((v) => v !== name));
}
},
[
// This can be removed
dashboardVariables,
updateLocalStorageDashboardVariables,
dependencyData,
updateUrlVariable,
setSelectedDashboard,
setVariablesToGetUpdated,
],
);
@@ -152,6 +158,9 @@ function DashboardVariableSelection(): JSX.Element | null {
existingVariables={dashboardVariables}
variableData={variable}
onValueUpdate={onValueUpdate}
variablesToGetUpdated={variablesToGetUpdated}
setVariablesToGetUpdated={setVariablesToGetUpdated}
dependencyData={dependencyData}
/>
);
})}

Some files were not shown because too many files have changed in this diff Show More