mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-20 15:52:41 +00:00
Compare commits
18 Commits
SIG-3496
...
fix/chart-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
41f5f6e421 | ||
|
|
da2e050a23 | ||
|
|
5a69f16410 | ||
|
|
07afef5c5e | ||
|
|
dcae722b53 | ||
|
|
92b07d15ea | ||
|
|
a0dad1602e | ||
|
|
5cf5b70aca | ||
|
|
db51b23e3d | ||
|
|
80c46b3414 | ||
|
|
2b929421a1 | ||
|
|
2792e20aa2 | ||
|
|
473be1b174 | ||
|
|
6d0c13f9a7 | ||
|
|
5cc562ba35 | ||
|
|
22099962be | ||
|
|
2559b52bb1 | ||
|
|
7523596043 |
4
.github/workflows/integrationci.yaml
vendored
4
.github/workflows/integrationci.yaml
vendored
@@ -53,9 +53,9 @@ jobs:
|
||||
- sqlite
|
||||
clickhouse-version:
|
||||
- 25.5.6
|
||||
- 25.10.1
|
||||
- 25.10.5
|
||||
schema-migrator-version:
|
||||
- v0.129.7
|
||||
- v0.142.0
|
||||
postgres-version:
|
||||
- 15
|
||||
if: |
|
||||
|
||||
@@ -85,6 +85,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return noopgateway.NewProviderFactory()
|
||||
},
|
||||
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
|
||||
return querier.NewHandler(ps, q, a)
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
eequerier "github.com/SigNoz/signoz/ee/querier"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
@@ -124,6 +125,10 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return httpgateway.NewProviderFactory(licensing)
|
||||
},
|
||||
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
|
||||
communityHandler := querier.NewHandler(ps, q, a)
|
||||
return eequerier.NewHandler(ps, q, communityHandler)
|
||||
},
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -307,11 +307,16 @@ components:
|
||||
type: string
|
||||
value:
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
- value
|
||||
type: object
|
||||
GatewaytypesGettableCreatedIngestionKeyLimit:
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
type: object
|
||||
GatewaytypesGettableIngestionKeys:
|
||||
properties:
|
||||
@@ -432,6 +437,8 @@ components:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- name
|
||||
type: object
|
||||
GatewaytypesPostableIngestionKeyLimit:
|
||||
properties:
|
||||
@@ -454,6 +461,8 @@ components:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- config
|
||||
type: object
|
||||
MetricsexplorertypesListMetric:
|
||||
properties:
|
||||
@@ -1920,6 +1929,82 @@ components:
|
||||
format: date-time
|
||||
type: string
|
||||
type: object
|
||||
ZeustypesGettableHost:
|
||||
properties:
|
||||
hosts:
|
||||
items:
|
||||
$ref: '#/components/schemas/ZeustypesHost'
|
||||
nullable: true
|
||||
type: array
|
||||
name:
|
||||
type: string
|
||||
state:
|
||||
type: string
|
||||
tier:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
- state
|
||||
- tier
|
||||
- hosts
|
||||
type: object
|
||||
ZeustypesHost:
|
||||
properties:
|
||||
is_default:
|
||||
type: boolean
|
||||
name:
|
||||
type: string
|
||||
url:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
- is_default
|
||||
- url
|
||||
type: object
|
||||
ZeustypesPostableHost:
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
type: object
|
||||
ZeustypesPostableProfile:
|
||||
properties:
|
||||
existing_observability_tool:
|
||||
type: string
|
||||
has_existing_observability_tool:
|
||||
type: boolean
|
||||
logs_scale_per_day_in_gb:
|
||||
format: int64
|
||||
type: integer
|
||||
number_of_hosts:
|
||||
format: int64
|
||||
type: integer
|
||||
number_of_services:
|
||||
format: int64
|
||||
type: integer
|
||||
reasons_for_interest_in_signoz:
|
||||
items:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
timeline_for_migrating_to_signoz:
|
||||
type: string
|
||||
uses_otel:
|
||||
type: boolean
|
||||
where_did_you_discover_signoz:
|
||||
type: string
|
||||
required:
|
||||
- uses_otel
|
||||
- has_existing_observability_tool
|
||||
- existing_observability_tool
|
||||
- reasons_for_interest_in_signoz
|
||||
- logs_scale_per_day_in_gb
|
||||
- number_of_services
|
||||
- number_of_hosts
|
||||
- where_did_you_discover_signoz
|
||||
- timeline_for_migrating_to_signoz
|
||||
type: object
|
||||
securitySchemes:
|
||||
api_key:
|
||||
description: API Keys
|
||||
@@ -3138,12 +3223,29 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- logs
|
||||
@@ -3168,12 +3270,29 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- EDITOR
|
||||
- tokenizer:
|
||||
- EDITOR
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- logs
|
||||
@@ -4720,6 +4839,7 @@ paths:
|
||||
parameters:
|
||||
- in: query
|
||||
name: name
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
@@ -5538,6 +5658,174 @@ paths:
|
||||
summary: Rotate session
|
||||
tags:
|
||||
- sessions
|
||||
/api/v2/zeus/hosts:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoint gets the host info from zeus.
|
||||
operationId: GetHosts
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/ZeustypesGettableHost'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Get host info from Zeus.
|
||||
tags:
|
||||
- zeus
|
||||
put:
|
||||
deprecated: false
|
||||
description: This endpoint saves the host of a deployment to zeus.
|
||||
operationId: PutHost
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ZeustypesPostableHost'
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"409":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Conflict
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Put host in Zeus for a deployment.
|
||||
tags:
|
||||
- zeus
|
||||
/api/v2/zeus/profiles:
|
||||
put:
|
||||
deprecated: false
|
||||
description: This endpoint saves the profile of a deployment to zeus.
|
||||
operationId: PutProfile
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ZeustypesPostableProfile'
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"409":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Conflict
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Put profile in Zeus for a deployment.
|
||||
tags:
|
||||
- zeus
|
||||
/api/v5/query_range:
|
||||
post:
|
||||
deprecated: false
|
||||
@@ -5932,4 +6220,58 @@ paths:
|
||||
- VIEWER
|
||||
summary: Query range
|
||||
tags:
|
||||
- query
|
||||
- querier
|
||||
/api/v5/substitute_vars:
|
||||
post:
|
||||
deprecated: false
|
||||
description: Replace variables in a query
|
||||
operationId: ReplaceVariables
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Replace variables
|
||||
tags:
|
||||
- querier
|
||||
|
||||
@@ -155,6 +155,7 @@ The `handler.New` function ties the HTTP handler to OpenAPI metadata via `OpenAP
|
||||
- **Request / RequestContentType**:
|
||||
- `Request` is a Go type that describes the request body or form.
|
||||
- `RequestContentType` is usually `"application/json"` or `"application/x-www-form-urlencoded"` (for callbacks like SAML).
|
||||
- **RequestExamples**: An array of `handler.OpenAPIExample` that provide concrete request payloads in the generated spec. See [Adding request examples](#adding-request-examples) below.
|
||||
- **Response / ResponseContentType**:
|
||||
- `Response` is the Go type for the successful response payload.
|
||||
- `ResponseContentType` is usually `"application/json"`; use `""` for responses without a body.
|
||||
@@ -172,8 +173,170 @@ See existing examples in:
|
||||
- `addUserRoutes` (for typical JSON request/response)
|
||||
- `addSessionRoutes` (for form-encoded and redirect flows)
|
||||
|
||||
## OpenAPI schema details for request/response types
|
||||
|
||||
The OpenAPI spec is generated from the Go types you pass as `Request` and `Response` in `OpenAPIDef`. The following struct tags and interfaces control how those types appear in the generated schema.
|
||||
|
||||
### Adding request examples
|
||||
|
||||
Use the `RequestExamples` field in `OpenAPIDef` to provide concrete request payloads. Each example is a `handler.OpenAPIExample`:
|
||||
|
||||
```go
|
||||
type OpenAPIExample struct {
|
||||
Name string // unique key for the example (e.g. "traces_time_series")
|
||||
Summary string // short description shown in docs (e.g. "Time series: count spans grouped by service")
|
||||
Description string // optional longer description
|
||||
Value any // the example payload, typically map[string]any
|
||||
}
|
||||
```
|
||||
|
||||
For reference, see `pkg/apiserver/signozapiserver/querier.go` which defines examples inline for the `/api/v5/query_range` endpoint:
|
||||
|
||||
```go
|
||||
if err := router.Handle("/api/v5/query_range", handler.New(provider.authZ.ViewAccess(provider.querierHandler.QueryRange), handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"querier"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
RequestExamples: []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
// ...
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// ... more examples
|
||||
},
|
||||
// ...
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
### `required` tag
|
||||
|
||||
Use `required:"true"` on struct fields where the property is expected to be **present** in the JSON payload. This is different from the zero value, a field can have its zero value (e.g. `0`, `""`, `false`) and still be required. The `required` tag means the key itself must exist in the JSON object.
|
||||
|
||||
```go
|
||||
type ListItem struct {
|
||||
...
|
||||
}
|
||||
|
||||
type ListResponse struct {
|
||||
List []ListItem `json:"list" required:"true" nullable:"true"`
|
||||
Total uint64 `json:"total" required:"true"`
|
||||
}
|
||||
```
|
||||
|
||||
In this example, a response like `{"list": null, "total": 0}` is valid. Both keys are present (satisfying `required`), `total` has its zero value, and `list` is null (allowed by `nullable`). But `{"total": 0}` would violate the schema because the `list` key is missing.
|
||||
|
||||
### `nullable` tag
|
||||
|
||||
Use `nullable:"true"` on struct fields that can be `null` in the JSON payload. This is especially important for **slice and map fields** because in Go, the zero value for these types is `nil`, which serializes to `null` in JSON (not `[]` or `{}`).
|
||||
|
||||
Be explicit about the distinction:
|
||||
|
||||
- **Nullable list** (`nullable:"true"`): the field can be `null`. Use this when the Go code may return `nil` for the slice.
|
||||
- **Non-nullable list** (no `nullable` tag): the field is always an array, never `null`. Ensure the Go code initializes it to an empty slice (e.g. `make([]T, 0)`) before serializing.
|
||||
|
||||
```go
|
||||
// Non-nullable: Go code must ensure this is always an initialized slice.
|
||||
type NonNullableExample struct {
|
||||
Items []Item `json:"items" required:"true"`
|
||||
}
|
||||
```
|
||||
|
||||
When defining your types, ask yourself: "Can this field be `null` in the JSON response, or is it always an array/object?" If the Go code ever returns a `nil` slice or map, mark it `nullable:"true"`.
|
||||
|
||||
### `Enum()` method
|
||||
|
||||
For types that have a fixed set of acceptable values, implement the `Enum() []any` method. This generates an `enum` constraint in the JSON schema so the OpenAPI spec accurately restricts the values.
|
||||
|
||||
```go
|
||||
type Signal struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
SignalTraces = Signal{valuer.NewString("traces")}
|
||||
SignalLogs = Signal{valuer.NewString("logs")}
|
||||
SignalMetrics = Signal{valuer.NewString("metrics")}
|
||||
)
|
||||
|
||||
func (Signal) Enum() []any {
|
||||
return []any{
|
||||
SignalTraces,
|
||||
SignalLogs,
|
||||
SignalMetrics,
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This produces the following in the generated OpenAPI spec:
|
||||
|
||||
```yaml
|
||||
Signal:
|
||||
enum:
|
||||
- traces
|
||||
- logs
|
||||
- metrics
|
||||
type: string
|
||||
```
|
||||
|
||||
Every type with a known set of values **must** implement `Enum()`. Without it, the JSON schema will only show the base type (e.g. `string`) with no value constraints.
|
||||
|
||||
### `JSONSchema()` method (custom schema)
|
||||
|
||||
For types that need a completely custom JSON schema (for example, a field that accepts either a string or a number), implement the `jsonschema.Exposer` interface:
|
||||
|
||||
```go
|
||||
var _ jsonschema.Exposer = Step{}
|
||||
|
||||
func (Step) JSONSchema() (jsonschema.Schema, error) {
|
||||
s := jsonschema.Schema{}
|
||||
s.WithDescription("Step interval. Accepts a duration string or seconds.")
|
||||
|
||||
strSchema := jsonschema.Schema{}
|
||||
strSchema.WithType(jsonschema.String.Type())
|
||||
strSchema.WithExamples("60s", "5m", "1h")
|
||||
|
||||
numSchema := jsonschema.Schema{}
|
||||
numSchema.WithType(jsonschema.Number.Type())
|
||||
numSchema.WithExamples(60, 300, 3600)
|
||||
|
||||
s.OneOf = []jsonschema.SchemaOrBool{
|
||||
strSchema.ToSchemaOrBool(),
|
||||
numSchema.ToSchemaOrBool(),
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- **Keep handlers thin**: focus on HTTP concerns and delegate logic to modules/services.
|
||||
- **Always register routes through `signozapiserver`** using `handler.New` and a complete `OpenAPIDef`.
|
||||
- **Choose accurate request/response types** from the `types` packages so OpenAPI schemas are correct.
|
||||
- **Add `required:"true"`** on fields where the key must be present in the JSON (this is about key presence, not about the zero value).
|
||||
- **Add `nullable:"true"`** on fields that can be `null`. Pay special attention to slices and maps -- in Go these default to `nil` which serializes to `null`. If the field should always be an array, initialize it and do not mark it nullable.
|
||||
- **Implement `Enum()`** on every type that has a fixed set of acceptable values so the JSON schema generates proper `enum` constraints.
|
||||
- **Add request examples** via `RequestExamples` in `OpenAPIDef` for any non-trivial endpoint. See `pkg/apiserver/signozapiserver/querier.go` for reference.
|
||||
|
||||
178
ee/querier/handler.go
Normal file
178
ee/querier/handler.go
Normal file
@@ -0,0 +1,178 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"runtime/debug"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
set factory.ProviderSettings
|
||||
querier querier.Querier
|
||||
community querier.Handler
|
||||
}
|
||||
|
||||
func NewHandler(set factory.ProviderSettings, querier querier.Querier, community querier.Handler) querier.Handler {
|
||||
return &handler{
|
||||
set: set,
|
||||
querier: querier,
|
||||
community: community,
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
bodyBytes, err := io.ReadAll(req.Body)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
|
||||
return
|
||||
}
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
|
||||
ctx := req.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
stackTrace := string(debug.Stack())
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
h.set.Logger.ErrorContext(ctx, "panic in QueryRange",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
"stacktrace", stackTrace,
|
||||
)
|
||||
|
||||
render.Error(rw, errors.NewInternalf(
|
||||
errors.CodeInternal,
|
||||
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
|
||||
))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := queryRangeRequest.Validate(); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
|
||||
anomalies, err := h.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
results := []any{}
|
||||
for _, item := range anomalies.Results {
|
||||
results = append(results, item)
|
||||
}
|
||||
|
||||
// Build step intervals from the anomaly query
|
||||
stepIntervals := make(map[string]uint64)
|
||||
if anomalyQuery.StepInterval.Duration > 0 {
|
||||
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
|
||||
}
|
||||
|
||||
finalResp := &qbtypes.QueryRangeResponse{
|
||||
Type: queryRangeRequest.RequestType,
|
||||
Data: qbtypes.QueryData{
|
||||
Results: results,
|
||||
},
|
||||
Meta: qbtypes.ExecStats{
|
||||
StepIntervals: stepIntervals,
|
||||
},
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, finalResp)
|
||||
return
|
||||
}
|
||||
|
||||
// regular query range request, delegate to community handler
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
h.community.QueryRange(rw, req)
|
||||
}
|
||||
|
||||
func (h *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
h.community.QueryRawStream(rw, req)
|
||||
}
|
||||
|
||||
func (h *handler) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
h.community.ReplaceVariables(rw, req)
|
||||
}
|
||||
|
||||
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
|
||||
for _, fn := range anomalyQuery.Functions {
|
||||
if fn.Name == qbtypes.FunctionNameAnomaly {
|
||||
for _, arg := range fn.Args {
|
||||
if arg.Name == "seasonality" {
|
||||
if seasonalityStr, ok := arg.Value.(string); ok {
|
||||
switch seasonalityStr {
|
||||
case "weekly":
|
||||
return anomalyV2.SeasonalityWeekly
|
||||
case "hourly":
|
||||
return anomalyV2.SeasonalityHourly
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return anomalyV2.SeasonalityDaily // default
|
||||
}
|
||||
|
||||
func (h *handler) createAnomalyProvider(seasonality anomalyV2.Seasonality) anomalyV2.Provider {
|
||||
switch seasonality {
|
||||
case anomalyV2.SeasonalityWeekly:
|
||||
return anomalyV2.NewWeeklyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](h.set.Logger),
|
||||
)
|
||||
case anomalyV2.SeasonalityHourly:
|
||||
return anomalyV2.NewHourlyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](h.set.Logger),
|
||||
)
|
||||
default:
|
||||
return anomalyV2.NewDailyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.DailyProvider](h.set.Logger),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
|
||||
seasonality := extractSeasonality(anomalyQuery)
|
||||
provider := h.createAnomalyProvider(seasonality)
|
||||
|
||||
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
|
||||
}
|
||||
@@ -2,17 +2,13 @@ package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
@@ -33,7 +29,6 @@ type APIHandlerOptions struct {
|
||||
IntegrationsController *integrations.Controller
|
||||
CloudIntegrationsController *cloudintegrations.Controller
|
||||
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
|
||||
Gateway *httputil.ReverseProxy
|
||||
GatewayUrl string
|
||||
// Querier Influx Interval
|
||||
FluxInterval time.Duration
|
||||
@@ -57,7 +52,6 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
}, config)
|
||||
|
||||
@@ -80,10 +74,6 @@ func (ah *APIHandler) UM() *usage.Manager {
|
||||
return ah.opts.UsageManager
|
||||
}
|
||||
|
||||
func (ah *APIHandler) Gateway() *httputil.ReverseProxy {
|
||||
return ah.opts.Gateway
|
||||
}
|
||||
|
||||
// RegisterRoutes registers routes for this handler on the given router
|
||||
func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// note: add ee override methods first
|
||||
@@ -106,17 +96,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// v4
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
// v5
|
||||
router.Handle("/api/v5/query_range", handler.New(
|
||||
am.ViewAccess(ah.queryRangeV5),
|
||||
querierAPI.QueryRangeV5OpenAPIDef,
|
||||
)).Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
|
||||
|
||||
// Gateway
|
||||
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.EditAccess(ah.ServeGatewayHTTP))
|
||||
|
||||
ah.APIHandler.RegisterRoutes(router, am)
|
||||
|
||||
}
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
validPath := false
|
||||
for _, allowedPrefix := range gateway.AllowedPrefix {
|
||||
if strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+allowedPrefix) {
|
||||
validPath = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !validPath {
|
||||
rw.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
//Create headers
|
||||
var licenseKey string
|
||||
if license != nil {
|
||||
licenseKey = license.Key
|
||||
}
|
||||
|
||||
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
|
||||
req.Header.Set("X-Consumer-Username", "lid:00000000-0000-0000-0000-000000000000")
|
||||
req.Header.Set("X-Consumer-Groups", "ns:default")
|
||||
|
||||
ah.Gateway().ServeHTTP(rw, req)
|
||||
}
|
||||
@@ -2,16 +2,11 @@ package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"runtime/debug"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
@@ -20,8 +15,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
@@ -144,140 +137,3 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
}
|
||||
|
||||
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
|
||||
for _, fn := range anomalyQuery.Functions {
|
||||
if fn.Name == qbtypes.FunctionNameAnomaly {
|
||||
for _, arg := range fn.Args {
|
||||
if arg.Name == "seasonality" {
|
||||
if seasonalityStr, ok := arg.Value.(string); ok {
|
||||
switch seasonalityStr {
|
||||
case "weekly":
|
||||
return anomalyV2.SeasonalityWeekly
|
||||
case "hourly":
|
||||
return anomalyV2.SeasonalityHourly
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return anomalyV2.SeasonalityDaily // default
|
||||
}
|
||||
|
||||
func createAnomalyProvider(aH *APIHandler, seasonality anomalyV2.Seasonality) anomalyV2.Provider {
|
||||
switch seasonality {
|
||||
case anomalyV2.SeasonalityWeekly:
|
||||
return anomalyV2.NewWeeklyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
case anomalyV2.SeasonalityHourly:
|
||||
return anomalyV2.NewHourlyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
default:
|
||||
return anomalyV2.NewDailyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.DailyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
|
||||
seasonality := extractSeasonality(anomalyQuery)
|
||||
provider := createAnomalyProvider(aH, seasonality)
|
||||
|
||||
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
|
||||
}
|
||||
|
||||
func (aH *APIHandler) queryRangeV5(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
bodyBytes, err := io.ReadAll(req.Body)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
|
||||
return
|
||||
}
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
|
||||
ctx := req.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
stackTrace := string(debug.Stack())
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
aH.Signoz.Instrumentation.Logger().ErrorContext(ctx, "panic in QueryRange",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
"stacktrace", stackTrace,
|
||||
)
|
||||
|
||||
render.Error(rw, errors.NewInternalf(
|
||||
errors.CodeInternal,
|
||||
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
|
||||
))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := queryRangeRequest.Validate(); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
|
||||
anomalies, err := aH.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
results := []any{}
|
||||
for _, item := range anomalies.Results {
|
||||
results = append(results, item)
|
||||
}
|
||||
|
||||
// Build step intervals from the anomaly query
|
||||
stepIntervals := make(map[string]uint64)
|
||||
if anomalyQuery.StepInterval.Duration > 0 {
|
||||
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
|
||||
}
|
||||
|
||||
finalResp := &qbtypes.QueryRangeResponse{
|
||||
Type: queryRangeRequest.RequestType,
|
||||
Data: qbtypes.QueryData{
|
||||
Results: results,
|
||||
},
|
||||
Meta: qbtypes.ExecStats{
|
||||
StepIntervals: stepIntervals,
|
||||
},
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, finalResp)
|
||||
return
|
||||
} else {
|
||||
// regular query range request, let the querier handle it
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
aH.QuerierAPI.QueryRange(rw, req)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,7 +19,6 @@ import (
|
||||
"github.com/gorilla/handlers"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/api"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/rules"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
@@ -72,11 +71,6 @@ type Server struct {
|
||||
|
||||
// NewServer creates and initializes Server
|
||||
func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
gatewayProxy, err := gateway.NewProxy(config.Gateway.URL.String(), gateway.RoutePrefix)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
|
||||
Provider: "memory",
|
||||
Memory: cache.Memory{
|
||||
@@ -170,7 +164,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
CloudIntegrationsController: cloudIntegrationsController,
|
||||
LogsParsingPipelineController: logParsingPipelineController,
|
||||
FluxInterval: config.Querier.FluxInterval,
|
||||
Gateway: gatewayProxy,
|
||||
GatewayUrl: config.Gateway.URL.String(),
|
||||
GlobalConfig: config.Global,
|
||||
}
|
||||
@@ -240,7 +233,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.RegisterQueryRangeV3Routes(r, am)
|
||||
apiHandler.RegisterInfraMetricsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV4Routes(r, am)
|
||||
apiHandler.RegisterQueryRangeV5Routes(r, am)
|
||||
apiHandler.RegisterWebSocketPaths(r, am)
|
||||
apiHandler.RegisterMessagingQueuesRoutes(r, am)
|
||||
apiHandler.RegisterThirdPartyApiRoutes(r, am)
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"net/http/httputil"
|
||||
)
|
||||
|
||||
func NewNoopProxy() (*httputil.ReverseProxy, error) {
|
||||
return &httputil.ReverseProxy{}, nil
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"net/url"
|
||||
"path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
RoutePrefix string = "/api/gateway"
|
||||
AllowedPrefix []string = []string{"/v1/workspaces/me", "/v2/profiles/me", "/v2/deployments/me"}
|
||||
)
|
||||
|
||||
type proxy struct {
|
||||
url *url.URL
|
||||
stripPath string
|
||||
}
|
||||
|
||||
func NewProxy(u string, stripPath string) (*httputil.ReverseProxy, error) {
|
||||
url, err := url.Parse(u)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
proxy := &proxy{url: url, stripPath: stripPath}
|
||||
|
||||
return &httputil.ReverseProxy{
|
||||
Rewrite: proxy.rewrite,
|
||||
ModifyResponse: proxy.modifyResponse,
|
||||
ErrorHandler: proxy.errorHandler,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (p *proxy) rewrite(pr *httputil.ProxyRequest) {
|
||||
pr.SetURL(p.url)
|
||||
pr.SetXForwarded()
|
||||
pr.Out.URL.Path = cleanPath(strings.ReplaceAll(pr.Out.URL.Path, p.stripPath, ""))
|
||||
}
|
||||
|
||||
func (p *proxy) modifyResponse(res *http.Response) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *proxy) errorHandler(rw http.ResponseWriter, req *http.Request, err error) {
|
||||
rw.WriteHeader(http.StatusBadGateway)
|
||||
}
|
||||
|
||||
func cleanPath(p string) string {
|
||||
if p == "" {
|
||||
return "/"
|
||||
}
|
||||
if p[0] != '/' {
|
||||
p = "/" + p
|
||||
}
|
||||
np := path.Clean(p)
|
||||
if p[len(p)-1] == '/' && np != "/" {
|
||||
if len(p) == len(np)+1 && strings.HasPrefix(p, np) {
|
||||
np = p
|
||||
} else {
|
||||
np += "/"
|
||||
}
|
||||
}
|
||||
return np
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"net/url"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestProxyRewrite(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
url *url.URL
|
||||
stripPath string
|
||||
in *url.URL
|
||||
expected *url.URL
|
||||
}{
|
||||
{
|
||||
name: "SamePathAdded",
|
||||
url: &url.URL{Scheme: "http", Host: "backend", Path: "/path1"},
|
||||
stripPath: "/strip",
|
||||
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
|
||||
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/path1/path1"},
|
||||
},
|
||||
{
|
||||
name: "NoStripPathInput",
|
||||
url: &url.URL{Scheme: "http", Host: "backend"},
|
||||
stripPath: "",
|
||||
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
|
||||
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
|
||||
},
|
||||
{
|
||||
name: "NoStripPathPresentInReq",
|
||||
url: &url.URL{Scheme: "http", Host: "backend"},
|
||||
stripPath: "/not-found",
|
||||
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
|
||||
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
proxy, err := NewProxy(tc.url.String(), tc.stripPath)
|
||||
require.NoError(t, err)
|
||||
inReq, err := http.NewRequest(http.MethodGet, tc.in.String(), nil)
|
||||
require.NoError(t, err)
|
||||
proxyReq := &httputil.ProxyRequest{
|
||||
In: inReq,
|
||||
Out: inReq.Clone(context.Background()),
|
||||
}
|
||||
proxy.Rewrite(proxyReq)
|
||||
|
||||
assert.Equal(t, tc.expected.Host, proxyReq.Out.URL.Host)
|
||||
assert.Equal(t, tc.expected.Scheme, proxyReq.Out.URL.Scheme)
|
||||
assert.Equal(t, tc.expected.Path, proxyReq.Out.URL.Path)
|
||||
assert.Equal(t, tc.expected.Query(), proxyReq.Out.URL.Query())
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ package httpzeus
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
@@ -10,6 +11,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/client"
|
||||
"github.com/SigNoz/signoz/pkg/types/zeustypes"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
@@ -119,8 +121,13 @@ func (provider *Provider) PutMeters(ctx context.Context, key string, data []byte
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *Provider) PutProfile(ctx context.Context, key string, body []byte) error {
|
||||
_, err := provider.do(
|
||||
func (provider *Provider) PutProfile(ctx context.Context, key string, profile *zeustypes.PostableProfile) error {
|
||||
body, err := json.Marshal(profile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/profiles/me"),
|
||||
http.MethodPut,
|
||||
@@ -131,10 +138,15 @@ func (provider *Provider) PutProfile(ctx context.Context, key string, body []byt
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *Provider) PutHost(ctx context.Context, key string, body []byte) error {
|
||||
_, err := provider.do(
|
||||
func (provider *Provider) PutHost(ctx context.Context, key string, host *zeustypes.PostableHost) error {
|
||||
body, err := json.Marshal(host)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/deployments/me/hosts"),
|
||||
provider.config.URL.JoinPath("/v2/deployments/me/host"),
|
||||
http.MethodPut,
|
||||
key,
|
||||
body,
|
||||
@@ -169,21 +181,28 @@ func (provider *Provider) do(ctx context.Context, url *url.URL, method string, k
|
||||
return body, nil
|
||||
}
|
||||
|
||||
return nil, provider.errFromStatusCode(response.StatusCode)
|
||||
errorMessage := gjson.GetBytes(body, "error").String()
|
||||
if errorMessage == "" {
|
||||
errorMessage = "an unknown error occurred"
|
||||
}
|
||||
|
||||
return nil, provider.errFromStatusCode(response.StatusCode, errorMessage)
|
||||
}
|
||||
|
||||
// This can be taken down to the client package
|
||||
func (provider *Provider) errFromStatusCode(statusCode int) error {
|
||||
func (provider *Provider) errFromStatusCode(statusCode int, errorMessage string) error {
|
||||
switch statusCode {
|
||||
case http.StatusBadRequest:
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "bad request")
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, errorMessage)
|
||||
case http.StatusUnauthorized:
|
||||
return errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated")
|
||||
return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, errorMessage)
|
||||
case http.StatusForbidden:
|
||||
return errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "forbidden")
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, errorMessage)
|
||||
case http.StatusNotFound:
|
||||
return errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "not found")
|
||||
return errors.New(errors.TypeNotFound, errors.CodeNotFound, errorMessage)
|
||||
case http.StatusConflict:
|
||||
return errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, errorMessage)
|
||||
}
|
||||
|
||||
return errors.Newf(errors.TypeInternal, errors.CodeInternal, "internal")
|
||||
return errors.New(errors.TypeInternal, errors.CodeInternal, errorMessage)
|
||||
}
|
||||
|
||||
@@ -58,6 +58,7 @@
|
||||
"@signozhq/radio-group": "0.0.2",
|
||||
"@signozhq/resizable": "0.0.0",
|
||||
"@signozhq/sonner": "0.1.0",
|
||||
"@signozhq/switch": "0.0.2",
|
||||
"@signozhq/table": "0.3.7",
|
||||
"@signozhq/tooltip": "0.0.2",
|
||||
"@tanstack/react-table": "8.20.6",
|
||||
|
||||
@@ -12,5 +12,6 @@
|
||||
"pipeline": "Pipeline",
|
||||
"pipelines": "Pipelines",
|
||||
"archives": "Archives",
|
||||
"logs_to_metrics": "Logs To Metrics"
|
||||
"logs_to_metrics": "Logs To Metrics",
|
||||
"roles": "Roles"
|
||||
}
|
||||
|
||||
@@ -12,5 +12,6 @@
|
||||
"pipeline": "Pipeline",
|
||||
"pipelines": "Pipelines",
|
||||
"archives": "Archives",
|
||||
"logs_to_metrics": "Logs To Metrics"
|
||||
"logs_to_metrics": "Logs To Metrics",
|
||||
"roles": "Roles"
|
||||
}
|
||||
|
||||
@@ -73,5 +73,6 @@
|
||||
"API_MONITORING": "SigNoz | External APIs",
|
||||
"METER_EXPLORER": "SigNoz | Meter Explorer",
|
||||
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
|
||||
"METER": "SigNoz | Meter"
|
||||
"METER": "SigNoz | Meter",
|
||||
"ROLES_SETTINGS": "SigNoz | Roles"
|
||||
}
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
CreateIngestionKeyProps,
|
||||
IngestionKeyProps,
|
||||
} from 'types/api/ingestionKeys/types';
|
||||
|
||||
const createIngestionKey = async (
|
||||
props: CreateIngestionKeyProps,
|
||||
): Promise<SuccessResponse<IngestionKeyProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.post('/workspaces/me/keys', {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default createIngestionKey;
|
||||
@@ -1,26 +0,0 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
|
||||
|
||||
const deleteIngestionKey = async (
|
||||
id: string,
|
||||
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.delete(
|
||||
`/workspaces/me/keys/${id}`,
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default deleteIngestionKey;
|
||||
@@ -1,21 +0,0 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import {
|
||||
AllIngestionKeyProps,
|
||||
GetIngestionKeyProps,
|
||||
} from 'types/api/ingestionKeys/types';
|
||||
|
||||
export const getAllIngestionKeys = (
|
||||
props: GetIngestionKeyProps,
|
||||
): Promise<AxiosResponse<AllIngestionKeyProps>> => {
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
const { search, per_page, page } = props;
|
||||
|
||||
const BASE_URL = '/workspaces/me/keys';
|
||||
const URL_QUERY_PARAMS =
|
||||
search && search.length > 0
|
||||
? `/search?name=${search}&page=1&per_page=100`
|
||||
: `?page=${page}&per_page=${per_page}`;
|
||||
|
||||
return GatewayApiV1Instance.get(`${BASE_URL}${URL_QUERY_PARAMS}`);
|
||||
};
|
||||
@@ -1,65 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-throw-literal */
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import axios from 'axios';
|
||||
import {
|
||||
AddLimitProps,
|
||||
LimitSuccessProps,
|
||||
} from 'types/api/ingestionKeys/limits/types';
|
||||
|
||||
interface SuccessResponse<T> {
|
||||
statusCode: number;
|
||||
error: null;
|
||||
message: string;
|
||||
payload: T;
|
||||
}
|
||||
|
||||
interface ErrorResponse {
|
||||
statusCode: number;
|
||||
error: string;
|
||||
message: string;
|
||||
payload: null;
|
||||
}
|
||||
|
||||
const createLimitForIngestionKey = async (
|
||||
props: AddLimitProps,
|
||||
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.post(
|
||||
`/workspaces/me/keys/${props.keyID}/limits`,
|
||||
{
|
||||
...props,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
// Axios error
|
||||
const errResponse: ErrorResponse = {
|
||||
statusCode: error.response?.status || 500,
|
||||
error: error.response?.data?.error,
|
||||
message: error.response?.data?.status || 'An error occurred',
|
||||
payload: null,
|
||||
};
|
||||
|
||||
throw errResponse;
|
||||
} else {
|
||||
// Non-Axios error
|
||||
const errResponse: ErrorResponse = {
|
||||
statusCode: 500,
|
||||
error: 'Unknown error',
|
||||
message: 'An unknown error occurred',
|
||||
payload: null,
|
||||
};
|
||||
|
||||
throw errResponse;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default createLimitForIngestionKey;
|
||||
@@ -1,26 +0,0 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
|
||||
|
||||
const deleteLimitsForIngestionKey = async (
|
||||
id: string,
|
||||
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.delete(
|
||||
`/workspaces/me/limits/${id}`,
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default deleteLimitsForIngestionKey;
|
||||
@@ -1,65 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-throw-literal */
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import axios from 'axios';
|
||||
import {
|
||||
LimitSuccessProps,
|
||||
UpdateLimitProps,
|
||||
} from 'types/api/ingestionKeys/limits/types';
|
||||
|
||||
interface SuccessResponse<T> {
|
||||
statusCode: number;
|
||||
error: null;
|
||||
message: string;
|
||||
payload: T;
|
||||
}
|
||||
|
||||
interface ErrorResponse {
|
||||
statusCode: number;
|
||||
error: string;
|
||||
message: string;
|
||||
payload: null;
|
||||
}
|
||||
|
||||
const updateLimitForIngestionKey = async (
|
||||
props: UpdateLimitProps,
|
||||
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.patch(
|
||||
`/workspaces/me/limits/${props.limitID}`,
|
||||
{
|
||||
config: props.config,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
// Axios error
|
||||
const errResponse: ErrorResponse = {
|
||||
statusCode: error.response?.status || 500,
|
||||
error: error.response?.data?.error,
|
||||
message: error.response?.data?.status || 'An error occurred',
|
||||
payload: null,
|
||||
};
|
||||
|
||||
throw errResponse;
|
||||
} else {
|
||||
// Non-Axios error
|
||||
const errResponse: ErrorResponse = {
|
||||
statusCode: 500,
|
||||
error: 'Unknown error',
|
||||
message: 'An unknown error occurred',
|
||||
payload: null,
|
||||
};
|
||||
|
||||
throw errResponse;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default updateLimitForIngestionKey;
|
||||
@@ -1,32 +0,0 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
IngestionKeysPayloadProps,
|
||||
UpdateIngestionKeyProps,
|
||||
} from 'types/api/ingestionKeys/types';
|
||||
|
||||
const updateIngestionKey = async (
|
||||
props: UpdateIngestionKeyProps,
|
||||
): Promise<SuccessResponse<IngestionKeysPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.patch(
|
||||
`/workspaces/me/keys/${props.id}`,
|
||||
{
|
||||
...props.data,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default updateIngestionKey;
|
||||
@@ -4,8 +4,6 @@ export const apiV2 = '/api/v2/';
|
||||
export const apiV3 = '/api/v3/';
|
||||
export const apiV4 = '/api/v4/';
|
||||
export const apiV5 = '/api/v5/';
|
||||
export const gatewayApiV1 = '/api/gateway/v1/';
|
||||
export const gatewayApiV2 = '/api/gateway/v2/';
|
||||
export const apiAlertManager = '/api/alertmanager/';
|
||||
|
||||
export default apiV1;
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
import { GatewayApiV2Instance as axios } from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { DeploymentsDataProps } from 'types/api/customDomain/types';
|
||||
|
||||
export const getDeploymentsData = (): Promise<
|
||||
AxiosResponse<DeploymentsDataProps>
|
||||
> => axios.get(`/deployments/me`);
|
||||
@@ -1,16 +0,0 @@
|
||||
import { GatewayApiV2Instance as axios } from 'api';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import {
|
||||
PayloadProps,
|
||||
UpdateCustomDomainProps,
|
||||
} from 'types/api/customDomain/types';
|
||||
|
||||
const updateSubDomainAPI = async (
|
||||
props: UpdateCustomDomainProps,
|
||||
): Promise<SuccessResponse<PayloadProps> | AxiosError> =>
|
||||
axios.put(`/deployments/me/host`, {
|
||||
...props.data,
|
||||
});
|
||||
|
||||
export default updateSubDomainAPI;
|
||||
@@ -678,7 +678,7 @@ export const useUpdateIngestionKeyLimit = <
|
||||
* @summary Search ingestion keys for workspace
|
||||
*/
|
||||
export const searchIngestionKeys = (
|
||||
params?: SearchIngestionKeysParams,
|
||||
params: SearchIngestionKeysParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<SearchIngestionKeys200>({
|
||||
@@ -699,7 +699,7 @@ export const getSearchIngestionKeysQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: SearchIngestionKeysParams,
|
||||
params: SearchIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
@@ -737,7 +737,7 @@ export function useSearchIngestionKeys<
|
||||
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: SearchIngestionKeysParams,
|
||||
params: SearchIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
@@ -762,7 +762,7 @@ export function useSearchIngestionKeys<
|
||||
*/
|
||||
export const invalidateSearchIngestionKeys = async (
|
||||
queryClient: QueryClient,
|
||||
params?: SearchIngestionKeysParams,
|
||||
params: SearchIngestionKeysParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
|
||||
@@ -16,6 +16,7 @@ import type {
|
||||
Querybuildertypesv5QueryRangeRequestDTO,
|
||||
QueryRangeV5200,
|
||||
RenderErrorResponseDTO,
|
||||
ReplaceVariables200,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
@@ -105,3 +106,86 @@ export const useQueryRangeV5 = <
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* Replace variables in a query
|
||||
* @summary Replace variables
|
||||
*/
|
||||
export const replaceVariables = (
|
||||
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<ReplaceVariables200>({
|
||||
url: `/api/v5/substitute_vars`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getReplaceVariablesMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['replaceVariables'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return replaceVariables(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type ReplaceVariablesMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof replaceVariables>>
|
||||
>;
|
||||
export type ReplaceVariablesMutationBody = Querybuildertypesv5QueryRangeRequestDTO;
|
||||
export type ReplaceVariablesMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Replace variables
|
||||
*/
|
||||
export const useReplaceVariables = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getReplaceVariablesMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
@@ -453,18 +453,18 @@ export interface GatewaytypesGettableCreatedIngestionKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
value?: string;
|
||||
value: string;
|
||||
}
|
||||
|
||||
export interface GatewaytypesGettableCreatedIngestionKeyLimitDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
}
|
||||
|
||||
export interface GatewaytypesGettableIngestionKeysDTO {
|
||||
@@ -616,7 +616,7 @@ export interface GatewaytypesPostableIngestionKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name?: string;
|
||||
name: string;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
@@ -638,7 +638,7 @@ export interface GatewaytypesPostableIngestionKeyLimitDTO {
|
||||
}
|
||||
|
||||
export interface GatewaytypesUpdatableIngestionKeyLimitDTO {
|
||||
config?: GatewaytypesLimitConfigDTO;
|
||||
config: GatewaytypesLimitConfigDTO;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
@@ -2414,6 +2414,91 @@ export interface TypesUserDTO {
|
||||
updatedAt?: Date;
|
||||
}
|
||||
|
||||
export interface ZeustypesGettableHostDTO {
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
hosts: ZeustypesHostDTO[] | null;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
state: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
tier: string;
|
||||
}
|
||||
|
||||
export interface ZeustypesHostDTO {
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
is_default: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface ZeustypesPostableHostDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface ZeustypesPostableProfileDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
existing_observability_tool: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
has_existing_observability_tool: boolean;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
logs_scale_per_day_in_gb: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
number_of_hosts: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
number_of_services: number;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
reasons_for_interest_in_signoz: string[] | null;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
timeline_for_migrating_to_signoz: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
uses_otel: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
where_did_you_discover_signoz: string;
|
||||
}
|
||||
|
||||
export type ChangePasswordPathParameters = {
|
||||
id: string;
|
||||
};
|
||||
@@ -2954,7 +3039,7 @@ export type SearchIngestionKeysParams = {
|
||||
* @type string
|
||||
* @description undefined
|
||||
*/
|
||||
name?: string;
|
||||
name: string;
|
||||
/**
|
||||
* @type integer
|
||||
* @description undefined
|
||||
@@ -3129,6 +3214,14 @@ export type RotateSession200 = {
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type GetHosts200 = {
|
||||
data?: ZeustypesGettableHostDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type QueryRangeV5200 = {
|
||||
data?: Querybuildertypesv5QueryRangeResponseDTO;
|
||||
/**
|
||||
@@ -3136,3 +3229,11 @@ export type QueryRangeV5200 = {
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type ReplaceVariables200 = {
|
||||
data?: Querybuildertypesv5QueryRangeRequestDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
269
frontend/src/api/generated/services/zeus/index.ts
Normal file
269
frontend/src/api/generated/services/zeus/index.ts
Normal file
@@ -0,0 +1,269 @@
|
||||
/**
|
||||
* ! Do not edit manually
|
||||
* * The file has been auto-generated using Orval for SigNoz
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
QueryClient,
|
||||
QueryFunction,
|
||||
QueryKey,
|
||||
UseMutationOptions,
|
||||
UseMutationResult,
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
GetHosts200,
|
||||
RenderErrorResponseDTO,
|
||||
ZeustypesPostableHostDTO,
|
||||
ZeustypesPostableProfileDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* This endpoint gets the host info from zeus.
|
||||
* @summary Get host info from Zeus.
|
||||
*/
|
||||
export const getHosts = (signal?: AbortSignal) => {
|
||||
return GeneratedAPIInstance<GetHosts200>({
|
||||
url: `/api/v2/zeus/hosts`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetHostsQueryKey = () => {
|
||||
return ['getHosts'] as const;
|
||||
};
|
||||
|
||||
export const getGetHostsQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getHosts>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof getHosts>>, TError, TData>;
|
||||
}) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getGetHostsQueryKey();
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof getHosts>>> = ({
|
||||
signal,
|
||||
}) => getHosts(signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getHosts>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type GetHostsQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getHosts>>
|
||||
>;
|
||||
export type GetHostsQueryError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Get host info from Zeus.
|
||||
*/
|
||||
|
||||
export function useGetHosts<
|
||||
TData = Awaited<ReturnType<typeof getHosts>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof getHosts>>, TError, TData>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetHostsQueryOptions(options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get host info from Zeus.
|
||||
*/
|
||||
export const invalidateGetHosts = async (
|
||||
queryClient: QueryClient,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetHostsQueryKey() },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint saves the host of a deployment to zeus.
|
||||
* @summary Put host in Zeus for a deployment.
|
||||
*/
|
||||
export const putHost = (zeustypesPostableHostDTO: ZeustypesPostableHostDTO) => {
|
||||
return GeneratedAPIInstance<void>({
|
||||
url: `/api/v2/zeus/hosts`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: zeustypesPostableHostDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getPutHostMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['putHost'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
{ data: ZeustypesPostableHostDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return putHost(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type PutHostMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof putHost>>
|
||||
>;
|
||||
export type PutHostMutationBody = ZeustypesPostableHostDTO;
|
||||
export type PutHostMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Put host in Zeus for a deployment.
|
||||
*/
|
||||
export const usePutHost = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getPutHostMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint saves the profile of a deployment to zeus.
|
||||
* @summary Put profile in Zeus for a deployment.
|
||||
*/
|
||||
export const putProfile = (
|
||||
zeustypesPostableProfileDTO: ZeustypesPostableProfileDTO,
|
||||
) => {
|
||||
return GeneratedAPIInstance<void>({
|
||||
url: `/api/v2/zeus/profiles`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: zeustypesPostableProfileDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getPutProfileMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['putProfile'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
{ data: ZeustypesPostableProfileDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return putProfile(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type PutProfileMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof putProfile>>
|
||||
>;
|
||||
export type PutProfileMutationBody = ZeustypesPostableProfileDTO;
|
||||
export type PutProfileMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Put profile in Zeus for a deployment.
|
||||
*/
|
||||
export const usePutProfile = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getPutProfileMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
@@ -15,15 +15,7 @@ import { Events } from 'constants/events';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { eventEmitter } from 'utils/getEventEmitter';
|
||||
|
||||
import apiV1, {
|
||||
apiAlertManager,
|
||||
apiV2,
|
||||
apiV3,
|
||||
apiV4,
|
||||
apiV5,
|
||||
gatewayApiV1,
|
||||
gatewayApiV2,
|
||||
} from './apiV1';
|
||||
import apiV1, { apiAlertManager, apiV2, apiV3, apiV4, apiV5 } from './apiV1';
|
||||
import { Logout } from './utils';
|
||||
|
||||
const RESPONSE_TIMEOUT_THRESHOLD = 5000; // 5 seconds
|
||||
@@ -211,24 +203,6 @@ LogEventAxiosInstance.interceptors.response.use(
|
||||
LogEventAxiosInstance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// gateway Api V1
|
||||
export const GatewayApiV1Instance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV1}`,
|
||||
});
|
||||
|
||||
GatewayApiV1Instance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejected,
|
||||
);
|
||||
|
||||
GatewayApiV1Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// gateway Api V2
|
||||
export const GatewayApiV2Instance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV2}`,
|
||||
});
|
||||
|
||||
// generated API Instance
|
||||
export const GeneratedAPIInstance = axios.create({
|
||||
baseURL: ENVIRONMENT.baseURL,
|
||||
@@ -240,14 +214,6 @@ GeneratedAPIInstance.interceptors.response.use(
|
||||
interceptorRejected,
|
||||
);
|
||||
|
||||
GatewayApiV2Instance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejected,
|
||||
);
|
||||
|
||||
GatewayApiV2Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
AxiosAlertManagerInstance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejected,
|
||||
|
||||
@@ -11,13 +11,10 @@ export const getMetricMetadata = async (
|
||||
): Promise<SuccessResponseV2<MetricMetadataResponse> | ErrorResponseV2> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.get(
|
||||
`/metrics/metadata?metricName=${encodedMetricName}`,
|
||||
{
|
||||
signal,
|
||||
headers,
|
||||
},
|
||||
);
|
||||
const response = await axios.get(`/metrics/${encodedMetricName}/metadata`, {
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
import { GatewayApiV2Instance } from 'api';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { UpdateProfileProps } from 'types/api/onboarding/types';
|
||||
|
||||
const updateProfile = async (
|
||||
props: UpdateProfileProps,
|
||||
): Promise<SuccessResponse<UpdateProfileProps> | ErrorResponse> => {
|
||||
const response = await GatewayApiV2Instance.put('/profiles/me', {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
|
||||
export default updateProfile;
|
||||
@@ -1,19 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
|
||||
const deleteDomain = async (id: string): Promise<SuccessResponseV2<null>> => {
|
||||
try {
|
||||
const response = await axios.delete<null>(`/domains/${id}`);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: null,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default deleteDomain;
|
||||
@@ -1,25 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
|
||||
import { UpdatableAuthDomain } from 'types/api/v1/domains/put';
|
||||
|
||||
const put = async (
|
||||
props: UpdatableAuthDomain,
|
||||
): Promise<SuccessResponseV2<null>> => {
|
||||
try {
|
||||
const response = await axios.put<RawSuccessResponse<null>>(
|
||||
`/domains/${props.id}`,
|
||||
{ config: props.config },
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default put;
|
||||
@@ -1,24 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
|
||||
import { GettableAuthDomain } from 'types/api/v1/domains/list';
|
||||
|
||||
const listAllDomain = async (): Promise<
|
||||
SuccessResponseV2<GettableAuthDomain[]>
|
||||
> => {
|
||||
try {
|
||||
const response = await axios.get<RawSuccessResponse<GettableAuthDomain[]>>(
|
||||
`/domains`,
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default listAllDomain;
|
||||
@@ -1,26 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
|
||||
import { GettableAuthDomain } from 'types/api/v1/domains/list';
|
||||
import { PostableAuthDomain } from 'types/api/v1/domains/post';
|
||||
|
||||
const post = async (
|
||||
props: PostableAuthDomain,
|
||||
): Promise<SuccessResponseV2<GettableAuthDomain>> => {
|
||||
try {
|
||||
const response = await axios.post<RawSuccessResponse<GettableAuthDomain>>(
|
||||
`/domains`,
|
||||
props,
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default post;
|
||||
1
frontend/src/auto-import-registry.d.ts
vendored
1
frontend/src/auto-import-registry.d.ts
vendored
@@ -24,5 +24,6 @@ import '@signozhq/popover';
|
||||
import '@signozhq/radio-group';
|
||||
import '@signozhq/resizable';
|
||||
import '@signozhq/sonner';
|
||||
import '@signozhq/switch';
|
||||
import '@signozhq/table';
|
||||
import '@signozhq/tooltip';
|
||||
|
||||
@@ -42,6 +42,7 @@
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
padding: 16px;
|
||||
padding-bottom: 0;
|
||||
}
|
||||
|
||||
.title {
|
||||
@@ -190,7 +191,7 @@
|
||||
padding: 0px;
|
||||
}
|
||||
.log-detail-drawer__footer-hint {
|
||||
position: absolute;
|
||||
position: sticky;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
@@ -366,7 +367,7 @@
|
||||
}
|
||||
|
||||
.log-detail-drawer__footer-hint {
|
||||
position: absolute;
|
||||
position: sticky;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
|
||||
@@ -55,6 +55,7 @@ const ROUTES = {
|
||||
LOGS_INDEX_FIELDS: '/logs-explorer/index-fields',
|
||||
TRACE_EXPLORER: '/trace-explorer',
|
||||
BILLING: '/settings/billing',
|
||||
ROLES_SETTINGS: '/settings/roles',
|
||||
SUPPORT: '/support',
|
||||
LOGS_SAVE_VIEWS: '/logs/saved-views',
|
||||
TRACES_SAVE_VIEWS: '/traces/saved-views',
|
||||
|
||||
@@ -202,7 +202,7 @@ function AllEndPoints({
|
||||
|
||||
const onRowClick = useCallback(
|
||||
(props: any): void => {
|
||||
setSelectedEndPointName(props[SPAN_ATTRIBUTES.URL_PATH] as string);
|
||||
setSelectedEndPointName(props[SPAN_ATTRIBUTES.HTTP_URL] as string);
|
||||
setSelectedView(VIEWS.ENDPOINT_STATS);
|
||||
const initialItems = [
|
||||
...(filters?.items || []),
|
||||
@@ -213,7 +213,7 @@ function AllEndPoints({
|
||||
op: 'AND',
|
||||
});
|
||||
setParams({
|
||||
selectedEndPointName: props[SPAN_ATTRIBUTES.URL_PATH] as string,
|
||||
selectedEndPointName: props[SPAN_ATTRIBUTES.HTTP_URL] as string,
|
||||
selectedView: VIEWS.ENDPOINT_STATS,
|
||||
endPointDetailsLocalFilters: {
|
||||
items: initialItems,
|
||||
|
||||
@@ -33,7 +33,7 @@ import { SPAN_ATTRIBUTES } from './constants';
|
||||
|
||||
const httpUrlKey = {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
type: 'tag',
|
||||
};
|
||||
|
||||
@@ -93,7 +93,7 @@ function EndPointDetails({
|
||||
return currentFilters; // No change needed, prevents loop
|
||||
}
|
||||
|
||||
// Rebuild filters: Keep non-http.url filters and add/update http.url filter based on prop
|
||||
// Rebuild filters: Keep non-http_url filters and add/update http_url filter based on prop
|
||||
const otherFilters = currentFilters?.items?.filter(
|
||||
(item) => item.key?.key !== httpUrlKey.key,
|
||||
);
|
||||
@@ -125,7 +125,7 @@ function EndPointDetails({
|
||||
(newFilters: IBuilderQuery['filters']): void => {
|
||||
// 1. Update local filters state immediately
|
||||
setFilters(newFilters);
|
||||
// Filter out http.url filter before saving to params
|
||||
// Filter out http_url filter before saving to params
|
||||
const filteredNewFilters = {
|
||||
op: 'AND',
|
||||
items:
|
||||
@@ -299,7 +299,6 @@ function EndPointDetails({
|
||||
endPointStatusCodeLatencyBarChartsDataQuery
|
||||
}
|
||||
domainName={domainName}
|
||||
endPointName={endPointName}
|
||||
filters={filters}
|
||||
timeRange={timeRange}
|
||||
onDragSelect={onDragSelect}
|
||||
|
||||
@@ -56,15 +56,15 @@ function TopErrors({
|
||||
{
|
||||
items: endPointName
|
||||
? [
|
||||
// Remove any existing http.url filters from initialFilters to avoid duplicates
|
||||
// Remove any existing http_url filters from initialFilters to avoid duplicates
|
||||
...(initialFilters?.items?.filter(
|
||||
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
|
||||
(item) => item.key?.key !== SPAN_ATTRIBUTES.HTTP_URL,
|
||||
) || []),
|
||||
{
|
||||
id: '92b8a1c1',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
|
||||
@@ -9,6 +9,7 @@ import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { SPAN_ATTRIBUTES } from '../constants';
|
||||
import DomainMetrics from './DomainMetrics';
|
||||
|
||||
// Mock the API call
|
||||
@@ -126,11 +127,9 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
|
||||
'count()',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryA.filter.expression).toContain("http_host = '0.0.0.0'");
|
||||
expect(queryA.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryA.filter.expression).toContain(
|
||||
'url.full EXISTS OR http.url EXISTS',
|
||||
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
);
|
||||
|
||||
// Verify Query B - p99 latency
|
||||
@@ -142,17 +141,13 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
|
||||
'p99(duration_nano)',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryB.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryB.filter.expression).toContain("http_host = '0.0.0.0'");
|
||||
|
||||
// Verify Query C - error count (disabled)
|
||||
const queryC = queryData.find((q: any) => q.queryName === 'C');
|
||||
expect(queryC).toBeDefined();
|
||||
expect(queryC.disabled).toBe(true);
|
||||
expect(queryC.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryC.filter.expression).toContain("http_host = '0.0.0.0'");
|
||||
expect(queryC.aggregations?.[0]).toBeDefined();
|
||||
expect((queryC.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'count()',
|
||||
@@ -169,9 +164,7 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
|
||||
'max(timestamp)',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryD.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryD.filter.expression).toContain("http_host = '0.0.0.0'");
|
||||
|
||||
// Verify Formula F1 - error rate calculation
|
||||
const formulas = payload.query.builder.queryFormulas;
|
||||
|
||||
@@ -153,7 +153,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryA.filter) {
|
||||
expect(queryA.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
`http_host = 'api.example.com'`,
|
||||
);
|
||||
expect(queryA.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
@@ -171,7 +171,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryB.filter) {
|
||||
expect(queryB.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
`http_host = 'api.example.com'`,
|
||||
);
|
||||
expect(queryB.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
@@ -185,7 +185,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
expect(queryC.aggregateOperator).toBe('count');
|
||||
if (queryC.filter) {
|
||||
expect(queryC.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
`http_host = 'api.example.com'`,
|
||||
);
|
||||
expect(queryC.filter.expression).toContain("kind_string = 'Client'");
|
||||
expect(queryC.filter.expression).toContain('has_error = true');
|
||||
@@ -204,7 +204,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryD.filter) {
|
||||
expect(queryD.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
`http_host = 'api.example.com'`,
|
||||
);
|
||||
expect(queryD.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
@@ -221,7 +221,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
}
|
||||
if (queryE.filter) {
|
||||
expect(queryE.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
`http_host = 'api.example.com'`,
|
||||
);
|
||||
expect(queryE.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
@@ -291,7 +291,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
expect(query.filter.expression).toContain('staging');
|
||||
// Also verify domain filter is still present
|
||||
expect(query.filter.expression).toContain(
|
||||
"(net.peer.name = 'api.internal.com' OR server.address = 'api.internal.com')",
|
||||
"http_host = 'api.internal.com'",
|
||||
);
|
||||
// Verify client kind filter is present
|
||||
expect(query.filter.expression).toContain("kind_string = 'Client'");
|
||||
|
||||
@@ -34,7 +34,6 @@ function StatusCodeBarCharts({
|
||||
endPointStatusCodeBarChartsDataQuery,
|
||||
endPointStatusCodeLatencyBarChartsDataQuery,
|
||||
domainName,
|
||||
endPointName,
|
||||
filters,
|
||||
timeRange,
|
||||
onDragSelect,
|
||||
@@ -48,7 +47,6 @@ function StatusCodeBarCharts({
|
||||
unknown
|
||||
>;
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
filters: IBuilderQuery['filters'];
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
@@ -144,11 +142,11 @@ function StatusCodeBarCharts({
|
||||
|
||||
const widget = useMemo<Widgets>(
|
||||
() =>
|
||||
getStatusCodeBarChartWidgetData(domainName, endPointName, {
|
||||
getStatusCodeBarChartWidgetData(domainName, {
|
||||
items: [...(filters?.items || [])],
|
||||
op: filters?.op || 'AND',
|
||||
}),
|
||||
[domainName, endPointName, filters],
|
||||
[domainName, filters],
|
||||
);
|
||||
|
||||
const graphClickHandler = useCallback(
|
||||
@@ -166,6 +164,7 @@ function StatusCodeBarCharts({
|
||||
xValue,
|
||||
TWO_AND_HALF_MINUTES_IN_MILLISECONDS,
|
||||
);
|
||||
|
||||
handleGraphClick({
|
||||
xValue,
|
||||
yValue,
|
||||
|
||||
@@ -12,7 +12,7 @@ export const VIEW_TYPES = {
|
||||
|
||||
// Span attribute keys - these are the source of truth for all attribute keys
|
||||
export const SPAN_ATTRIBUTES = {
|
||||
URL_PATH: 'http.url',
|
||||
HTTP_URL: 'http_url',
|
||||
RESPONSE_STATUS_CODE: 'response_status_code',
|
||||
SERVER_NAME: 'http_host',
|
||||
SERVER_PORT: 'net.peer.port',
|
||||
|
||||
@@ -280,7 +280,7 @@ describe('API Monitoring Utils', () => {
|
||||
const endpointFilter = result?.items?.find(
|
||||
(item) =>
|
||||
item.key &&
|
||||
item.key.key === SPAN_ATTRIBUTES.URL_PATH &&
|
||||
item.key.key === SPAN_ATTRIBUTES.HTTP_URL &&
|
||||
item.value === endPointName,
|
||||
);
|
||||
expect(endpointFilter).toBeDefined();
|
||||
@@ -344,13 +344,12 @@ describe('API Monitoring Utils', () => {
|
||||
describe('getFormattedEndPointDropDownData', () => {
|
||||
it('should format endpoint dropdown data correctly', () => {
|
||||
// Arrange
|
||||
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
|
||||
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
[URL_PATH_KEY]: '/api/users',
|
||||
'url.full': 'http://example.com/api/users',
|
||||
A: 150, // count or other metric
|
||||
},
|
||||
},
|
||||
@@ -358,7 +357,6 @@ describe('API Monitoring Utils', () => {
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
[URL_PATH_KEY]: '/api/orders',
|
||||
'url.full': 'http://example.com/api/orders',
|
||||
A: 75,
|
||||
},
|
||||
},
|
||||
@@ -406,7 +404,7 @@ describe('API Monitoring Utils', () => {
|
||||
|
||||
it('should handle items without URL path', () => {
|
||||
// Arrange
|
||||
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
|
||||
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
|
||||
type MockDataType = {
|
||||
data: {
|
||||
[key: string]: string | number;
|
||||
@@ -712,13 +710,11 @@ describe('API Monitoring Utils', () => {
|
||||
it('should generate widget configuration for status code bar chart', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '/api/test';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getStatusCodeBarChartWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
@@ -741,21 +737,11 @@ describe('API Monitoring Utils', () => {
|
||||
if (domainFilter) {
|
||||
expect(domainFilter.value).toBe(domainName);
|
||||
}
|
||||
|
||||
// Should have endpoint filter if provided
|
||||
const endpointFilter = queryData.filters?.items?.find(
|
||||
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.URL_PATH,
|
||||
);
|
||||
expect(endpointFilter).toBeDefined();
|
||||
if (endpointFilter) {
|
||||
expect(endpointFilter.value).toBe(endPointName);
|
||||
}
|
||||
});
|
||||
|
||||
it('should include custom filters in the widget configuration', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '/api/test';
|
||||
const customFilter = {
|
||||
id: 'custom-filter',
|
||||
key: {
|
||||
@@ -771,7 +757,6 @@ describe('API Monitoring Utils', () => {
|
||||
// Act
|
||||
const result = getStatusCodeBarChartWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ jest.mock('container/GridCardLayout/GridCard', () => ({
|
||||
type="button"
|
||||
data-testid="row-click-button"
|
||||
onClick={(): void =>
|
||||
customOnRowClick({ [SPAN_ATTRIBUTES.URL_PATH]: '/api/test' })
|
||||
customOnRowClick({ [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test' })
|
||||
}
|
||||
>
|
||||
Click Row
|
||||
|
||||
@@ -6,10 +6,10 @@
|
||||
* These tests validate the migration from V4 to V5 format for getAllEndpointsWidgetData:
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Aggregation format: aggregateAttribute → aggregations[] array
|
||||
* - Domain filter: (net.peer.name OR server.address)
|
||||
* - Domain filter: http_host = '${domainName}'
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Four queries: A (count), B (p99 latency), C (max timestamp), D (error count - disabled)
|
||||
* - GroupBy: Both http.url AND url.full with type 'attribute'
|
||||
* - GroupBy: http_url with type 'attribute'
|
||||
*/
|
||||
import { getAllEndpointsWidgetData } from 'container/ApiMonitoring/utils';
|
||||
import {
|
||||
@@ -18,6 +18,8 @@ import {
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
|
||||
|
||||
describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
const mockDomainName = 'api.example.com';
|
||||
const emptyFilters: IBuilderQuery['filters'] = {
|
||||
@@ -92,28 +94,28 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
|
||||
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
|
||||
|
||||
const baseExpression = `(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') AND kind_string = 'Client'`;
|
||||
const baseExpression = `http_host = '${mockDomainName}' AND kind_string = 'Client'`;
|
||||
|
||||
// Queries A, B, C have identical base filter
|
||||
expect(queryA.filter?.expression).toBe(
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
);
|
||||
expect(queryB.filter?.expression).toBe(
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
);
|
||||
expect(queryC.filter?.expression).toBe(
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
);
|
||||
|
||||
// Query D has additional has_error filter
|
||||
expect(queryD.filter?.expression).toBe(
|
||||
`${baseExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
`${baseExpression} AND has_error = true AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. GroupBy Structure', () => {
|
||||
it('default groupBy includes both http.url and url.full with type attribute', () => {
|
||||
it(`default groupBy includes ${SPAN_ATTRIBUTES.HTTP_URL} with type attribute`, () => {
|
||||
const widget = getAllEndpointsWidgetData(
|
||||
emptyGroupBy,
|
||||
mockDomainName,
|
||||
@@ -124,23 +126,13 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
|
||||
// All queries should have the same default groupBy
|
||||
queryData.forEach((query) => {
|
||||
expect(query.groupBy).toHaveLength(2);
|
||||
expect(query.groupBy).toHaveLength(1);
|
||||
|
||||
// http.url
|
||||
expect(query.groupBy).toContainEqual({
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'http.url',
|
||||
type: 'attribute',
|
||||
});
|
||||
|
||||
// url.full
|
||||
expect(query.groupBy).toContainEqual({
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'url.full',
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
type: 'attribute',
|
||||
});
|
||||
});
|
||||
@@ -170,19 +162,18 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
|
||||
// All queries should have defaults + custom groupBy
|
||||
queryData.forEach((query) => {
|
||||
expect(query.groupBy).toHaveLength(4); // 2 defaults + 2 custom
|
||||
expect(query.groupBy).toHaveLength(3); // 1 default + 2 custom
|
||||
|
||||
// First two should be defaults (http.url, url.full)
|
||||
expect(query.groupBy[0].key).toBe('http.url');
|
||||
expect(query.groupBy[1].key).toBe('url.full');
|
||||
// First two should be defaults (http_url)
|
||||
expect(query.groupBy[0].key).toBe(SPAN_ATTRIBUTES.HTTP_URL);
|
||||
|
||||
// Last two should be custom (matching subset of properties)
|
||||
expect(query.groupBy[2]).toMatchObject({
|
||||
expect(query.groupBy[1]).toMatchObject({
|
||||
dataType: DataTypes.String,
|
||||
key: 'service.name',
|
||||
type: 'resource',
|
||||
});
|
||||
expect(query.groupBy[3]).toMatchObject({
|
||||
expect(query.groupBy[2]).toMatchObject({
|
||||
dataType: DataTypes.String,
|
||||
key: 'deployment.environment',
|
||||
type: 'resource',
|
||||
|
||||
@@ -258,7 +258,7 @@ describe('EndPointDetails Component', () => {
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
@@ -278,7 +278,7 @@ describe('EndPointDetails Component', () => {
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
@@ -360,7 +360,7 @@ describe('EndPointDetails Component', () => {
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
@@ -373,7 +373,7 @@ describe('EndPointDetails Component', () => {
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
|
||||
@@ -191,7 +191,7 @@ describe('EndPointsDropDown Component', () => {
|
||||
|
||||
it('formats data using the utility function', () => {
|
||||
const mockRows = [
|
||||
{ data: { [SPAN_ATTRIBUTES.URL_PATH]: '/api/test', A: 10 } },
|
||||
{ data: { [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test', A: 10 } },
|
||||
];
|
||||
|
||||
const dataProps = {
|
||||
|
||||
@@ -6,15 +6,18 @@
|
||||
* These tests validate the migration from V4 to V5 format for the third payload
|
||||
* in getEndPointDetailsQueryPayload (endpoint dropdown data):
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Domain handling: (net.peer.name OR server.address)
|
||||
* - Domain handling: http_host = '${domainName}'
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Existence check: (http.url EXISTS OR url.full EXISTS)
|
||||
* - Existence check: http_url EXISTS
|
||||
* - Aggregation: count() expression
|
||||
* - GroupBy: Both http.url AND url.full with type 'attribute'
|
||||
* - GroupBy: http_url with type 'attribute'
|
||||
*/
|
||||
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
|
||||
|
||||
describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
const mockDomainName = 'api.example.com';
|
||||
const mockStartTime = 1000;
|
||||
@@ -43,9 +46,9 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters');
|
||||
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
// Base filter 1: Domain http_host = '${domainName}'
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
`http_host = '${mockDomainName}'`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
@@ -53,7 +56,7 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
|
||||
// Base filter 3: Existence check
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
'(http.url EXISTS OR url.full EXISTS)',
|
||||
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
);
|
||||
|
||||
// V5 Aggregation format: aggregations array (not aggregateAttribute)
|
||||
@@ -64,16 +67,11 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
});
|
||||
expect(queryA).not.toHaveProperty('aggregateAttribute');
|
||||
|
||||
// GroupBy: Both http.url and url.full
|
||||
expect(queryA.groupBy).toHaveLength(2);
|
||||
// GroupBy: http_url
|
||||
expect(queryA.groupBy).toHaveLength(1);
|
||||
expect(queryA.groupBy).toContainEqual({
|
||||
key: 'http.url',
|
||||
dataType: 'string',
|
||||
type: 'attribute',
|
||||
});
|
||||
expect(queryA.groupBy).toContainEqual({
|
||||
key: 'url.full',
|
||||
dataType: 'string',
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
dataType: DataTypes.String,
|
||||
type: 'attribute',
|
||||
});
|
||||
});
|
||||
@@ -120,53 +118,7 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
|
||||
// Exact filter expression with custom filters merged
|
||||
expect(expression).toBe(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND deployment.environment = 'production'",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('3. HTTP URL Filter Special Handling', () => {
|
||||
it('converts http.url filter to (http.url OR url.full) expression', () => {
|
||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'http-url-filter',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
dataType: 'string' as any,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: '/api/users',
|
||||
},
|
||||
{
|
||||
id: 'service-filter',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'user-service',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
filtersWithHttpUrl,
|
||||
);
|
||||
|
||||
const dropdownQuery = payload[2];
|
||||
const expression =
|
||||
dropdownQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// CRITICAL: Exact filter expression with http.url converted to OR logic
|
||||
expect(expression).toBe(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND (http.url = '/api/users' OR url.full = '/api/users')",
|
||||
`${SPAN_ATTRIBUTES.SERVER_NAME} = 'api.example.com' AND kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS service.name = 'user-service' AND deployment.environment = 'production'`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -33,7 +33,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
expect(queryData).not.toHaveProperty('filters.items');
|
||||
});
|
||||
|
||||
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
|
||||
it('uses new domain filter format: (http_host)', () => {
|
||||
const widget = getRateOverTimeWidgetData(
|
||||
mockDomainName,
|
||||
mockEndpointName,
|
||||
@@ -44,7 +44,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
|
||||
// Verify EXACT new filter format with OR operator
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
`http_host = '${mockDomainName}'`,
|
||||
);
|
||||
|
||||
// Endpoint name is used in legend, not filter
|
||||
@@ -90,7 +90,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
|
||||
// Verify domain filter is present
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
`http_host = '${mockDomainName}'`,
|
||||
);
|
||||
|
||||
// Verify custom filters are merged into the expression
|
||||
@@ -120,7 +120,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
expect(queryData).not.toHaveProperty('filters.items');
|
||||
});
|
||||
|
||||
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
|
||||
it('uses new domain filter format: (http_host)', () => {
|
||||
const widget = getLatencyOverTimeWidgetData(
|
||||
mockDomainName,
|
||||
mockEndpointName,
|
||||
@@ -132,7 +132,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
// Verify EXACT new filter format with OR operator
|
||||
expect(queryData.filter).toBeDefined();
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
`http_host = '${mockDomainName}'`,
|
||||
);
|
||||
|
||||
// Endpoint name is used in legend, not filter
|
||||
@@ -166,7 +166,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
|
||||
// Verify domain filter is present
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') service.name = 'user-service'`,
|
||||
`http_host = '${mockDomainName}' service.name = 'user-service'`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -142,7 +142,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
endTime: 1609545600000,
|
||||
};
|
||||
const mockDomainName = 'test-domain';
|
||||
const mockEndPointName = '/api/test';
|
||||
const onDragSelectMock = jest.fn();
|
||||
const refetchFn = jest.fn();
|
||||
|
||||
@@ -232,7 +231,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -268,7 +266,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -311,7 +308,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -356,7 +352,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -404,7 +399,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -419,7 +413,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
// but we've confirmed the function is mocked and ready to be tested
|
||||
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
|
||||
mockDomainName,
|
||||
mockEndPointName,
|
||||
expect.objectContaining({
|
||||
items: [],
|
||||
op: 'AND',
|
||||
@@ -467,7 +460,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockCustomFilters as IBuilderQuery['filters']}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -477,7 +469,6 @@ describe('StatusCodeBarCharts', () => {
|
||||
// Assert widget creation was called with the correct parameters
|
||||
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
|
||||
mockDomainName,
|
||||
mockEndPointName,
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({ id: 'custom-filter' }),
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
*
|
||||
* V5 Changes:
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Domain filter: (net.peer.name OR server.address)
|
||||
* - Domain filter: (http_host)
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - stepInterval: 60 → null
|
||||
* - Grouped by response_status_code
|
||||
@@ -47,9 +47,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters.items');
|
||||
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
// Base filter 1: Domain (http_host)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
`http_host = '${mockDomainName}'`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
@@ -96,9 +96,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters.items');
|
||||
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
// Base filter 1: Domain (http_host)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
`http_host = '${mockDomainName}'`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
@@ -177,7 +177,7 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
expect(callsExpression).toBe(latencyExpression);
|
||||
|
||||
// Verify base filters
|
||||
expect(callsExpression).toContain('net.peer.name');
|
||||
expect(callsExpression).toContain('http_host');
|
||||
expect(callsExpression).toContain("kind_string = 'Client'");
|
||||
|
||||
// Verify custom filters are merged
|
||||
@@ -187,51 +187,4 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
expect(callsExpression).toContain('production');
|
||||
});
|
||||
});
|
||||
|
||||
describe('4. HTTP URL Filter Handling', () => {
|
||||
it('converts http.url filter to (http.url OR url.full) expression in both charts', () => {
|
||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'http-url-filter',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
dataType: 'string' as any,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: '/api/metrics',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
filtersWithHttpUrl,
|
||||
);
|
||||
|
||||
const callsChartQuery = payload[4];
|
||||
const latencyChartQuery = payload[5];
|
||||
|
||||
const callsExpression =
|
||||
callsChartQuery.query.builder.queryData[0].filter?.expression;
|
||||
const latencyExpression =
|
||||
latencyChartQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// CRITICAL: http.url converted to OR logic
|
||||
expect(callsExpression).toContain(
|
||||
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
|
||||
);
|
||||
expect(latencyExpression).toContain(
|
||||
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
|
||||
);
|
||||
|
||||
// Base filters still present
|
||||
expect(callsExpression).toContain('net.peer.name');
|
||||
expect(callsExpression).toContain("kind_string = 'Client'");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
* These tests validate the migration from V4 to V5 format for the second payload
|
||||
* in getEndPointDetailsQueryPayload (status code table data):
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - URL handling: Special logic for (http.url OR url.full)
|
||||
* - Domain filter: (net.peer.name OR server.address)
|
||||
* - URL handling: Special logic for http_url
|
||||
* - Domain filter: http_host = '${domainName}'
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Kind filter: response_status_code EXISTS
|
||||
* - Three queries: A (count), B (p99 latency), C (rate)
|
||||
@@ -45,9 +45,9 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters.items');
|
||||
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
// Base filter 1: Domain (http_host)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
`http_host = '${mockDomainName}'`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
@@ -149,7 +149,7 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
|
||||
statusCodeQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// Base filters present
|
||||
expect(expression).toContain('net.peer.name');
|
||||
expect(expression).toContain('http_host');
|
||||
expect(expression).toContain("kind_string = 'Client'");
|
||||
expect(expression).toContain('response_status_code EXISTS');
|
||||
|
||||
@@ -165,62 +165,4 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
|
||||
expect(queries[1].filter?.expression).toBe(queries[2].filter?.expression);
|
||||
});
|
||||
});
|
||||
|
||||
describe('4. HTTP URL Filter Handling', () => {
|
||||
it('converts http.url filter to (http.url OR url.full) expression', () => {
|
||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'http-url-filter',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
dataType: 'string' as any,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: '/api/users',
|
||||
},
|
||||
{
|
||||
id: 'service-filter',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'user-service',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
filtersWithHttpUrl,
|
||||
);
|
||||
|
||||
const statusCodeQuery = payload[1];
|
||||
const expression =
|
||||
statusCodeQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// CRITICAL: http.url converted to OR logic
|
||||
expect(expression).toContain(
|
||||
"(http.url = '/api/users' OR url.full = '/api/users')",
|
||||
);
|
||||
|
||||
// Other filters still present
|
||||
expect(expression).toContain('service.name');
|
||||
expect(expression).toContain('user-service');
|
||||
|
||||
// Base filters present
|
||||
expect(expression).toContain('net.peer.name');
|
||||
expect(expression).toContain("kind_string = 'Client'");
|
||||
expect(expression).toContain('response_status_code EXISTS');
|
||||
|
||||
// All ANDed together (at least 2 ANDs: domain+kind, custom filter, url condition)
|
||||
expect(expression?.match(/AND/g)?.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -84,7 +84,7 @@ describe('TopErrors', () => {
|
||||
{
|
||||
columns: [
|
||||
{
|
||||
name: 'http.url',
|
||||
name: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
@@ -124,7 +124,7 @@ describe('TopErrors', () => {
|
||||
table: {
|
||||
rows: [
|
||||
{
|
||||
'http.url': '/api/test',
|
||||
http_url: '/api/test',
|
||||
A: 100,
|
||||
},
|
||||
],
|
||||
@@ -206,7 +206,7 @@ describe('TopErrors', () => {
|
||||
expect(navigateMock).toHaveBeenCalledWith({
|
||||
filters: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: 'http.url' }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
op: '=',
|
||||
value: '/api/test',
|
||||
}),
|
||||
@@ -335,7 +335,7 @@ describe('TopErrors', () => {
|
||||
|
||||
// Verify all required filters are present
|
||||
expect(filterExpression).toContain(
|
||||
`kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) AND (net.peer.name = 'test-domain' OR server.address = 'test-domain') AND has_error = true`,
|
||||
`kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS AND ${SPAN_ATTRIBUTES.SERVER_NAME} = 'test-domain' AND has_error = true`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -15,7 +15,6 @@ import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsAppli
|
||||
import { convertNanoToMilliseconds } from 'container/MetricsExplorer/Summary/utils';
|
||||
import dayjs from 'dayjs';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { ArrowUpDown, ChevronDown, ChevronRight, Info } from 'lucide-react';
|
||||
import { getWidgetQuery } from 'pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil';
|
||||
@@ -57,12 +56,12 @@ export const getDisplayValue = (value: unknown): string =>
|
||||
isEmptyFilterValue(value) ? '-' : String(value);
|
||||
|
||||
export const getDomainNameFilterExpression = (domainName: string): string =>
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`;
|
||||
`http_host = '${domainName}'`;
|
||||
|
||||
export const clientKindExpression = `kind_string = 'Client'`;
|
||||
|
||||
/**
|
||||
* Converts filters to expression, handling http.url specially by creating (http.url OR url.full) condition
|
||||
* Converts filters to expression
|
||||
* @param filters Filters to convert
|
||||
* @param baseExpression Base expression to combine with filters
|
||||
* @returns Filter expression string
|
||||
@@ -75,34 +74,6 @@ export const convertFiltersWithUrlHandling = (
|
||||
return baseExpression;
|
||||
}
|
||||
|
||||
// Check if filters contain http.url (SPAN_ATTRIBUTES.URL_PATH)
|
||||
const httpUrlFilter = filters.items?.find(
|
||||
(item) => item.key?.key === SPAN_ATTRIBUTES.URL_PATH,
|
||||
);
|
||||
|
||||
// If http.url filter exists, create modified filters with (http.url OR url.full)
|
||||
if (httpUrlFilter && httpUrlFilter.value) {
|
||||
// Remove ALL http.url filters from items (guards against duplicates)
|
||||
const otherFilters = filters.items?.filter(
|
||||
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
|
||||
);
|
||||
|
||||
// Convert to expression first with other filters
|
||||
const {
|
||||
filter: intermediateFilter,
|
||||
} = convertFiltersToExpressionWithExistingQuery(
|
||||
{ ...filters, items: otherFilters || [] },
|
||||
baseExpression,
|
||||
);
|
||||
|
||||
// Add the OR condition for http.url and url.full
|
||||
const urlValue = httpUrlFilter.value;
|
||||
const urlCondition = `(http.url = '${urlValue}' OR url.full = '${urlValue}')`;
|
||||
return intermediateFilter.expression.trim()
|
||||
? `${intermediateFilter.expression} AND ${urlCondition}`
|
||||
: urlCondition;
|
||||
}
|
||||
|
||||
const { filter } = convertFiltersToExpressionWithExistingQuery(
|
||||
filters,
|
||||
baseExpression,
|
||||
@@ -371,7 +342,7 @@ export const formatDataForTable = (
|
||||
});
|
||||
};
|
||||
|
||||
const urlExpression = `(url.full EXISTS OR http.url EXISTS)`;
|
||||
const urlExpression = `${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`;
|
||||
|
||||
export const getDomainMetricsQueryPayload = (
|
||||
domainName: string,
|
||||
@@ -588,14 +559,7 @@ const defaultGroupBy = [
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
type: 'attribute',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'url.full',
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
type: 'attribute',
|
||||
},
|
||||
// {
|
||||
@@ -867,8 +831,8 @@ function buildFilterExpression(
|
||||
): string {
|
||||
const baseFilterParts = [
|
||||
`kind_string = 'Client'`,
|
||||
`(http.url EXISTS OR url.full EXISTS)`,
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
|
||||
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
`${SPAN_ATTRIBUTES.SERVER_NAME} = '${domainName}'`,
|
||||
`has_error = true`,
|
||||
];
|
||||
if (showStatusCodeErrors) {
|
||||
@@ -910,12 +874,7 @@ export const getTopErrorsQueryPayload = (
|
||||
filter: { expression: filterExpression },
|
||||
groupBy: [
|
||||
{
|
||||
name: 'http.url',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
{
|
||||
name: 'url.full',
|
||||
name: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
@@ -1134,11 +1093,11 @@ export const formatEndPointsDataForTable = (
|
||||
if (!isGroupedByAttribute) {
|
||||
formattedData = data?.map((endpoint) => {
|
||||
const { port } = extractPortAndEndpoint(
|
||||
(endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '',
|
||||
(endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '',
|
||||
);
|
||||
return {
|
||||
key: v4(),
|
||||
endpointName: (endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '-',
|
||||
endpointName: (endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '-',
|
||||
port,
|
||||
callCount:
|
||||
endpoint.data.A === 'n/a' || endpoint.data.A === undefined
|
||||
@@ -1262,9 +1221,7 @@ export const formatTopErrorsDataForTable = (
|
||||
|
||||
return {
|
||||
key: v4(),
|
||||
endpointName: getDisplayValue(
|
||||
rowObj[SPAN_ATTRIBUTES.URL_PATH] || rowObj['url.full'],
|
||||
),
|
||||
endpointName: getDisplayValue(rowObj[SPAN_ATTRIBUTES.HTTP_URL]),
|
||||
statusCode: getDisplayValue(rowObj[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]),
|
||||
statusMessage: getDisplayValue(rowObj.status_message),
|
||||
count: getDisplayValue(rowObj.__result_0),
|
||||
@@ -1281,10 +1238,10 @@ export const getTopErrorsCoRelationQueryFilters = (
|
||||
{
|
||||
id: 'ea16470b',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
id: 'http.url--string--tag--false',
|
||||
id: `${SPAN_ATTRIBUTES.HTTP_URL}--string--tag--false`,
|
||||
},
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
@@ -1781,7 +1738,7 @@ export const getEndPointDetailsQueryPayload = (
|
||||
filters || { items: [], op: 'AND' },
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
)} AND ${clientKindExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
),
|
||||
},
|
||||
expression: 'A',
|
||||
@@ -1793,12 +1750,7 @@ export const getEndPointDetailsQueryPayload = (
|
||||
orderBy: [],
|
||||
groupBy: [
|
||||
{
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
dataType: DataTypes.String,
|
||||
type: 'attribute',
|
||||
},
|
||||
{
|
||||
key: 'url.full',
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
dataType: DataTypes.String,
|
||||
type: 'attribute',
|
||||
},
|
||||
@@ -2225,7 +2177,7 @@ export const getEndPointZeroStateQueryPayload = (
|
||||
orderBy: [],
|
||||
groupBy: [
|
||||
{
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -2419,8 +2371,7 @@ export const statusCodeWidgetInfo = [
|
||||
|
||||
interface EndPointDropDownResponseRow {
|
||||
data: {
|
||||
[SPAN_ATTRIBUTES.URL_PATH]: string;
|
||||
'url.full': string;
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: string;
|
||||
A: number;
|
||||
};
|
||||
}
|
||||
@@ -2439,8 +2390,8 @@ export const getFormattedEndPointDropDownData = (
|
||||
}
|
||||
return data.map((row) => ({
|
||||
key: v4(),
|
||||
label: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
|
||||
value: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
|
||||
label: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
|
||||
value: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
|
||||
}));
|
||||
};
|
||||
|
||||
@@ -2769,7 +2720,6 @@ export const groupStatusCodes = (
|
||||
|
||||
export const getStatusCodeBarChartWidgetData = (
|
||||
domainName: string,
|
||||
endPointName: string,
|
||||
filters: IBuilderQuery['filters'],
|
||||
): Widgets => ({
|
||||
query: {
|
||||
@@ -2798,20 +2748,6 @@ export const getStatusCodeBarChartWidgetData = (
|
||||
op: '=',
|
||||
value: domainName,
|
||||
},
|
||||
...(endPointName
|
||||
? [
|
||||
{
|
||||
id: '8b1be6f0',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
...(filters?.items || []),
|
||||
],
|
||||
op: 'AND',
|
||||
@@ -2933,7 +2869,7 @@ export const getAllEndpointsWidgetData = (
|
||||
filters,
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
)} AND ${clientKindExpression} AND http_url EXISTS`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -2965,7 +2901,7 @@ export const getAllEndpointsWidgetData = (
|
||||
filters,
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
)} AND ${clientKindExpression} AND http_url EXISTS`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -2997,7 +2933,7 @@ export const getAllEndpointsWidgetData = (
|
||||
filters,
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
)} AND ${clientKindExpression} AND http_url EXISTS`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -3029,7 +2965,7 @@ export const getAllEndpointsWidgetData = (
|
||||
filters,
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
)} AND ${clientKindExpression} AND has_error = true AND http_url EXISTS`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -3060,24 +2996,12 @@ export const getAllEndpointsWidgetData = (
|
||||
);
|
||||
|
||||
widget.renderColumnCell = {
|
||||
[SPAN_ATTRIBUTES.URL_PATH]: (
|
||||
url: string | number,
|
||||
record?: RowData,
|
||||
): ReactNode => {
|
||||
// First try to use the url from the column value
|
||||
let urlValue = url;
|
||||
|
||||
// If url is empty/null and we have the record, fallback to url.full
|
||||
if (isEmptyFilterValue(url) && record) {
|
||||
const { 'url.full': urlFull } = record;
|
||||
urlValue = urlFull;
|
||||
}
|
||||
|
||||
if (!urlValue || urlValue === 'n/a') {
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: (url: string | number): ReactNode => {
|
||||
if (isEmptyFilterValue(url) || !url || url === 'n/a') {
|
||||
return <span>-</span>;
|
||||
}
|
||||
|
||||
const { endpoint } = extractPortAndEndpoint(String(urlValue));
|
||||
const { endpoint } = extractPortAndEndpoint(String(url));
|
||||
return <span>{getDisplayValue(endpoint)}</span>;
|
||||
},
|
||||
A: (numOfCalls: any): ReactNode => (
|
||||
@@ -3132,8 +3056,8 @@ export const getAllEndpointsWidgetData = (
|
||||
};
|
||||
|
||||
widget.customColTitles = {
|
||||
[SPAN_ATTRIBUTES.URL_PATH]: 'Endpoint',
|
||||
'net.peer.port': 'Port',
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: 'Endpoint',
|
||||
[SPAN_ATTRIBUTES.SERVER_PORT]: 'Port',
|
||||
};
|
||||
|
||||
widget.title = (
|
||||
@@ -3158,12 +3082,10 @@ export const getAllEndpointsWidgetData = (
|
||||
</div>
|
||||
);
|
||||
|
||||
widget.hiddenColumns = ['url.full'];
|
||||
|
||||
return widget;
|
||||
};
|
||||
|
||||
const keysToRemove = ['http.url', 'url.full', 'A', 'B', 'C', 'F1'];
|
||||
const keysToRemove = [SPAN_ATTRIBUTES.HTTP_URL, 'A', 'B', 'C', 'F1'];
|
||||
|
||||
export const getGroupByFiltersFromGroupByValues = (
|
||||
rowData: any,
|
||||
@@ -3221,7 +3143,7 @@ export const getRateOverTimeWidgetData = (
|
||||
filter: {
|
||||
expression: convertFiltersWithUrlHandling(
|
||||
filters || { items: [], op: 'AND' },
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
|
||||
`http_host = '${domainName}'`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -3272,7 +3194,7 @@ export const getLatencyOverTimeWidgetData = (
|
||||
filter: {
|
||||
expression: convertFiltersWithUrlHandling(
|
||||
filters || { items: [], op: 'AND' },
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
|
||||
`http_host = '${domainName}'`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
/* eslint-disable jsx-a11y/no-static-element-interactions */
|
||||
/* eslint-disable jsx-a11y/click-events-have-key-events */
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useCopyToClipboard } from 'react-use';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import {
|
||||
@@ -15,14 +14,16 @@ import {
|
||||
Tag,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import updateSubDomainAPI from 'api/customDomain/updateSubDomain';
|
||||
import {
|
||||
RenderErrorResponseDTO,
|
||||
ZeustypesHostDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { useGetHosts, usePutHost } from 'api/generated/services/zeus';
|
||||
import { AxiosError } from 'axios';
|
||||
import LaunchChatSupport from 'components/LaunchChatSupport/LaunchChatSupport';
|
||||
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { InfoIcon, Link2, Pencil } from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { HostsProps } from 'types/api/customDomain/types';
|
||||
|
||||
import './CustomDomainSettings.styles.scss';
|
||||
|
||||
@@ -35,7 +36,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
const { notifications } = useNotifications();
|
||||
const [isEditModalOpen, setIsEditModalOpen] = useState(false);
|
||||
const [isPollingEnabled, setIsPollingEnabled] = useState(false);
|
||||
const [hosts, setHosts] = useState<HostsProps[] | null>(null);
|
||||
const [hosts, setHosts] = useState<ZeustypesHostDTO[] | null>(null);
|
||||
|
||||
const [updateDomainError, setUpdateDomainError] = useState<AxiosError | null>(
|
||||
null,
|
||||
@@ -57,36 +58,37 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
};
|
||||
|
||||
const {
|
||||
data: deploymentsData,
|
||||
isLoading: isLoadingDeploymentsData,
|
||||
isFetching: isFetchingDeploymentsData,
|
||||
refetch: refetchDeploymentsData,
|
||||
} = useGetDeploymentsData(true);
|
||||
data: hostsData,
|
||||
isLoading: isLoadingHosts,
|
||||
isFetching: isFetchingHosts,
|
||||
refetch: refetchHosts,
|
||||
} = useGetHosts();
|
||||
|
||||
const {
|
||||
mutate: updateSubDomain,
|
||||
isLoading: isLoadingUpdateCustomDomain,
|
||||
} = useMutation(updateSubDomainAPI, {
|
||||
onSuccess: () => {
|
||||
setIsPollingEnabled(true);
|
||||
refetchDeploymentsData();
|
||||
setIsEditModalOpen(false);
|
||||
},
|
||||
onError: (error: AxiosError) => {
|
||||
setUpdateDomainError(error);
|
||||
setIsPollingEnabled(false);
|
||||
},
|
||||
});
|
||||
} = usePutHost<AxiosError<RenderErrorResponseDTO>>();
|
||||
|
||||
const stripProtocol = (url: string): string => {
|
||||
return url?.split('://')[1] ?? url;
|
||||
};
|
||||
|
||||
const dnsSuffix = useMemo(() => {
|
||||
const defaultHost = hosts?.find((h) => h.is_default);
|
||||
return defaultHost?.url && defaultHost?.name
|
||||
? defaultHost.url.split(`${defaultHost.name}.`)[1] || ''
|
||||
: '';
|
||||
}, [hosts]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isFetchingDeploymentsData) {
|
||||
if (isFetchingHosts) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (deploymentsData?.data?.status === 'success') {
|
||||
setHosts(deploymentsData.data.data.hosts);
|
||||
if (hostsData?.data?.status === 'success') {
|
||||
setHosts(hostsData?.data?.data?.hosts ?? null);
|
||||
|
||||
const activeCustomDomain = deploymentsData.data.data.hosts.find(
|
||||
const activeCustomDomain = hostsData?.data?.data?.hosts?.find(
|
||||
(host) => !host.is_default,
|
||||
);
|
||||
|
||||
@@ -97,32 +99,36 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
}
|
||||
}
|
||||
|
||||
if (deploymentsData?.data?.data?.state !== 'HEALTHY' && isPollingEnabled) {
|
||||
if (hostsData?.data?.data?.state !== 'HEALTHY' && isPollingEnabled) {
|
||||
setTimeout(() => {
|
||||
refetchDeploymentsData();
|
||||
refetchHosts();
|
||||
}, 3000);
|
||||
}
|
||||
|
||||
if (deploymentsData?.data?.data.state === 'HEALTHY') {
|
||||
if (hostsData?.data?.data?.state === 'HEALTHY') {
|
||||
setIsPollingEnabled(false);
|
||||
}
|
||||
}, [
|
||||
deploymentsData,
|
||||
refetchDeploymentsData,
|
||||
isPollingEnabled,
|
||||
isFetchingDeploymentsData,
|
||||
]);
|
||||
}, [hostsData, refetchHosts, isPollingEnabled, isFetchingHosts]);
|
||||
|
||||
const onUpdateCustomDomainSettings = (): void => {
|
||||
editForm
|
||||
.validateFields()
|
||||
.then((values) => {
|
||||
if (values.subdomain) {
|
||||
updateSubDomain({
|
||||
data: {
|
||||
name: values.subdomain,
|
||||
updateSubDomain(
|
||||
{ data: { name: values.subdomain } },
|
||||
{
|
||||
onSuccess: () => {
|
||||
setIsPollingEnabled(true);
|
||||
refetchHosts();
|
||||
setIsEditModalOpen(false);
|
||||
},
|
||||
onError: (error: AxiosError<RenderErrorResponseDTO>) => {
|
||||
setUpdateDomainError(error as AxiosError);
|
||||
setIsPollingEnabled(false);
|
||||
},
|
||||
},
|
||||
});
|
||||
);
|
||||
|
||||
setCustomDomainDetails({
|
||||
subdomain: values.subdomain,
|
||||
@@ -134,10 +140,8 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
});
|
||||
};
|
||||
|
||||
const onCopyUrlHandler = (host: string): void => {
|
||||
const url = `${host}.${deploymentsData?.data.data.cluster.region.dns}`;
|
||||
|
||||
setCopyUrl(url);
|
||||
const onCopyUrlHandler = (url: string): void => {
|
||||
setCopyUrl(stripProtocol(url));
|
||||
notifications.success({
|
||||
message: 'Copied to clipboard',
|
||||
});
|
||||
@@ -157,7 +161,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
</div>
|
||||
|
||||
<div className="custom-domain-settings-content">
|
||||
{!isLoadingDeploymentsData && (
|
||||
{!isLoadingHosts && (
|
||||
<Card className="custom-domain-settings-card">
|
||||
<div className="custom-domain-settings-content-header">
|
||||
Team {org?.[0]?.displayName} Information
|
||||
@@ -169,10 +173,9 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
<div
|
||||
className="custom-domain-url"
|
||||
key={host.name}
|
||||
onClick={(): void => onCopyUrlHandler(host.name)}
|
||||
onClick={(): void => onCopyUrlHandler(host.url || '')}
|
||||
>
|
||||
<Link2 size={12} /> {host.name}.
|
||||
{deploymentsData?.data.data.cluster.region.dns}
|
||||
<Link2 size={12} /> {stripProtocol(host.url || '')}
|
||||
{host.is_default && <Tag color={Color.BG_ROBIN_500}>Default</Tag>}
|
||||
</div>
|
||||
))}
|
||||
@@ -181,11 +184,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
<div className="custom-domain-url-edit-btn">
|
||||
<Button
|
||||
className="periscope-btn"
|
||||
disabled={
|
||||
isLoadingDeploymentsData ||
|
||||
isFetchingDeploymentsData ||
|
||||
isPollingEnabled
|
||||
}
|
||||
disabled={isLoadingHosts || isFetchingHosts || isPollingEnabled}
|
||||
type="default"
|
||||
icon={<Pencil size={10} />}
|
||||
onClick={(): void => setIsEditModalOpen(true)}
|
||||
@@ -198,7 +197,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
{isPollingEnabled && (
|
||||
<Alert
|
||||
className="custom-domain-update-status"
|
||||
message={`Updating your URL to ⎯ ${customDomainDetails?.subdomain}.${deploymentsData?.data.data.cluster.region.dns}. This may take a few mins.`}
|
||||
message={`Updating your URL to ⎯ ${customDomainDetails?.subdomain}.${dnsSuffix}. This may take a few mins.`}
|
||||
type="info"
|
||||
icon={<InfoIcon size={12} />}
|
||||
/>
|
||||
@@ -206,7 +205,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{isLoadingDeploymentsData && (
|
||||
{isLoadingHosts && (
|
||||
<Card className="custom-domain-settings-card">
|
||||
<Skeleton
|
||||
className="custom-domain-settings-skeleton"
|
||||
@@ -255,7 +254,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
addonBefore={updateDomainError && <InfoIcon size={12} color="red" />}
|
||||
placeholder="Enter Domain"
|
||||
onChange={(): void => setUpdateDomainError(null)}
|
||||
addonAfter={deploymentsData?.data.data.cluster.region.dns}
|
||||
addonAfter={dnsSuffix}
|
||||
autoFocus
|
||||
/>
|
||||
</Form.Item>
|
||||
@@ -267,7 +266,8 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
{updateDomainError.status === 409 ? (
|
||||
<Alert
|
||||
message={
|
||||
(updateDomainError?.response?.data as { error?: string })?.error ||
|
||||
(updateDomainError?.response?.data as RenderErrorResponseDTO)?.error
|
||||
?.message ||
|
||||
'You’ve already updated the custom domain once today. To make further changes, please contact our support team for assistance.'
|
||||
}
|
||||
type="warning"
|
||||
@@ -275,7 +275,10 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
/>
|
||||
) : (
|
||||
<Typography.Text type="danger">
|
||||
{(updateDomainError.response?.data as { error: string })?.error}
|
||||
{
|
||||
(updateDomainError?.response?.data as RenderErrorResponseDTO)?.error
|
||||
?.message
|
||||
}
|
||||
</Typography.Text>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,128 @@
|
||||
import { GetHosts200 } from 'api/generated/services/sigNoz.schemas';
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
|
||||
import CustomDomainSettings from '../CustomDomainSettings';
|
||||
|
||||
const ZEUS_HOSTS_ENDPOINT = '*/api/v2/zeus/hosts';
|
||||
|
||||
const mockHostsResponse: GetHosts200 = {
|
||||
status: 'success',
|
||||
data: {
|
||||
name: 'accepted-starfish',
|
||||
state: 'HEALTHY',
|
||||
tier: 'PREMIUM',
|
||||
hosts: [
|
||||
{
|
||||
name: 'accepted-starfish',
|
||||
is_default: true,
|
||||
url: 'https://accepted-starfish.test.cloud',
|
||||
},
|
||||
{
|
||||
name: 'custom-host',
|
||||
is_default: false,
|
||||
url: 'https://custom-host.test.cloud',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
describe('CustomDomainSettings', () => {
|
||||
afterEach(() => server.resetHandlers());
|
||||
|
||||
it('renders host URLs with protocol stripped and marks the default host', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<CustomDomainSettings />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
await screen.findByText(/custom-host\.test\.cloud/i);
|
||||
expect(screen.getByText('Default')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('opens edit modal with DNS suffix derived from the default host', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
render(<CustomDomainSettings />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
|
||||
await user.click(
|
||||
screen.getByRole('button', { name: /customize team['’]s url/i }),
|
||||
);
|
||||
|
||||
expect(
|
||||
screen.getByRole('dialog', { name: /customize your team['’]s url/i }),
|
||||
).toBeInTheDocument();
|
||||
// DNS suffix is the part of the default host URL after the name prefix
|
||||
expect(screen.getByText('test.cloud')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('submits PUT to /zeus/hosts with the entered subdomain as the payload', async () => {
|
||||
let capturedBody: Record<string, unknown> = {};
|
||||
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
rest.put(ZEUS_HOSTS_ENDPOINT, async (req, res, ctx) => {
|
||||
capturedBody = await req.json<Record<string, unknown>>();
|
||||
return res(ctx.status(200), ctx.json({}));
|
||||
}),
|
||||
);
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
render(<CustomDomainSettings />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
await user.click(
|
||||
screen.getByRole('button', { name: /customize team['’]s url/i }),
|
||||
);
|
||||
|
||||
const input = screen.getByPlaceholderText(/enter domain/i);
|
||||
await user.clear(input);
|
||||
await user.type(input, 'myteam');
|
||||
await user.click(screen.getByRole('button', { name: /apply changes/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(capturedBody).toEqual({ name: 'myteam' });
|
||||
});
|
||||
});
|
||||
|
||||
it('shows contact support option when domain update returns 409', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
rest.put(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(
|
||||
ctx.status(409),
|
||||
ctx.json({ error: { message: 'Already updated today' } }),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
render(<CustomDomainSettings />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
await user.click(
|
||||
screen.getByRole('button', { name: /customize team['’]s url/i }),
|
||||
);
|
||||
await user.type(screen.getByPlaceholderText(/enter domain/i), 'myteam');
|
||||
await user.click(screen.getByRole('button', { name: /apply changes/i }));
|
||||
|
||||
expect(
|
||||
await screen.findByRole('button', { name: /contact support/i }),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -34,6 +34,9 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
const sortedVariablesArray = useDashboardVariablesSelector(
|
||||
(state) => state.sortedVariablesArray,
|
||||
);
|
||||
const dynamicVariableOrder = useDashboardVariablesSelector(
|
||||
(state) => state.dynamicVariableOrder,
|
||||
);
|
||||
const dependencyData = useDashboardVariablesSelector(
|
||||
(state) => state.dependencyData,
|
||||
);
|
||||
@@ -52,10 +55,11 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
}, [getUrlVariables, updateUrlVariable, dashboardVariables]);
|
||||
|
||||
// Memoize the order key to avoid unnecessary triggers
|
||||
const dependencyOrderKey = useMemo(
|
||||
() => dependencyData?.order?.join(',') ?? '',
|
||||
[dependencyData?.order],
|
||||
);
|
||||
const variableOrderKey = useMemo(() => {
|
||||
const queryVariableOrderKey = dependencyData?.order?.join(',') ?? '';
|
||||
const dynamicVariableOrderKey = dynamicVariableOrder?.join(',') ?? '';
|
||||
return `${queryVariableOrderKey}|${dynamicVariableOrderKey}`;
|
||||
}, [dependencyData?.order, dynamicVariableOrder]);
|
||||
|
||||
// Initialize fetch store then start a new fetch cycle.
|
||||
// Runs on dependency order changes, and time range changes.
|
||||
@@ -66,7 +70,7 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
initializeVariableFetchStore(allVariableNames);
|
||||
enqueueFetchOfAllVariables();
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [dependencyOrderKey, minTime, maxTime]);
|
||||
}, [variableOrderKey, minTime, maxTime]);
|
||||
|
||||
// Performance optimization: For dynamic variables with allSelected=true, we don't store
|
||||
// individual values in localStorage since we can always derive them from available options.
|
||||
|
||||
@@ -0,0 +1,203 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { act, render } from '@testing-library/react';
|
||||
import {
|
||||
dashboardVariablesStore,
|
||||
setDashboardVariablesStore,
|
||||
updateDashboardVariablesStore,
|
||||
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
|
||||
import {
|
||||
IDashboardVariables,
|
||||
IDashboardVariablesStoreState,
|
||||
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import {
|
||||
enqueueFetchOfAllVariables,
|
||||
initializeVariableFetchStore,
|
||||
} from 'providers/Dashboard/store/variableFetchStore';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import DashboardVariableSelection from '../DashboardVariableSelection';
|
||||
|
||||
// Mock providers/Dashboard/Dashboard
|
||||
const mockSetSelectedDashboard = jest.fn();
|
||||
const mockUpdateLocalStorageDashboardVariables = jest.fn();
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): Record<string, unknown> => ({
|
||||
setSelectedDashboard: mockSetSelectedDashboard,
|
||||
updateLocalStorageDashboardVariables: mockUpdateLocalStorageDashboardVariables,
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock hooks/dashboard/useVariablesFromUrl
|
||||
const mockUpdateUrlVariable = jest.fn();
|
||||
const mockGetUrlVariables = jest.fn().mockReturnValue({});
|
||||
jest.mock('hooks/dashboard/useVariablesFromUrl', () => ({
|
||||
__esModule: true,
|
||||
default: (): Record<string, unknown> => ({
|
||||
updateUrlVariable: mockUpdateUrlVariable,
|
||||
getUrlVariables: mockGetUrlVariables,
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock variableFetchStore functions
|
||||
jest.mock('providers/Dashboard/store/variableFetchStore', () => ({
|
||||
initializeVariableFetchStore: jest.fn(),
|
||||
enqueueFetchOfAllVariables: jest.fn(),
|
||||
enqueueDescendantsOfVariable: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock initializeDefaultVariables
|
||||
jest.mock('providers/Dashboard/initializeDefaultVariables', () => ({
|
||||
initializeDefaultVariables: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock react-redux useSelector for globalTime
|
||||
jest.mock('react-redux', () => ({
|
||||
...jest.requireActual('react-redux'),
|
||||
useSelector: jest.fn().mockReturnValue({ minTime: 1000, maxTime: 2000 }),
|
||||
}));
|
||||
|
||||
// Mock VariableItem to avoid rendering complexity
|
||||
jest.mock('../VariableItem', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => <div data-testid="variable-item" />,
|
||||
}));
|
||||
|
||||
function createVariable(
|
||||
overrides: Partial<IDashboardVariable> = {},
|
||||
): IDashboardVariable {
|
||||
return {
|
||||
id: 'test-id',
|
||||
name: 'test-var',
|
||||
description: '',
|
||||
type: 'QUERY',
|
||||
sort: 'DISABLED',
|
||||
showALLOption: false,
|
||||
multiSelect: false,
|
||||
order: 0,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function resetStore(): void {
|
||||
dashboardVariablesStore.set(() => ({
|
||||
dashboardId: '',
|
||||
variables: {},
|
||||
sortedVariablesArray: [],
|
||||
dependencyData: null,
|
||||
variableTypes: {},
|
||||
dynamicVariableOrder: [],
|
||||
}));
|
||||
}
|
||||
|
||||
describe('DashboardVariableSelection', () => {
|
||||
beforeEach(() => {
|
||||
resetStore();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should call initializeVariableFetchStore and enqueueFetchOfAllVariables on mount', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
expect(initializeVariableFetchStore).toHaveBeenCalledWith(['env']);
|
||||
expect(enqueueFetchOfAllVariables).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should re-trigger fetch cycle when dynamicVariableOrder changes', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
// Clear mocks after initial render
|
||||
(initializeVariableFetchStore as jest.Mock).mockClear();
|
||||
(enqueueFetchOfAllVariables as jest.Mock).mockClear();
|
||||
|
||||
// Add a DYNAMIC variable which changes dynamicVariableOrder
|
||||
act(() => {
|
||||
updateDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
expect(initializeVariableFetchStore).toHaveBeenCalledWith(
|
||||
expect.arrayContaining(['env', 'dyn1']),
|
||||
);
|
||||
expect(enqueueFetchOfAllVariables).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should re-trigger fetch cycle when a dynamic variable is removed', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
dyn2: createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 2 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
(initializeVariableFetchStore as jest.Mock).mockClear();
|
||||
(enqueueFetchOfAllVariables as jest.Mock).mockClear();
|
||||
|
||||
// Remove dyn2, changing dynamicVariableOrder from ['dyn1','dyn2'] to ['dyn1']
|
||||
act(() => {
|
||||
updateDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
expect(initializeVariableFetchStore).toHaveBeenCalledWith(['env', 'dyn1']);
|
||||
expect(enqueueFetchOfAllVariables).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should NOT re-trigger fetch cycle when dynamicVariableOrder stays the same', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
(initializeVariableFetchStore as jest.Mock).mockClear();
|
||||
(enqueueFetchOfAllVariables as jest.Mock).mockClear();
|
||||
|
||||
// Update a non-dynamic variable's selectedValue — dynamicVariableOrder unchanged
|
||||
act(() => {
|
||||
const snapshot = dashboardVariablesStore.getSnapshot();
|
||||
dashboardVariablesStore.set(
|
||||
(): IDashboardVariablesStoreState => ({
|
||||
...snapshot,
|
||||
variables: {
|
||||
...snapshot.variables,
|
||||
env: {
|
||||
...snapshot.variables.env,
|
||||
selectedValue: 'production',
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
expect(initializeVariableFetchStore).not.toHaveBeenCalled();
|
||||
expect(enqueueFetchOfAllVariables).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -1,7 +1,6 @@
|
||||
import { useCallback } from 'react';
|
||||
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
|
||||
import HistogramTooltip from 'lib/uPlotV2/components/Tooltip/HistogramTooltip';
|
||||
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
|
||||
import {
|
||||
HistogramTooltipProps,
|
||||
TooltipRenderArgs,
|
||||
@@ -22,21 +21,11 @@ export default function Histogram(props: HistogramChartProps): JSX.Element {
|
||||
if (customRenderTooltip) {
|
||||
return customRenderTooltip(props);
|
||||
}
|
||||
const content = buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: rest.yAxisUnit ?? '',
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
});
|
||||
const tooltipProps: HistogramTooltipProps = {
|
||||
...props,
|
||||
timezone: rest.timezone,
|
||||
yAxisUnit: rest.yAxisUnit,
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
content,
|
||||
};
|
||||
return <HistogramTooltip {...tooltipProps} />;
|
||||
},
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { useCallback } from 'react';
|
||||
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
|
||||
import TimeSeriesTooltip from 'lib/uPlotV2/components/Tooltip/TimeSeriesTooltip';
|
||||
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
|
||||
import {
|
||||
TimeSeriesTooltipProps,
|
||||
TooltipRenderArgs,
|
||||
@@ -17,21 +16,11 @@ export default function TimeSeries(props: TimeSeriesChartProps): JSX.Element {
|
||||
if (customRenderTooltip) {
|
||||
return customRenderTooltip(props);
|
||||
}
|
||||
const content = buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: rest.yAxisUnit ?? '',
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
});
|
||||
const tooltipProps: TimeSeriesTooltipProps = {
|
||||
...props,
|
||||
timezone: rest.timezone,
|
||||
yAxisUnit: rest.yAxisUnit,
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
content,
|
||||
};
|
||||
return <TimeSeriesTooltip {...tooltipProps} />;
|
||||
},
|
||||
|
||||
@@ -1,3 +1,22 @@
|
||||
.chart-manager-series-label {
|
||||
width: 100%;
|
||||
min-width: 0;
|
||||
max-width: 100%;
|
||||
cursor: pointer;
|
||||
border: none;
|
||||
background-color: transparent;
|
||||
color: inherit;
|
||||
text-align: left;
|
||||
padding: 0;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
|
||||
&:disabled {
|
||||
cursor: not-allowed;
|
||||
}
|
||||
}
|
||||
|
||||
.chart-manager-container {
|
||||
width: 100%;
|
||||
max-height: calc(40% - 40px);
|
||||
|
||||
@@ -1,24 +1,28 @@
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { Button, Input } from 'antd';
|
||||
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import { getGraphManagerTableColumns } from 'container/GridCardLayout/GridCard/FullView/TableRender/GraphManagerColumns';
|
||||
import { ExtendedChartDataset } from 'container/GridCardLayout/GridCard/FullView/types';
|
||||
import { getDefaultTableDataSet } from 'container/GridCardLayout/GridCard/FullView/utils';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { usePlotContext } from 'lib/uPlotV2/context/PlotContext';
|
||||
import useLegendsSync from 'lib/uPlotV2/hooks/useLegendsSync';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
|
||||
import { getChartManagerColumns } from './columns';
|
||||
import { ExtendedChartDataset, getDefaultTableDataSet } from './utils';
|
||||
|
||||
import './ChartManager.styles.scss';
|
||||
|
||||
interface ChartManagerProps {
|
||||
config: UPlotConfigBuilder;
|
||||
alignedData: uPlot.AlignedData;
|
||||
yAxisUnit?: string;
|
||||
decimalPrecision?: PrecisionOption;
|
||||
onCancel?: () => void;
|
||||
}
|
||||
|
||||
const X_AXIS_INDEX = 0;
|
||||
|
||||
/**
|
||||
* ChartManager provides a tabular view to manage the visibility of
|
||||
* individual series on a uPlot chart.
|
||||
@@ -28,16 +32,12 @@ interface ChartManagerProps {
|
||||
* - filter series by label
|
||||
* - toggle individual series on/off
|
||||
* - persist the visibility configuration to local storage.
|
||||
*
|
||||
* @param config - `UPlotConfigBuilder` instance used to derive chart options.
|
||||
* @param alignedData - uPlot aligned data used to build the initial table dataset.
|
||||
* @param yAxisUnit - Optional unit label for Y-axis values shown in the table.
|
||||
* @param onCancel - Optional callback invoked when the user cancels the dialog.
|
||||
*/
|
||||
export default function ChartManager({
|
||||
config,
|
||||
alignedData,
|
||||
yAxisUnit,
|
||||
decimalPrecision = PrecisionOptionsEnum.TWO,
|
||||
onCancel,
|
||||
}: ChartManagerProps): JSX.Element {
|
||||
const { notifications } = useNotifications();
|
||||
@@ -53,8 +53,13 @@ export default function ChartManager({
|
||||
const { isDashboardLocked } = useDashboard();
|
||||
|
||||
const [tableDataSet, setTableDataSet] = useState<ExtendedChartDataset[]>(() =>
|
||||
getDefaultTableDataSet(config.getConfig() as uPlot.Options, alignedData),
|
||||
getDefaultTableDataSet(
|
||||
config.getConfig() as uPlot.Options,
|
||||
alignedData,
|
||||
decimalPrecision,
|
||||
),
|
||||
);
|
||||
const [filterValue, setFilterValue] = useState('');
|
||||
|
||||
const graphVisibilityState = useMemo(
|
||||
() =>
|
||||
@@ -67,46 +72,62 @@ export default function ChartManager({
|
||||
|
||||
useEffect(() => {
|
||||
setTableDataSet(
|
||||
getDefaultTableDataSet(config.getConfig() as uPlot.Options, alignedData),
|
||||
);
|
||||
}, [alignedData, config]);
|
||||
|
||||
const filterHandler = useCallback(
|
||||
(event: React.ChangeEvent<HTMLInputElement>): void => {
|
||||
const value = event.target.value.toString().toLowerCase();
|
||||
const updatedDataSet = tableDataSet.map((item) => {
|
||||
if (item.label?.toLocaleLowerCase().includes(value)) {
|
||||
return { ...item, show: true };
|
||||
}
|
||||
return { ...item, show: false };
|
||||
});
|
||||
setTableDataSet(updatedDataSet);
|
||||
},
|
||||
[tableDataSet],
|
||||
);
|
||||
|
||||
const dataSource = useMemo(
|
||||
() =>
|
||||
tableDataSet.filter(
|
||||
(item, index) => index !== 0 && item.show, // skipping the first item as it is the x-axis
|
||||
getDefaultTableDataSet(
|
||||
config.getConfig() as uPlot.Options,
|
||||
alignedData,
|
||||
decimalPrecision,
|
||||
),
|
||||
[tableDataSet],
|
||||
);
|
||||
setFilterValue('');
|
||||
}, [alignedData, config, decimalPrecision]);
|
||||
|
||||
const handleFilterChange = useCallback(
|
||||
(e: React.ChangeEvent<HTMLInputElement>): void => {
|
||||
setFilterValue(e.target.value.toLowerCase());
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const handleToggleSeriesOnOff = useCallback(
|
||||
(index: number): void => {
|
||||
onToggleSeriesOnOff(index);
|
||||
},
|
||||
[onToggleSeriesOnOff],
|
||||
);
|
||||
|
||||
const dataSource = useMemo(() => {
|
||||
const filter = filterValue.trim();
|
||||
return tableDataSet.filter((item, index) => {
|
||||
if (index === X_AXIS_INDEX) {
|
||||
return false;
|
||||
}
|
||||
if (!filter) {
|
||||
return true;
|
||||
}
|
||||
return item.label?.toLowerCase().includes(filter) ?? false;
|
||||
});
|
||||
}, [tableDataSet, filterValue]);
|
||||
|
||||
const columns = useMemo(
|
||||
() =>
|
||||
getGraphManagerTableColumns({
|
||||
getChartManagerColumns({
|
||||
tableDataSet,
|
||||
checkBoxOnChangeHandler: (_e, index) => {
|
||||
onToggleSeriesOnOff(index);
|
||||
},
|
||||
graphVisibilityState,
|
||||
labelClickedHandler: onToggleSeriesVisibility,
|
||||
onToggleSeriesOnOff: handleToggleSeriesOnOff,
|
||||
onToggleSeriesVisibility,
|
||||
yAxisUnit,
|
||||
isGraphDisabled: isDashboardLocked,
|
||||
decimalPrecision,
|
||||
}),
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[tableDataSet, graphVisibilityState, yAxisUnit, isDashboardLocked],
|
||||
[
|
||||
tableDataSet,
|
||||
graphVisibilityState,
|
||||
handleToggleSeriesOnOff,
|
||||
onToggleSeriesVisibility,
|
||||
yAxisUnit,
|
||||
isDashboardLocked,
|
||||
decimalPrecision,
|
||||
],
|
||||
);
|
||||
|
||||
const handleSave = useCallback((): void => {
|
||||
@@ -114,20 +135,23 @@ export default function ChartManager({
|
||||
notifications.success({
|
||||
message: 'The updated graphs & legends are saved',
|
||||
});
|
||||
if (onCancel) {
|
||||
onCancel();
|
||||
}
|
||||
onCancel?.();
|
||||
}, [syncSeriesVisibilityToLocalStorage, notifications, onCancel]);
|
||||
|
||||
return (
|
||||
<div className="chart-manager-container">
|
||||
<div className="chart-manager-header">
|
||||
<Input onChange={filterHandler} placeholder="Filter Series" />
|
||||
<Input
|
||||
placeholder="Filter Series"
|
||||
value={filterValue}
|
||||
onChange={handleFilterChange}
|
||||
data-testid="filter-input"
|
||||
/>
|
||||
<div className="chart-manager-actions-container">
|
||||
<Button type="default" onClick={onCancel}>
|
||||
<Button type="default" onClick={onCancel} data-testid="cancel-button">
|
||||
Cancel
|
||||
</Button>
|
||||
<Button type="primary" onClick={handleSave}>
|
||||
<Button type="primary" onClick={handleSave} data-testid="save-button">
|
||||
Save
|
||||
</Button>
|
||||
</div>
|
||||
@@ -136,10 +160,10 @@ export default function ChartManager({
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
dataSource={dataSource}
|
||||
virtual
|
||||
rowKey="index"
|
||||
scroll={{ y: 200 }}
|
||||
pagination={false}
|
||||
virtual
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
import { Tooltip } from 'antd';
|
||||
|
||||
import './ChartManager.styles.scss';
|
||||
|
||||
interface SeriesLabelProps {
|
||||
label: string;
|
||||
labelIndex: number;
|
||||
onClick: (idx: number) => void;
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
export function SeriesLabel({
|
||||
label,
|
||||
labelIndex,
|
||||
onClick,
|
||||
disabled,
|
||||
}: SeriesLabelProps): JSX.Element {
|
||||
return (
|
||||
<Tooltip placement="topLeft" title={label}>
|
||||
<button
|
||||
className="chart-manager-series-label"
|
||||
disabled={disabled}
|
||||
type="button"
|
||||
data-testid={`series-label-button-${labelIndex}`}
|
||||
onClick={(): void => onClick(labelIndex)}
|
||||
>
|
||||
{label}
|
||||
</button>
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,172 @@
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
|
||||
import ChartManager from '../ChartManager';
|
||||
|
||||
const mockSyncSeriesVisibilityToLocalStorage = jest.fn();
|
||||
const mockNotificationsSuccess = jest.fn();
|
||||
|
||||
jest.mock('lib/uPlotV2/context/PlotContext', () => ({
|
||||
usePlotContext: (): {
|
||||
onToggleSeriesOnOff: jest.Mock;
|
||||
onToggleSeriesVisibility: jest.Mock;
|
||||
syncSeriesVisibilityToLocalStorage: jest.Mock;
|
||||
} => ({
|
||||
onToggleSeriesOnOff: jest.fn(),
|
||||
onToggleSeriesVisibility: jest.fn(),
|
||||
syncSeriesVisibilityToLocalStorage: mockSyncSeriesVisibilityToLocalStorage,
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('lib/uPlotV2/hooks/useLegendsSync', () => ({
|
||||
__esModule: true,
|
||||
default: (): {
|
||||
legendItemsMap: { [key: number]: { show: boolean; label: string } };
|
||||
} => ({
|
||||
legendItemsMap: {
|
||||
0: { show: true, label: 'Time' },
|
||||
1: { show: true, label: 'Series 1' },
|
||||
2: { show: true, label: 'Series 2' },
|
||||
},
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): { isDashboardLocked: boolean } => ({
|
||||
isDashboardLocked: false,
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useNotifications', () => ({
|
||||
useNotifications: (): { notifications: { success: jest.Mock } } => ({
|
||||
notifications: {
|
||||
success: mockNotificationsSuccess,
|
||||
},
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('components/ResizeTable', () => {
|
||||
const MockTable = ({
|
||||
dataSource,
|
||||
columns,
|
||||
}: {
|
||||
dataSource: { index: number; label?: string }[];
|
||||
columns: { key: string; title: string }[];
|
||||
}): JSX.Element => (
|
||||
<div data-testid="resize-table">
|
||||
{columns.map((col) => (
|
||||
<span key={col.key}>{col.title}</span>
|
||||
))}
|
||||
{dataSource.map((row) => (
|
||||
<div key={row.index} data-testid={`row-${row.index}`}>
|
||||
{row.label}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
return { ResizeTable: MockTable };
|
||||
});
|
||||
|
||||
const createMockConfig = (): { getConfig: () => uPlot.Options } => ({
|
||||
getConfig: (): uPlot.Options => ({
|
||||
width: 100,
|
||||
height: 100,
|
||||
series: [
|
||||
{ label: 'Time', value: 'time' },
|
||||
{ label: 'Series 1', scale: 'y' },
|
||||
{ label: 'Series 2', scale: 'y' },
|
||||
],
|
||||
}),
|
||||
});
|
||||
|
||||
const createAlignedData = (): uPlot.AlignedData => [
|
||||
[1000, 2000, 3000],
|
||||
[10, 20, 30],
|
||||
[1, 2, 3],
|
||||
];
|
||||
|
||||
describe('ChartManager', () => {
|
||||
const mockOnCancel = jest.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('renders filter input and action buttons', () => {
|
||||
render(
|
||||
<ChartManager
|
||||
config={createMockConfig() as any}
|
||||
alignedData={createAlignedData()}
|
||||
onCancel={mockOnCancel}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByPlaceholderText('Filter Series')).toBeInTheDocument();
|
||||
expect(screen.getByRole('button', { name: /cancel/i })).toBeInTheDocument();
|
||||
expect(screen.getByRole('button', { name: /save/i })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders ResizeTable with data', () => {
|
||||
render(
|
||||
<ChartManager
|
||||
config={createMockConfig() as UPlotConfigBuilder}
|
||||
alignedData={createAlignedData()}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('resize-table')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls onCancel when Cancel button is clicked', async () => {
|
||||
render(
|
||||
<ChartManager
|
||||
config={createMockConfig() as UPlotConfigBuilder}
|
||||
alignedData={createAlignedData()}
|
||||
onCancel={mockOnCancel}
|
||||
/>,
|
||||
);
|
||||
|
||||
await userEvent.click(screen.getByTestId('cancel-button'));
|
||||
|
||||
expect(mockOnCancel).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('filters table data when typing in filter input', async () => {
|
||||
render(
|
||||
<ChartManager
|
||||
config={createMockConfig() as UPlotConfigBuilder}
|
||||
alignedData={createAlignedData()}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Before filter: both Series 1 and Series 2 rows are visible
|
||||
expect(screen.getByTestId('row-1')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('row-2')).toBeInTheDocument();
|
||||
|
||||
const filterInput = screen.getByTestId('filter-input');
|
||||
await userEvent.type(filterInput, 'Series 1');
|
||||
|
||||
// After filter: only Series 1 row is visible, Series 2 row is filtered out
|
||||
expect(screen.getByTestId('row-1')).toBeInTheDocument();
|
||||
expect(screen.queryByTestId('row-2')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls syncSeriesVisibilityToLocalStorage, notifications.success, and onCancel when Save is clicked', async () => {
|
||||
render(
|
||||
<ChartManager
|
||||
config={createMockConfig() as UPlotConfigBuilder}
|
||||
alignedData={createAlignedData()}
|
||||
onCancel={mockOnCancel}
|
||||
/>,
|
||||
);
|
||||
|
||||
await userEvent.click(screen.getByTestId('save-button'));
|
||||
|
||||
expect(mockSyncSeriesVisibilityToLocalStorage).toHaveBeenCalledTimes(1);
|
||||
expect(mockNotificationsSuccess).toHaveBeenCalledWith({
|
||||
message: 'The updated graphs & legends are saved',
|
||||
});
|
||||
expect(mockOnCancel).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,39 @@
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
|
||||
import { SeriesLabel } from '../SeriesLabel';
|
||||
|
||||
describe('SeriesLabel', () => {
|
||||
it('renders the label text', () => {
|
||||
render(
|
||||
<SeriesLabel label="Test Series Label" labelIndex={1} onClick={jest.fn()} />,
|
||||
);
|
||||
expect(screen.getByTestId('series-label-button-1')).toHaveTextContent(
|
||||
'Test Series Label',
|
||||
);
|
||||
});
|
||||
|
||||
it('calls onClick with labelIndex when clicked', async () => {
|
||||
const onClick = jest.fn();
|
||||
render(<SeriesLabel label="Series A" labelIndex={2} onClick={onClick} />);
|
||||
|
||||
await userEvent.click(screen.getByTestId('series-label-button-2'));
|
||||
|
||||
expect(onClick).toHaveBeenCalledWith(2);
|
||||
expect(onClick).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('renders disabled button when disabled prop is true', () => {
|
||||
render(
|
||||
<SeriesLabel label="Disabled" labelIndex={0} onClick={jest.fn()} disabled />,
|
||||
);
|
||||
const button = screen.getByTestId('series-label-button-0');
|
||||
expect(button).toBeDisabled();
|
||||
});
|
||||
|
||||
it('has chart-manager-series-label class', () => {
|
||||
render(<SeriesLabel label="Label" labelIndex={0} onClick={jest.fn()} />);
|
||||
const button = screen.getByTestId('series-label-button-0');
|
||||
expect(button).toHaveClass('chart-manager-series-label');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,167 @@
|
||||
import { render } from '@testing-library/react';
|
||||
import { Y_AXIS_UNIT_NAMES } from 'components/YAxisUnitSelector/constants';
|
||||
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
|
||||
|
||||
import { getChartManagerColumns } from '../columns';
|
||||
import { ExtendedChartDataset } from '../utils';
|
||||
|
||||
const createMockDataset = (
|
||||
index: number,
|
||||
overrides: Partial<ExtendedChartDataset> = {},
|
||||
): ExtendedChartDataset =>
|
||||
({
|
||||
index,
|
||||
label: `Series ${index}`,
|
||||
show: true,
|
||||
sum: 100,
|
||||
avg: 50,
|
||||
min: 10,
|
||||
max: 90,
|
||||
stroke: '#ff0000',
|
||||
...overrides,
|
||||
} as ExtendedChartDataset);
|
||||
|
||||
describe('getChartManagerColumns', () => {
|
||||
const tableDataSet: ExtendedChartDataset[] = [
|
||||
createMockDataset(0, { label: 'Time' }),
|
||||
createMockDataset(1),
|
||||
createMockDataset(2),
|
||||
];
|
||||
const graphVisibilityState = [true, true, false];
|
||||
const onToggleSeriesOnOff = jest.fn();
|
||||
const onToggleSeriesVisibility = jest.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('returns columns with expected structure', () => {
|
||||
const columns = getChartManagerColumns({
|
||||
tableDataSet,
|
||||
graphVisibilityState,
|
||||
onToggleSeriesOnOff,
|
||||
onToggleSeriesVisibility,
|
||||
});
|
||||
|
||||
expect(columns).toHaveLength(6);
|
||||
expect(columns[0].key).toBe('index');
|
||||
expect(columns[1].key).toBe('label');
|
||||
expect(columns[2].key).toBe('avg');
|
||||
expect(columns[3].key).toBe('sum');
|
||||
expect(columns[4].key).toBe('max');
|
||||
expect(columns[5].key).toBe('min');
|
||||
});
|
||||
|
||||
it('includes Label column with title', () => {
|
||||
const columns = getChartManagerColumns({
|
||||
tableDataSet,
|
||||
graphVisibilityState,
|
||||
onToggleSeriesOnOff,
|
||||
onToggleSeriesVisibility,
|
||||
});
|
||||
|
||||
const labelCol = columns.find((c) => c.key === 'label');
|
||||
expect(labelCol?.title).toBe('Label');
|
||||
});
|
||||
|
||||
it('formats column titles with yAxisUnit', () => {
|
||||
const columns = getChartManagerColumns({
|
||||
tableDataSet,
|
||||
graphVisibilityState,
|
||||
onToggleSeriesOnOff,
|
||||
onToggleSeriesVisibility,
|
||||
yAxisUnit: 'ms',
|
||||
});
|
||||
|
||||
const avgCol = columns.find((c) => c.key === 'avg');
|
||||
expect(avgCol?.title).toBe(
|
||||
`Avg (in ${Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MILLISECONDS]})`,
|
||||
);
|
||||
});
|
||||
|
||||
it('numeric column render returns formatted string with yAxisUnit', () => {
|
||||
const columns = getChartManagerColumns({
|
||||
tableDataSet,
|
||||
graphVisibilityState,
|
||||
onToggleSeriesOnOff,
|
||||
onToggleSeriesVisibility,
|
||||
yAxisUnit: 'ms',
|
||||
});
|
||||
|
||||
const avgCol = columns.find((c) => c.key === 'avg');
|
||||
const renderFn = avgCol?.render as
|
||||
| ((val: number, record: ExtendedChartDataset, index: number) => string)
|
||||
| undefined;
|
||||
expect(renderFn).toBeDefined();
|
||||
const output = renderFn?.(123.45, tableDataSet[1], 1);
|
||||
expect(output).toBe('123.45 ms');
|
||||
});
|
||||
|
||||
it('numeric column render formats zero when value is undefined', () => {
|
||||
const columns = getChartManagerColumns({
|
||||
tableDataSet,
|
||||
graphVisibilityState,
|
||||
onToggleSeriesOnOff,
|
||||
onToggleSeriesVisibility,
|
||||
yAxisUnit: 'none',
|
||||
});
|
||||
|
||||
const sumCol = columns.find((c) => c.key === 'sum');
|
||||
const renderFn = sumCol?.render as
|
||||
| ((
|
||||
val: number | undefined,
|
||||
record: ExtendedChartDataset,
|
||||
index: number,
|
||||
) => string)
|
||||
| undefined;
|
||||
const output = renderFn?.(undefined, tableDataSet[1], 1);
|
||||
expect(output).toBe('0');
|
||||
});
|
||||
|
||||
it('label column render displays label text and is clickable', () => {
|
||||
const columns = getChartManagerColumns({
|
||||
tableDataSet,
|
||||
graphVisibilityState,
|
||||
onToggleSeriesOnOff,
|
||||
onToggleSeriesVisibility,
|
||||
});
|
||||
|
||||
const labelCol = columns.find((c) => c.key === 'label');
|
||||
const renderFn = labelCol?.render as
|
||||
| ((
|
||||
label: string,
|
||||
record: ExtendedChartDataset,
|
||||
index: number,
|
||||
) => JSX.Element)
|
||||
| undefined;
|
||||
expect(renderFn).toBeDefined();
|
||||
const renderResult = renderFn!('Series 1', tableDataSet[1], 1);
|
||||
|
||||
const { getByRole } = render(renderResult);
|
||||
expect(getByRole('button', { name: 'Series 1' })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('index column render renders checkbox with correct checked state', () => {
|
||||
const columns = getChartManagerColumns({
|
||||
tableDataSet,
|
||||
graphVisibilityState,
|
||||
onToggleSeriesOnOff,
|
||||
onToggleSeriesVisibility,
|
||||
});
|
||||
|
||||
const indexCol = columns.find((c) => c.key === 'index');
|
||||
const renderFn = indexCol?.render as
|
||||
| ((
|
||||
_val: unknown,
|
||||
record: ExtendedChartDataset,
|
||||
index: number,
|
||||
) => JSX.Element)
|
||||
| undefined;
|
||||
expect(renderFn).toBeDefined();
|
||||
const { container } = render(renderFn!(null, tableDataSet[1], 1));
|
||||
|
||||
const checkbox = container.querySelector('input[type="checkbox"]');
|
||||
expect(checkbox).toBeInTheDocument();
|
||||
expect(checkbox).toBeChecked(); // graphVisibilityState[1] is true
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,113 @@
|
||||
import { PrecisionOptionsEnum } from 'components/Graph/types';
|
||||
|
||||
import {
|
||||
formatTableValueWithUnit,
|
||||
getDefaultTableDataSet,
|
||||
getTableColumnTitle,
|
||||
} from '../utils';
|
||||
|
||||
describe('ChartManager utils', () => {
|
||||
describe('getDefaultTableDataSet', () => {
|
||||
const createOptions = (seriesCount: number): uPlot.Options => ({
|
||||
series: Array.from({ length: seriesCount }, (_, i) =>
|
||||
i === 0
|
||||
? { label: 'Time', value: 'time' }
|
||||
: { label: `Series ${i}`, scale: 'y' },
|
||||
),
|
||||
width: 100,
|
||||
height: 100,
|
||||
});
|
||||
|
||||
it('returns one row per series with computed stats', () => {
|
||||
const options = createOptions(3);
|
||||
const data: uPlot.AlignedData = [
|
||||
[1000, 2000, 3000],
|
||||
[10, 20, 30],
|
||||
[1, 2, 3],
|
||||
];
|
||||
|
||||
const result = getDefaultTableDataSet(options, data);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0]).toMatchObject({
|
||||
index: 0,
|
||||
label: 'Time',
|
||||
show: true,
|
||||
});
|
||||
expect(result[1]).toMatchObject({
|
||||
index: 1,
|
||||
label: 'Series 1',
|
||||
show: true,
|
||||
sum: 60,
|
||||
avg: 20,
|
||||
max: 30,
|
||||
min: 10,
|
||||
});
|
||||
expect(result[2]).toMatchObject({
|
||||
index: 2,
|
||||
label: 'Series 2',
|
||||
show: true,
|
||||
sum: 6,
|
||||
avg: 2,
|
||||
max: 3,
|
||||
min: 1,
|
||||
});
|
||||
});
|
||||
|
||||
it('handles empty data arrays', () => {
|
||||
const options = createOptions(2);
|
||||
const data: uPlot.AlignedData = [[], []];
|
||||
|
||||
const result = getDefaultTableDataSet(options, data);
|
||||
|
||||
expect(result[0]).toMatchObject({
|
||||
sum: 0,
|
||||
avg: 0,
|
||||
max: 0,
|
||||
min: 0,
|
||||
});
|
||||
});
|
||||
|
||||
it('respects decimalPrecision parameter', () => {
|
||||
const options = createOptions(2);
|
||||
const data: uPlot.AlignedData = [[1000], [123.454]];
|
||||
|
||||
const resultTwo = getDefaultTableDataSet(
|
||||
options,
|
||||
data,
|
||||
PrecisionOptionsEnum.TWO,
|
||||
);
|
||||
expect(resultTwo[1].avg).toBe(123.45);
|
||||
|
||||
const resultZero = getDefaultTableDataSet(
|
||||
options,
|
||||
data,
|
||||
PrecisionOptionsEnum.ZERO,
|
||||
);
|
||||
expect(resultZero[1].avg).toBe(123);
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatTableValueWithUnit', () => {
|
||||
it('formats value with unit', () => {
|
||||
const result = formatTableValueWithUnit(1234.56, 'ms');
|
||||
expect(result).toBe('1.23 s');
|
||||
});
|
||||
|
||||
it('falls back to none format when yAxisUnit is undefined', () => {
|
||||
const result = formatTableValueWithUnit(123.45);
|
||||
expect(result).toBe('123.45');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTableColumnTitle', () => {
|
||||
it('returns title only when yAxisUnit is undefined', () => {
|
||||
expect(getTableColumnTitle('Avg')).toBe('Avg');
|
||||
});
|
||||
|
||||
it('returns title with unit when yAxisUnit is provided', () => {
|
||||
const result = getTableColumnTitle('Avg', 'ms');
|
||||
expect(result).toBe('Avg (in Milliseconds (ms))');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,94 @@
|
||||
import { ColumnType } from 'antd/es/table';
|
||||
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
|
||||
import CustomCheckBox from 'container/GridCardLayout/GridCard/FullView/TableRender/CustomCheckBox';
|
||||
|
||||
import { SeriesLabel } from './SeriesLabel';
|
||||
import {
|
||||
ExtendedChartDataset,
|
||||
formatTableValueWithUnit,
|
||||
getTableColumnTitle,
|
||||
} from './utils';
|
||||
|
||||
export interface GetChartManagerColumnsParams {
|
||||
tableDataSet: ExtendedChartDataset[];
|
||||
graphVisibilityState: boolean[];
|
||||
onToggleSeriesOnOff: (index: number) => void;
|
||||
onToggleSeriesVisibility: (index: number) => void;
|
||||
yAxisUnit?: string;
|
||||
decimalPrecision?: PrecisionOption;
|
||||
isGraphDisabled?: boolean;
|
||||
}
|
||||
|
||||
export function getChartManagerColumns({
|
||||
tableDataSet,
|
||||
graphVisibilityState,
|
||||
onToggleSeriesOnOff,
|
||||
onToggleSeriesVisibility,
|
||||
yAxisUnit,
|
||||
decimalPrecision = PrecisionOptionsEnum.TWO,
|
||||
isGraphDisabled,
|
||||
}: GetChartManagerColumnsParams): ColumnType<ExtendedChartDataset>[] {
|
||||
return [
|
||||
{
|
||||
title: '',
|
||||
width: 50,
|
||||
dataIndex: 'index',
|
||||
key: 'index',
|
||||
render: (_: unknown, record: ExtendedChartDataset): JSX.Element => (
|
||||
<CustomCheckBox
|
||||
data={tableDataSet}
|
||||
graphVisibilityState={graphVisibilityState}
|
||||
index={record.index}
|
||||
disabled={isGraphDisabled}
|
||||
checkBoxOnChangeHandler={(_e, idx): void => onToggleSeriesOnOff(idx)}
|
||||
/>
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'Label',
|
||||
width: 300,
|
||||
dataIndex: 'label',
|
||||
key: 'label',
|
||||
render: (label: string, record: ExtendedChartDataset): JSX.Element => (
|
||||
<SeriesLabel
|
||||
label={label ?? ''}
|
||||
labelIndex={record.index}
|
||||
disabled={isGraphDisabled}
|
||||
onClick={onToggleSeriesVisibility}
|
||||
/>
|
||||
),
|
||||
},
|
||||
{
|
||||
title: getTableColumnTitle('Avg', yAxisUnit),
|
||||
width: 90,
|
||||
dataIndex: 'avg',
|
||||
key: 'avg',
|
||||
render: (val: number | undefined): string =>
|
||||
formatTableValueWithUnit(val ?? 0, yAxisUnit, decimalPrecision),
|
||||
},
|
||||
{
|
||||
title: getTableColumnTitle('Sum', yAxisUnit),
|
||||
width: 90,
|
||||
dataIndex: 'sum',
|
||||
key: 'sum',
|
||||
render: (val: number | undefined): string =>
|
||||
formatTableValueWithUnit(val ?? 0, yAxisUnit, decimalPrecision),
|
||||
},
|
||||
{
|
||||
title: getTableColumnTitle('Max', yAxisUnit),
|
||||
width: 90,
|
||||
dataIndex: 'max',
|
||||
key: 'max',
|
||||
render: (val: number | undefined): string =>
|
||||
formatTableValueWithUnit(val ?? 0, yAxisUnit, decimalPrecision),
|
||||
},
|
||||
{
|
||||
title: getTableColumnTitle('Min', yAxisUnit),
|
||||
width: 90,
|
||||
dataIndex: 'min',
|
||||
key: 'min',
|
||||
render: (val: number | undefined): string =>
|
||||
formatTableValueWithUnit(val ?? 0, yAxisUnit, decimalPrecision),
|
||||
},
|
||||
];
|
||||
}
|
||||
@@ -0,0 +1,93 @@
|
||||
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
|
||||
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
|
||||
import { Y_AXIS_UNIT_NAMES } from 'components/YAxisUnitSelector/constants';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
/** Extended series with computed stats for table display */
|
||||
export type ExtendedChartDataset = uPlot.Series & {
|
||||
show: boolean;
|
||||
sum: number;
|
||||
avg: number;
|
||||
min: number;
|
||||
max: number;
|
||||
index: number;
|
||||
};
|
||||
|
||||
function roundToDecimalPrecision(
|
||||
value: number,
|
||||
decimalPrecision: PrecisionOption = PrecisionOptionsEnum.TWO,
|
||||
): number {
|
||||
if (
|
||||
typeof value !== 'number' ||
|
||||
Number.isNaN(value) ||
|
||||
value === Infinity ||
|
||||
value === -Infinity
|
||||
) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (decimalPrecision === PrecisionOptionsEnum.FULL) {
|
||||
return value;
|
||||
}
|
||||
|
||||
// regex to match the decimal precision for the given decimal precision
|
||||
const regex = new RegExp(`^-?\\d*\\.?0*\\d{0,${decimalPrecision}}`);
|
||||
const matched = value ? value.toFixed(decimalPrecision).match(regex) : null;
|
||||
return matched ? parseFloat(matched[0]) : 0;
|
||||
}
|
||||
|
||||
/** Build table dataset from uPlot options and aligned data */
|
||||
export function getDefaultTableDataSet(
|
||||
options: uPlot.Options,
|
||||
data: uPlot.AlignedData,
|
||||
decimalPrecision: PrecisionOption = PrecisionOptionsEnum.TWO,
|
||||
): ExtendedChartDataset[] {
|
||||
return options.series.map(
|
||||
(series: uPlot.Series, index: number): ExtendedChartDataset => {
|
||||
const arr = (data[index] as number[]) ?? [];
|
||||
const sum = arr.reduce((a, b) => a + b, 0) || 0;
|
||||
const count = arr.length || 1;
|
||||
|
||||
const hasValues = arr.length > 0;
|
||||
return {
|
||||
...series,
|
||||
index,
|
||||
show: true,
|
||||
sum: roundToDecimalPrecision(sum, decimalPrecision),
|
||||
avg: roundToDecimalPrecision(sum / count, decimalPrecision),
|
||||
max: hasValues
|
||||
? roundToDecimalPrecision(Math.max(...arr), decimalPrecision)
|
||||
: 0,
|
||||
min: hasValues
|
||||
? roundToDecimalPrecision(Math.min(...arr), decimalPrecision)
|
||||
: 0,
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/** Format numeric value for table display using yAxisUnit */
|
||||
export function formatTableValueWithUnit(
|
||||
value: number,
|
||||
yAxisUnit?: string,
|
||||
decimalPrecision: PrecisionOption = PrecisionOptionsEnum.TWO,
|
||||
): string {
|
||||
return `${getYAxisFormattedValue(
|
||||
String(value),
|
||||
yAxisUnit ?? 'none',
|
||||
decimalPrecision,
|
||||
)}`;
|
||||
}
|
||||
|
||||
/** Format column header with optional unit */
|
||||
export function getTableColumnTitle(title: string, yAxisUnit?: string): string {
|
||||
if (!yAxisUnit) {
|
||||
return title;
|
||||
}
|
||||
const universalName =
|
||||
Y_AXIS_UNIT_NAMES[yAxisUnit as keyof typeof Y_AXIS_UNIT_NAMES];
|
||||
if (!universalName) {
|
||||
return `${title} (in ${yAxisUnit})`;
|
||||
}
|
||||
return `${title} (in ${universalName})`;
|
||||
}
|
||||
@@ -96,6 +96,7 @@ function BarPanel(props: PanelWrapperProps): JSX.Element {
|
||||
config={config}
|
||||
alignedData={chartData}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
decimalPrecision={widget.decimalPrecision}
|
||||
onCancel={onToggleModelHandler}
|
||||
/>
|
||||
);
|
||||
@@ -105,6 +106,7 @@ function BarPanel(props: PanelWrapperProps): JSX.Element {
|
||||
chartData,
|
||||
widget.yAxisUnit,
|
||||
onToggleModelHandler,
|
||||
widget.decimalPrecision,
|
||||
]);
|
||||
|
||||
const onPlotDestroy = useCallback(() => {
|
||||
|
||||
@@ -95,6 +95,7 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
|
||||
config={config}
|
||||
alignedData={chartData}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
decimalPrecision={widget.decimalPrecision}
|
||||
onCancel={onToggleModelHandler}
|
||||
/>
|
||||
);
|
||||
@@ -104,6 +105,7 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
|
||||
chartData,
|
||||
widget.yAxisUnit,
|
||||
onToggleModelHandler,
|
||||
widget.decimalPrecision,
|
||||
]);
|
||||
|
||||
return (
|
||||
|
||||
@@ -0,0 +1,325 @@
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import {
|
||||
MetricRangePayloadProps,
|
||||
MetricRangePayloadV3,
|
||||
} from 'types/api/metrics/getQueryRange';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { PanelMode } from '../../types';
|
||||
import { prepareChartData, prepareUPlotConfig } from '../utils';
|
||||
|
||||
jest.mock(
|
||||
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
|
||||
() => ({
|
||||
getStoredSeriesVisibility: jest.fn(),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock('lib/uPlotLib/plugins/onClickPlugin', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockReturnValue({ name: 'onClickPlugin' }),
|
||||
}));
|
||||
|
||||
jest.mock('lib/dashboard/getQueryResults', () => ({
|
||||
getLegend: jest.fn(
|
||||
(_queryData: unknown, _query: unknown, labelName: string) =>
|
||||
`legend-${labelName}`,
|
||||
),
|
||||
}));
|
||||
|
||||
jest.mock('lib/getLabelName', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(
|
||||
(_metric: unknown, _queryName: string, _legend: string) => 'baseLabel',
|
||||
),
|
||||
}));
|
||||
|
||||
const getLegendMock = jest.requireMock('lib/dashboard/getQueryResults')
|
||||
.getLegend as jest.Mock;
|
||||
const getLabelNameMock = jest.requireMock('lib/getLabelName')
|
||||
.default as jest.Mock;
|
||||
|
||||
const createApiResponse = (
|
||||
result: MetricRangePayloadProps['data']['result'] = [],
|
||||
): MetricRangePayloadProps => ({
|
||||
data: {
|
||||
result,
|
||||
resultType: 'matrix',
|
||||
newResult: (null as unknown) as MetricRangePayloadV3,
|
||||
},
|
||||
});
|
||||
|
||||
const createWidget = (overrides: Partial<Widgets> = {}): Widgets =>
|
||||
({
|
||||
id: 'widget-1',
|
||||
yAxisUnit: 'ms',
|
||||
isLogScale: false,
|
||||
thresholds: [],
|
||||
customLegendColors: {},
|
||||
...overrides,
|
||||
} as Widgets);
|
||||
|
||||
const defaultTimezone = {
|
||||
name: 'UTC',
|
||||
value: 'UTC',
|
||||
offset: 'UTC',
|
||||
searchIndex: 'UTC',
|
||||
};
|
||||
|
||||
describe('TimeSeriesPanel utils', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
getLabelNameMock.mockReturnValue('baseLabel');
|
||||
getLegendMock.mockImplementation(
|
||||
(_queryData: unknown, _query: unknown, labelName: string) =>
|
||||
`legend-${labelName}`,
|
||||
);
|
||||
});
|
||||
|
||||
describe('prepareChartData', () => {
|
||||
it('returns aligned data with timestamps and empty series when result is empty', () => {
|
||||
const apiResponse = createApiResponse([]);
|
||||
|
||||
const data = prepareChartData(apiResponse);
|
||||
|
||||
expect(data).toHaveLength(1);
|
||||
expect(data[0]).toEqual([]);
|
||||
});
|
||||
|
||||
it('returns timestamps and one series of y values for single series', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q',
|
||||
legend: 'Series A',
|
||||
values: [
|
||||
[1000, '10'],
|
||||
[2000, '20'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const data = prepareChartData(apiResponse);
|
||||
|
||||
expect(data).toHaveLength(2);
|
||||
expect(data[0]).toEqual([1000, 2000]);
|
||||
expect(data[1]).toEqual([10, 20]);
|
||||
});
|
||||
|
||||
it('merges timestamps and fills missing values with null for multiple series', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q1',
|
||||
values: [
|
||||
[1000, '1'],
|
||||
[3000, '3'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q2',
|
||||
values: [
|
||||
[1000, '10'],
|
||||
[2000, '20'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const data = prepareChartData(apiResponse);
|
||||
|
||||
expect(data[0]).toEqual([1000, 2000, 3000]);
|
||||
// First series: 1, null, 3
|
||||
expect(data[1]).toEqual([1, null, 3]);
|
||||
// Second series: 10, 20, null
|
||||
expect(data[2]).toEqual([10, 20, null]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prepareUPlotConfig', () => {
|
||||
const baseParams = {
|
||||
widget: createWidget(),
|
||||
isDarkMode: true,
|
||||
currentQuery: {} as Query,
|
||||
onClick: jest.fn(),
|
||||
onDragSelect: jest.fn(),
|
||||
apiResponse: createApiResponse(),
|
||||
timezone: defaultTimezone,
|
||||
panelMode: PanelMode.DASHBOARD_VIEW,
|
||||
};
|
||||
|
||||
it('adds no series when apiResponse has empty result', () => {
|
||||
const builder = prepareUPlotConfig(baseParams);
|
||||
|
||||
const config = builder.getConfig();
|
||||
// Base series (timestamp) only
|
||||
expect(config.series).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('adds one series per result item with label from getLabelName when no currentQuery', () => {
|
||||
getLegendMock.mockReset();
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: { __name__: 'cpu' },
|
||||
queryName: 'Q1',
|
||||
legend: 'CPU',
|
||||
values: [
|
||||
[1000, '1'],
|
||||
[2000, '2'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const builder = prepareUPlotConfig({
|
||||
...baseParams,
|
||||
apiResponse,
|
||||
currentQuery: (null as unknown) as Query,
|
||||
});
|
||||
|
||||
expect(getLabelNameMock).toHaveBeenCalled();
|
||||
expect(getLegendMock).not.toHaveBeenCalled();
|
||||
|
||||
const config = builder.getConfig();
|
||||
expect(config.series).toHaveLength(2);
|
||||
expect(config.series?.[1]).toMatchObject({
|
||||
label: 'baseLabel',
|
||||
scale: 'y',
|
||||
});
|
||||
});
|
||||
|
||||
it('uses getLegend for label when currentQuery is provided', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q1',
|
||||
legend: 'L1',
|
||||
values: [[1000, '1']],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
prepareUPlotConfig({
|
||||
...baseParams,
|
||||
apiResponse,
|
||||
currentQuery: {} as Query,
|
||||
});
|
||||
|
||||
expect(getLegendMock).toHaveBeenCalledWith(
|
||||
{
|
||||
legend: 'L1',
|
||||
metric: {},
|
||||
queryName: 'Q1',
|
||||
values: [[1000, '1']],
|
||||
},
|
||||
{},
|
||||
'baseLabel',
|
||||
);
|
||||
|
||||
const config = prepareUPlotConfig({
|
||||
...baseParams,
|
||||
apiResponse,
|
||||
currentQuery: {} as Query,
|
||||
}).getConfig();
|
||||
expect(config.series?.[1]).toMatchObject({
|
||||
label: 'legend-baseLabel',
|
||||
});
|
||||
});
|
||||
|
||||
it('uses DrawStyle.Line and VisibilityMode.Never when series has multiple valid points', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q',
|
||||
values: [
|
||||
[1000, '1'],
|
||||
[2000, '2'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const builder = prepareUPlotConfig({ ...baseParams, apiResponse });
|
||||
const config = builder.getConfig();
|
||||
const series = config.series?.[1];
|
||||
|
||||
expect(config.series).toHaveLength(2);
|
||||
// Line style and points never for multi-point series (checked via builder API)
|
||||
const legendItems = builder.getLegendItems();
|
||||
expect(Object.keys(legendItems)).toHaveLength(1);
|
||||
// multi-point series → points hidden
|
||||
expect(series).toBeDefined();
|
||||
expect(series!.points?.show).toBe(false);
|
||||
});
|
||||
|
||||
it('uses DrawStyle.Points and shows points when series has only one valid point', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q',
|
||||
values: [
|
||||
[1000, '1'],
|
||||
[2000, 'NaN'],
|
||||
[3000, 'invalid'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const builder = prepareUPlotConfig({ ...baseParams, apiResponse });
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.series).toHaveLength(2);
|
||||
const seriesConfig = config.series?.[1];
|
||||
expect(seriesConfig).toBeDefined();
|
||||
// Single valid point -> Points draw style (asserted via series config)
|
||||
expect(seriesConfig).toMatchObject({
|
||||
scale: 'y',
|
||||
spanGaps: true,
|
||||
});
|
||||
// single-point series → points shown
|
||||
expect(seriesConfig).toBeDefined();
|
||||
expect(seriesConfig!.points?.show).toBe(true);
|
||||
});
|
||||
|
||||
it('uses widget customLegendColors to set series stroke color', () => {
|
||||
const widget = createWidget({
|
||||
customLegendColors: { 'legend-baseLabel': '#ff0000' },
|
||||
});
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q',
|
||||
values: [[1000, '1']],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const builder = prepareUPlotConfig({
|
||||
...baseParams,
|
||||
widget,
|
||||
apiResponse,
|
||||
});
|
||||
|
||||
const config = builder.getConfig();
|
||||
const seriesConfig = config.series?.[1];
|
||||
expect(seriesConfig).toBeDefined();
|
||||
expect(seriesConfig!.stroke).toBe('#ff0000');
|
||||
});
|
||||
|
||||
it('adds multiple series when result has multiple items', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q1',
|
||||
values: [[1000, '1']],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q2',
|
||||
values: [[1000, '2']],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const builder = prepareUPlotConfig({ ...baseParams, apiResponse });
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.series).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -15,10 +15,12 @@ import {
|
||||
VisibilityMode,
|
||||
} from 'lib/uPlotV2/config/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { isInvalidPlotValue } from 'lib/uPlotV2/utils/dataUtils';
|
||||
import get from 'lodash-es/get';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { QueryData } from 'types/api/widgets/getQuery';
|
||||
|
||||
import { PanelMode } from '../types';
|
||||
import { buildBaseConfig } from '../utils/baseConfigBuilder';
|
||||
@@ -33,6 +35,22 @@ export const prepareChartData = (
|
||||
return [timestampArr, ...yAxisValuesArr];
|
||||
};
|
||||
|
||||
function hasSingleVisiblePointForSeries(series: QueryData): boolean {
|
||||
const rawValues = series.values ?? [];
|
||||
let validPointCount = 0;
|
||||
|
||||
for (const [, rawValue] of rawValues) {
|
||||
if (!isInvalidPlotValue(rawValue)) {
|
||||
validPointCount += 1;
|
||||
if (validPointCount > 1) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export const prepareUPlotConfig = ({
|
||||
widget,
|
||||
isDarkMode,
|
||||
@@ -77,9 +95,8 @@ export const prepareUPlotConfig = ({
|
||||
stepInterval: minStepInterval,
|
||||
});
|
||||
|
||||
const seriesList = apiResponse.data?.result || [];
|
||||
|
||||
seriesList.forEach((series) => {
|
||||
apiResponse.data?.result?.forEach((series) => {
|
||||
const hasSingleValidPoint = hasSingleVisiblePointForSeries(series);
|
||||
const baseLabelName = getLabelName(
|
||||
series.metric,
|
||||
series.queryName || '', // query
|
||||
@@ -92,13 +109,15 @@ export const prepareUPlotConfig = ({
|
||||
|
||||
builder.addSeries({
|
||||
scaleKey: 'y',
|
||||
drawStyle: DrawStyle.Line,
|
||||
drawStyle: hasSingleValidPoint ? DrawStyle.Points : DrawStyle.Line,
|
||||
label: label,
|
||||
colorMapping: widget.customLegendColors ?? {},
|
||||
spanGaps: true,
|
||||
lineStyle: LineStyle.Solid,
|
||||
lineInterpolation: LineInterpolation.Spline,
|
||||
showPoints: VisibilityMode.Never,
|
||||
showPoints: hasSingleValidPoint
|
||||
? VisibilityMode.Always
|
||||
: VisibilityMode.Never,
|
||||
pointSize: 5,
|
||||
isDarkMode,
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import { Button, Skeleton, Tag, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { useGetHosts } from 'api/generated/services/zeus';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
|
||||
import history from 'lib/history';
|
||||
import { Globe, Link2 } from 'lucide-react';
|
||||
import { Link2 } from 'lucide-react';
|
||||
import Card from 'periscope/components/Card/Card';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { LicensePlatform } from 'types/api/licensesV3/getActive';
|
||||
@@ -26,36 +26,21 @@ function DataSourceInfo({
|
||||
const isEnabled =
|
||||
activeLicense && activeLicense.platform === LicensePlatform.CLOUD;
|
||||
|
||||
const {
|
||||
data: deploymentsData,
|
||||
isError: isErrorDeploymentsData,
|
||||
} = useGetDeploymentsData(isEnabled || false);
|
||||
const { data: hostsData, isError } = useGetHosts({
|
||||
query: { enabled: isEnabled || false },
|
||||
});
|
||||
|
||||
const [region, setRegion] = useState<string>('');
|
||||
const [url, setUrl] = useState<string>('');
|
||||
|
||||
useEffect(() => {
|
||||
if (deploymentsData) {
|
||||
switch (deploymentsData?.data.data.cluster.region.name) {
|
||||
case 'in':
|
||||
setRegion('India');
|
||||
break;
|
||||
case 'us':
|
||||
setRegion('United States');
|
||||
break;
|
||||
case 'eu':
|
||||
setRegion('Europe');
|
||||
break;
|
||||
default:
|
||||
setRegion(deploymentsData?.data.data.cluster.region.name);
|
||||
break;
|
||||
if (hostsData) {
|
||||
const defaultHost = hostsData?.data?.data?.hosts?.find((h) => h.is_default);
|
||||
if (defaultHost?.url) {
|
||||
const url = defaultHost?.url?.split('://')[1] ?? '';
|
||||
setUrl(url);
|
||||
}
|
||||
|
||||
setUrl(
|
||||
`${deploymentsData?.data.data.name}.${deploymentsData?.data.data.cluster.region.dns}`,
|
||||
);
|
||||
}
|
||||
}, [deploymentsData]);
|
||||
}, [hostsData]);
|
||||
|
||||
const renderNotSendingData = (): JSX.Element => (
|
||||
<>
|
||||
@@ -123,14 +108,8 @@ function DataSourceInfo({
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{!isErrorDeploymentsData && deploymentsData && (
|
||||
{!isError && hostsData && (
|
||||
<div className="workspace-details">
|
||||
<div className="workspace-region">
|
||||
<Globe size={10} />
|
||||
|
||||
<Typography>{region}</Typography>
|
||||
</div>
|
||||
|
||||
<div className="workspace-url">
|
||||
<Link2 size={12} />
|
||||
|
||||
@@ -156,17 +135,11 @@ function DataSourceInfo({
|
||||
Hello there, Welcome to your SigNoz workspace
|
||||
</Typography>
|
||||
|
||||
{!isErrorDeploymentsData && deploymentsData && (
|
||||
{!isError && hostsData && (
|
||||
<Card className="welcome-card">
|
||||
<Card.Content>
|
||||
<div className="workspace-ready-container">
|
||||
<div className="workspace-details">
|
||||
<div className="workspace-region">
|
||||
<Globe size={10} />
|
||||
|
||||
<Typography>{region}</Typography>
|
||||
</div>
|
||||
|
||||
<div className="workspace-url">
|
||||
<Link2 size={12} />
|
||||
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
import { GetHosts200 } from 'api/generated/services/sigNoz.schemas';
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
|
||||
import DataSourceInfo from '../DataSourceInfo';
|
||||
|
||||
const ZEUS_HOSTS_ENDPOINT = '*/api/v2/zeus/hosts';
|
||||
|
||||
const mockHostsResponse: GetHosts200 = {
|
||||
status: 'success',
|
||||
data: {
|
||||
name: 'accepted-starfish',
|
||||
state: 'HEALTHY',
|
||||
tier: 'PREMIUM',
|
||||
hosts: [
|
||||
{
|
||||
name: 'accepted-starfish',
|
||||
is_default: true,
|
||||
url: 'https://accepted-starfish.test.cloud',
|
||||
},
|
||||
{
|
||||
name: 'custom-host',
|
||||
is_default: false,
|
||||
url: 'https://custom-host.test.cloud',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
describe('DataSourceInfo', () => {
|
||||
afterEach(() => server.resetHandlers());
|
||||
|
||||
it('renders the default workspace URL with protocol stripped', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<DataSourceInfo dataSentToSigNoz={false} isLoading={false} />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
});
|
||||
|
||||
it('does not render workspace URL when GET /zeus/hosts fails', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(500), ctx.json({})),
|
||||
),
|
||||
);
|
||||
|
||||
render(<DataSourceInfo dataSentToSigNoz={false} isLoading={false} />);
|
||||
|
||||
await screen.findByText(/Your workspace is ready/i);
|
||||
expect(screen.queryByText(/signoz\.cloud/i)).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders workspace URL in the data-received view when telemetry is flowing', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<DataSourceInfo dataSentToSigNoz={true} isLoading={false} />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
});
|
||||
});
|
||||
@@ -36,24 +36,6 @@ jest.mock('react-router-dom', () => {
|
||||
};
|
||||
});
|
||||
|
||||
// Mock deployments data hook to avoid unrelated network calls in this page
|
||||
jest.mock(
|
||||
'hooks/CustomDomain/useGetDeploymentsData',
|
||||
(): Record<string, unknown> => ({
|
||||
useGetDeploymentsData: (): {
|
||||
data: undefined;
|
||||
isLoading: boolean;
|
||||
isFetching: boolean;
|
||||
isError: boolean;
|
||||
} => ({
|
||||
data: undefined,
|
||||
isLoading: false,
|
||||
isFetching: false,
|
||||
isError: false,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
const TEST_CREATED_UPDATED = '2024-01-01T00:00:00Z';
|
||||
const TEST_EXPIRES_AT = '2030-01-01T00:00:00Z';
|
||||
const TEST_WORKSPACE_ID = 'w1';
|
||||
|
||||
@@ -39,7 +39,10 @@ function RelatedMetricsCard({ metric }: RelatedMetricsCardProps): JSX.Element {
|
||||
dataSource={DataSource.METRICS}
|
||||
/>
|
||||
)}
|
||||
<DashboardsAndAlertsPopover metricName={metric.name} />
|
||||
<DashboardsAndAlertsPopover
|
||||
dashboards={metric.dashboards}
|
||||
alerts={metric.alerts}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
import { useMemo, useState } from 'react';
|
||||
import { Card, Input, Select, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { MetrictypesTypeDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { InspectMetricsSeries } from 'api/metricsExplorer/getInspectMetricsDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import classNames from 'classnames';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { AggregatorFilter } from 'container/QueryBuilder/filters';
|
||||
@@ -40,10 +40,8 @@ import {
|
||||
* returns true if the feature flag is enabled, false otherwise
|
||||
* Show the inspect button in metrics explorer if the feature flag is enabled
|
||||
*/
|
||||
export function isInspectEnabled(
|
||||
metricType: MetrictypesTypeDTO | undefined,
|
||||
): boolean {
|
||||
return metricType === MetrictypesTypeDTO.gauge;
|
||||
export function isInspectEnabled(metricType: MetricType | undefined): boolean {
|
||||
return metricType === MetricType.GAUGE;
|
||||
}
|
||||
|
||||
export function getAllTimestampsOfMetrics(
|
||||
|
||||
@@ -1,17 +1,8 @@
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useCopyToClipboard } from 'react-use';
|
||||
import {
|
||||
Button,
|
||||
Collapse,
|
||||
Input,
|
||||
Menu,
|
||||
Popover,
|
||||
Skeleton,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { Button, Collapse, Input, Menu, Popover, Typography } from 'antd';
|
||||
import { ColumnsType } from 'antd/es/table';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { useGetMetricAttributes } from 'api/generated/services/metrics';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import { DataType } from 'container/LogDetailedView/TableView';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
@@ -24,8 +15,6 @@ import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
|
||||
import { AllAttributesProps, AllAttributesValueProps } from './types';
|
||||
import { getMetricDetailsQuery } from './utils';
|
||||
|
||||
const ALL_ATTRIBUTES_KEY = 'all-attributes';
|
||||
|
||||
export function AllAttributesValue({
|
||||
filterKey,
|
||||
filterValue,
|
||||
@@ -121,22 +110,12 @@ export function AllAttributesValue({
|
||||
|
||||
function AllAttributes({
|
||||
metricName,
|
||||
attributes,
|
||||
metricType,
|
||||
}: AllAttributesProps): JSX.Element {
|
||||
const [searchString, setSearchString] = useState('');
|
||||
const [activeKey, setActiveKey] = useState<string[]>([ALL_ATTRIBUTES_KEY]);
|
||||
|
||||
const {
|
||||
data: attributesData,
|
||||
isLoading: isLoadingAttributes,
|
||||
isError: isErrorAttributes,
|
||||
} = useGetMetricAttributes({
|
||||
metricName,
|
||||
});
|
||||
|
||||
const attributes = useMemo(
|
||||
() => attributesData?.data?.data?.attributes ?? [],
|
||||
[attributesData],
|
||||
const [activeKey, setActiveKey] = useState<string | string[]>(
|
||||
'all-attributes',
|
||||
);
|
||||
|
||||
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
||||
@@ -199,7 +178,7 @@ function AllAttributes({
|
||||
attributes.filter(
|
||||
(attribute) =>
|
||||
attribute.key.toLowerCase().includes(searchString.toLowerCase()) ||
|
||||
attribute.values?.some((value) =>
|
||||
attribute.value.some((value) =>
|
||||
value.toLowerCase().includes(searchString.toLowerCase()),
|
||||
),
|
||||
),
|
||||
@@ -216,7 +195,7 @@ function AllAttributes({
|
||||
},
|
||||
value: {
|
||||
key: attribute.key,
|
||||
value: attribute.values,
|
||||
value: attribute.value,
|
||||
},
|
||||
}))
|
||||
: [],
|
||||
@@ -273,10 +252,6 @@ function AllAttributes({
|
||||
],
|
||||
);
|
||||
|
||||
const emptyText = isErrorAttributes
|
||||
? 'Error fetching attributes'
|
||||
: 'No attributes found';
|
||||
|
||||
const items = useMemo(
|
||||
() => [
|
||||
{
|
||||
@@ -295,7 +270,6 @@ function AllAttributes({
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
}}
|
||||
disabled={isLoadingAttributes}
|
||||
/>
|
||||
</div>
|
||||
),
|
||||
@@ -303,37 +277,25 @@ function AllAttributes({
|
||||
children: (
|
||||
<ResizeTable
|
||||
columns={columns}
|
||||
loading={isLoadingAttributes}
|
||||
tableLayout="fixed"
|
||||
dataSource={tableData}
|
||||
pagination={false}
|
||||
showHeader={false}
|
||||
className="metrics-accordion-content all-attributes-content"
|
||||
scroll={{ y: 600 }}
|
||||
locale={{
|
||||
emptyText,
|
||||
}}
|
||||
/>
|
||||
),
|
||||
},
|
||||
],
|
||||
[searchString, columns, isLoadingAttributes, tableData, emptyText],
|
||||
[columns, tableData, searchString],
|
||||
);
|
||||
|
||||
if (isLoadingAttributes) {
|
||||
return (
|
||||
<div className="all-attributes-skeleton-container">
|
||||
<Skeleton active paragraph={{ rows: 8 }} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Collapse
|
||||
bordered
|
||||
className="metrics-accordion"
|
||||
className="metrics-accordion metrics-metadata-accordion"
|
||||
activeKey={activeKey}
|
||||
onChange={(keys): void => setActiveKey(keys as string[])}
|
||||
onChange={(keys): void => setActiveKey(keys)}
|
||||
items={items}
|
||||
/>
|
||||
);
|
||||
|
||||
@@ -2,84 +2,36 @@ import { useMemo } from 'react';
|
||||
import { generatePath } from 'react-router-dom';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Dropdown, Typography } from 'antd';
|
||||
import { Skeleton } from 'antd/lib';
|
||||
import {
|
||||
useGetMetricAlerts,
|
||||
useGetMetricDashboards,
|
||||
} from 'api/generated/services/metrics';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import history from 'lib/history';
|
||||
import { Bell, Grid } from 'lucide-react';
|
||||
import { pluralize } from 'utils/pluralize';
|
||||
|
||||
import { DashboardsAndAlertsPopoverProps } from './types';
|
||||
|
||||
function DashboardsAndAlertsPopover({
|
||||
metricName,
|
||||
alerts,
|
||||
dashboards,
|
||||
}: DashboardsAndAlertsPopoverProps): JSX.Element | null {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const params = useUrlQuery();
|
||||
|
||||
const {
|
||||
data: alertsData,
|
||||
isLoading: isLoadingAlerts,
|
||||
isError: isErrorAlerts,
|
||||
} = useGetMetricAlerts(
|
||||
{
|
||||
metricName: metricName ?? '',
|
||||
},
|
||||
{
|
||||
query: {
|
||||
enabled: !!metricName,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const {
|
||||
data: dashboardsData,
|
||||
isLoading: isLoadingDashboards,
|
||||
isError: isErrorDashboards,
|
||||
} = useGetMetricDashboards(
|
||||
{
|
||||
metricName: metricName ?? '',
|
||||
},
|
||||
{
|
||||
query: {
|
||||
enabled: !!metricName,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const alerts = useMemo(() => {
|
||||
return alertsData?.data?.data?.alerts ?? [];
|
||||
}, [alertsData]);
|
||||
|
||||
const dashboards = useMemo(() => {
|
||||
const currentDashboards = dashboardsData?.data?.data?.dashboards ?? [];
|
||||
// Remove duplicate dashboards
|
||||
return currentDashboards.filter(
|
||||
(dashboard, index, self) =>
|
||||
index === self.findIndex((t) => t.dashboardId === dashboard.dashboardId),
|
||||
);
|
||||
}, [dashboardsData]);
|
||||
|
||||
const alertsPopoverContent = useMemo(() => {
|
||||
if (alerts && alerts.length > 0) {
|
||||
return alerts.map((alert) => ({
|
||||
key: alert.alertId,
|
||||
key: alert.alert_id,
|
||||
label: (
|
||||
<Typography.Link
|
||||
key={alert.alertId}
|
||||
key={alert.alert_id}
|
||||
onClick={(): void => {
|
||||
params.set(QueryParams.ruleId, alert.alertId);
|
||||
params.set(QueryParams.ruleId, alert.alert_id);
|
||||
history.push(`${ROUTES.ALERT_OVERVIEW}?${params.toString()}`);
|
||||
}}
|
||||
className="dashboards-popover-content-item"
|
||||
>
|
||||
{alert.alertName || alert.alertId}
|
||||
{alert.alert_name || alert.alert_id}
|
||||
</Typography.Link>
|
||||
),
|
||||
}));
|
||||
@@ -87,44 +39,41 @@ function DashboardsAndAlertsPopover({
|
||||
return null;
|
||||
}, [alerts, params]);
|
||||
|
||||
const uniqueDashboards = useMemo(
|
||||
() =>
|
||||
dashboards?.filter(
|
||||
(item, index, self) =>
|
||||
index === self.findIndex((t) => t.dashboard_id === item.dashboard_id),
|
||||
),
|
||||
[dashboards],
|
||||
);
|
||||
|
||||
const dashboardsPopoverContent = useMemo(() => {
|
||||
if (dashboards && dashboards.length > 0) {
|
||||
return dashboards.map((dashboard) => ({
|
||||
key: dashboard.dashboardId,
|
||||
if (uniqueDashboards && uniqueDashboards.length > 0) {
|
||||
return uniqueDashboards.map((dashboard) => ({
|
||||
key: dashboard.dashboard_id,
|
||||
label: (
|
||||
<Typography.Link
|
||||
key={dashboard.dashboardId}
|
||||
key={dashboard.dashboard_id}
|
||||
onClick={(): void => {
|
||||
safeNavigate(
|
||||
generatePath(ROUTES.DASHBOARD, {
|
||||
dashboardId: dashboard.dashboardId,
|
||||
dashboardId: dashboard.dashboard_id,
|
||||
}),
|
||||
);
|
||||
}}
|
||||
className="dashboards-popover-content-item"
|
||||
>
|
||||
{dashboard.dashboardName || dashboard.dashboardId}
|
||||
{dashboard.dashboard_name || dashboard.dashboard_id}
|
||||
</Typography.Link>
|
||||
),
|
||||
}));
|
||||
}
|
||||
return null;
|
||||
}, [dashboards, safeNavigate]);
|
||||
}, [uniqueDashboards, safeNavigate]);
|
||||
|
||||
if (isLoadingAlerts || isLoadingDashboards) {
|
||||
return (
|
||||
<div className="dashboards-and-alerts-popover-container">
|
||||
<Skeleton title={false} paragraph={{ rows: 1 }} active />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// If there are no dashboards or alerts or both have errors, don't show the popover
|
||||
const hidePopover =
|
||||
(!dashboardsPopoverContent && !alertsPopoverContent) ||
|
||||
(isErrorAlerts && isErrorDashboards);
|
||||
if (hidePopover) {
|
||||
return <div className="dashboards-and-alerts-popover-container" />;
|
||||
if (!dashboardsPopoverContent && !alertsPopoverContent) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -143,7 +92,8 @@ function DashboardsAndAlertsPopover({
|
||||
>
|
||||
<Grid size={12} color={Color.BG_SIENNA_500} />
|
||||
<Typography.Text>
|
||||
{pluralize(dashboards.length, 'dashboard')}
|
||||
{uniqueDashboards?.length} dashboard
|
||||
{uniqueDashboards?.length === 1 ? '' : 's'}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</Dropdown>
|
||||
@@ -162,7 +112,7 @@ function DashboardsAndAlertsPopover({
|
||||
>
|
||||
<Bell size={12} color={Color.BG_SAKURA_500} />
|
||||
<Typography.Text>
|
||||
{pluralize(alerts.length, 'alert rule')}
|
||||
{alerts?.length} alert {alerts?.length === 1 ? 'rule' : 'rules'}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</Dropdown>
|
||||
|
||||
@@ -1,129 +0,0 @@
|
||||
import { useMemo } from 'react';
|
||||
import { Skeleton, Tooltip, Typography } from 'antd';
|
||||
import { useGetMetricHighlights } from 'api/generated/services/metrics';
|
||||
|
||||
import { formatNumberIntoHumanReadableFormat } from '../Summary/utils';
|
||||
import { HighlightsProps } from './types';
|
||||
import {
|
||||
formatNumberToCompactFormat,
|
||||
formatTimestampToReadableDate,
|
||||
} from './utils';
|
||||
|
||||
function Highlights({ metricName }: HighlightsProps): JSX.Element {
|
||||
const {
|
||||
data: metricHighlightsData,
|
||||
isLoading: isLoadingMetricHighlights,
|
||||
isError: isErrorMetricHighlights,
|
||||
} = useGetMetricHighlights(
|
||||
{
|
||||
metricName: metricName ?? '',
|
||||
},
|
||||
{
|
||||
query: {
|
||||
enabled: !!metricName,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const metricHighlights = useMemo(() => {
|
||||
return metricHighlightsData?.data?.data ?? null;
|
||||
}, [metricHighlightsData]);
|
||||
|
||||
const dataPoints = useMemo(() => {
|
||||
if (!metricHighlights) {
|
||||
return null;
|
||||
}
|
||||
if (isErrorMetricHighlights) {
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">-</Typography.Text>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={metricHighlights?.dataPoints?.toLocaleString()}>
|
||||
{formatNumberIntoHumanReadableFormat(metricHighlights?.dataPoints ?? 0)}
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
);
|
||||
}, [metricHighlights, isErrorMetricHighlights]);
|
||||
|
||||
const timeSeries = useMemo(() => {
|
||||
if (!metricHighlights) {
|
||||
return null;
|
||||
}
|
||||
if (isErrorMetricHighlights) {
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">-</Typography.Text>
|
||||
);
|
||||
}
|
||||
|
||||
const timeSeriesActive = formatNumberToCompactFormat(
|
||||
metricHighlights.activeTimeSeries,
|
||||
);
|
||||
const timeSeriesTotal = formatNumberToCompactFormat(
|
||||
metricHighlights.totalTimeSeries,
|
||||
);
|
||||
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip
|
||||
title="Active time series are those that have received data points in the last 1
|
||||
hour."
|
||||
placement="top"
|
||||
>
|
||||
<span>{`${timeSeriesTotal} total ⎯ ${timeSeriesActive} active`}</span>
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
);
|
||||
}, [metricHighlights, isErrorMetricHighlights]);
|
||||
|
||||
const lastReceived = useMemo(() => {
|
||||
if (!metricHighlights) {
|
||||
return null;
|
||||
}
|
||||
if (isErrorMetricHighlights) {
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">-</Typography.Text>
|
||||
);
|
||||
}
|
||||
const displayText = formatTimestampToReadableDate(
|
||||
metricHighlights.lastReceived,
|
||||
);
|
||||
return (
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={displayText}>{displayText}</Tooltip>
|
||||
</Typography.Text>
|
||||
);
|
||||
}, [metricHighlights, isErrorMetricHighlights]);
|
||||
|
||||
if (isLoadingMetricHighlights) {
|
||||
return (
|
||||
<div className="metric-details-content-grid">
|
||||
<Skeleton title={false} paragraph={{ rows: 2 }} active />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="metric-details-content-grid">
|
||||
<div className="labels-row">
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
SAMPLES
|
||||
</Typography.Text>
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
TIME SERIES
|
||||
</Typography.Text>
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
LAST RECEIVED
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<div className="values-row">
|
||||
{dataPoints}
|
||||
{timeSeries}
|
||||
{lastReceived}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default Highlights;
|
||||
@@ -1,55 +1,45 @@
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useQueryClient } from 'react-query';
|
||||
import { Button, Collapse, Input, Select, Skeleton, Typography } from 'antd';
|
||||
import { Button, Collapse, Input, Select, Typography } from 'antd';
|
||||
import { ColumnsType } from 'antd/es/table';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import {
|
||||
invalidateGetMetricMetadata,
|
||||
useUpdateMetricMetadata,
|
||||
} from 'api/generated/services/metrics';
|
||||
import {
|
||||
MetrictypesTemporalityDTO,
|
||||
MetrictypesTypeDTO,
|
||||
RenderErrorResponseDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { AxiosError } from 'axios';
|
||||
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { UpdateMetricMetadataProps } from 'api/metricsExplorer/updateMetricMetadata';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import YAxisUnitSelector from 'components/YAxisUnitSelector';
|
||||
import { YAxisSource } from 'components/YAxisUnitSelector/types';
|
||||
import { getUniversalNameFromMetricUnit } from 'components/YAxisUnitSelector/utils';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import FieldRenderer from 'container/LogDetailedView/FieldRenderer';
|
||||
import { DataType } from 'container/LogDetailedView/TableView';
|
||||
import { useUpdateMetricMetadata } from 'hooks/metricsExplorer/useUpdateMetricMetadata';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { Edit2, Save, X } from 'lucide-react';
|
||||
|
||||
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
|
||||
import { MetricTypeViewRenderer } from '../Summary/utils';
|
||||
import {
|
||||
METRIC_METADATA_KEYS,
|
||||
METRIC_METADATA_TEMPORALITY_OPTIONS,
|
||||
METRIC_METADATA_TYPE_OPTIONS,
|
||||
METRIC_METADATA_UPDATE_ERROR_MESSAGE,
|
||||
} from './constants';
|
||||
import { MetadataProps, MetricMetadataState, TableFields } from './types';
|
||||
import { transformUpdateMetricMetadataRequest } from './utils';
|
||||
METRIC_TYPE_LABEL_MAP,
|
||||
METRIC_TYPE_VALUES_MAP,
|
||||
} from '../Summary/constants';
|
||||
import { MetricTypeRenderer } from '../Summary/utils';
|
||||
import { METRIC_METADATA_KEYS } from './constants';
|
||||
import { MetadataProps } from './types';
|
||||
import { determineIsMonotonic } from './utils';
|
||||
|
||||
function Metadata({
|
||||
metricName,
|
||||
metadata,
|
||||
isErrorMetricMetadata,
|
||||
isLoadingMetricMetadata,
|
||||
refetchMetricDetails,
|
||||
}: MetadataProps): JSX.Element {
|
||||
const [isEditing, setIsEditing] = useState(false);
|
||||
|
||||
const [
|
||||
metricMetadataState,
|
||||
setMetricMetadataState,
|
||||
] = useState<MetricMetadataState>({
|
||||
type: MetrictypesTypeDTO.sum,
|
||||
description: '',
|
||||
temporality: MetrictypesTemporalityDTO.unspecified,
|
||||
unit: '',
|
||||
metricMetadata,
|
||||
setMetricMetadata,
|
||||
] = useState<UpdateMetricMetadataProps>({
|
||||
metricType: metadata?.metric_type || MetricType.SUM,
|
||||
description: metadata?.description || '',
|
||||
temporality: metadata?.temporality,
|
||||
unit: metadata?.unit,
|
||||
});
|
||||
const { notifications } = useNotifications();
|
||||
const {
|
||||
@@ -61,24 +51,15 @@ function Metadata({
|
||||
);
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
// Initialize state from metadata api data
|
||||
useEffect(() => {
|
||||
if (metadata) {
|
||||
setMetricMetadataState({
|
||||
type: metadata.type,
|
||||
description: metadata.description,
|
||||
temporality: metadata.temporality,
|
||||
unit: metadata.unit,
|
||||
});
|
||||
}
|
||||
}, [metadata]);
|
||||
|
||||
const tableData = useMemo(
|
||||
() =>
|
||||
metadata
|
||||
? Object.keys(metadata)
|
||||
? Object.keys({
|
||||
...metadata,
|
||||
temporality: metadata?.temporality,
|
||||
})
|
||||
// Filter out monotonic as user input is not required
|
||||
.filter((key) => key !== TableFields.IS_MONOTONIC)
|
||||
.filter((key) => key !== 'monotonic')
|
||||
.map((key) => ({
|
||||
key,
|
||||
value: {
|
||||
@@ -91,96 +72,89 @@ function Metadata({
|
||||
);
|
||||
|
||||
// Render un-editable field value
|
||||
const renderUneditableField = useCallback(
|
||||
(key: keyof MetricMetadataState, value: string) => {
|
||||
if (isErrorMetricMetadata) {
|
||||
return <FieldRenderer field="-" />;
|
||||
}
|
||||
if (key === TableFields.TYPE) {
|
||||
return <MetricTypeViewRenderer type={value as MetrictypesTypeDTO} />;
|
||||
}
|
||||
if (key === TableFields.Temporality) {
|
||||
const temporality = METRIC_METADATA_TEMPORALITY_OPTIONS.find(
|
||||
(option) => option.value === value,
|
||||
);
|
||||
return <FieldRenderer field={temporality?.label || '-'} />;
|
||||
}
|
||||
let fieldValue = value;
|
||||
if (key === TableFields.UNIT) {
|
||||
fieldValue = getUniversalNameFromMetricUnit(value);
|
||||
}
|
||||
return <FieldRenderer field={fieldValue || '-'} />;
|
||||
},
|
||||
[isErrorMetricMetadata],
|
||||
);
|
||||
const renderUneditableField = useCallback((key: string, value: string) => {
|
||||
if (key === 'metric_type') {
|
||||
return <MetricTypeRenderer type={value as MetricType} />;
|
||||
}
|
||||
let fieldValue = value;
|
||||
if (key === 'unit') {
|
||||
fieldValue = getUniversalNameFromMetricUnit(value);
|
||||
}
|
||||
return <FieldRenderer field={fieldValue || '-'} />;
|
||||
}, []);
|
||||
|
||||
const renderColumnValue = useCallback(
|
||||
(field: { value: string; key: keyof MetricMetadataState }): JSX.Element => {
|
||||
(field: { value: string; key: string }): JSX.Element => {
|
||||
if (!isEditing) {
|
||||
return renderUneditableField(field.key, field.value);
|
||||
}
|
||||
|
||||
// Don't allow editing of unit if it's already set
|
||||
const metricUnitAlreadySet =
|
||||
field.key === TableFields.UNIT && Boolean(metadata?.unit);
|
||||
const metricUnitAlreadySet = field.key === 'unit' && Boolean(metadata?.unit);
|
||||
if (metricUnitAlreadySet) {
|
||||
return renderUneditableField(field.key, field.value);
|
||||
}
|
||||
|
||||
if (field.key === TableFields.TYPE) {
|
||||
if (field.key === 'metric_type') {
|
||||
return (
|
||||
<Select
|
||||
data-testid="metric-type-select"
|
||||
options={METRIC_METADATA_TYPE_OPTIONS}
|
||||
value={metricMetadataState.type}
|
||||
options={Object.entries(METRIC_TYPE_VALUES_MAP).map(([key]) => ({
|
||||
value: key,
|
||||
label: METRIC_TYPE_LABEL_MAP[key as MetricType],
|
||||
}))}
|
||||
value={metricMetadata.metricType}
|
||||
onChange={(value): void => {
|
||||
setMetricMetadataState((prev) => ({
|
||||
setMetricMetadata((prev) => ({
|
||||
...prev,
|
||||
type: value,
|
||||
metricType: value as MetricType,
|
||||
}));
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (field.key === TableFields.UNIT) {
|
||||
if (field.key === 'unit') {
|
||||
return (
|
||||
<YAxisUnitSelector
|
||||
value={metricMetadataState.unit}
|
||||
value={metricMetadata.unit}
|
||||
onChange={(value): void => {
|
||||
setMetricMetadataState((prev) => ({ ...prev, unit: value }));
|
||||
setMetricMetadata((prev) => ({ ...prev, unit: value }));
|
||||
}}
|
||||
data-testid="unit-select"
|
||||
source={YAxisSource.EXPLORER}
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (field.key === TableFields.Temporality) {
|
||||
const temporalityValue =
|
||||
metricMetadataState.temporality === MetrictypesTemporalityDTO.unspecified
|
||||
? undefined
|
||||
: metricMetadataState.temporality;
|
||||
if (field.key === 'temporality') {
|
||||
return (
|
||||
<Select
|
||||
data-testid="temporality-select"
|
||||
options={METRIC_METADATA_TEMPORALITY_OPTIONS}
|
||||
value={temporalityValue}
|
||||
options={Object.values(Temporality).map((key) => ({
|
||||
value: key,
|
||||
label: key,
|
||||
}))}
|
||||
value={metricMetadata.temporality}
|
||||
onChange={(value): void => {
|
||||
setMetricMetadataState((prev) => ({
|
||||
setMetricMetadata((prev) => ({
|
||||
...prev,
|
||||
temporality: value,
|
||||
temporality: value as Temporality,
|
||||
}));
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (field.key === TableFields.DESCRIPTION) {
|
||||
if (field.key === 'description') {
|
||||
return (
|
||||
<Input
|
||||
data-testid="description-input"
|
||||
name={field.key}
|
||||
defaultValue={metricMetadataState.description}
|
||||
defaultValue={
|
||||
metricMetadata[
|
||||
field.key as Exclude<keyof UpdateMetricMetadataProps, 'isMonotonic'>
|
||||
]
|
||||
}
|
||||
onChange={(e): void => {
|
||||
setMetricMetadataState((prev) => ({
|
||||
setMetricMetadata((prev) => ({
|
||||
...prev,
|
||||
[field.key]: e.target.value,
|
||||
}));
|
||||
@@ -190,7 +164,7 @@ function Metadata({
|
||||
}
|
||||
return <FieldRenderer field="-" />;
|
||||
},
|
||||
[isEditing, metadata?.unit, metricMetadataState, renderUneditableField],
|
||||
[isEditing, metadata?.unit, metricMetadata, renderUneditableField],
|
||||
);
|
||||
|
||||
const columns: ColumnsType<DataType> = useMemo(
|
||||
@@ -227,14 +201,18 @@ function Metadata({
|
||||
const handleSave = useCallback(() => {
|
||||
updateMetricMetadata(
|
||||
{
|
||||
pathParams: {
|
||||
metricName: metricName ?? '',
|
||||
metricName,
|
||||
payload: {
|
||||
...metricMetadata,
|
||||
isMonotonic: determineIsMonotonic(
|
||||
metricMetadata.metricType,
|
||||
metricMetadata.temporality,
|
||||
),
|
||||
},
|
||||
data: transformUpdateMetricMetadataRequest(metricName, metricMetadataState),
|
||||
},
|
||||
{
|
||||
onSuccess: (response): void => {
|
||||
if (response.status === 200) {
|
||||
if (response?.statusCode === 200) {
|
||||
logEvent(MetricsExplorerEvents.MetricMetadataUpdated, {
|
||||
[MetricsExplorerEventKeys.MetricName]: metricName,
|
||||
[MetricsExplorerEventKeys.Tab]: 'summary',
|
||||
@@ -243,52 +221,32 @@ function Metadata({
|
||||
notifications.success({
|
||||
message: 'Metadata updated successfully',
|
||||
});
|
||||
refetchMetricDetails();
|
||||
setIsEditing(false);
|
||||
// TODO(@amlannandy): To update this to use invalidateGetMetricList
|
||||
// once we have switched to the V2 API in summary page
|
||||
queryClient.invalidateQueries([REACT_QUERY_KEY.GET_METRICS_LIST]);
|
||||
invalidateGetMetricMetadata(queryClient, {
|
||||
metricName,
|
||||
});
|
||||
queryClient.invalidateQueries(['metricsList']);
|
||||
} else {
|
||||
notifications.error({
|
||||
message: METRIC_METADATA_UPDATE_ERROR_MESSAGE,
|
||||
message:
|
||||
'Failed to update metadata, please try again. If the issue persists, please contact support.',
|
||||
});
|
||||
}
|
||||
},
|
||||
onError: (error): void => {
|
||||
const errorMessage = (error as AxiosError<RenderErrorResponseDTO>).response
|
||||
?.data.error?.message;
|
||||
onError: (): void =>
|
||||
notifications.error({
|
||||
message: errorMessage || METRIC_METADATA_UPDATE_ERROR_MESSAGE,
|
||||
});
|
||||
},
|
||||
message:
|
||||
'Failed to update metadata, please try again. If the issue persists, please contact support.',
|
||||
}),
|
||||
},
|
||||
);
|
||||
}, [
|
||||
updateMetricMetadata,
|
||||
metricName,
|
||||
metricMetadataState,
|
||||
metricMetadata,
|
||||
notifications,
|
||||
refetchMetricDetails,
|
||||
queryClient,
|
||||
]);
|
||||
|
||||
const cancelEdit = useCallback(
|
||||
(e: React.MouseEvent<HTMLElement, MouseEvent>): void => {
|
||||
e.stopPropagation();
|
||||
if (metadata) {
|
||||
setMetricMetadataState({
|
||||
type: metadata.type,
|
||||
description: metadata.description,
|
||||
unit: metadata.unit,
|
||||
temporality: metadata.temporality,
|
||||
});
|
||||
}
|
||||
setIsEditing(false);
|
||||
},
|
||||
[metadata],
|
||||
);
|
||||
|
||||
const actionButton = useMemo(() => {
|
||||
if (isEditing) {
|
||||
return (
|
||||
@@ -296,7 +254,10 @@ function Metadata({
|
||||
<Button
|
||||
className="action-button"
|
||||
type="text"
|
||||
onClick={cancelEdit}
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
setIsEditing(false);
|
||||
}}
|
||||
disabled={isUpdatingMetricsMetadata}
|
||||
>
|
||||
<X size={14} />
|
||||
@@ -333,7 +294,7 @@ function Metadata({
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}, [isEditing, isUpdatingMetricsMetadata, cancelEdit, handleSave]);
|
||||
}, [handleSave, isEditing, isUpdatingMetricsMetadata]);
|
||||
|
||||
const items = useMemo(
|
||||
() => [
|
||||
@@ -360,14 +321,6 @@ function Metadata({
|
||||
[actionButton, columns, tableData],
|
||||
);
|
||||
|
||||
if (isLoadingMetricMetadata) {
|
||||
return (
|
||||
<div className="metrics-metadata-skeleton-container">
|
||||
<Skeleton active paragraph={{ rows: 8 }} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Collapse
|
||||
bordered
|
||||
|
||||
@@ -39,7 +39,6 @@
|
||||
gap: 12px;
|
||||
|
||||
.metric-details-content-grid {
|
||||
height: 50px;
|
||||
.labels-row,
|
||||
.values-row {
|
||||
display: grid;
|
||||
@@ -73,7 +72,6 @@
|
||||
.dashboards-and-alerts-popover-container {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
height: 32px;
|
||||
|
||||
.dashboards-and-alerts-popover {
|
||||
border-radius: 20px;
|
||||
@@ -104,14 +102,6 @@
|
||||
}
|
||||
}
|
||||
|
||||
.metrics-metadata-skeleton-container {
|
||||
height: 330px;
|
||||
}
|
||||
|
||||
.all-attributes-skeleton-container {
|
||||
height: 600px;
|
||||
}
|
||||
|
||||
.metrics-accordion {
|
||||
.ant-table-body {
|
||||
&::-webkit-scrollbar {
|
||||
@@ -158,6 +148,7 @@
|
||||
|
||||
.all-attributes-search-input {
|
||||
width: 300px;
|
||||
border: 1px solid var(--bg-slate-300);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -170,7 +161,6 @@
|
||||
.ant-typography:first-child {
|
||||
font-family: 'Geist Mono';
|
||||
color: var(--bg-robin-400);
|
||||
background-color: transparent;
|
||||
}
|
||||
}
|
||||
.all-attributes-contribution {
|
||||
@@ -247,7 +237,6 @@
|
||||
}
|
||||
|
||||
.metric-metadata-value {
|
||||
height: 67px;
|
||||
background: rgba(22, 25, 34, 0.4);
|
||||
overflow-x: scroll;
|
||||
.field-renderer-container {
|
||||
@@ -341,26 +330,18 @@
|
||||
.metric-details-content {
|
||||
.metrics-accordion {
|
||||
.metrics-accordion-header {
|
||||
.action-menu {
|
||||
.action-button {
|
||||
.ant-typography {
|
||||
color: var(--bg-slate-400);
|
||||
}
|
||||
.action-button {
|
||||
.ant-typography {
|
||||
color: var(--bg-slate-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.metrics-accordion-content {
|
||||
.metric-metadata-key {
|
||||
.field-renderer-container {
|
||||
.label {
|
||||
color: var(--bg-slate-300);
|
||||
}
|
||||
}
|
||||
|
||||
.all-attributes-key {
|
||||
.ant-typography:last-child {
|
||||
color: var(--bg-vanilla-200);
|
||||
color: var(--bg-slate-400);
|
||||
background-color: var(--bg-robin-300);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,16 @@
|
||||
import { useCallback, useEffect, useMemo } from 'react';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Divider, Drawer, Empty, Typography } from 'antd';
|
||||
import {
|
||||
Button,
|
||||
Divider,
|
||||
Drawer,
|
||||
Empty,
|
||||
Skeleton,
|
||||
Tooltip,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { useGetMetricMetadata } from 'api/generated/services/metrics';
|
||||
import { useGetMetricDetails } from 'hooks/metricsExplorer/useGetMetricDetails';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { Compass, Crosshair, X } from 'lucide-react';
|
||||
|
||||
@@ -11,12 +19,16 @@ import ROUTES from '../../../constants/routes';
|
||||
import { useHandleExplorerTabChange } from '../../../hooks/useHandleExplorerTabChange';
|
||||
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
|
||||
import { isInspectEnabled } from '../Inspect/utils';
|
||||
import { formatNumberIntoHumanReadableFormat } from '../Summary/utils';
|
||||
import AllAttributes from './AllAttributes';
|
||||
import DashboardsAndAlertsPopover from './DashboardsAndAlertsPopover';
|
||||
import Highlights from './Highlights';
|
||||
import Metadata from './Metadata';
|
||||
import { MetricDetailsProps } from './types';
|
||||
import { getMetricDetailsQuery } from './utils';
|
||||
import {
|
||||
formatNumberToCompactFormat,
|
||||
formatTimestampToReadableDate,
|
||||
getMetricDetailsQuery,
|
||||
} from './utils';
|
||||
|
||||
import './MetricDetails.styles.scss';
|
||||
import '../Summary/Summary.styles.scss';
|
||||
@@ -31,52 +43,55 @@ function MetricDetails({
|
||||
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
||||
|
||||
const {
|
||||
data: metricMetadataResponse,
|
||||
isLoading: isLoadingMetricMetadata,
|
||||
isError: isErrorMetricMetadata,
|
||||
} = useGetMetricMetadata(
|
||||
{
|
||||
metricName: metricName ?? '',
|
||||
},
|
||||
{
|
||||
query: {
|
||||
enabled: !!metricName,
|
||||
},
|
||||
},
|
||||
);
|
||||
data,
|
||||
isLoading,
|
||||
isFetching,
|
||||
error: metricDetailsError,
|
||||
refetch: refetchMetricDetails,
|
||||
} = useGetMetricDetails(metricName ?? '', {
|
||||
enabled: !!metricName,
|
||||
});
|
||||
|
||||
const metadata = useMemo(() => {
|
||||
if (
|
||||
!metricMetadataResponse ||
|
||||
!metricMetadataResponse.data ||
|
||||
!metricMetadataResponse.data.data
|
||||
) {
|
||||
const metric = data?.payload?.data;
|
||||
|
||||
const lastReceived = useMemo(() => {
|
||||
if (!metric) {
|
||||
return null;
|
||||
}
|
||||
const {
|
||||
type,
|
||||
description,
|
||||
unit,
|
||||
temporality,
|
||||
isMonotonic,
|
||||
} = metricMetadataResponse.data.data;
|
||||
return formatTimestampToReadableDate(metric.lastReceived);
|
||||
}, [metric]);
|
||||
|
||||
return {
|
||||
type,
|
||||
description,
|
||||
unit,
|
||||
temporality,
|
||||
isMonotonic,
|
||||
};
|
||||
}, [metricMetadataResponse]);
|
||||
const showInspectFeature = useMemo(
|
||||
() => isInspectEnabled(metric?.metadata?.metric_type),
|
||||
[metric],
|
||||
);
|
||||
|
||||
const showInspectFeature = useMemo(() => isInspectEnabled(metadata?.type), [
|
||||
metadata,
|
||||
]);
|
||||
const isMetricDetailsLoading = isLoading || isFetching;
|
||||
|
||||
const timeSeries = useMemo(() => {
|
||||
if (!metric) {
|
||||
return null;
|
||||
}
|
||||
const timeSeriesActive = formatNumberToCompactFormat(metric.timeSeriesActive);
|
||||
const timeSeriesTotal = formatNumberToCompactFormat(metric.timeSeriesTotal);
|
||||
|
||||
return (
|
||||
<Tooltip
|
||||
title="Active time series are those that have received data points in the last 1
|
||||
hour."
|
||||
placement="top"
|
||||
>
|
||||
<span>{`${timeSeriesTotal} total ⎯ ${timeSeriesActive} active`}</span>
|
||||
</Tooltip>
|
||||
);
|
||||
}, [metric]);
|
||||
|
||||
const goToMetricsExplorerwithSelectedMetric = useCallback(() => {
|
||||
if (metricName) {
|
||||
const compositeQuery = getMetricDetailsQuery(metricName, metadata?.type);
|
||||
const compositeQuery = getMetricDetailsQuery(
|
||||
metricName,
|
||||
metric?.metadata?.metric_type,
|
||||
);
|
||||
handleExplorerTabChange(
|
||||
PANEL_TYPES.TIME_SERIES,
|
||||
{
|
||||
@@ -92,7 +107,9 @@ function MetricDetails({
|
||||
[MetricsExplorerEventKeys.Modal]: 'metric-details',
|
||||
});
|
||||
}
|
||||
}, [metricName, handleExplorerTabChange, metadata?.type]);
|
||||
}, [metricName, handleExplorerTabChange, metric?.metadata?.metric_type]);
|
||||
|
||||
const isMetricDetailsError = metricDetailsError || !metric;
|
||||
|
||||
useEffect(() => {
|
||||
logEvent(MetricsExplorerEvents.ModalOpened, {
|
||||
@@ -100,10 +117,6 @@ function MetricDetails({
|
||||
});
|
||||
}, []);
|
||||
|
||||
if (!metricName) {
|
||||
return <Empty description="Metric not found" />;
|
||||
}
|
||||
|
||||
return (
|
||||
<Drawer
|
||||
width="60%"
|
||||
@@ -111,13 +124,13 @@ function MetricDetails({
|
||||
<div className="metric-details-header">
|
||||
<div className="metric-details-title">
|
||||
<Divider type="vertical" />
|
||||
<Typography.Text>{metricName}</Typography.Text>
|
||||
<Typography.Text>{metric?.name}</Typography.Text>
|
||||
</div>
|
||||
<div className="metric-details-header-buttons">
|
||||
<Button
|
||||
onClick={goToMetricsExplorerwithSelectedMetric}
|
||||
icon={<Compass size={16} />}
|
||||
disabled={!metricName || isLoadingMetricMetadata}
|
||||
disabled={!metricName}
|
||||
data-testid="open-in-explorer-button"
|
||||
>
|
||||
Open in Explorer
|
||||
@@ -127,11 +140,10 @@ function MetricDetails({
|
||||
<Button
|
||||
className="inspect-metrics-button"
|
||||
aria-label="Inspect Metric"
|
||||
disabled={!metricName || isLoadingMetricMetadata}
|
||||
icon={<Crosshair size={18} />}
|
||||
onClick={(): void => {
|
||||
if (metricName) {
|
||||
openInspectModal(metricName);
|
||||
if (metric?.name) {
|
||||
openInspectModal(metric.name);
|
||||
}
|
||||
}}
|
||||
data-testid="inspect-metric-button"
|
||||
@@ -151,17 +163,60 @@ function MetricDetails({
|
||||
destroyOnClose
|
||||
closeIcon={<X size={16} />}
|
||||
>
|
||||
<div className="metric-details-content">
|
||||
<Highlights metricName={metricName} />
|
||||
<DashboardsAndAlertsPopover metricName={metricName} />
|
||||
<Metadata
|
||||
metricName={metricName}
|
||||
metadata={metadata}
|
||||
isErrorMetricMetadata={isErrorMetricMetadata}
|
||||
isLoadingMetricMetadata={isLoadingMetricMetadata}
|
||||
/>
|
||||
<AllAttributes metricName={metricName} metricType={metadata?.type} />
|
||||
</div>
|
||||
{isMetricDetailsLoading && (
|
||||
<div data-testid="metric-details-skeleton">
|
||||
<Skeleton active />
|
||||
</div>
|
||||
)}
|
||||
{isMetricDetailsError && !isMetricDetailsLoading && (
|
||||
<Empty description="Error fetching metric details" />
|
||||
)}
|
||||
{!isMetricDetailsLoading && !isMetricDetailsError && (
|
||||
<div className="metric-details-content">
|
||||
<div className="metric-details-content-grid">
|
||||
<div className="labels-row">
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
SAMPLES
|
||||
</Typography.Text>
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
TIME SERIES
|
||||
</Typography.Text>
|
||||
<Typography.Text type="secondary" className="metric-details-grid-label">
|
||||
LAST RECEIVED
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<div className="values-row">
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={metric?.samples.toLocaleString()}>
|
||||
{formatNumberIntoHumanReadableFormat(metric?.samples)}
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={timeSeries}>{timeSeries}</Tooltip>
|
||||
</Typography.Text>
|
||||
<Typography.Text className="metric-details-grid-value">
|
||||
<Tooltip title={lastReceived}>{lastReceived}</Tooltip>
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
<DashboardsAndAlertsPopover
|
||||
dashboards={metric.dashboards}
|
||||
alerts={metric.alerts}
|
||||
/>
|
||||
<Metadata
|
||||
metricName={metric?.name}
|
||||
metadata={metric.metadata}
|
||||
refetchMetricDetails={refetchMetricDetails}
|
||||
/>
|
||||
{metric.attributes && (
|
||||
<AllAttributes
|
||||
metricName={metric?.name}
|
||||
attributes={metric.attributes}
|
||||
metricType={metric?.metadata?.metric_type}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</Drawer>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import * as reactUseHooks from 'react-use';
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import * as metricsExplorerHooks from 'api/generated/services/metrics';
|
||||
import { MetrictypesTypeDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import * as useHandleExplorerTabChange from 'hooks/useHandleExplorerTabChange';
|
||||
import { userEvent } from 'tests/test-utils';
|
||||
|
||||
import { MetricDetailsAttribute } from '../../../../api/metricsExplorer/getMetricDetails';
|
||||
import ROUTES from '../../../../constants/routes';
|
||||
import AllAttributes, { AllAttributesValue } from '../AllAttributes';
|
||||
import { getMockMetricAttributesData } from './testUtlls';
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
@@ -23,28 +21,33 @@ jest
|
||||
});
|
||||
|
||||
const mockMetricName = 'test-metric';
|
||||
const mockMetricType = MetrictypesTypeDTO.gauge;
|
||||
const mockMetricType = MetricType.GAUGE;
|
||||
const mockAttributes: MetricDetailsAttribute[] = [
|
||||
{
|
||||
key: 'attribute1',
|
||||
value: ['value1', 'value2'],
|
||||
valueCount: 2,
|
||||
},
|
||||
{
|
||||
key: 'attribute2',
|
||||
value: ['value3'],
|
||||
valueCount: 1,
|
||||
},
|
||||
];
|
||||
|
||||
const mockUseCopyToClipboard = jest.fn();
|
||||
jest
|
||||
.spyOn(reactUseHooks, 'useCopyToClipboard')
|
||||
.mockReturnValue([{ value: 'value1' }, mockUseCopyToClipboard] as any);
|
||||
|
||||
const useGetMetricAttributesMock = jest.spyOn(
|
||||
metricsExplorerHooks,
|
||||
'useGetMetricAttributes',
|
||||
);
|
||||
|
||||
describe('AllAttributes', () => {
|
||||
beforeEach(() => {
|
||||
useGetMetricAttributesMock.mockReturnValue({
|
||||
...getMockMetricAttributesData(),
|
||||
});
|
||||
});
|
||||
|
||||
it('renders attributes section with title', () => {
|
||||
render(
|
||||
<AllAttributes metricName={mockMetricName} metricType={mockMetricType} />,
|
||||
<AllAttributes
|
||||
metricName={mockMetricName}
|
||||
attributes={mockAttributes}
|
||||
metricType={mockMetricType}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByText('All Attributes')).toBeInTheDocument();
|
||||
@@ -52,7 +55,11 @@ describe('AllAttributes', () => {
|
||||
|
||||
it('renders all attribute keys and values', () => {
|
||||
render(
|
||||
<AllAttributes metricName={mockMetricName} metricType={mockMetricType} />,
|
||||
<AllAttributes
|
||||
metricName={mockMetricName}
|
||||
attributes={mockAttributes}
|
||||
metricType={mockMetricType}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Check attribute keys are rendered
|
||||
@@ -67,7 +74,11 @@ describe('AllAttributes', () => {
|
||||
|
||||
it('renders value counts correctly', () => {
|
||||
render(
|
||||
<AllAttributes metricName={mockMetricName} metricType={mockMetricType} />,
|
||||
<AllAttributes
|
||||
metricName={mockMetricName}
|
||||
attributes={mockAttributes}
|
||||
metricType={mockMetricType}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByText('2')).toBeInTheDocument(); // For attribute1
|
||||
@@ -75,35 +86,41 @@ describe('AllAttributes', () => {
|
||||
});
|
||||
|
||||
it('handles empty attributes array', () => {
|
||||
useGetMetricAttributesMock.mockReturnValue({
|
||||
...getMockMetricAttributesData({
|
||||
data: {
|
||||
attributes: [],
|
||||
totalKeys: 0,
|
||||
},
|
||||
}),
|
||||
});
|
||||
render(
|
||||
<AllAttributes metricName={mockMetricName} metricType={mockMetricType} />,
|
||||
<AllAttributes
|
||||
metricName={mockMetricName}
|
||||
attributes={[]}
|
||||
metricType={mockMetricType}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByText('All Attributes')).toBeInTheDocument();
|
||||
expect(screen.getByText('No attributes found')).toBeInTheDocument();
|
||||
expect(screen.queryByText('No data')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('clicking on an attribute key opens the explorer with the attribute filter applied', async () => {
|
||||
it('clicking on an attribute key opens the explorer with the attribute filter applied', () => {
|
||||
render(
|
||||
<AllAttributes metricName={mockMetricName} metricType={mockMetricType} />,
|
||||
<AllAttributes
|
||||
metricName={mockMetricName}
|
||||
attributes={mockAttributes}
|
||||
metricType={mockMetricType}
|
||||
/>,
|
||||
);
|
||||
await userEvent.click(screen.getByText('attribute1'));
|
||||
fireEvent.click(screen.getByText('attribute1'));
|
||||
expect(mockHandleExplorerTabChange).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('filters attributes based on search input', async () => {
|
||||
it('filters attributes based on search input', () => {
|
||||
render(
|
||||
<AllAttributes metricName={mockMetricName} metricType={mockMetricType} />,
|
||||
<AllAttributes
|
||||
metricName={mockMetricName}
|
||||
attributes={mockAttributes}
|
||||
metricType={mockMetricType}
|
||||
/>,
|
||||
);
|
||||
await userEvent.type(screen.getByPlaceholderText('Search'), 'value1');
|
||||
fireEvent.change(screen.getByPlaceholderText('Search'), {
|
||||
target: { value: 'value1' },
|
||||
});
|
||||
|
||||
expect(screen.getByText('attribute1')).toBeInTheDocument();
|
||||
expect(screen.getByText('value1')).toBeInTheDocument();
|
||||
@@ -127,7 +144,7 @@ describe('AllAttributesValue', () => {
|
||||
expect(screen.getByText('value2')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('loads more attributes when show more button is clicked', async () => {
|
||||
it('loads more attributes when show more button is clicked', () => {
|
||||
render(
|
||||
<AllAttributesValue
|
||||
filterKey="attribute1"
|
||||
@@ -138,7 +155,7 @@ describe('AllAttributesValue', () => {
|
||||
/>,
|
||||
);
|
||||
expect(screen.queryByText('value6')).not.toBeInTheDocument();
|
||||
await userEvent.click(screen.getByText('Show More'));
|
||||
fireEvent.click(screen.getByText('Show More'));
|
||||
expect(screen.getByText('value6')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
@@ -155,7 +172,7 @@ describe('AllAttributesValue', () => {
|
||||
expect(screen.queryByText('Show More')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('copy button should copy the attribute value to the clipboard', async () => {
|
||||
it('copy button should copy the attribute value to the clipboard', () => {
|
||||
render(
|
||||
<AllAttributesValue
|
||||
filterKey="attribute1"
|
||||
@@ -166,13 +183,13 @@ describe('AllAttributesValue', () => {
|
||||
/>,
|
||||
);
|
||||
expect(screen.getByText('value1')).toBeInTheDocument();
|
||||
await userEvent.click(screen.getByText('value1'));
|
||||
fireEvent.click(screen.getByText('value1'));
|
||||
expect(screen.getByText('Copy Attribute')).toBeInTheDocument();
|
||||
await userEvent.click(screen.getByText('Copy Attribute'));
|
||||
fireEvent.click(screen.getByText('Copy Attribute'));
|
||||
expect(mockUseCopyToClipboard).toHaveBeenCalledWith('value1');
|
||||
});
|
||||
|
||||
it('explorer button should go to metrics explore with the attribute filter applied', async () => {
|
||||
it('explorer button should go to metrics explore with the attribute filter applied', () => {
|
||||
render(
|
||||
<AllAttributesValue
|
||||
filterKey="attribute1"
|
||||
@@ -183,10 +200,10 @@ describe('AllAttributesValue', () => {
|
||||
/>,
|
||||
);
|
||||
expect(screen.getByText('value1')).toBeInTheDocument();
|
||||
await userEvent.click(screen.getByText('value1'));
|
||||
fireEvent.click(screen.getByText('value1'));
|
||||
|
||||
expect(screen.getByText('Open in Explorer')).toBeInTheDocument();
|
||||
await userEvent.click(screen.getByText('Open in Explorer'));
|
||||
fireEvent.click(screen.getByText('Open in Explorer'));
|
||||
expect(mockGoToMetricsExploreWithAppliedAttribute).toHaveBeenCalledWith(
|
||||
'attribute1',
|
||||
'value1',
|
||||
|
||||
@@ -1,17 +1,26 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import * as metricsExplorerHooks from 'api/generated/services/metrics';
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { userEvent } from 'tests/test-utils';
|
||||
|
||||
import DashboardsAndAlertsPopover from '../DashboardsAndAlertsPopover';
|
||||
import {
|
||||
getMockAlertsData,
|
||||
getMockDashboardsData,
|
||||
MOCK_ALERT_1,
|
||||
MOCK_ALERT_2,
|
||||
MOCK_DASHBOARD_1,
|
||||
MOCK_DASHBOARD_2,
|
||||
} from './testUtlls';
|
||||
|
||||
const mockAlert1 = {
|
||||
alert_id: '1',
|
||||
alert_name: 'Alert 1',
|
||||
};
|
||||
const mockAlert2 = {
|
||||
alert_id: '2',
|
||||
alert_name: 'Alert 2',
|
||||
};
|
||||
const mockDashboard1 = {
|
||||
dashboard_id: '1',
|
||||
dashboard_name: 'Dashboard 1',
|
||||
};
|
||||
const mockDashboard2 = {
|
||||
dashboard_id: '2',
|
||||
dashboard_name: 'Dashboard 2',
|
||||
};
|
||||
const mockAlerts = [mockAlert1, mockAlert2];
|
||||
const mockDashboards = [mockDashboard1, mockDashboard2];
|
||||
|
||||
const mockSafeNavigate = jest.fn();
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
@@ -19,6 +28,7 @@ jest.mock('hooks/useSafeNavigate', () => ({
|
||||
safeNavigate: mockSafeNavigate,
|
||||
}),
|
||||
}));
|
||||
|
||||
const mockSetQuery = jest.fn();
|
||||
const mockUrlQuery = {
|
||||
set: mockSetQuery,
|
||||
@@ -29,155 +39,125 @@ jest.mock('hooks/useUrlQuery', () => ({
|
||||
default: jest.fn(() => mockUrlQuery),
|
||||
}));
|
||||
|
||||
const MOCK_METRIC_NAME = 'test-metric';
|
||||
|
||||
const useGetMetricAlertsMock = jest.spyOn(
|
||||
metricsExplorerHooks,
|
||||
'useGetMetricAlerts',
|
||||
);
|
||||
const useGetMetricDashboardsMock = jest.spyOn(
|
||||
metricsExplorerHooks,
|
||||
'useGetMetricDashboards',
|
||||
);
|
||||
|
||||
describe('DashboardsAndAlertsPopover', () => {
|
||||
beforeEach(() => {
|
||||
useGetMetricAlertsMock.mockReturnValue(getMockAlertsData());
|
||||
useGetMetricDashboardsMock.mockReturnValue(getMockDashboardsData());
|
||||
});
|
||||
|
||||
it('renders the popover correctly with multiple dashboards and alerts', () => {
|
||||
render(<DashboardsAndAlertsPopover metricName={MOCK_METRIC_NAME} />);
|
||||
|
||||
expect(screen.getByText(`2 dashboards`)).toBeInTheDocument();
|
||||
expect(screen.getByText(`2 alert rules`)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders null with no dashboards and alerts', () => {
|
||||
useGetMetricAlertsMock.mockReturnValue(
|
||||
getMockAlertsData({
|
||||
data: undefined,
|
||||
}),
|
||||
);
|
||||
useGetMetricDashboardsMock.mockReturnValue(
|
||||
getMockDashboardsData({
|
||||
data: undefined,
|
||||
}),
|
||||
);
|
||||
|
||||
const { container } = render(
|
||||
<DashboardsAndAlertsPopover metricName={MOCK_METRIC_NAME} />,
|
||||
render(
|
||||
<DashboardsAndAlertsPopover
|
||||
alerts={mockAlerts}
|
||||
dashboards={mockDashboards}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(
|
||||
container.querySelector('dashboards-and-alerts-popover-container'),
|
||||
).toBeNull();
|
||||
screen.getByText(`${mockDashboards.length} dashboards`),
|
||||
).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText(`${mockAlerts.length} alert rules`),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders null with no dashboards and alerts', () => {
|
||||
const { container } = render(
|
||||
<DashboardsAndAlertsPopover alerts={[]} dashboards={[]} />,
|
||||
);
|
||||
expect(container).toBeEmptyDOMElement();
|
||||
});
|
||||
|
||||
it('renders popover with single dashboard and alert', () => {
|
||||
useGetMetricAlertsMock.mockReturnValue(
|
||||
getMockAlertsData({
|
||||
data: {
|
||||
alerts: [MOCK_ALERT_1],
|
||||
},
|
||||
}),
|
||||
render(
|
||||
<DashboardsAndAlertsPopover
|
||||
alerts={[mockAlert1]}
|
||||
dashboards={[mockDashboard1]}
|
||||
/>,
|
||||
);
|
||||
useGetMetricDashboardsMock.mockReturnValue(
|
||||
getMockDashboardsData({
|
||||
data: {
|
||||
dashboards: [MOCK_DASHBOARD_1],
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
render(<DashboardsAndAlertsPopover metricName={MOCK_METRIC_NAME} />);
|
||||
|
||||
expect(screen.getByText(`1 dashboard`)).toBeInTheDocument();
|
||||
expect(screen.getByText(`1 alert rule`)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders popover with dashboard id if name is not available', async () => {
|
||||
useGetMetricDashboardsMock.mockReturnValue(
|
||||
getMockDashboardsData({
|
||||
data: {
|
||||
dashboards: [{ ...MOCK_DASHBOARD_1, dashboardName: '' }],
|
||||
},
|
||||
}),
|
||||
it('renders popover with dashboard id if name is not available', () => {
|
||||
render(
|
||||
<DashboardsAndAlertsPopover
|
||||
alerts={mockAlerts}
|
||||
dashboards={[{ ...mockDashboard1, dashboard_name: undefined } as any]}
|
||||
/>,
|
||||
);
|
||||
|
||||
render(<DashboardsAndAlertsPopover metricName={MOCK_METRIC_NAME} />);
|
||||
|
||||
await userEvent.click(screen.getByText(`1 dashboard`));
|
||||
expect(screen.getByText(MOCK_DASHBOARD_1.dashboardId)).toBeInTheDocument();
|
||||
fireEvent.click(screen.getByText(`1 dashboard`));
|
||||
expect(screen.getByText(mockDashboard1.dashboard_id)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders popover with alert id if name is not available', async () => {
|
||||
useGetMetricAlertsMock.mockReturnValue(
|
||||
getMockAlertsData({
|
||||
data: {
|
||||
alerts: [{ ...MOCK_ALERT_1, alertName: '' }],
|
||||
},
|
||||
}),
|
||||
it('renders popover with alert id if name is not available', () => {
|
||||
render(
|
||||
<DashboardsAndAlertsPopover
|
||||
alerts={[{ ...mockAlert1, alert_name: undefined } as any]}
|
||||
dashboards={mockDashboards}
|
||||
/>,
|
||||
);
|
||||
|
||||
render(<DashboardsAndAlertsPopover metricName={MOCK_METRIC_NAME} />);
|
||||
|
||||
await userEvent.click(screen.getByText(`1 alert rule`));
|
||||
expect(screen.getByText(MOCK_ALERT_1.alertId)).toBeInTheDocument();
|
||||
fireEvent.click(screen.getByText(`1 alert rule`));
|
||||
expect(screen.getByText(mockAlert1.alert_id)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('navigates to the dashboard when the dashboard is clicked', async () => {
|
||||
render(<DashboardsAndAlertsPopover metricName={MOCK_METRIC_NAME} />);
|
||||
it('navigates to the dashboard when the dashboard is clicked', () => {
|
||||
render(
|
||||
<DashboardsAndAlertsPopover
|
||||
alerts={mockAlerts}
|
||||
dashboards={mockDashboards}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Click on 2 dashboards button
|
||||
await userEvent.click(screen.getByText(`2 dashboards`));
|
||||
fireEvent.click(screen.getByText(`${mockDashboards.length} dashboards`));
|
||||
// Popover showing list of 2 dashboards should be visible
|
||||
expect(screen.getByText(MOCK_DASHBOARD_1.dashboardName)).toBeInTheDocument();
|
||||
expect(screen.getByText(MOCK_DASHBOARD_2.dashboardName)).toBeInTheDocument();
|
||||
expect(screen.getByText(mockDashboard1.dashboard_name)).toBeInTheDocument();
|
||||
expect(screen.getByText(mockDashboard2.dashboard_name)).toBeInTheDocument();
|
||||
|
||||
// Click on the first dashboard
|
||||
await userEvent.click(screen.getByText(MOCK_DASHBOARD_1.dashboardName));
|
||||
fireEvent.click(screen.getByText(mockDashboard1.dashboard_name));
|
||||
|
||||
// Should navigate to the dashboard
|
||||
expect(mockSafeNavigate).toHaveBeenCalledWith(
|
||||
`/dashboard/${MOCK_DASHBOARD_1.dashboardId}`,
|
||||
`/dashboard/${mockDashboard1.dashboard_id}`,
|
||||
);
|
||||
});
|
||||
|
||||
it('navigates to the alert when the alert is clicked', async () => {
|
||||
render(<DashboardsAndAlertsPopover metricName={MOCK_METRIC_NAME} />);
|
||||
it('navigates to the alert when the alert is clicked', () => {
|
||||
render(
|
||||
<DashboardsAndAlertsPopover
|
||||
alerts={mockAlerts}
|
||||
dashboards={mockDashboards}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Click on 2 alert rules button
|
||||
await userEvent.click(screen.getByText(`2 alert rules`));
|
||||
fireEvent.click(screen.getByText(`${mockAlerts.length} alert rules`));
|
||||
// Popover showing list of 2 alert rules should be visible
|
||||
expect(screen.getByText(MOCK_ALERT_1.alertName)).toBeInTheDocument();
|
||||
expect(screen.getByText(MOCK_ALERT_2.alertName)).toBeInTheDocument();
|
||||
expect(screen.getByText(mockAlert1.alert_name)).toBeInTheDocument();
|
||||
expect(screen.getByText(mockAlert2.alert_name)).toBeInTheDocument();
|
||||
|
||||
// Click on the first alert rule
|
||||
await userEvent.click(screen.getByText(MOCK_ALERT_1.alertName));
|
||||
fireEvent.click(screen.getByText(mockAlert1.alert_name));
|
||||
|
||||
// Should navigate to the alert rule
|
||||
expect(mockSetQuery).toHaveBeenCalledWith(
|
||||
QueryParams.ruleId,
|
||||
MOCK_ALERT_1.alertId,
|
||||
mockAlert1.alert_id,
|
||||
);
|
||||
});
|
||||
|
||||
it('renders unique dashboards even when there are duplicates', async () => {
|
||||
useGetMetricDashboardsMock.mockReturnValue(
|
||||
getMockDashboardsData({
|
||||
data: {
|
||||
dashboards: [MOCK_DASHBOARD_1, MOCK_DASHBOARD_2, MOCK_DASHBOARD_1],
|
||||
},
|
||||
}),
|
||||
it('renders unique dashboards even when there are duplicates', () => {
|
||||
render(
|
||||
<DashboardsAndAlertsPopover
|
||||
alerts={mockAlerts}
|
||||
dashboards={[...mockDashboards, mockDashboard1]}
|
||||
/>,
|
||||
);
|
||||
expect(
|
||||
screen.getByText(`${mockDashboards.length} dashboards`),
|
||||
).toBeInTheDocument();
|
||||
|
||||
render(<DashboardsAndAlertsPopover metricName={MOCK_METRIC_NAME} />);
|
||||
|
||||
expect(screen.getByText('2 dashboards')).toBeInTheDocument();
|
||||
|
||||
await userEvent.click(screen.getByText('2 dashboards'));
|
||||
expect(screen.getByText(MOCK_DASHBOARD_1.dashboardName)).toBeInTheDocument();
|
||||
expect(screen.getByText(MOCK_DASHBOARD_2.dashboardName)).toBeInTheDocument();
|
||||
fireEvent.click(screen.getByText(`${mockDashboards.length} dashboards`));
|
||||
expect(screen.getByText(mockDashboard1.dashboard_name)).toBeInTheDocument();
|
||||
expect(screen.getByText(mockDashboard2.dashboard_name)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
import { render } from '@testing-library/react';
|
||||
import * as metricsExplorerHooks from 'api/generated/services/metrics';
|
||||
|
||||
import Highlights from '../Highlights';
|
||||
import { formatTimestampToReadableDate } from '../utils';
|
||||
import { getMockMetricHighlightsData } from './testUtlls';
|
||||
|
||||
const MOCK_METRIC_NAME = 'test-metric';
|
||||
const METRIC_DETAILS_GRID_VALUE_SELECTOR = '.metric-details-grid-value';
|
||||
|
||||
const useGetMetricHighlightsMock = jest.spyOn(
|
||||
metricsExplorerHooks,
|
||||
'useGetMetricHighlights',
|
||||
);
|
||||
|
||||
describe('Highlights', () => {
|
||||
beforeEach(() => {
|
||||
useGetMetricHighlightsMock.mockReturnValue(getMockMetricHighlightsData());
|
||||
});
|
||||
|
||||
it('should render all highlights data correctly', () => {
|
||||
const { container } = render(<Highlights metricName={MOCK_METRIC_NAME} />);
|
||||
|
||||
const metricHighlightsValues = container.querySelectorAll(
|
||||
METRIC_DETAILS_GRID_VALUE_SELECTOR,
|
||||
);
|
||||
|
||||
expect(metricHighlightsValues).toHaveLength(3);
|
||||
expect(metricHighlightsValues[0].textContent).toBe('1M+');
|
||||
expect(metricHighlightsValues[1].textContent).toBe('1M total ⎯ 1M active');
|
||||
expect(metricHighlightsValues[2].textContent).toBe(
|
||||
formatTimestampToReadableDate('2026-01-24T00:00:00Z'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should render "-" for highlights data when there is an error', () => {
|
||||
useGetMetricHighlightsMock.mockReturnValue(
|
||||
getMockMetricHighlightsData(
|
||||
{},
|
||||
{
|
||||
isError: true,
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
const { container } = render(<Highlights metricName={MOCK_METRIC_NAME} />);
|
||||
|
||||
const metricHighlightsValues = container.querySelectorAll(
|
||||
METRIC_DETAILS_GRID_VALUE_SELECTOR,
|
||||
);
|
||||
expect(metricHighlightsValues[0].textContent).toBe('-');
|
||||
expect(metricHighlightsValues[1].textContent).toBe('-');
|
||||
expect(metricHighlightsValues[2].textContent).toBe('-');
|
||||
});
|
||||
|
||||
it('should render loading state when data is loading', () => {
|
||||
useGetMetricHighlightsMock.mockReturnValue(
|
||||
getMockMetricHighlightsData(
|
||||
{},
|
||||
{
|
||||
isLoading: true,
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
const { container } = render(<Highlights metricName={MOCK_METRIC_NAME} />);
|
||||
|
||||
expect(container.querySelector('.ant-skeleton')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should not render grid values when there is no data', () => {
|
||||
useGetMetricHighlightsMock.mockReturnValue(
|
||||
getMockMetricHighlightsData({
|
||||
data: undefined,
|
||||
}),
|
||||
);
|
||||
|
||||
const { container } = render(<Highlights metricName={MOCK_METRIC_NAME} />);
|
||||
|
||||
const metricHighlightsValues = container.querySelectorAll(
|
||||
METRIC_DETAILS_GRID_VALUE_SELECTOR,
|
||||
);
|
||||
expect(metricHighlightsValues).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
@@ -1,25 +1,16 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import * as metricsExplorerHooks from 'api/generated/services/metrics';
|
||||
import {
|
||||
GetMetricMetadata200,
|
||||
MetrictypesTemporalityDTO,
|
||||
MetrictypesTypeDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import {
|
||||
UniversalYAxisUnit,
|
||||
YAxisUnitSelectorProps,
|
||||
} from 'components/YAxisUnitSelector/types';
|
||||
import * as useUpdateMetricMetadataHooks from 'hooks/metricsExplorer/useUpdateMetricMetadata';
|
||||
import * as useNotificationsHooks from 'hooks/useNotifications';
|
||||
import { userEvent } from 'tests/test-utils';
|
||||
import { SelectOption } from 'types/common/select';
|
||||
|
||||
import Metadata from '../Metadata';
|
||||
import { MetricMetadata } from '../types';
|
||||
import { transformMetricMetadata } from '../utils';
|
||||
import { getMockMetricMetadataData } from './testUtlls';
|
||||
|
||||
// Mock antd select for testing
|
||||
jest.mock('antd', () => ({
|
||||
@@ -81,18 +72,13 @@ jest.mock('react-query', () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
const mockUseUpdateMetricMetadataHook = jest.spyOn(
|
||||
metricsExplorerHooks,
|
||||
'useUpdateMetricMetadata',
|
||||
);
|
||||
type UseUpdateMetricMetadataResult = ReturnType<
|
||||
typeof metricsExplorerHooks.useUpdateMetricMetadata
|
||||
>;
|
||||
const mockUseUpdateMetricMetadata = jest.fn();
|
||||
|
||||
const mockMetricMetadata = transformMetricMetadata(
|
||||
(getMockMetricMetadataData().data as AxiosResponse<GetMetricMetadata200>).data,
|
||||
) as MetricMetadata;
|
||||
jest
|
||||
.spyOn(useUpdateMetricMetadataHooks, 'useUpdateMetricMetadata')
|
||||
.mockReturnValue({
|
||||
mutate: mockUseUpdateMetricMetadata,
|
||||
isLoading: false,
|
||||
} as any);
|
||||
|
||||
const mockErrorNotification = jest.fn();
|
||||
const mockSuccessNotification = jest.fn();
|
||||
@@ -104,47 +90,46 @@ jest.spyOn(useNotificationsHooks, 'useNotifications').mockReturnValue({
|
||||
} as any);
|
||||
|
||||
const mockMetricName = 'test_metric';
|
||||
const mockMetricMetadata = {
|
||||
metric_type: MetricType.GAUGE,
|
||||
description: 'test_description',
|
||||
unit: 'test_unit',
|
||||
temporality: Temporality.DELTA,
|
||||
};
|
||||
const mockRefetchMetricDetails = jest.fn();
|
||||
|
||||
describe('Metadata', () => {
|
||||
beforeEach(() => {
|
||||
mockUseUpdateMetricMetadataHook.mockReturnValue(({
|
||||
mutate: mockUseUpdateMetricMetadata,
|
||||
} as Partial<UseUpdateMetricMetadataResult>) as UseUpdateMetricMetadataResult);
|
||||
});
|
||||
|
||||
it('should render the metadata properly', () => {
|
||||
render(
|
||||
<Metadata
|
||||
metricName={mockMetricName}
|
||||
metadata={mockMetricMetadata}
|
||||
isErrorMetricMetadata={false}
|
||||
isLoadingMetricMetadata={false}
|
||||
refetchMetricDetails={mockRefetchMetricDetails}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByText('Metric Type')).toBeInTheDocument();
|
||||
expect(screen.getByText('Gauge')).toBeInTheDocument();
|
||||
expect(screen.getByText(mockMetricMetadata.metric_type)).toBeInTheDocument();
|
||||
expect(screen.getByText('Description')).toBeInTheDocument();
|
||||
expect(screen.getByText(mockMetricMetadata.description)).toBeInTheDocument();
|
||||
expect(screen.getByText('Unit')).toBeInTheDocument();
|
||||
expect(screen.getByText(mockMetricMetadata.unit)).toBeInTheDocument();
|
||||
expect(screen.getByText('Temporality')).toBeInTheDocument();
|
||||
expect(screen.getByText('Delta')).toBeInTheDocument();
|
||||
expect(screen.getByText(mockMetricMetadata.temporality)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('editing the metadata should show the form inputs', async () => {
|
||||
it('editing the metadata should show the form inputs', () => {
|
||||
render(
|
||||
<Metadata
|
||||
metricName={mockMetricName}
|
||||
metadata={mockMetricMetadata}
|
||||
isErrorMetricMetadata={false}
|
||||
isLoadingMetricMetadata={false}
|
||||
refetchMetricDetails={mockRefetchMetricDetails}
|
||||
/>,
|
||||
);
|
||||
|
||||
const editButton = screen.getByText('Edit');
|
||||
expect(editButton).toBeInTheDocument();
|
||||
await userEvent.click(editButton);
|
||||
fireEvent.click(editButton);
|
||||
|
||||
expect(screen.getByTestId('metric-type-select')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('temporality-select')).toBeInTheDocument();
|
||||
@@ -159,47 +144,52 @@ describe('Metadata', () => {
|
||||
...mockMetricMetadata,
|
||||
unit: '',
|
||||
}}
|
||||
isErrorMetricMetadata={false}
|
||||
isLoadingMetricMetadata={false}
|
||||
refetchMetricDetails={mockRefetchMetricDetails}
|
||||
/>,
|
||||
);
|
||||
|
||||
const editButton = screen.getByText('Edit');
|
||||
expect(editButton).toBeInTheDocument();
|
||||
await userEvent.click(editButton);
|
||||
fireEvent.click(editButton);
|
||||
|
||||
const metricDescriptionInput = screen.getByTestId('description-input');
|
||||
expect(metricDescriptionInput).toBeInTheDocument();
|
||||
await userEvent.clear(metricDescriptionInput);
|
||||
await userEvent.type(metricDescriptionInput, 'Updated description');
|
||||
fireEvent.change(metricDescriptionInput, {
|
||||
target: { value: 'Updated description' },
|
||||
});
|
||||
|
||||
const metricTypeSelect = screen.getByTestId('metric-type-select');
|
||||
expect(metricTypeSelect).toBeInTheDocument();
|
||||
await userEvent.selectOptions(metricTypeSelect, MetrictypesTypeDTO.sum);
|
||||
fireEvent.change(metricTypeSelect, {
|
||||
target: { value: MetricType.SUM },
|
||||
});
|
||||
|
||||
const temporalitySelect = screen.getByTestId('temporality-select');
|
||||
expect(temporalitySelect).toBeInTheDocument();
|
||||
await userEvent.selectOptions(temporalitySelect, Temporality.CUMULATIVE);
|
||||
fireEvent.change(temporalitySelect, {
|
||||
target: { value: Temporality.CUMULATIVE },
|
||||
});
|
||||
|
||||
const unitSelect = screen.getByTestId('unit-select');
|
||||
expect(unitSelect).toBeInTheDocument();
|
||||
await userEvent.selectOptions(unitSelect, 'By');
|
||||
fireEvent.change(unitSelect, {
|
||||
target: { value: 'By' },
|
||||
});
|
||||
|
||||
const saveButton = screen.getByText('Save');
|
||||
expect(saveButton).toBeInTheDocument();
|
||||
await userEvent.click(saveButton);
|
||||
fireEvent.click(saveButton);
|
||||
|
||||
expect(mockUseUpdateMetricMetadata).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
type: MetrictypesTypeDTO.sum,
|
||||
temporality: MetrictypesTemporalityDTO.cumulative,
|
||||
metricName: mockMetricName,
|
||||
payload: expect.objectContaining({
|
||||
description: 'Updated description',
|
||||
metricType: MetricType.SUM,
|
||||
temporality: Temporality.CUMULATIVE,
|
||||
unit: 'By',
|
||||
isMonotonic: true,
|
||||
}),
|
||||
pathParams: {
|
||||
metricName: mockMetricName,
|
||||
},
|
||||
}),
|
||||
expect.objectContaining({
|
||||
onSuccess: expect.any(Function),
|
||||
@@ -213,28 +203,29 @@ describe('Metadata', () => {
|
||||
<Metadata
|
||||
metricName={mockMetricName}
|
||||
metadata={mockMetricMetadata}
|
||||
isErrorMetricMetadata={false}
|
||||
isLoadingMetricMetadata={false}
|
||||
refetchMetricDetails={mockRefetchMetricDetails}
|
||||
/>,
|
||||
);
|
||||
|
||||
const editButton = screen.getByText('Edit');
|
||||
await userEvent.click(editButton);
|
||||
fireEvent.click(editButton);
|
||||
|
||||
const metricDescriptionInput = screen.getByTestId('description-input');
|
||||
await userEvent.clear(metricDescriptionInput);
|
||||
await userEvent.type(metricDescriptionInput, 'Updated description');
|
||||
fireEvent.change(metricDescriptionInput, {
|
||||
target: { value: 'Updated description' },
|
||||
});
|
||||
|
||||
const saveButton = screen.getByText('Save');
|
||||
await userEvent.click(saveButton);
|
||||
fireEvent.click(saveButton);
|
||||
|
||||
const onSuccessCallback =
|
||||
mockUseUpdateMetricMetadata.mock.calls[0][1].onSuccess;
|
||||
onSuccessCallback({ status: 200 });
|
||||
onSuccessCallback({ statusCode: 200 });
|
||||
|
||||
expect(mockSuccessNotification).toHaveBeenCalledWith({
|
||||
message: 'Metadata updated successfully',
|
||||
});
|
||||
expect(mockRefetchMetricDetails).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should show error notification when metadata update fails with non-200 response', async () => {
|
||||
@@ -242,24 +233,24 @@ describe('Metadata', () => {
|
||||
<Metadata
|
||||
metricName={mockMetricName}
|
||||
metadata={mockMetricMetadata}
|
||||
isErrorMetricMetadata={false}
|
||||
isLoadingMetricMetadata={false}
|
||||
refetchMetricDetails={mockRefetchMetricDetails}
|
||||
/>,
|
||||
);
|
||||
|
||||
const editButton = screen.getByText('Edit');
|
||||
await userEvent.click(editButton);
|
||||
fireEvent.click(editButton);
|
||||
|
||||
const metricDescriptionInput = screen.getByTestId('description-input');
|
||||
await userEvent.clear(metricDescriptionInput);
|
||||
await userEvent.type(metricDescriptionInput, 'Updated description');
|
||||
fireEvent.change(metricDescriptionInput, {
|
||||
target: { value: 'Updated description' },
|
||||
});
|
||||
|
||||
const saveButton = screen.getByText('Save');
|
||||
await userEvent.click(saveButton);
|
||||
fireEvent.click(saveButton);
|
||||
|
||||
const onSuccessCallback =
|
||||
mockUseUpdateMetricMetadata.mock.calls[0][1].onSuccess;
|
||||
onSuccessCallback({ status: 500 });
|
||||
onSuccessCallback({ statusCode: 500 });
|
||||
|
||||
expect(mockErrorNotification).toHaveBeenCalledWith({
|
||||
message:
|
||||
@@ -272,20 +263,20 @@ describe('Metadata', () => {
|
||||
<Metadata
|
||||
metricName={mockMetricName}
|
||||
metadata={mockMetricMetadata}
|
||||
isErrorMetricMetadata={false}
|
||||
isLoadingMetricMetadata={false}
|
||||
refetchMetricDetails={mockRefetchMetricDetails}
|
||||
/>,
|
||||
);
|
||||
|
||||
const editButton = screen.getByText('Edit');
|
||||
await userEvent.click(editButton);
|
||||
fireEvent.click(editButton);
|
||||
|
||||
const metricDescriptionInput = screen.getByTestId('description-input');
|
||||
await userEvent.clear(metricDescriptionInput);
|
||||
await userEvent.type(metricDescriptionInput, 'Updated description');
|
||||
fireEvent.change(metricDescriptionInput, {
|
||||
target: { value: 'Updated description' },
|
||||
});
|
||||
|
||||
const saveButton = screen.getByText('Save');
|
||||
await userEvent.click(saveButton);
|
||||
fireEvent.click(saveButton);
|
||||
|
||||
const onErrorCallback = mockUseUpdateMetricMetadata.mock.calls[0][1].onError;
|
||||
|
||||
@@ -298,41 +289,39 @@ describe('Metadata', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('cancel button should cancel the edit mode', async () => {
|
||||
it('cancel button should cancel the edit mode', () => {
|
||||
render(
|
||||
<Metadata
|
||||
metricName={mockMetricName}
|
||||
metadata={mockMetricMetadata}
|
||||
isErrorMetricMetadata={false}
|
||||
isLoadingMetricMetadata={false}
|
||||
refetchMetricDetails={mockRefetchMetricDetails}
|
||||
/>,
|
||||
);
|
||||
|
||||
const editButton = screen.getByText('Edit');
|
||||
expect(editButton).toBeInTheDocument();
|
||||
await userEvent.click(editButton);
|
||||
fireEvent.click(editButton);
|
||||
|
||||
const cancelButton = screen.getByText('Cancel');
|
||||
expect(cancelButton).toBeInTheDocument();
|
||||
await userEvent.click(cancelButton);
|
||||
fireEvent.click(cancelButton);
|
||||
|
||||
const editButton2 = screen.getByText('Edit');
|
||||
expect(editButton2).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should not allow editing of unit if it is already set', async () => {
|
||||
it('should not allow editing of unit if it is already set', () => {
|
||||
render(
|
||||
<Metadata
|
||||
metricName={mockMetricName}
|
||||
metadata={mockMetricMetadata}
|
||||
isErrorMetricMetadata={false}
|
||||
isLoadingMetricMetadata={false}
|
||||
refetchMetricDetails={mockRefetchMetricDetails}
|
||||
/>,
|
||||
);
|
||||
|
||||
const editButton = screen.getByText('Edit');
|
||||
expect(editButton).toBeInTheDocument();
|
||||
await userEvent.click(editButton);
|
||||
fireEvent.click(editButton);
|
||||
|
||||
const unitSelect = screen.queryByTestId('unit-select');
|
||||
expect(unitSelect).not.toBeInTheDocument();
|
||||
|
||||
@@ -1,16 +1,68 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import * as metricsExplorerHooks from 'api/generated/services/metrics';
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { MetricDetails as MetricDetailsType } from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { getUniversalNameFromMetricUnit } from 'components/YAxisUnitSelector/utils';
|
||||
import ROUTES from 'constants/routes';
|
||||
import * as useGetMetricDetails from 'hooks/metricsExplorer/useGetMetricDetails';
|
||||
import * as useUpdateMetricMetadata from 'hooks/metricsExplorer/useUpdateMetricMetadata';
|
||||
import * as useHandleExplorerTabChange from 'hooks/useHandleExplorerTabChange';
|
||||
import { userEvent } from 'tests/test-utils';
|
||||
|
||||
import MetricDetails from '../MetricDetails';
|
||||
import { getMockMetricMetadataData } from './testUtlls';
|
||||
|
||||
const mockMetricName = 'test-metric';
|
||||
const mockMetricDescription = 'description for a test metric';
|
||||
const mockMetricData: MetricDetailsType = {
|
||||
name: mockMetricName,
|
||||
description: mockMetricDescription,
|
||||
unit: 'count',
|
||||
attributes: [
|
||||
{
|
||||
key: 'test-attribute',
|
||||
value: ['test-value'],
|
||||
valueCount: 1,
|
||||
},
|
||||
],
|
||||
alerts: [],
|
||||
dashboards: [],
|
||||
metadata: {
|
||||
metric_type: MetricType.SUM,
|
||||
description: mockMetricDescription,
|
||||
unit: 'count',
|
||||
},
|
||||
type: '',
|
||||
timeseries: 0,
|
||||
samples: 0,
|
||||
timeSeriesTotal: 0,
|
||||
timeSeriesActive: 0,
|
||||
lastReceived: '',
|
||||
};
|
||||
const mockOpenInspectModal = jest.fn();
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
const mockUseGetMetricDetailsData = {
|
||||
data: {
|
||||
payload: {
|
||||
data: mockMetricData,
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isFetching: false,
|
||||
isError: false,
|
||||
error: null,
|
||||
refetch: jest.fn(),
|
||||
};
|
||||
|
||||
jest
|
||||
.spyOn(useGetMetricDetails, 'useGetMetricDetails')
|
||||
.mockReturnValue(mockUseGetMetricDetailsData as any);
|
||||
|
||||
jest.spyOn(useUpdateMetricMetadata, 'useUpdateMetricMetadata').mockReturnValue({
|
||||
mutate: jest.fn(),
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
error: null,
|
||||
} as any);
|
||||
|
||||
const mockHandleExplorerTabChange = jest.fn();
|
||||
jest
|
||||
.spyOn(useHandleExplorerTabChange, 'useHandleExplorerTabChange')
|
||||
@@ -36,50 +88,7 @@ jest.mock('react-query', () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock(
|
||||
'container/MetricsExplorer/MetricDetails/AllAttributes',
|
||||
() =>
|
||||
function MockAllAttributes(): JSX.Element {
|
||||
return <div data-testid="all-attributes">All Attributes</div>;
|
||||
},
|
||||
);
|
||||
jest.mock(
|
||||
'container/MetricsExplorer/MetricDetails/DashboardsAndAlertsPopover',
|
||||
() =>
|
||||
function MockDashboardsAndAlertsPopover(): JSX.Element {
|
||||
return (
|
||||
<div data-testid="dashboards-and-alerts-popover">
|
||||
Dashboards and Alerts Popover
|
||||
</div>
|
||||
);
|
||||
},
|
||||
);
|
||||
jest.mock(
|
||||
'container/MetricsExplorer/MetricDetails/Highlights',
|
||||
() =>
|
||||
function MockHighlights(): JSX.Element {
|
||||
return <div data-testid="highlights">Highlights</div>;
|
||||
},
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'container/MetricsExplorer/MetricDetails/Metadata',
|
||||
() =>
|
||||
function MockMetadata(): JSX.Element {
|
||||
return <div data-testid="metadata">Metadata</div>;
|
||||
},
|
||||
);
|
||||
|
||||
const useGetMetricMetadataMock = jest.spyOn(
|
||||
metricsExplorerHooks,
|
||||
'useGetMetricMetadata',
|
||||
);
|
||||
|
||||
describe('MetricDetails', () => {
|
||||
beforeEach(() => {
|
||||
useGetMetricMetadataMock.mockReturnValue(getMockMetricMetadataData());
|
||||
});
|
||||
|
||||
it('renders metric details correctly', () => {
|
||||
render(
|
||||
<MetricDetails
|
||||
@@ -92,15 +101,27 @@ describe('MetricDetails', () => {
|
||||
);
|
||||
|
||||
expect(screen.getByText(mockMetricName)).toBeInTheDocument();
|
||||
expect(screen.getByTestId('all-attributes')).toBeInTheDocument();
|
||||
expect(screen.getByText(mockMetricDescription)).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByTestId('dashboards-and-alerts-popover'),
|
||||
screen.getByText(getUniversalNameFromMetricUnit(mockMetricData.unit)),
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByTestId('highlights')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('metadata')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders the "open in explorer" and "inspect" buttons', async () => {
|
||||
it('renders the "open in explorer" and "inspect" buttons', () => {
|
||||
jest.spyOn(useGetMetricDetails, 'useGetMetricDetails').mockReturnValueOnce({
|
||||
...mockUseGetMetricDetailsData,
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
...mockMetricData,
|
||||
metadata: {
|
||||
...mockMetricData.metadata,
|
||||
metric_type: MetricType.GAUGE,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
render(
|
||||
<MetricDetails
|
||||
onClose={mockOnClose}
|
||||
@@ -114,24 +135,93 @@ describe('MetricDetails', () => {
|
||||
expect(screen.getByTestId('open-in-explorer-button')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('inspect-metric-button')).toBeInTheDocument();
|
||||
|
||||
await userEvent.click(screen.getByTestId('open-in-explorer-button'));
|
||||
fireEvent.click(screen.getByTestId('open-in-explorer-button'));
|
||||
expect(mockHandleExplorerTabChange).toHaveBeenCalled();
|
||||
|
||||
await userEvent.click(screen.getByTestId('inspect-metric-button'));
|
||||
fireEvent.click(screen.getByTestId('inspect-metric-button'));
|
||||
expect(mockOpenInspectModal).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should render empty state when metric name is not provided', () => {
|
||||
it('should render error state when metric details are not found', () => {
|
||||
jest.spyOn(useGetMetricDetails, 'useGetMetricDetails').mockReturnValue({
|
||||
...mockUseGetMetricDetailsData,
|
||||
isError: true,
|
||||
error: {
|
||||
message: 'Error fetching metric details',
|
||||
},
|
||||
} as any);
|
||||
|
||||
render(
|
||||
<MetricDetails
|
||||
onClose={mockOnClose}
|
||||
isOpen
|
||||
metricName={null}
|
||||
metricName={mockMetricName}
|
||||
isModalTimeSelection
|
||||
openInspectModal={mockOpenInspectModal}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByText('Metric not found')).toBeInTheDocument();
|
||||
expect(screen.getByText('Error fetching metric details')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render loading state when metric details are loading', () => {
|
||||
jest.spyOn(useGetMetricDetails, 'useGetMetricDetails').mockReturnValue({
|
||||
...mockUseGetMetricDetailsData,
|
||||
isLoading: true,
|
||||
} as any);
|
||||
|
||||
render(
|
||||
<MetricDetails
|
||||
onClose={mockOnClose}
|
||||
isOpen
|
||||
metricName={mockMetricName}
|
||||
isModalTimeSelection
|
||||
openInspectModal={mockOpenInspectModal}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('metric-details-skeleton')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render all attributes section', () => {
|
||||
jest
|
||||
.spyOn(useGetMetricDetails, 'useGetMetricDetails')
|
||||
.mockReturnValue(mockUseGetMetricDetailsData as any);
|
||||
render(
|
||||
<MetricDetails
|
||||
onClose={mockOnClose}
|
||||
isOpen
|
||||
metricName={mockMetricName}
|
||||
isModalTimeSelection
|
||||
openInspectModal={mockOpenInspectModal}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByText('All Attributes')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should not render all attributes section when relevant data is not present', () => {
|
||||
jest.spyOn(useGetMetricDetails, 'useGetMetricDetails').mockReturnValue({
|
||||
...mockUseGetMetricDetailsData,
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
...mockMetricData,
|
||||
attributes: null,
|
||||
},
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
render(
|
||||
<MetricDetails
|
||||
onClose={mockOnClose}
|
||||
isOpen
|
||||
metricName={mockMetricName}
|
||||
isModalTimeSelection
|
||||
openInspectModal={mockOpenInspectModal}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.queryByText('All Attributes')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,175 +0,0 @@
|
||||
import * as metricsExplorerHooks from 'api/generated/services/metrics';
|
||||
import {
|
||||
GetMetricAlerts200,
|
||||
GetMetricAttributes200,
|
||||
GetMetricDashboards200,
|
||||
GetMetricHighlights200,
|
||||
GetMetricMetadata200,
|
||||
MetrictypesTemporalityDTO,
|
||||
MetrictypesTypeDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
|
||||
export function getMockMetricHighlightsData(
|
||||
overrides?: Partial<GetMetricHighlights200>,
|
||||
{
|
||||
isLoading = false,
|
||||
isError = false,
|
||||
}: {
|
||||
isLoading?: boolean;
|
||||
isError?: boolean;
|
||||
} = {},
|
||||
): ReturnType<typeof metricsExplorerHooks.useGetMetricHighlights> {
|
||||
return {
|
||||
data: {
|
||||
data: {
|
||||
data: {
|
||||
dataPoints: 1000000,
|
||||
lastReceived: '2026-01-24T00:00:00Z',
|
||||
totalTimeSeries: 1000000,
|
||||
activeTimeSeries: 1000000,
|
||||
},
|
||||
status: 'success',
|
||||
...overrides,
|
||||
},
|
||||
},
|
||||
isLoading,
|
||||
isError,
|
||||
} as ReturnType<typeof metricsExplorerHooks.useGetMetricHighlights>;
|
||||
}
|
||||
|
||||
export const MOCK_DASHBOARD_1 = {
|
||||
dashboardName: 'Dashboard 1',
|
||||
dashboardId: '1',
|
||||
widgetId: '1',
|
||||
widgetName: 'Widget 1',
|
||||
};
|
||||
export const MOCK_DASHBOARD_2 = {
|
||||
dashboardName: 'Dashboard 2',
|
||||
dashboardId: '2',
|
||||
widgetId: '2',
|
||||
widgetName: 'Widget 2',
|
||||
};
|
||||
export const MOCK_ALERT_1 = {
|
||||
alertName: 'Alert 1',
|
||||
alertId: '1',
|
||||
};
|
||||
export const MOCK_ALERT_2 = {
|
||||
alertName: 'Alert 2',
|
||||
alertId: '2',
|
||||
};
|
||||
|
||||
export function getMockDashboardsData(
|
||||
overrides?: Partial<GetMetricDashboards200>,
|
||||
{
|
||||
isLoading = false,
|
||||
isError = false,
|
||||
}: {
|
||||
isLoading?: boolean;
|
||||
isError?: boolean;
|
||||
} = {},
|
||||
): ReturnType<typeof metricsExplorerHooks.useGetMetricDashboards> {
|
||||
return {
|
||||
data: {
|
||||
data: {
|
||||
data: {
|
||||
dashboards: [MOCK_DASHBOARD_1, MOCK_DASHBOARD_2],
|
||||
},
|
||||
status: 'success',
|
||||
...overrides,
|
||||
},
|
||||
},
|
||||
isLoading,
|
||||
isError,
|
||||
} as ReturnType<typeof metricsExplorerHooks.useGetMetricDashboards>;
|
||||
}
|
||||
|
||||
export function getMockAlertsData(
|
||||
overrides?: Partial<GetMetricAlerts200>,
|
||||
{
|
||||
isLoading = false,
|
||||
isError = false,
|
||||
}: {
|
||||
isLoading?: boolean;
|
||||
isError?: boolean;
|
||||
} = {},
|
||||
): ReturnType<typeof metricsExplorerHooks.useGetMetricAlerts> {
|
||||
return {
|
||||
data: {
|
||||
data: {
|
||||
data: {
|
||||
alerts: [MOCK_ALERT_1, MOCK_ALERT_2],
|
||||
},
|
||||
status: 'success',
|
||||
...overrides,
|
||||
},
|
||||
},
|
||||
isLoading,
|
||||
isError,
|
||||
} as ReturnType<typeof metricsExplorerHooks.useGetMetricAlerts>;
|
||||
}
|
||||
|
||||
export function getMockMetricAttributesData(
|
||||
overrides?: Partial<GetMetricAttributes200>,
|
||||
{
|
||||
isLoading = false,
|
||||
isError = false,
|
||||
}: {
|
||||
isLoading?: boolean;
|
||||
isError?: boolean;
|
||||
} = {},
|
||||
): ReturnType<typeof metricsExplorerHooks.useGetMetricAttributes> {
|
||||
return {
|
||||
data: {
|
||||
data: {
|
||||
data: {
|
||||
attributes: [
|
||||
{
|
||||
key: 'attribute1',
|
||||
values: ['value1', 'value2'],
|
||||
valueCount: 2,
|
||||
},
|
||||
{
|
||||
key: 'attribute2',
|
||||
values: ['value3'],
|
||||
valueCount: 1,
|
||||
},
|
||||
],
|
||||
totalKeys: 2,
|
||||
},
|
||||
status: 'success',
|
||||
...overrides,
|
||||
},
|
||||
},
|
||||
isLoading,
|
||||
isError,
|
||||
} as ReturnType<typeof metricsExplorerHooks.useGetMetricAttributes>;
|
||||
}
|
||||
|
||||
export function getMockMetricMetadataData(
|
||||
overrides?: Partial<GetMetricMetadata200>,
|
||||
{
|
||||
isLoading = false,
|
||||
isError = false,
|
||||
}: {
|
||||
isLoading?: boolean;
|
||||
isError?: boolean;
|
||||
} = {},
|
||||
): ReturnType<typeof metricsExplorerHooks.useGetMetricMetadata> {
|
||||
return {
|
||||
data: {
|
||||
data: {
|
||||
data: {
|
||||
description: 'test_description',
|
||||
type: MetrictypesTypeDTO.gauge,
|
||||
unit: 'test_unit',
|
||||
temporality: MetrictypesTemporalityDTO.delta,
|
||||
isMonotonic: false,
|
||||
},
|
||||
status: 'success',
|
||||
...overrides,
|
||||
},
|
||||
},
|
||||
isLoading,
|
||||
isError,
|
||||
} as ReturnType<typeof metricsExplorerHooks.useGetMetricMetadata>;
|
||||
}
|
||||
@@ -1,7 +1,5 @@
|
||||
import {
|
||||
MetrictypesTemporalityDTO,
|
||||
MetrictypesTypeDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
|
||||
import {
|
||||
determineIsMonotonic,
|
||||
@@ -12,48 +10,35 @@ import {
|
||||
describe('MetricDetails utils', () => {
|
||||
describe('determineIsMonotonic', () => {
|
||||
it('should return true for histogram metrics', () => {
|
||||
expect(determineIsMonotonic(MetrictypesTypeDTO.histogram)).toBe(true);
|
||||
expect(determineIsMonotonic(MetricType.HISTOGRAM)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for exponential histogram metrics', () => {
|
||||
expect(determineIsMonotonic(MetrictypesTypeDTO.exponentialhistogram)).toBe(
|
||||
expect(determineIsMonotonic(MetricType.EXPONENTIAL_HISTOGRAM)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for gauge metrics', () => {
|
||||
expect(determineIsMonotonic(MetricType.GAUGE)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for summary metrics', () => {
|
||||
expect(determineIsMonotonic(MetricType.SUMMARY)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for sum metrics with cumulative temporality', () => {
|
||||
expect(determineIsMonotonic(MetricType.SUM, Temporality.CUMULATIVE)).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return false for gauge metrics', () => {
|
||||
expect(determineIsMonotonic(MetrictypesTypeDTO.gauge)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for summary metrics', () => {
|
||||
expect(determineIsMonotonic(MetrictypesTypeDTO.summary)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for sum metrics with cumulative temporality', () => {
|
||||
expect(
|
||||
determineIsMonotonic(
|
||||
MetrictypesTypeDTO.sum,
|
||||
MetrictypesTemporalityDTO.cumulative,
|
||||
),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for sum metrics with delta temporality', () => {
|
||||
expect(
|
||||
determineIsMonotonic(
|
||||
MetrictypesTypeDTO.sum,
|
||||
MetrictypesTemporalityDTO.delta,
|
||||
),
|
||||
).toBe(false);
|
||||
expect(determineIsMonotonic(MetricType.SUM, Temporality.DELTA)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false by default', () => {
|
||||
expect(
|
||||
determineIsMonotonic(
|
||||
'' as MetrictypesTypeDTO,
|
||||
'' as MetrictypesTemporalityDTO,
|
||||
),
|
||||
).toBe(false);
|
||||
expect(determineIsMonotonic('' as MetricType, '' as Temporality)).toBe(
|
||||
false,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -130,16 +115,13 @@ describe('MetricDetails utils', () => {
|
||||
const API_GATEWAY = 'api-gateway';
|
||||
|
||||
it('should create correct query for SUM metric type', () => {
|
||||
const query = getMetricDetailsQuery(
|
||||
TEST_METRIC_NAME,
|
||||
MetrictypesTypeDTO.sum,
|
||||
);
|
||||
const query = getMetricDetailsQuery(TEST_METRIC_NAME, MetricType.SUM);
|
||||
|
||||
expect(query.builder.queryData[0]?.aggregateAttribute?.key).toBe(
|
||||
TEST_METRIC_NAME,
|
||||
);
|
||||
expect(query.builder.queryData[0]?.aggregateAttribute?.type).toBe(
|
||||
MetrictypesTypeDTO.sum,
|
||||
MetricType.SUM,
|
||||
);
|
||||
expect(query.builder.queryData[0]?.aggregateOperator).toBe('rate');
|
||||
expect(query.builder.queryData[0]?.timeAggregation).toBe('rate');
|
||||
@@ -147,16 +129,13 @@ describe('MetricDetails utils', () => {
|
||||
});
|
||||
|
||||
it('should create correct query for GAUGE metric type', () => {
|
||||
const query = getMetricDetailsQuery(
|
||||
TEST_METRIC_NAME,
|
||||
MetrictypesTypeDTO.gauge,
|
||||
);
|
||||
const query = getMetricDetailsQuery(TEST_METRIC_NAME, MetricType.GAUGE);
|
||||
|
||||
expect(query.builder.queryData[0]?.aggregateAttribute?.key).toBe(
|
||||
TEST_METRIC_NAME,
|
||||
);
|
||||
expect(query.builder.queryData[0]?.aggregateAttribute?.type).toBe(
|
||||
MetrictypesTypeDTO.gauge,
|
||||
MetricType.GAUGE,
|
||||
);
|
||||
expect(query.builder.queryData[0]?.aggregateOperator).toBe('avg');
|
||||
expect(query.builder.queryData[0]?.timeAggregation).toBe('avg');
|
||||
@@ -164,16 +143,13 @@ describe('MetricDetails utils', () => {
|
||||
});
|
||||
|
||||
it('should create correct query for SUMMARY metric type', () => {
|
||||
const query = getMetricDetailsQuery(
|
||||
TEST_METRIC_NAME,
|
||||
MetrictypesTypeDTO.summary,
|
||||
);
|
||||
const query = getMetricDetailsQuery(TEST_METRIC_NAME, MetricType.SUMMARY);
|
||||
|
||||
expect(query.builder.queryData[0]?.aggregateAttribute?.key).toBe(
|
||||
TEST_METRIC_NAME,
|
||||
);
|
||||
expect(query.builder.queryData[0]?.aggregateAttribute?.type).toBe(
|
||||
MetrictypesTypeDTO.summary,
|
||||
MetricType.SUMMARY,
|
||||
);
|
||||
expect(query.builder.queryData[0]?.aggregateOperator).toBe('noop');
|
||||
expect(query.builder.queryData[0]?.timeAggregation).toBe('noop');
|
||||
@@ -181,16 +157,13 @@ describe('MetricDetails utils', () => {
|
||||
});
|
||||
|
||||
it('should create correct query for HISTOGRAM metric type', () => {
|
||||
const query = getMetricDetailsQuery(
|
||||
TEST_METRIC_NAME,
|
||||
MetrictypesTypeDTO.histogram,
|
||||
);
|
||||
const query = getMetricDetailsQuery(TEST_METRIC_NAME, MetricType.HISTOGRAM);
|
||||
|
||||
expect(query.builder.queryData[0]?.aggregateAttribute?.key).toBe(
|
||||
TEST_METRIC_NAME,
|
||||
);
|
||||
expect(query.builder.queryData[0]?.aggregateAttribute?.type).toBe(
|
||||
MetrictypesTypeDTO.histogram,
|
||||
MetricType.HISTOGRAM,
|
||||
);
|
||||
expect(query.builder.queryData[0]?.aggregateOperator).toBe('noop');
|
||||
expect(query.builder.queryData[0]?.timeAggregation).toBe('noop');
|
||||
@@ -200,14 +173,14 @@ describe('MetricDetails utils', () => {
|
||||
it('should create correct query for EXPONENTIAL_HISTOGRAM metric type', () => {
|
||||
const query = getMetricDetailsQuery(
|
||||
TEST_METRIC_NAME,
|
||||
MetrictypesTypeDTO.exponentialhistogram,
|
||||
MetricType.EXPONENTIAL_HISTOGRAM,
|
||||
);
|
||||
|
||||
expect(query.builder.queryData[0]?.aggregateAttribute?.key).toBe(
|
||||
TEST_METRIC_NAME,
|
||||
);
|
||||
expect(query.builder.queryData[0]?.aggregateAttribute?.type).toBe(
|
||||
MetrictypesTypeDTO.exponentialhistogram,
|
||||
MetricType.EXPONENTIAL_HISTOGRAM,
|
||||
);
|
||||
expect(query.builder.queryData[0]?.aggregateOperator).toBe('noop');
|
||||
expect(query.builder.queryData[0]?.timeAggregation).toBe('noop');
|
||||
@@ -230,7 +203,7 @@ describe('MetricDetails utils', () => {
|
||||
const filter = { key: 'service', value: API_GATEWAY };
|
||||
const query = getMetricDetailsQuery(
|
||||
TEST_METRIC_NAME,
|
||||
MetrictypesTypeDTO.sum,
|
||||
MetricType.SUM,
|
||||
filter,
|
||||
);
|
||||
|
||||
@@ -248,7 +221,7 @@ describe('MetricDetails utils', () => {
|
||||
const groupBy = 'service';
|
||||
const query = getMetricDetailsQuery(
|
||||
TEST_METRIC_NAME,
|
||||
MetrictypesTypeDTO.sum,
|
||||
MetricType.SUM,
|
||||
undefined,
|
||||
groupBy,
|
||||
);
|
||||
@@ -263,7 +236,7 @@ describe('MetricDetails utils', () => {
|
||||
const groupBy = 'endpoint';
|
||||
const query = getMetricDetailsQuery(
|
||||
TEST_METRIC_NAME,
|
||||
MetrictypesTypeDTO.sum,
|
||||
MetricType.SUM,
|
||||
filter,
|
||||
groupBy,
|
||||
);
|
||||
@@ -277,10 +250,7 @@ describe('MetricDetails utils', () => {
|
||||
});
|
||||
|
||||
it('should not include filters or groupBy when not provided', () => {
|
||||
const query = getMetricDetailsQuery(
|
||||
TEST_METRIC_NAME,
|
||||
MetrictypesTypeDTO.sum,
|
||||
);
|
||||
const query = getMetricDetailsQuery(TEST_METRIC_NAME, MetricType.SUM);
|
||||
|
||||
expect(query.builder.queryData[0]?.filters?.items).toHaveLength(0);
|
||||
expect(query.builder.queryData[0]?.groupBy).toHaveLength(0);
|
||||
|
||||
@@ -1,48 +1,6 @@
|
||||
import {
|
||||
MetrictypesTemporalityDTO,
|
||||
MetrictypesTypeDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
|
||||
export const METRIC_METADATA_KEYS = {
|
||||
description: 'Description',
|
||||
unit: 'Unit',
|
||||
type: 'Metric Type',
|
||||
metric_type: 'Metric Type',
|
||||
temporality: 'Temporality',
|
||||
};
|
||||
|
||||
export const METRIC_METADATA_TEMPORALITY_OPTIONS = [
|
||||
{
|
||||
value: MetrictypesTemporalityDTO.delta,
|
||||
label: 'Delta',
|
||||
},
|
||||
{
|
||||
value: MetrictypesTemporalityDTO.cumulative,
|
||||
label: 'Cumulative',
|
||||
},
|
||||
];
|
||||
|
||||
export const METRIC_METADATA_TYPE_OPTIONS = [
|
||||
{
|
||||
value: MetrictypesTypeDTO.sum,
|
||||
label: 'Sum',
|
||||
},
|
||||
{
|
||||
value: MetrictypesTypeDTO.gauge,
|
||||
label: 'Gauge',
|
||||
},
|
||||
{
|
||||
value: MetrictypesTypeDTO.histogram,
|
||||
label: 'Histogram',
|
||||
},
|
||||
{
|
||||
value: MetrictypesTypeDTO.summary,
|
||||
label: 'Summary',
|
||||
},
|
||||
{
|
||||
value: MetrictypesTypeDTO.exponentialhistogram,
|
||||
label: 'Exponential Histogram',
|
||||
},
|
||||
];
|
||||
|
||||
export const METRIC_METADATA_UPDATE_ERROR_MESSAGE =
|
||||
'Failed to update metadata, please try again. If the issue persists, please contact support.';
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
import {
|
||||
MetricsexplorertypesMetricAlertDTO,
|
||||
MetricsexplorertypesMetricAttributeDTO,
|
||||
MetricsexplorertypesMetricDashboardDTO,
|
||||
MetricsexplorertypesMetricHighlightsResponseDTO,
|
||||
MetricsexplorertypesMetricMetadataDTO,
|
||||
MetrictypesTemporalityDTO,
|
||||
MetrictypesTypeDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
MetricDetails,
|
||||
MetricDetailsAlert,
|
||||
MetricDetailsAttribute,
|
||||
MetricDetailsDashboard,
|
||||
} from 'api/metricsExplorer/getMetricDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
|
||||
export interface MetricDetailsProps {
|
||||
onClose: () => void;
|
||||
@@ -16,23 +14,21 @@ export interface MetricDetailsProps {
|
||||
openInspectModal?: (metricName: string) => void;
|
||||
}
|
||||
|
||||
export interface HighlightsProps {
|
||||
metricName: string;
|
||||
}
|
||||
export interface DashboardsAndAlertsPopoverProps {
|
||||
metricName: string;
|
||||
dashboards: MetricDetailsDashboard[] | null;
|
||||
alerts: MetricDetailsAlert[] | null;
|
||||
}
|
||||
|
||||
export interface MetadataProps {
|
||||
metricName: string;
|
||||
metadata: MetricMetadata | null;
|
||||
isErrorMetricMetadata: boolean;
|
||||
isLoadingMetricMetadata: boolean;
|
||||
metadata: MetricDetails['metadata'] | undefined;
|
||||
refetchMetricDetails: () => void;
|
||||
}
|
||||
|
||||
export interface AllAttributesProps {
|
||||
attributes: MetricDetailsAttribute[];
|
||||
metricName: string;
|
||||
metricType: MetrictypesTypeDTO | undefined;
|
||||
metricType: MetricType | undefined;
|
||||
}
|
||||
|
||||
export interface AllAttributesValueProps {
|
||||
@@ -40,27 +36,3 @@ export interface AllAttributesValueProps {
|
||||
filterValue: string[];
|
||||
goToMetricsExploreWithAppliedAttribute: (key: string, value: string) => void;
|
||||
}
|
||||
|
||||
export type MetricHighlight = MetricsexplorertypesMetricHighlightsResponseDTO;
|
||||
|
||||
export type MetricAlert = MetricsexplorertypesMetricAlertDTO;
|
||||
|
||||
export type MetricDashboard = MetricsexplorertypesMetricDashboardDTO;
|
||||
|
||||
export type MetricMetadata = MetricsexplorertypesMetricMetadataDTO;
|
||||
export interface MetricMetadataState {
|
||||
type: MetrictypesTypeDTO;
|
||||
description: string;
|
||||
temporality?: MetrictypesTemporalityDTO;
|
||||
unit: string;
|
||||
}
|
||||
|
||||
export type MetricAttribute = MetricsexplorertypesMetricAttributeDTO;
|
||||
|
||||
export enum TableFields {
|
||||
DESCRIPTION = 'description',
|
||||
UNIT = 'unit',
|
||||
TYPE = 'type',
|
||||
Temporality = 'temporality',
|
||||
IS_MONOTONIC = 'isMonotonic',
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user