mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-20 15:52:41 +00:00
Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fd5f22bd22 | ||
|
|
57c0c0d3b6 | ||
|
|
c5e3c92938 | ||
|
|
f725143af8 | ||
|
|
f8c6236bf9 | ||
|
|
b247a08d01 |
4
.github/workflows/integrationci.yaml
vendored
4
.github/workflows/integrationci.yaml
vendored
@@ -53,9 +53,9 @@ jobs:
|
||||
- sqlite
|
||||
clickhouse-version:
|
||||
- 25.5.6
|
||||
- 25.10.5
|
||||
- 25.10.1
|
||||
schema-migrator-version:
|
||||
- v0.142.0
|
||||
- v0.129.7
|
||||
postgres-version:
|
||||
- 15
|
||||
if: |
|
||||
|
||||
@@ -85,9 +85,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return noopgateway.NewProviderFactory()
|
||||
},
|
||||
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
|
||||
return querier.NewHandler(ps, q, a)
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
eequerier "github.com/SigNoz/signoz/ee/querier"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
@@ -125,10 +124,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return httpgateway.NewProviderFactory(licensing)
|
||||
},
|
||||
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
|
||||
communityHandler := querier.NewHandler(ps, q, a)
|
||||
return eequerier.NewHandler(ps, q, communityHandler)
|
||||
},
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -307,16 +307,11 @@ components:
|
||||
type: string
|
||||
value:
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
- value
|
||||
type: object
|
||||
GatewaytypesGettableCreatedIngestionKeyLimit:
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
type: object
|
||||
GatewaytypesGettableIngestionKeys:
|
||||
properties:
|
||||
@@ -437,8 +432,6 @@ components:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- name
|
||||
type: object
|
||||
GatewaytypesPostableIngestionKeyLimit:
|
||||
properties:
|
||||
@@ -461,8 +454,6 @@ components:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- config
|
||||
type: object
|
||||
MetricsexplorertypesListMetric:
|
||||
properties:
|
||||
@@ -1929,82 +1920,6 @@ components:
|
||||
format: date-time
|
||||
type: string
|
||||
type: object
|
||||
ZeustypesGettableHost:
|
||||
properties:
|
||||
hosts:
|
||||
items:
|
||||
$ref: '#/components/schemas/ZeustypesHost'
|
||||
nullable: true
|
||||
type: array
|
||||
name:
|
||||
type: string
|
||||
state:
|
||||
type: string
|
||||
tier:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
- state
|
||||
- tier
|
||||
- hosts
|
||||
type: object
|
||||
ZeustypesHost:
|
||||
properties:
|
||||
is_default:
|
||||
type: boolean
|
||||
name:
|
||||
type: string
|
||||
url:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
- is_default
|
||||
- url
|
||||
type: object
|
||||
ZeustypesPostableHost:
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
type: object
|
||||
ZeustypesPostableProfile:
|
||||
properties:
|
||||
existing_observability_tool:
|
||||
type: string
|
||||
has_existing_observability_tool:
|
||||
type: boolean
|
||||
logs_scale_per_day_in_gb:
|
||||
format: int64
|
||||
type: integer
|
||||
number_of_hosts:
|
||||
format: int64
|
||||
type: integer
|
||||
number_of_services:
|
||||
format: int64
|
||||
type: integer
|
||||
reasons_for_interest_in_signoz:
|
||||
items:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
timeline_for_migrating_to_signoz:
|
||||
type: string
|
||||
uses_otel:
|
||||
type: boolean
|
||||
where_did_you_discover_signoz:
|
||||
type: string
|
||||
required:
|
||||
- uses_otel
|
||||
- has_existing_observability_tool
|
||||
- existing_observability_tool
|
||||
- reasons_for_interest_in_signoz
|
||||
- logs_scale_per_day_in_gb
|
||||
- number_of_services
|
||||
- number_of_hosts
|
||||
- where_did_you_discover_signoz
|
||||
- timeline_for_migrating_to_signoz
|
||||
type: object
|
||||
securitySchemes:
|
||||
api_key:
|
||||
description: API Keys
|
||||
@@ -3223,29 +3138,12 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- logs
|
||||
@@ -3270,29 +3168,12 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- EDITOR
|
||||
- tokenizer:
|
||||
- EDITOR
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- logs
|
||||
@@ -4839,7 +4720,6 @@ paths:
|
||||
parameters:
|
||||
- in: query
|
||||
name: name
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
@@ -5658,174 +5538,6 @@ paths:
|
||||
summary: Rotate session
|
||||
tags:
|
||||
- sessions
|
||||
/api/v2/zeus/hosts:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoint gets the host info from zeus.
|
||||
operationId: GetHosts
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/ZeustypesGettableHost'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Get host info from Zeus.
|
||||
tags:
|
||||
- zeus
|
||||
put:
|
||||
deprecated: false
|
||||
description: This endpoint saves the host of a deployment to zeus.
|
||||
operationId: PutHost
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ZeustypesPostableHost'
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"409":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Conflict
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Put host in Zeus for a deployment.
|
||||
tags:
|
||||
- zeus
|
||||
/api/v2/zeus/profiles:
|
||||
put:
|
||||
deprecated: false
|
||||
description: This endpoint saves the profile of a deployment to zeus.
|
||||
operationId: PutProfile
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ZeustypesPostableProfile'
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"409":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Conflict
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Put profile in Zeus for a deployment.
|
||||
tags:
|
||||
- zeus
|
||||
/api/v5/query_range:
|
||||
post:
|
||||
deprecated: false
|
||||
@@ -6220,58 +5932,4 @@ paths:
|
||||
- VIEWER
|
||||
summary: Query range
|
||||
tags:
|
||||
- querier
|
||||
/api/v5/substitute_vars:
|
||||
post:
|
||||
deprecated: false
|
||||
description: Replace variables in a query
|
||||
operationId: ReplaceVariables
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Replace variables
|
||||
tags:
|
||||
- querier
|
||||
- query
|
||||
|
||||
@@ -155,7 +155,6 @@ The `handler.New` function ties the HTTP handler to OpenAPI metadata via `OpenAP
|
||||
- **Request / RequestContentType**:
|
||||
- `Request` is a Go type that describes the request body or form.
|
||||
- `RequestContentType` is usually `"application/json"` or `"application/x-www-form-urlencoded"` (for callbacks like SAML).
|
||||
- **RequestExamples**: An array of `handler.OpenAPIExample` that provide concrete request payloads in the generated spec. See [Adding request examples](#adding-request-examples) below.
|
||||
- **Response / ResponseContentType**:
|
||||
- `Response` is the Go type for the successful response payload.
|
||||
- `ResponseContentType` is usually `"application/json"`; use `""` for responses without a body.
|
||||
@@ -173,170 +172,8 @@ See existing examples in:
|
||||
- `addUserRoutes` (for typical JSON request/response)
|
||||
- `addSessionRoutes` (for form-encoded and redirect flows)
|
||||
|
||||
## OpenAPI schema details for request/response types
|
||||
|
||||
The OpenAPI spec is generated from the Go types you pass as `Request` and `Response` in `OpenAPIDef`. The following struct tags and interfaces control how those types appear in the generated schema.
|
||||
|
||||
### Adding request examples
|
||||
|
||||
Use the `RequestExamples` field in `OpenAPIDef` to provide concrete request payloads. Each example is a `handler.OpenAPIExample`:
|
||||
|
||||
```go
|
||||
type OpenAPIExample struct {
|
||||
Name string // unique key for the example (e.g. "traces_time_series")
|
||||
Summary string // short description shown in docs (e.g. "Time series: count spans grouped by service")
|
||||
Description string // optional longer description
|
||||
Value any // the example payload, typically map[string]any
|
||||
}
|
||||
```
|
||||
|
||||
For reference, see `pkg/apiserver/signozapiserver/querier.go` which defines examples inline for the `/api/v5/query_range` endpoint:
|
||||
|
||||
```go
|
||||
if err := router.Handle("/api/v5/query_range", handler.New(provider.authZ.ViewAccess(provider.querierHandler.QueryRange), handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"querier"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
RequestExamples: []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
// ...
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// ... more examples
|
||||
},
|
||||
// ...
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
### `required` tag
|
||||
|
||||
Use `required:"true"` on struct fields where the property is expected to be **present** in the JSON payload. This is different from the zero value, a field can have its zero value (e.g. `0`, `""`, `false`) and still be required. The `required` tag means the key itself must exist in the JSON object.
|
||||
|
||||
```go
|
||||
type ListItem struct {
|
||||
...
|
||||
}
|
||||
|
||||
type ListResponse struct {
|
||||
List []ListItem `json:"list" required:"true" nullable:"true"`
|
||||
Total uint64 `json:"total" required:"true"`
|
||||
}
|
||||
```
|
||||
|
||||
In this example, a response like `{"list": null, "total": 0}` is valid. Both keys are present (satisfying `required`), `total` has its zero value, and `list` is null (allowed by `nullable`). But `{"total": 0}` would violate the schema because the `list` key is missing.
|
||||
|
||||
### `nullable` tag
|
||||
|
||||
Use `nullable:"true"` on struct fields that can be `null` in the JSON payload. This is especially important for **slice and map fields** because in Go, the zero value for these types is `nil`, which serializes to `null` in JSON (not `[]` or `{}`).
|
||||
|
||||
Be explicit about the distinction:
|
||||
|
||||
- **Nullable list** (`nullable:"true"`): the field can be `null`. Use this when the Go code may return `nil` for the slice.
|
||||
- **Non-nullable list** (no `nullable` tag): the field is always an array, never `null`. Ensure the Go code initializes it to an empty slice (e.g. `make([]T, 0)`) before serializing.
|
||||
|
||||
```go
|
||||
// Non-nullable: Go code must ensure this is always an initialized slice.
|
||||
type NonNullableExample struct {
|
||||
Items []Item `json:"items" required:"true"`
|
||||
}
|
||||
```
|
||||
|
||||
When defining your types, ask yourself: "Can this field be `null` in the JSON response, or is it always an array/object?" If the Go code ever returns a `nil` slice or map, mark it `nullable:"true"`.
|
||||
|
||||
### `Enum()` method
|
||||
|
||||
For types that have a fixed set of acceptable values, implement the `Enum() []any` method. This generates an `enum` constraint in the JSON schema so the OpenAPI spec accurately restricts the values.
|
||||
|
||||
```go
|
||||
type Signal struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
SignalTraces = Signal{valuer.NewString("traces")}
|
||||
SignalLogs = Signal{valuer.NewString("logs")}
|
||||
SignalMetrics = Signal{valuer.NewString("metrics")}
|
||||
)
|
||||
|
||||
func (Signal) Enum() []any {
|
||||
return []any{
|
||||
SignalTraces,
|
||||
SignalLogs,
|
||||
SignalMetrics,
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This produces the following in the generated OpenAPI spec:
|
||||
|
||||
```yaml
|
||||
Signal:
|
||||
enum:
|
||||
- traces
|
||||
- logs
|
||||
- metrics
|
||||
type: string
|
||||
```
|
||||
|
||||
Every type with a known set of values **must** implement `Enum()`. Without it, the JSON schema will only show the base type (e.g. `string`) with no value constraints.
|
||||
|
||||
### `JSONSchema()` method (custom schema)
|
||||
|
||||
For types that need a completely custom JSON schema (for example, a field that accepts either a string or a number), implement the `jsonschema.Exposer` interface:
|
||||
|
||||
```go
|
||||
var _ jsonschema.Exposer = Step{}
|
||||
|
||||
func (Step) JSONSchema() (jsonschema.Schema, error) {
|
||||
s := jsonschema.Schema{}
|
||||
s.WithDescription("Step interval. Accepts a duration string or seconds.")
|
||||
|
||||
strSchema := jsonschema.Schema{}
|
||||
strSchema.WithType(jsonschema.String.Type())
|
||||
strSchema.WithExamples("60s", "5m", "1h")
|
||||
|
||||
numSchema := jsonschema.Schema{}
|
||||
numSchema.WithType(jsonschema.Number.Type())
|
||||
numSchema.WithExamples(60, 300, 3600)
|
||||
|
||||
s.OneOf = []jsonschema.SchemaOrBool{
|
||||
strSchema.ToSchemaOrBool(),
|
||||
numSchema.ToSchemaOrBool(),
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- **Keep handlers thin**: focus on HTTP concerns and delegate logic to modules/services.
|
||||
- **Always register routes through `signozapiserver`** using `handler.New` and a complete `OpenAPIDef`.
|
||||
- **Choose accurate request/response types** from the `types` packages so OpenAPI schemas are correct.
|
||||
- **Add `required:"true"`** on fields where the key must be present in the JSON (this is about key presence, not about the zero value).
|
||||
- **Add `nullable:"true"`** on fields that can be `null`. Pay special attention to slices and maps -- in Go these default to `nil` which serializes to `null`. If the field should always be an array, initialize it and do not mark it nullable.
|
||||
- **Implement `Enum()`** on every type that has a fixed set of acceptable values so the JSON schema generates proper `enum` constraints.
|
||||
- **Add request examples** via `RequestExamples` in `OpenAPIDef` for any non-trivial endpoint. See `pkg/apiserver/signozapiserver/querier.go` for reference.
|
||||
|
||||
@@ -1,178 +0,0 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"runtime/debug"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
set factory.ProviderSettings
|
||||
querier querier.Querier
|
||||
community querier.Handler
|
||||
}
|
||||
|
||||
func NewHandler(set factory.ProviderSettings, querier querier.Querier, community querier.Handler) querier.Handler {
|
||||
return &handler{
|
||||
set: set,
|
||||
querier: querier,
|
||||
community: community,
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
bodyBytes, err := io.ReadAll(req.Body)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
|
||||
return
|
||||
}
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
|
||||
ctx := req.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
stackTrace := string(debug.Stack())
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
h.set.Logger.ErrorContext(ctx, "panic in QueryRange",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
"stacktrace", stackTrace,
|
||||
)
|
||||
|
||||
render.Error(rw, errors.NewInternalf(
|
||||
errors.CodeInternal,
|
||||
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
|
||||
))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := queryRangeRequest.Validate(); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
|
||||
anomalies, err := h.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
results := []any{}
|
||||
for _, item := range anomalies.Results {
|
||||
results = append(results, item)
|
||||
}
|
||||
|
||||
// Build step intervals from the anomaly query
|
||||
stepIntervals := make(map[string]uint64)
|
||||
if anomalyQuery.StepInterval.Duration > 0 {
|
||||
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
|
||||
}
|
||||
|
||||
finalResp := &qbtypes.QueryRangeResponse{
|
||||
Type: queryRangeRequest.RequestType,
|
||||
Data: qbtypes.QueryData{
|
||||
Results: results,
|
||||
},
|
||||
Meta: qbtypes.ExecStats{
|
||||
StepIntervals: stepIntervals,
|
||||
},
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, finalResp)
|
||||
return
|
||||
}
|
||||
|
||||
// regular query range request, delegate to community handler
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
h.community.QueryRange(rw, req)
|
||||
}
|
||||
|
||||
func (h *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
h.community.QueryRawStream(rw, req)
|
||||
}
|
||||
|
||||
func (h *handler) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
h.community.ReplaceVariables(rw, req)
|
||||
}
|
||||
|
||||
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
|
||||
for _, fn := range anomalyQuery.Functions {
|
||||
if fn.Name == qbtypes.FunctionNameAnomaly {
|
||||
for _, arg := range fn.Args {
|
||||
if arg.Name == "seasonality" {
|
||||
if seasonalityStr, ok := arg.Value.(string); ok {
|
||||
switch seasonalityStr {
|
||||
case "weekly":
|
||||
return anomalyV2.SeasonalityWeekly
|
||||
case "hourly":
|
||||
return anomalyV2.SeasonalityHourly
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return anomalyV2.SeasonalityDaily // default
|
||||
}
|
||||
|
||||
func (h *handler) createAnomalyProvider(seasonality anomalyV2.Seasonality) anomalyV2.Provider {
|
||||
switch seasonality {
|
||||
case anomalyV2.SeasonalityWeekly:
|
||||
return anomalyV2.NewWeeklyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](h.set.Logger),
|
||||
)
|
||||
case anomalyV2.SeasonalityHourly:
|
||||
return anomalyV2.NewHourlyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](h.set.Logger),
|
||||
)
|
||||
default:
|
||||
return anomalyV2.NewDailyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.DailyProvider](h.set.Logger),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
|
||||
seasonality := extractSeasonality(anomalyQuery)
|
||||
provider := h.createAnomalyProvider(seasonality)
|
||||
|
||||
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
|
||||
}
|
||||
@@ -2,13 +2,17 @@ package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
@@ -29,6 +33,7 @@ type APIHandlerOptions struct {
|
||||
IntegrationsController *integrations.Controller
|
||||
CloudIntegrationsController *cloudintegrations.Controller
|
||||
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
|
||||
Gateway *httputil.ReverseProxy
|
||||
GatewayUrl string
|
||||
// Querier Influx Interval
|
||||
FluxInterval time.Duration
|
||||
@@ -52,6 +57,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
}, config)
|
||||
|
||||
@@ -74,6 +80,10 @@ func (ah *APIHandler) UM() *usage.Manager {
|
||||
return ah.opts.UsageManager
|
||||
}
|
||||
|
||||
func (ah *APIHandler) Gateway() *httputil.ReverseProxy {
|
||||
return ah.opts.Gateway
|
||||
}
|
||||
|
||||
// RegisterRoutes registers routes for this handler on the given router
|
||||
func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// note: add ee override methods first
|
||||
@@ -96,6 +106,17 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// v4
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
// v5
|
||||
router.Handle("/api/v5/query_range", handler.New(
|
||||
am.ViewAccess(ah.queryRangeV5),
|
||||
querierAPI.QueryRangeV5OpenAPIDef,
|
||||
)).Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
|
||||
|
||||
// Gateway
|
||||
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.EditAccess(ah.ServeGatewayHTTP))
|
||||
|
||||
ah.APIHandler.RegisterRoutes(router, am)
|
||||
|
||||
}
|
||||
|
||||
58
ee/query-service/app/api/gateway.go
Normal file
58
ee/query-service/app/api/gateway.go
Normal file
@@ -0,0 +1,58 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
validPath := false
|
||||
for _, allowedPrefix := range gateway.AllowedPrefix {
|
||||
if strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+allowedPrefix) {
|
||||
validPath = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !validPath {
|
||||
rw.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
//Create headers
|
||||
var licenseKey string
|
||||
if license != nil {
|
||||
licenseKey = license.Key
|
||||
}
|
||||
|
||||
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
|
||||
req.Header.Set("X-Consumer-Username", "lid:00000000-0000-0000-0000-000000000000")
|
||||
req.Header.Set("X-Consumer-Groups", "ns:default")
|
||||
|
||||
ah.Gateway().ServeHTTP(rw, req)
|
||||
}
|
||||
@@ -2,11 +2,16 @@ package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"runtime/debug"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
@@ -15,6 +20,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
@@ -137,3 +144,140 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
}
|
||||
|
||||
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
|
||||
for _, fn := range anomalyQuery.Functions {
|
||||
if fn.Name == qbtypes.FunctionNameAnomaly {
|
||||
for _, arg := range fn.Args {
|
||||
if arg.Name == "seasonality" {
|
||||
if seasonalityStr, ok := arg.Value.(string); ok {
|
||||
switch seasonalityStr {
|
||||
case "weekly":
|
||||
return anomalyV2.SeasonalityWeekly
|
||||
case "hourly":
|
||||
return anomalyV2.SeasonalityHourly
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return anomalyV2.SeasonalityDaily // default
|
||||
}
|
||||
|
||||
func createAnomalyProvider(aH *APIHandler, seasonality anomalyV2.Seasonality) anomalyV2.Provider {
|
||||
switch seasonality {
|
||||
case anomalyV2.SeasonalityWeekly:
|
||||
return anomalyV2.NewWeeklyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
case anomalyV2.SeasonalityHourly:
|
||||
return anomalyV2.NewHourlyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
default:
|
||||
return anomalyV2.NewDailyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.DailyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
|
||||
seasonality := extractSeasonality(anomalyQuery)
|
||||
provider := createAnomalyProvider(aH, seasonality)
|
||||
|
||||
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
|
||||
}
|
||||
|
||||
func (aH *APIHandler) queryRangeV5(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
bodyBytes, err := io.ReadAll(req.Body)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
|
||||
return
|
||||
}
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
|
||||
ctx := req.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
stackTrace := string(debug.Stack())
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
aH.Signoz.Instrumentation.Logger().ErrorContext(ctx, "panic in QueryRange",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
"stacktrace", stackTrace,
|
||||
)
|
||||
|
||||
render.Error(rw, errors.NewInternalf(
|
||||
errors.CodeInternal,
|
||||
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
|
||||
))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := queryRangeRequest.Validate(); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
|
||||
anomalies, err := aH.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
results := []any{}
|
||||
for _, item := range anomalies.Results {
|
||||
results = append(results, item)
|
||||
}
|
||||
|
||||
// Build step intervals from the anomaly query
|
||||
stepIntervals := make(map[string]uint64)
|
||||
if anomalyQuery.StepInterval.Duration > 0 {
|
||||
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
|
||||
}
|
||||
|
||||
finalResp := &qbtypes.QueryRangeResponse{
|
||||
Type: queryRangeRequest.RequestType,
|
||||
Data: qbtypes.QueryData{
|
||||
Results: results,
|
||||
},
|
||||
Meta: qbtypes.ExecStats{
|
||||
StepIntervals: stepIntervals,
|
||||
},
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, finalResp)
|
||||
return
|
||||
} else {
|
||||
// regular query range request, let the querier handle it
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
aH.QuerierAPI.QueryRange(rw, req)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"github.com/gorilla/handlers"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/api"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/rules"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
@@ -71,6 +72,11 @@ type Server struct {
|
||||
|
||||
// NewServer creates and initializes Server
|
||||
func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
gatewayProxy, err := gateway.NewProxy(config.Gateway.URL.String(), gateway.RoutePrefix)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
|
||||
Provider: "memory",
|
||||
Memory: cache.Memory{
|
||||
@@ -164,6 +170,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
CloudIntegrationsController: cloudIntegrationsController,
|
||||
LogsParsingPipelineController: logParsingPipelineController,
|
||||
FluxInterval: config.Querier.FluxInterval,
|
||||
Gateway: gatewayProxy,
|
||||
GatewayUrl: config.Gateway.URL.String(),
|
||||
GlobalConfig: config.Global,
|
||||
}
|
||||
@@ -233,6 +240,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.RegisterQueryRangeV3Routes(r, am)
|
||||
apiHandler.RegisterInfraMetricsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV4Routes(r, am)
|
||||
apiHandler.RegisterQueryRangeV5Routes(r, am)
|
||||
apiHandler.RegisterWebSocketPaths(r, am)
|
||||
apiHandler.RegisterMessagingQueuesRoutes(r, am)
|
||||
apiHandler.RegisterThirdPartyApiRoutes(r, am)
|
||||
|
||||
9
ee/query-service/integrations/gateway/noop.go
Normal file
9
ee/query-service/integrations/gateway/noop.go
Normal file
@@ -0,0 +1,9 @@
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"net/http/httputil"
|
||||
)
|
||||
|
||||
func NewNoopProxy() (*httputil.ReverseProxy, error) {
|
||||
return &httputil.ReverseProxy{}, nil
|
||||
}
|
||||
66
ee/query-service/integrations/gateway/proxy.go
Normal file
66
ee/query-service/integrations/gateway/proxy.go
Normal file
@@ -0,0 +1,66 @@
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"net/url"
|
||||
"path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
RoutePrefix string = "/api/gateway"
|
||||
AllowedPrefix []string = []string{"/v1/workspaces/me", "/v2/profiles/me", "/v2/deployments/me"}
|
||||
)
|
||||
|
||||
type proxy struct {
|
||||
url *url.URL
|
||||
stripPath string
|
||||
}
|
||||
|
||||
func NewProxy(u string, stripPath string) (*httputil.ReverseProxy, error) {
|
||||
url, err := url.Parse(u)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
proxy := &proxy{url: url, stripPath: stripPath}
|
||||
|
||||
return &httputil.ReverseProxy{
|
||||
Rewrite: proxy.rewrite,
|
||||
ModifyResponse: proxy.modifyResponse,
|
||||
ErrorHandler: proxy.errorHandler,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (p *proxy) rewrite(pr *httputil.ProxyRequest) {
|
||||
pr.SetURL(p.url)
|
||||
pr.SetXForwarded()
|
||||
pr.Out.URL.Path = cleanPath(strings.ReplaceAll(pr.Out.URL.Path, p.stripPath, ""))
|
||||
}
|
||||
|
||||
func (p *proxy) modifyResponse(res *http.Response) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *proxy) errorHandler(rw http.ResponseWriter, req *http.Request, err error) {
|
||||
rw.WriteHeader(http.StatusBadGateway)
|
||||
}
|
||||
|
||||
func cleanPath(p string) string {
|
||||
if p == "" {
|
||||
return "/"
|
||||
}
|
||||
if p[0] != '/' {
|
||||
p = "/" + p
|
||||
}
|
||||
np := path.Clean(p)
|
||||
if p[len(p)-1] == '/' && np != "/" {
|
||||
if len(p) == len(np)+1 && strings.HasPrefix(p, np) {
|
||||
np = p
|
||||
} else {
|
||||
np += "/"
|
||||
}
|
||||
}
|
||||
return np
|
||||
}
|
||||
61
ee/query-service/integrations/gateway/proxy_test.go
Normal file
61
ee/query-service/integrations/gateway/proxy_test.go
Normal file
@@ -0,0 +1,61 @@
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"net/url"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestProxyRewrite(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
url *url.URL
|
||||
stripPath string
|
||||
in *url.URL
|
||||
expected *url.URL
|
||||
}{
|
||||
{
|
||||
name: "SamePathAdded",
|
||||
url: &url.URL{Scheme: "http", Host: "backend", Path: "/path1"},
|
||||
stripPath: "/strip",
|
||||
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
|
||||
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/path1/path1"},
|
||||
},
|
||||
{
|
||||
name: "NoStripPathInput",
|
||||
url: &url.URL{Scheme: "http", Host: "backend"},
|
||||
stripPath: "",
|
||||
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
|
||||
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
|
||||
},
|
||||
{
|
||||
name: "NoStripPathPresentInReq",
|
||||
url: &url.URL{Scheme: "http", Host: "backend"},
|
||||
stripPath: "/not-found",
|
||||
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
|
||||
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
proxy, err := NewProxy(tc.url.String(), tc.stripPath)
|
||||
require.NoError(t, err)
|
||||
inReq, err := http.NewRequest(http.MethodGet, tc.in.String(), nil)
|
||||
require.NoError(t, err)
|
||||
proxyReq := &httputil.ProxyRequest{
|
||||
In: inReq,
|
||||
Out: inReq.Clone(context.Background()),
|
||||
}
|
||||
proxy.Rewrite(proxyReq)
|
||||
|
||||
assert.Equal(t, tc.expected.Host, proxyReq.Out.URL.Host)
|
||||
assert.Equal(t, tc.expected.Scheme, proxyReq.Out.URL.Scheme)
|
||||
assert.Equal(t, tc.expected.Path, proxyReq.Out.URL.Path)
|
||||
assert.Equal(t, tc.expected.Query(), proxyReq.Out.URL.Query())
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@ package httpzeus
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
@@ -11,7 +10,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/client"
|
||||
"github.com/SigNoz/signoz/pkg/types/zeustypes"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
@@ -121,13 +119,8 @@ func (provider *Provider) PutMeters(ctx context.Context, key string, data []byte
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *Provider) PutProfile(ctx context.Context, key string, profile *zeustypes.PostableProfile) error {
|
||||
body, err := json.Marshal(profile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.do(
|
||||
func (provider *Provider) PutProfile(ctx context.Context, key string, body []byte) error {
|
||||
_, err := provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/profiles/me"),
|
||||
http.MethodPut,
|
||||
@@ -138,15 +131,10 @@ func (provider *Provider) PutProfile(ctx context.Context, key string, profile *z
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *Provider) PutHost(ctx context.Context, key string, host *zeustypes.PostableHost) error {
|
||||
body, err := json.Marshal(host)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.do(
|
||||
func (provider *Provider) PutHost(ctx context.Context, key string, body []byte) error {
|
||||
_, err := provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/deployments/me/host"),
|
||||
provider.config.URL.JoinPath("/v2/deployments/me/hosts"),
|
||||
http.MethodPut,
|
||||
key,
|
||||
body,
|
||||
@@ -181,28 +169,21 @@ func (provider *Provider) do(ctx context.Context, url *url.URL, method string, k
|
||||
return body, nil
|
||||
}
|
||||
|
||||
errorMessage := gjson.GetBytes(body, "error").String()
|
||||
if errorMessage == "" {
|
||||
errorMessage = "an unknown error occurred"
|
||||
}
|
||||
|
||||
return nil, provider.errFromStatusCode(response.StatusCode, errorMessage)
|
||||
return nil, provider.errFromStatusCode(response.StatusCode)
|
||||
}
|
||||
|
||||
// This can be taken down to the client package
|
||||
func (provider *Provider) errFromStatusCode(statusCode int, errorMessage string) error {
|
||||
func (provider *Provider) errFromStatusCode(statusCode int) error {
|
||||
switch statusCode {
|
||||
case http.StatusBadRequest:
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, errorMessage)
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "bad request")
|
||||
case http.StatusUnauthorized:
|
||||
return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, errorMessage)
|
||||
return errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated")
|
||||
case http.StatusForbidden:
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, errorMessage)
|
||||
return errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "forbidden")
|
||||
case http.StatusNotFound:
|
||||
return errors.New(errors.TypeNotFound, errors.CodeNotFound, errorMessage)
|
||||
case http.StatusConflict:
|
||||
return errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, errorMessage)
|
||||
return errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "not found")
|
||||
}
|
||||
|
||||
return errors.New(errors.TypeInternal, errors.CodeInternal, errorMessage)
|
||||
return errors.Newf(errors.TypeInternal, errors.CodeInternal, "internal")
|
||||
}
|
||||
|
||||
@@ -58,7 +58,6 @@
|
||||
"@signozhq/radio-group": "0.0.2",
|
||||
"@signozhq/resizable": "0.0.0",
|
||||
"@signozhq/sonner": "0.1.0",
|
||||
"@signozhq/switch": "0.0.2",
|
||||
"@signozhq/table": "0.3.7",
|
||||
"@signozhq/tooltip": "0.0.2",
|
||||
"@tanstack/react-table": "8.20.6",
|
||||
|
||||
@@ -12,6 +12,5 @@
|
||||
"pipeline": "Pipeline",
|
||||
"pipelines": "Pipelines",
|
||||
"archives": "Archives",
|
||||
"logs_to_metrics": "Logs To Metrics",
|
||||
"roles": "Roles"
|
||||
"logs_to_metrics": "Logs To Metrics"
|
||||
}
|
||||
|
||||
@@ -12,6 +12,5 @@
|
||||
"pipeline": "Pipeline",
|
||||
"pipelines": "Pipelines",
|
||||
"archives": "Archives",
|
||||
"logs_to_metrics": "Logs To Metrics",
|
||||
"roles": "Roles"
|
||||
"logs_to_metrics": "Logs To Metrics"
|
||||
}
|
||||
|
||||
@@ -73,6 +73,5 @@
|
||||
"API_MONITORING": "SigNoz | External APIs",
|
||||
"METER_EXPLORER": "SigNoz | Meter Explorer",
|
||||
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
|
||||
"METER": "SigNoz | Meter",
|
||||
"ROLES_SETTINGS": "SigNoz | Roles"
|
||||
"METER": "SigNoz | Meter"
|
||||
}
|
||||
|
||||
29
frontend/src/api/IngestionKeys/createIngestionKey.ts
Normal file
29
frontend/src/api/IngestionKeys/createIngestionKey.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
CreateIngestionKeyProps,
|
||||
IngestionKeyProps,
|
||||
} from 'types/api/ingestionKeys/types';
|
||||
|
||||
const createIngestionKey = async (
|
||||
props: CreateIngestionKeyProps,
|
||||
): Promise<SuccessResponse<IngestionKeyProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.post('/workspaces/me/keys', {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default createIngestionKey;
|
||||
26
frontend/src/api/IngestionKeys/deleteIngestionKey.ts
Normal file
26
frontend/src/api/IngestionKeys/deleteIngestionKey.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
|
||||
|
||||
const deleteIngestionKey = async (
|
||||
id: string,
|
||||
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.delete(
|
||||
`/workspaces/me/keys/${id}`,
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default deleteIngestionKey;
|
||||
21
frontend/src/api/IngestionKeys/getAllIngestionKeys.ts
Normal file
21
frontend/src/api/IngestionKeys/getAllIngestionKeys.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import {
|
||||
AllIngestionKeyProps,
|
||||
GetIngestionKeyProps,
|
||||
} from 'types/api/ingestionKeys/types';
|
||||
|
||||
export const getAllIngestionKeys = (
|
||||
props: GetIngestionKeyProps,
|
||||
): Promise<AxiosResponse<AllIngestionKeyProps>> => {
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
const { search, per_page, page } = props;
|
||||
|
||||
const BASE_URL = '/workspaces/me/keys';
|
||||
const URL_QUERY_PARAMS =
|
||||
search && search.length > 0
|
||||
? `/search?name=${search}&page=1&per_page=100`
|
||||
: `?page=${page}&per_page=${per_page}`;
|
||||
|
||||
return GatewayApiV1Instance.get(`${BASE_URL}${URL_QUERY_PARAMS}`);
|
||||
};
|
||||
65
frontend/src/api/IngestionKeys/limits/createLimitsForKey.ts
Normal file
65
frontend/src/api/IngestionKeys/limits/createLimitsForKey.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
/* eslint-disable @typescript-eslint/no-throw-literal */
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import axios from 'axios';
|
||||
import {
|
||||
AddLimitProps,
|
||||
LimitSuccessProps,
|
||||
} from 'types/api/ingestionKeys/limits/types';
|
||||
|
||||
interface SuccessResponse<T> {
|
||||
statusCode: number;
|
||||
error: null;
|
||||
message: string;
|
||||
payload: T;
|
||||
}
|
||||
|
||||
interface ErrorResponse {
|
||||
statusCode: number;
|
||||
error: string;
|
||||
message: string;
|
||||
payload: null;
|
||||
}
|
||||
|
||||
const createLimitForIngestionKey = async (
|
||||
props: AddLimitProps,
|
||||
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.post(
|
||||
`/workspaces/me/keys/${props.keyID}/limits`,
|
||||
{
|
||||
...props,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
// Axios error
|
||||
const errResponse: ErrorResponse = {
|
||||
statusCode: error.response?.status || 500,
|
||||
error: error.response?.data?.error,
|
||||
message: error.response?.data?.status || 'An error occurred',
|
||||
payload: null,
|
||||
};
|
||||
|
||||
throw errResponse;
|
||||
} else {
|
||||
// Non-Axios error
|
||||
const errResponse: ErrorResponse = {
|
||||
statusCode: 500,
|
||||
error: 'Unknown error',
|
||||
message: 'An unknown error occurred',
|
||||
payload: null,
|
||||
};
|
||||
|
||||
throw errResponse;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default createLimitForIngestionKey;
|
||||
@@ -0,0 +1,26 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
|
||||
|
||||
const deleteLimitsForIngestionKey = async (
|
||||
id: string,
|
||||
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.delete(
|
||||
`/workspaces/me/limits/${id}`,
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default deleteLimitsForIngestionKey;
|
||||
@@ -0,0 +1,65 @@
|
||||
/* eslint-disable @typescript-eslint/no-throw-literal */
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import axios from 'axios';
|
||||
import {
|
||||
LimitSuccessProps,
|
||||
UpdateLimitProps,
|
||||
} from 'types/api/ingestionKeys/limits/types';
|
||||
|
||||
interface SuccessResponse<T> {
|
||||
statusCode: number;
|
||||
error: null;
|
||||
message: string;
|
||||
payload: T;
|
||||
}
|
||||
|
||||
interface ErrorResponse {
|
||||
statusCode: number;
|
||||
error: string;
|
||||
message: string;
|
||||
payload: null;
|
||||
}
|
||||
|
||||
const updateLimitForIngestionKey = async (
|
||||
props: UpdateLimitProps,
|
||||
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.patch(
|
||||
`/workspaces/me/limits/${props.limitID}`,
|
||||
{
|
||||
config: props.config,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
// Axios error
|
||||
const errResponse: ErrorResponse = {
|
||||
statusCode: error.response?.status || 500,
|
||||
error: error.response?.data?.error,
|
||||
message: error.response?.data?.status || 'An error occurred',
|
||||
payload: null,
|
||||
};
|
||||
|
||||
throw errResponse;
|
||||
} else {
|
||||
// Non-Axios error
|
||||
const errResponse: ErrorResponse = {
|
||||
statusCode: 500,
|
||||
error: 'Unknown error',
|
||||
message: 'An unknown error occurred',
|
||||
payload: null,
|
||||
};
|
||||
|
||||
throw errResponse;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default updateLimitForIngestionKey;
|
||||
32
frontend/src/api/IngestionKeys/updateIngestionKey.ts
Normal file
32
frontend/src/api/IngestionKeys/updateIngestionKey.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
IngestionKeysPayloadProps,
|
||||
UpdateIngestionKeyProps,
|
||||
} from 'types/api/ingestionKeys/types';
|
||||
|
||||
const updateIngestionKey = async (
|
||||
props: UpdateIngestionKeyProps,
|
||||
): Promise<SuccessResponse<IngestionKeysPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.patch(
|
||||
`/workspaces/me/keys/${props.id}`,
|
||||
{
|
||||
...props.data,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default updateIngestionKey;
|
||||
@@ -4,6 +4,8 @@ export const apiV2 = '/api/v2/';
|
||||
export const apiV3 = '/api/v3/';
|
||||
export const apiV4 = '/api/v4/';
|
||||
export const apiV5 = '/api/v5/';
|
||||
export const gatewayApiV1 = '/api/gateway/v1/';
|
||||
export const gatewayApiV2 = '/api/gateway/v2/';
|
||||
export const apiAlertManager = '/api/alertmanager/';
|
||||
|
||||
export default apiV1;
|
||||
|
||||
7
frontend/src/api/customDomain/getDeploymentsData.ts
Normal file
7
frontend/src/api/customDomain/getDeploymentsData.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { GatewayApiV2Instance as axios } from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { DeploymentsDataProps } from 'types/api/customDomain/types';
|
||||
|
||||
export const getDeploymentsData = (): Promise<
|
||||
AxiosResponse<DeploymentsDataProps>
|
||||
> => axios.get(`/deployments/me`);
|
||||
16
frontend/src/api/customDomain/updateSubDomain.ts
Normal file
16
frontend/src/api/customDomain/updateSubDomain.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { GatewayApiV2Instance as axios } from 'api';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import {
|
||||
PayloadProps,
|
||||
UpdateCustomDomainProps,
|
||||
} from 'types/api/customDomain/types';
|
||||
|
||||
const updateSubDomainAPI = async (
|
||||
props: UpdateCustomDomainProps,
|
||||
): Promise<SuccessResponse<PayloadProps> | AxiosError> =>
|
||||
axios.put(`/deployments/me/host`, {
|
||||
...props.data,
|
||||
});
|
||||
|
||||
export default updateSubDomainAPI;
|
||||
@@ -678,7 +678,7 @@ export const useUpdateIngestionKeyLimit = <
|
||||
* @summary Search ingestion keys for workspace
|
||||
*/
|
||||
export const searchIngestionKeys = (
|
||||
params: SearchIngestionKeysParams,
|
||||
params?: SearchIngestionKeysParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<SearchIngestionKeys200>({
|
||||
@@ -699,7 +699,7 @@ export const getSearchIngestionKeysQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params: SearchIngestionKeysParams,
|
||||
params?: SearchIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
@@ -737,7 +737,7 @@ export function useSearchIngestionKeys<
|
||||
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params: SearchIngestionKeysParams,
|
||||
params?: SearchIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
@@ -762,7 +762,7 @@ export function useSearchIngestionKeys<
|
||||
*/
|
||||
export const invalidateSearchIngestionKeys = async (
|
||||
queryClient: QueryClient,
|
||||
params: SearchIngestionKeysParams,
|
||||
params?: SearchIngestionKeysParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
|
||||
@@ -16,7 +16,6 @@ import type {
|
||||
Querybuildertypesv5QueryRangeRequestDTO,
|
||||
QueryRangeV5200,
|
||||
RenderErrorResponseDTO,
|
||||
ReplaceVariables200,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
@@ -106,86 +105,3 @@ export const useQueryRangeV5 = <
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* Replace variables in a query
|
||||
* @summary Replace variables
|
||||
*/
|
||||
export const replaceVariables = (
|
||||
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<ReplaceVariables200>({
|
||||
url: `/api/v5/substitute_vars`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getReplaceVariablesMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['replaceVariables'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return replaceVariables(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type ReplaceVariablesMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof replaceVariables>>
|
||||
>;
|
||||
export type ReplaceVariablesMutationBody = Querybuildertypesv5QueryRangeRequestDTO;
|
||||
export type ReplaceVariablesMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Replace variables
|
||||
*/
|
||||
export const useReplaceVariables = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getReplaceVariablesMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
@@ -453,18 +453,18 @@ export interface GatewaytypesGettableCreatedIngestionKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id: string;
|
||||
id?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
value: string;
|
||||
value?: string;
|
||||
}
|
||||
|
||||
export interface GatewaytypesGettableCreatedIngestionKeyLimitDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id: string;
|
||||
id?: string;
|
||||
}
|
||||
|
||||
export interface GatewaytypesGettableIngestionKeysDTO {
|
||||
@@ -616,7 +616,7 @@ export interface GatewaytypesPostableIngestionKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
name?: string;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
@@ -638,7 +638,7 @@ export interface GatewaytypesPostableIngestionKeyLimitDTO {
|
||||
}
|
||||
|
||||
export interface GatewaytypesUpdatableIngestionKeyLimitDTO {
|
||||
config: GatewaytypesLimitConfigDTO;
|
||||
config?: GatewaytypesLimitConfigDTO;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
@@ -2414,91 +2414,6 @@ export interface TypesUserDTO {
|
||||
updatedAt?: Date;
|
||||
}
|
||||
|
||||
export interface ZeustypesGettableHostDTO {
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
hosts: ZeustypesHostDTO[] | null;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
state: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
tier: string;
|
||||
}
|
||||
|
||||
export interface ZeustypesHostDTO {
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
is_default: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface ZeustypesPostableHostDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface ZeustypesPostableProfileDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
existing_observability_tool: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
has_existing_observability_tool: boolean;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
logs_scale_per_day_in_gb: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
number_of_hosts: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
number_of_services: number;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
reasons_for_interest_in_signoz: string[] | null;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
timeline_for_migrating_to_signoz: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
uses_otel: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
where_did_you_discover_signoz: string;
|
||||
}
|
||||
|
||||
export type ChangePasswordPathParameters = {
|
||||
id: string;
|
||||
};
|
||||
@@ -3039,7 +2954,7 @@ export type SearchIngestionKeysParams = {
|
||||
* @type string
|
||||
* @description undefined
|
||||
*/
|
||||
name: string;
|
||||
name?: string;
|
||||
/**
|
||||
* @type integer
|
||||
* @description undefined
|
||||
@@ -3214,14 +3129,6 @@ export type RotateSession200 = {
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type GetHosts200 = {
|
||||
data?: ZeustypesGettableHostDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type QueryRangeV5200 = {
|
||||
data?: Querybuildertypesv5QueryRangeResponseDTO;
|
||||
/**
|
||||
@@ -3229,11 +3136,3 @@ export type QueryRangeV5200 = {
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type ReplaceVariables200 = {
|
||||
data?: Querybuildertypesv5QueryRangeRequestDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
@@ -1,269 +0,0 @@
|
||||
/**
|
||||
* ! Do not edit manually
|
||||
* * The file has been auto-generated using Orval for SigNoz
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
QueryClient,
|
||||
QueryFunction,
|
||||
QueryKey,
|
||||
UseMutationOptions,
|
||||
UseMutationResult,
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
GetHosts200,
|
||||
RenderErrorResponseDTO,
|
||||
ZeustypesPostableHostDTO,
|
||||
ZeustypesPostableProfileDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* This endpoint gets the host info from zeus.
|
||||
* @summary Get host info from Zeus.
|
||||
*/
|
||||
export const getHosts = (signal?: AbortSignal) => {
|
||||
return GeneratedAPIInstance<GetHosts200>({
|
||||
url: `/api/v2/zeus/hosts`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetHostsQueryKey = () => {
|
||||
return ['getHosts'] as const;
|
||||
};
|
||||
|
||||
export const getGetHostsQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getHosts>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof getHosts>>, TError, TData>;
|
||||
}) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getGetHostsQueryKey();
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof getHosts>>> = ({
|
||||
signal,
|
||||
}) => getHosts(signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getHosts>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type GetHostsQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getHosts>>
|
||||
>;
|
||||
export type GetHostsQueryError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Get host info from Zeus.
|
||||
*/
|
||||
|
||||
export function useGetHosts<
|
||||
TData = Awaited<ReturnType<typeof getHosts>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof getHosts>>, TError, TData>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetHostsQueryOptions(options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get host info from Zeus.
|
||||
*/
|
||||
export const invalidateGetHosts = async (
|
||||
queryClient: QueryClient,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetHostsQueryKey() },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint saves the host of a deployment to zeus.
|
||||
* @summary Put host in Zeus for a deployment.
|
||||
*/
|
||||
export const putHost = (zeustypesPostableHostDTO: ZeustypesPostableHostDTO) => {
|
||||
return GeneratedAPIInstance<void>({
|
||||
url: `/api/v2/zeus/hosts`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: zeustypesPostableHostDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getPutHostMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['putHost'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
{ data: ZeustypesPostableHostDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return putHost(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type PutHostMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof putHost>>
|
||||
>;
|
||||
export type PutHostMutationBody = ZeustypesPostableHostDTO;
|
||||
export type PutHostMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Put host in Zeus for a deployment.
|
||||
*/
|
||||
export const usePutHost = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getPutHostMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint saves the profile of a deployment to zeus.
|
||||
* @summary Put profile in Zeus for a deployment.
|
||||
*/
|
||||
export const putProfile = (
|
||||
zeustypesPostableProfileDTO: ZeustypesPostableProfileDTO,
|
||||
) => {
|
||||
return GeneratedAPIInstance<void>({
|
||||
url: `/api/v2/zeus/profiles`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: zeustypesPostableProfileDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getPutProfileMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['putProfile'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
{ data: ZeustypesPostableProfileDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return putProfile(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type PutProfileMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof putProfile>>
|
||||
>;
|
||||
export type PutProfileMutationBody = ZeustypesPostableProfileDTO;
|
||||
export type PutProfileMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Put profile in Zeus for a deployment.
|
||||
*/
|
||||
export const usePutProfile = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getPutProfileMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
@@ -15,7 +15,15 @@ import { Events } from 'constants/events';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { eventEmitter } from 'utils/getEventEmitter';
|
||||
|
||||
import apiV1, { apiAlertManager, apiV2, apiV3, apiV4, apiV5 } from './apiV1';
|
||||
import apiV1, {
|
||||
apiAlertManager,
|
||||
apiV2,
|
||||
apiV3,
|
||||
apiV4,
|
||||
apiV5,
|
||||
gatewayApiV1,
|
||||
gatewayApiV2,
|
||||
} from './apiV1';
|
||||
import { Logout } from './utils';
|
||||
|
||||
const RESPONSE_TIMEOUT_THRESHOLD = 5000; // 5 seconds
|
||||
@@ -203,6 +211,24 @@ LogEventAxiosInstance.interceptors.response.use(
|
||||
LogEventAxiosInstance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// gateway Api V1
|
||||
export const GatewayApiV1Instance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV1}`,
|
||||
});
|
||||
|
||||
GatewayApiV1Instance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejected,
|
||||
);
|
||||
|
||||
GatewayApiV1Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// gateway Api V2
|
||||
export const GatewayApiV2Instance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV2}`,
|
||||
});
|
||||
|
||||
// generated API Instance
|
||||
export const GeneratedAPIInstance = axios.create({
|
||||
baseURL: ENVIRONMENT.baseURL,
|
||||
@@ -214,6 +240,14 @@ GeneratedAPIInstance.interceptors.response.use(
|
||||
interceptorRejected,
|
||||
);
|
||||
|
||||
GatewayApiV2Instance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejected,
|
||||
);
|
||||
|
||||
GatewayApiV2Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
AxiosAlertManagerInstance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejected,
|
||||
|
||||
20
frontend/src/api/onboarding/updateProfile.ts
Normal file
20
frontend/src/api/onboarding/updateProfile.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { GatewayApiV2Instance } from 'api';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { UpdateProfileProps } from 'types/api/onboarding/types';
|
||||
|
||||
const updateProfile = async (
|
||||
props: UpdateProfileProps,
|
||||
): Promise<SuccessResponse<UpdateProfileProps> | ErrorResponse> => {
|
||||
const response = await GatewayApiV2Instance.put('/profiles/me', {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
|
||||
export default updateProfile;
|
||||
19
frontend/src/api/v1/domains/id/delete.ts
Normal file
19
frontend/src/api/v1/domains/id/delete.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
|
||||
const deleteDomain = async (id: string): Promise<SuccessResponseV2<null>> => {
|
||||
try {
|
||||
const response = await axios.delete<null>(`/domains/${id}`);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: null,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default deleteDomain;
|
||||
25
frontend/src/api/v1/domains/id/put.ts
Normal file
25
frontend/src/api/v1/domains/id/put.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
|
||||
import { UpdatableAuthDomain } from 'types/api/v1/domains/put';
|
||||
|
||||
const put = async (
|
||||
props: UpdatableAuthDomain,
|
||||
): Promise<SuccessResponseV2<null>> => {
|
||||
try {
|
||||
const response = await axios.put<RawSuccessResponse<null>>(
|
||||
`/domains/${props.id}`,
|
||||
{ config: props.config },
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default put;
|
||||
24
frontend/src/api/v1/domains/list.ts
Normal file
24
frontend/src/api/v1/domains/list.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
|
||||
import { GettableAuthDomain } from 'types/api/v1/domains/list';
|
||||
|
||||
const listAllDomain = async (): Promise<
|
||||
SuccessResponseV2<GettableAuthDomain[]>
|
||||
> => {
|
||||
try {
|
||||
const response = await axios.get<RawSuccessResponse<GettableAuthDomain[]>>(
|
||||
`/domains`,
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default listAllDomain;
|
||||
26
frontend/src/api/v1/domains/post.ts
Normal file
26
frontend/src/api/v1/domains/post.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
|
||||
import { GettableAuthDomain } from 'types/api/v1/domains/list';
|
||||
import { PostableAuthDomain } from 'types/api/v1/domains/post';
|
||||
|
||||
const post = async (
|
||||
props: PostableAuthDomain,
|
||||
): Promise<SuccessResponseV2<GettableAuthDomain>> => {
|
||||
try {
|
||||
const response = await axios.post<RawSuccessResponse<GettableAuthDomain>>(
|
||||
`/domains`,
|
||||
props,
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default post;
|
||||
1
frontend/src/auto-import-registry.d.ts
vendored
1
frontend/src/auto-import-registry.d.ts
vendored
@@ -24,6 +24,5 @@ import '@signozhq/popover';
|
||||
import '@signozhq/radio-group';
|
||||
import '@signozhq/resizable';
|
||||
import '@signozhq/sonner';
|
||||
import '@signozhq/switch';
|
||||
import '@signozhq/table';
|
||||
import '@signozhq/tooltip';
|
||||
|
||||
@@ -42,7 +42,6 @@
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
padding: 16px;
|
||||
padding-bottom: 0;
|
||||
}
|
||||
|
||||
.title {
|
||||
@@ -191,7 +190,7 @@
|
||||
padding: 0px;
|
||||
}
|
||||
.log-detail-drawer__footer-hint {
|
||||
position: sticky;
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
@@ -367,7 +366,7 @@
|
||||
}
|
||||
|
||||
.log-detail-drawer__footer-hint {
|
||||
position: sticky;
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
|
||||
@@ -21,7 +21,7 @@ export function getDefaultCellStyle(isDarkMode?: boolean): CSSProperties {
|
||||
|
||||
export const defaultTableStyle: CSSProperties = {
|
||||
minWidth: '40rem',
|
||||
maxWidth: '90rem',
|
||||
maxWidth: '60rem',
|
||||
};
|
||||
|
||||
export const defaultListViewPanelStyle: CSSProperties = {
|
||||
|
||||
@@ -86,6 +86,7 @@ interface QuerySearchProps {
|
||||
signalSource?: string;
|
||||
hardcodedAttributeKeys?: QueryKeyDataSuggestionsProps[];
|
||||
onRun?: (query: string) => void;
|
||||
showFilterSuggestionsWithoutMetric?: boolean;
|
||||
}
|
||||
|
||||
function QuerySearch({
|
||||
@@ -96,6 +97,7 @@ function QuerySearch({
|
||||
onRun,
|
||||
signalSource,
|
||||
hardcodedAttributeKeys,
|
||||
showFilterSuggestionsWithoutMetric,
|
||||
}: QuerySearchProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const [valueSuggestions, setValueSuggestions] = useState<any[]>([]);
|
||||
@@ -252,7 +254,8 @@ function QuerySearch({
|
||||
async (searchText?: string): Promise<void> => {
|
||||
if (
|
||||
dataSource === DataSource.METRICS &&
|
||||
!queryData.aggregateAttribute?.key
|
||||
!queryData.aggregateAttribute?.key &&
|
||||
!showFilterSuggestionsWithoutMetric
|
||||
) {
|
||||
setKeySuggestions([]);
|
||||
return;
|
||||
@@ -301,6 +304,7 @@ function QuerySearch({
|
||||
queryData.aggregateAttribute?.key,
|
||||
signalSource,
|
||||
hardcodedAttributeKeys,
|
||||
showFilterSuggestionsWithoutMetric,
|
||||
],
|
||||
);
|
||||
|
||||
@@ -1562,6 +1566,7 @@ QuerySearch.defaultProps = {
|
||||
hardcodedAttributeKeys: undefined,
|
||||
placeholder:
|
||||
"Enter your filter query (e.g., http.status_code >= 500 AND service.name = 'frontend')",
|
||||
showFilterSuggestionsWithoutMetric: false,
|
||||
};
|
||||
|
||||
export default QuerySearch;
|
||||
|
||||
@@ -55,7 +55,6 @@ const ROUTES = {
|
||||
LOGS_INDEX_FIELDS: '/logs-explorer/index-fields',
|
||||
TRACE_EXPLORER: '/trace-explorer',
|
||||
BILLING: '/settings/billing',
|
||||
ROLES_SETTINGS: '/settings/roles',
|
||||
SUPPORT: '/support',
|
||||
LOGS_SAVE_VIEWS: '/logs/saved-views',
|
||||
TRACES_SAVE_VIEWS: '/traces/saved-views',
|
||||
|
||||
@@ -202,7 +202,7 @@ function AllEndPoints({
|
||||
|
||||
const onRowClick = useCallback(
|
||||
(props: any): void => {
|
||||
setSelectedEndPointName(props[SPAN_ATTRIBUTES.HTTP_URL] as string);
|
||||
setSelectedEndPointName(props[SPAN_ATTRIBUTES.URL_PATH] as string);
|
||||
setSelectedView(VIEWS.ENDPOINT_STATS);
|
||||
const initialItems = [
|
||||
...(filters?.items || []),
|
||||
@@ -213,7 +213,7 @@ function AllEndPoints({
|
||||
op: 'AND',
|
||||
});
|
||||
setParams({
|
||||
selectedEndPointName: props[SPAN_ATTRIBUTES.HTTP_URL] as string,
|
||||
selectedEndPointName: props[SPAN_ATTRIBUTES.URL_PATH] as string,
|
||||
selectedView: VIEWS.ENDPOINT_STATS,
|
||||
endPointDetailsLocalFilters: {
|
||||
items: initialItems,
|
||||
|
||||
@@ -33,7 +33,7 @@ import { SPAN_ATTRIBUTES } from './constants';
|
||||
|
||||
const httpUrlKey = {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
type: 'tag',
|
||||
};
|
||||
|
||||
@@ -93,7 +93,7 @@ function EndPointDetails({
|
||||
return currentFilters; // No change needed, prevents loop
|
||||
}
|
||||
|
||||
// Rebuild filters: Keep non-http_url filters and add/update http_url filter based on prop
|
||||
// Rebuild filters: Keep non-http.url filters and add/update http.url filter based on prop
|
||||
const otherFilters = currentFilters?.items?.filter(
|
||||
(item) => item.key?.key !== httpUrlKey.key,
|
||||
);
|
||||
@@ -125,7 +125,7 @@ function EndPointDetails({
|
||||
(newFilters: IBuilderQuery['filters']): void => {
|
||||
// 1. Update local filters state immediately
|
||||
setFilters(newFilters);
|
||||
// Filter out http_url filter before saving to params
|
||||
// Filter out http.url filter before saving to params
|
||||
const filteredNewFilters = {
|
||||
op: 'AND',
|
||||
items:
|
||||
@@ -299,6 +299,7 @@ function EndPointDetails({
|
||||
endPointStatusCodeLatencyBarChartsDataQuery
|
||||
}
|
||||
domainName={domainName}
|
||||
endPointName={endPointName}
|
||||
filters={filters}
|
||||
timeRange={timeRange}
|
||||
onDragSelect={onDragSelect}
|
||||
|
||||
@@ -56,15 +56,15 @@ function TopErrors({
|
||||
{
|
||||
items: endPointName
|
||||
? [
|
||||
// Remove any existing http_url filters from initialFilters to avoid duplicates
|
||||
// Remove any existing http.url filters from initialFilters to avoid duplicates
|
||||
...(initialFilters?.items?.filter(
|
||||
(item) => item.key?.key !== SPAN_ATTRIBUTES.HTTP_URL,
|
||||
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
|
||||
) || []),
|
||||
{
|
||||
id: '92b8a1c1',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
|
||||
@@ -9,7 +9,6 @@ import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { SPAN_ATTRIBUTES } from '../constants';
|
||||
import DomainMetrics from './DomainMetrics';
|
||||
|
||||
// Mock the API call
|
||||
@@ -127,9 +126,11 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
|
||||
'count()',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryA.filter.expression).toContain("http_host = '0.0.0.0'");
|
||||
expect(queryA.filter.expression).toContain(
|
||||
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryA.filter.expression).toContain(
|
||||
'url.full EXISTS OR http.url EXISTS',
|
||||
);
|
||||
|
||||
// Verify Query B - p99 latency
|
||||
@@ -141,13 +142,17 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
|
||||
'p99(duration_nano)',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryB.filter.expression).toContain("http_host = '0.0.0.0'");
|
||||
expect(queryB.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
|
||||
// Verify Query C - error count (disabled)
|
||||
const queryC = queryData.find((q: any) => q.queryName === 'C');
|
||||
expect(queryC).toBeDefined();
|
||||
expect(queryC.disabled).toBe(true);
|
||||
expect(queryC.filter.expression).toContain("http_host = '0.0.0.0'");
|
||||
expect(queryC.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryC.aggregations?.[0]).toBeDefined();
|
||||
expect((queryC.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'count()',
|
||||
@@ -164,7 +169,9 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
|
||||
'max(timestamp)',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryD.filter.expression).toContain("http_host = '0.0.0.0'");
|
||||
expect(queryD.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
|
||||
// Verify Formula F1 - error rate calculation
|
||||
const formulas = payload.query.builder.queryFormulas;
|
||||
|
||||
@@ -153,7 +153,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryA.filter) {
|
||||
expect(queryA.filter.expression).toContain(
|
||||
`http_host = 'api.example.com'`,
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryA.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
@@ -171,7 +171,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryB.filter) {
|
||||
expect(queryB.filter.expression).toContain(
|
||||
`http_host = 'api.example.com'`,
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryB.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
@@ -185,7 +185,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
expect(queryC.aggregateOperator).toBe('count');
|
||||
if (queryC.filter) {
|
||||
expect(queryC.filter.expression).toContain(
|
||||
`http_host = 'api.example.com'`,
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryC.filter.expression).toContain("kind_string = 'Client'");
|
||||
expect(queryC.filter.expression).toContain('has_error = true');
|
||||
@@ -204,7 +204,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryD.filter) {
|
||||
expect(queryD.filter.expression).toContain(
|
||||
`http_host = 'api.example.com'`,
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryD.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
@@ -221,7 +221,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
}
|
||||
if (queryE.filter) {
|
||||
expect(queryE.filter.expression).toContain(
|
||||
`http_host = 'api.example.com'`,
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryE.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
@@ -291,7 +291,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
expect(query.filter.expression).toContain('staging');
|
||||
// Also verify domain filter is still present
|
||||
expect(query.filter.expression).toContain(
|
||||
"http_host = 'api.internal.com'",
|
||||
"(net.peer.name = 'api.internal.com' OR server.address = 'api.internal.com')",
|
||||
);
|
||||
// Verify client kind filter is present
|
||||
expect(query.filter.expression).toContain("kind_string = 'Client'");
|
||||
|
||||
@@ -34,6 +34,7 @@ function StatusCodeBarCharts({
|
||||
endPointStatusCodeBarChartsDataQuery,
|
||||
endPointStatusCodeLatencyBarChartsDataQuery,
|
||||
domainName,
|
||||
endPointName,
|
||||
filters,
|
||||
timeRange,
|
||||
onDragSelect,
|
||||
@@ -47,6 +48,7 @@ function StatusCodeBarCharts({
|
||||
unknown
|
||||
>;
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
filters: IBuilderQuery['filters'];
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
@@ -142,11 +144,11 @@ function StatusCodeBarCharts({
|
||||
|
||||
const widget = useMemo<Widgets>(
|
||||
() =>
|
||||
getStatusCodeBarChartWidgetData(domainName, {
|
||||
getStatusCodeBarChartWidgetData(domainName, endPointName, {
|
||||
items: [...(filters?.items || [])],
|
||||
op: filters?.op || 'AND',
|
||||
}),
|
||||
[domainName, filters],
|
||||
[domainName, endPointName, filters],
|
||||
);
|
||||
|
||||
const graphClickHandler = useCallback(
|
||||
@@ -164,7 +166,6 @@ function StatusCodeBarCharts({
|
||||
xValue,
|
||||
TWO_AND_HALF_MINUTES_IN_MILLISECONDS,
|
||||
);
|
||||
|
||||
handleGraphClick({
|
||||
xValue,
|
||||
yValue,
|
||||
|
||||
@@ -12,7 +12,7 @@ export const VIEW_TYPES = {
|
||||
|
||||
// Span attribute keys - these are the source of truth for all attribute keys
|
||||
export const SPAN_ATTRIBUTES = {
|
||||
HTTP_URL: 'http_url',
|
||||
URL_PATH: 'http.url',
|
||||
RESPONSE_STATUS_CODE: 'response_status_code',
|
||||
SERVER_NAME: 'http_host',
|
||||
SERVER_PORT: 'net.peer.port',
|
||||
|
||||
@@ -280,7 +280,7 @@ describe('API Monitoring Utils', () => {
|
||||
const endpointFilter = result?.items?.find(
|
||||
(item) =>
|
||||
item.key &&
|
||||
item.key.key === SPAN_ATTRIBUTES.HTTP_URL &&
|
||||
item.key.key === SPAN_ATTRIBUTES.URL_PATH &&
|
||||
item.value === endPointName,
|
||||
);
|
||||
expect(endpointFilter).toBeDefined();
|
||||
@@ -344,12 +344,13 @@ describe('API Monitoring Utils', () => {
|
||||
describe('getFormattedEndPointDropDownData', () => {
|
||||
it('should format endpoint dropdown data correctly', () => {
|
||||
// Arrange
|
||||
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
|
||||
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
[URL_PATH_KEY]: '/api/users',
|
||||
'url.full': 'http://example.com/api/users',
|
||||
A: 150, // count or other metric
|
||||
},
|
||||
},
|
||||
@@ -357,6 +358,7 @@ describe('API Monitoring Utils', () => {
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
[URL_PATH_KEY]: '/api/orders',
|
||||
'url.full': 'http://example.com/api/orders',
|
||||
A: 75,
|
||||
},
|
||||
},
|
||||
@@ -404,7 +406,7 @@ describe('API Monitoring Utils', () => {
|
||||
|
||||
it('should handle items without URL path', () => {
|
||||
// Arrange
|
||||
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
|
||||
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
|
||||
type MockDataType = {
|
||||
data: {
|
||||
[key: string]: string | number;
|
||||
@@ -710,11 +712,13 @@ describe('API Monitoring Utils', () => {
|
||||
it('should generate widget configuration for status code bar chart', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '/api/test';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getStatusCodeBarChartWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
@@ -737,11 +741,21 @@ describe('API Monitoring Utils', () => {
|
||||
if (domainFilter) {
|
||||
expect(domainFilter.value).toBe(domainName);
|
||||
}
|
||||
|
||||
// Should have endpoint filter if provided
|
||||
const endpointFilter = queryData.filters?.items?.find(
|
||||
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.URL_PATH,
|
||||
);
|
||||
expect(endpointFilter).toBeDefined();
|
||||
if (endpointFilter) {
|
||||
expect(endpointFilter.value).toBe(endPointName);
|
||||
}
|
||||
});
|
||||
|
||||
it('should include custom filters in the widget configuration', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '/api/test';
|
||||
const customFilter = {
|
||||
id: 'custom-filter',
|
||||
key: {
|
||||
@@ -757,6 +771,7 @@ describe('API Monitoring Utils', () => {
|
||||
// Act
|
||||
const result = getStatusCodeBarChartWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ jest.mock('container/GridCardLayout/GridCard', () => ({
|
||||
type="button"
|
||||
data-testid="row-click-button"
|
||||
onClick={(): void =>
|
||||
customOnRowClick({ [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test' })
|
||||
customOnRowClick({ [SPAN_ATTRIBUTES.URL_PATH]: '/api/test' })
|
||||
}
|
||||
>
|
||||
Click Row
|
||||
|
||||
@@ -6,10 +6,10 @@
|
||||
* These tests validate the migration from V4 to V5 format for getAllEndpointsWidgetData:
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Aggregation format: aggregateAttribute → aggregations[] array
|
||||
* - Domain filter: http_host = '${domainName}'
|
||||
* - Domain filter: (net.peer.name OR server.address)
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Four queries: A (count), B (p99 latency), C (max timestamp), D (error count - disabled)
|
||||
* - GroupBy: http_url with type 'attribute'
|
||||
* - GroupBy: Both http.url AND url.full with type 'attribute'
|
||||
*/
|
||||
import { getAllEndpointsWidgetData } from 'container/ApiMonitoring/utils';
|
||||
import {
|
||||
@@ -18,8 +18,6 @@ import {
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
|
||||
|
||||
describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
const mockDomainName = 'api.example.com';
|
||||
const emptyFilters: IBuilderQuery['filters'] = {
|
||||
@@ -94,28 +92,28 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
|
||||
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
|
||||
|
||||
const baseExpression = `http_host = '${mockDomainName}' AND kind_string = 'Client'`;
|
||||
const baseExpression = `(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') AND kind_string = 'Client'`;
|
||||
|
||||
// Queries A, B, C have identical base filter
|
||||
expect(queryA.filter?.expression).toBe(
|
||||
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
);
|
||||
expect(queryB.filter?.expression).toBe(
|
||||
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
);
|
||||
expect(queryC.filter?.expression).toBe(
|
||||
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
);
|
||||
|
||||
// Query D has additional has_error filter
|
||||
expect(queryD.filter?.expression).toBe(
|
||||
`${baseExpression} AND has_error = true AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
`${baseExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. GroupBy Structure', () => {
|
||||
it(`default groupBy includes ${SPAN_ATTRIBUTES.HTTP_URL} with type attribute`, () => {
|
||||
it('default groupBy includes both http.url and url.full with type attribute', () => {
|
||||
const widget = getAllEndpointsWidgetData(
|
||||
emptyGroupBy,
|
||||
mockDomainName,
|
||||
@@ -126,13 +124,23 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
|
||||
// All queries should have the same default groupBy
|
||||
queryData.forEach((query) => {
|
||||
expect(query.groupBy).toHaveLength(1);
|
||||
expect(query.groupBy).toHaveLength(2);
|
||||
|
||||
// http.url
|
||||
expect(query.groupBy).toContainEqual({
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
key: 'http.url',
|
||||
type: 'attribute',
|
||||
});
|
||||
|
||||
// url.full
|
||||
expect(query.groupBy).toContainEqual({
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'url.full',
|
||||
type: 'attribute',
|
||||
});
|
||||
});
|
||||
@@ -162,18 +170,19 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
|
||||
// All queries should have defaults + custom groupBy
|
||||
queryData.forEach((query) => {
|
||||
expect(query.groupBy).toHaveLength(3); // 1 default + 2 custom
|
||||
expect(query.groupBy).toHaveLength(4); // 2 defaults + 2 custom
|
||||
|
||||
// First two should be defaults (http_url)
|
||||
expect(query.groupBy[0].key).toBe(SPAN_ATTRIBUTES.HTTP_URL);
|
||||
// First two should be defaults (http.url, url.full)
|
||||
expect(query.groupBy[0].key).toBe('http.url');
|
||||
expect(query.groupBy[1].key).toBe('url.full');
|
||||
|
||||
// Last two should be custom (matching subset of properties)
|
||||
expect(query.groupBy[1]).toMatchObject({
|
||||
expect(query.groupBy[2]).toMatchObject({
|
||||
dataType: DataTypes.String,
|
||||
key: 'service.name',
|
||||
type: 'resource',
|
||||
});
|
||||
expect(query.groupBy[2]).toMatchObject({
|
||||
expect(query.groupBy[3]).toMatchObject({
|
||||
dataType: DataTypes.String,
|
||||
key: 'deployment.environment',
|
||||
type: 'resource',
|
||||
|
||||
@@ -258,7 +258,7 @@ describe('EndPointDetails Component', () => {
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
@@ -278,7 +278,7 @@ describe('EndPointDetails Component', () => {
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
@@ -360,7 +360,7 @@ describe('EndPointDetails Component', () => {
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
@@ -373,7 +373,7 @@ describe('EndPointDetails Component', () => {
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
|
||||
@@ -191,7 +191,7 @@ describe('EndPointsDropDown Component', () => {
|
||||
|
||||
it('formats data using the utility function', () => {
|
||||
const mockRows = [
|
||||
{ data: { [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test', A: 10 } },
|
||||
{ data: { [SPAN_ATTRIBUTES.URL_PATH]: '/api/test', A: 10 } },
|
||||
];
|
||||
|
||||
const dataProps = {
|
||||
|
||||
@@ -6,18 +6,15 @@
|
||||
* These tests validate the migration from V4 to V5 format for the third payload
|
||||
* in getEndPointDetailsQueryPayload (endpoint dropdown data):
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Domain handling: http_host = '${domainName}'
|
||||
* - Domain handling: (net.peer.name OR server.address)
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Existence check: http_url EXISTS
|
||||
* - Existence check: (http.url EXISTS OR url.full EXISTS)
|
||||
* - Aggregation: count() expression
|
||||
* - GroupBy: http_url with type 'attribute'
|
||||
* - GroupBy: Both http.url AND url.full with type 'attribute'
|
||||
*/
|
||||
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
|
||||
|
||||
describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
const mockDomainName = 'api.example.com';
|
||||
const mockStartTime = 1000;
|
||||
@@ -46,9 +43,9 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters');
|
||||
|
||||
// Base filter 1: Domain http_host = '${domainName}'
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`http_host = '${mockDomainName}'`,
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
@@ -56,7 +53,7 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
|
||||
// Base filter 3: Existence check
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
'(http.url EXISTS OR url.full EXISTS)',
|
||||
);
|
||||
|
||||
// V5 Aggregation format: aggregations array (not aggregateAttribute)
|
||||
@@ -67,11 +64,16 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
});
|
||||
expect(queryA).not.toHaveProperty('aggregateAttribute');
|
||||
|
||||
// GroupBy: http_url
|
||||
expect(queryA.groupBy).toHaveLength(1);
|
||||
// GroupBy: Both http.url and url.full
|
||||
expect(queryA.groupBy).toHaveLength(2);
|
||||
expect(queryA.groupBy).toContainEqual({
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
dataType: DataTypes.String,
|
||||
key: 'http.url',
|
||||
dataType: 'string',
|
||||
type: 'attribute',
|
||||
});
|
||||
expect(queryA.groupBy).toContainEqual({
|
||||
key: 'url.full',
|
||||
dataType: 'string',
|
||||
type: 'attribute',
|
||||
});
|
||||
});
|
||||
@@ -118,7 +120,53 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
|
||||
// Exact filter expression with custom filters merged
|
||||
expect(expression).toBe(
|
||||
`${SPAN_ATTRIBUTES.SERVER_NAME} = 'api.example.com' AND kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS service.name = 'user-service' AND deployment.environment = 'production'`,
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND deployment.environment = 'production'",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('3. HTTP URL Filter Special Handling', () => {
|
||||
it('converts http.url filter to (http.url OR url.full) expression', () => {
|
||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'http-url-filter',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
dataType: 'string' as any,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: '/api/users',
|
||||
},
|
||||
{
|
||||
id: 'service-filter',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'user-service',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
filtersWithHttpUrl,
|
||||
);
|
||||
|
||||
const dropdownQuery = payload[2];
|
||||
const expression =
|
||||
dropdownQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// CRITICAL: Exact filter expression with http.url converted to OR logic
|
||||
expect(expression).toBe(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND (http.url = '/api/users' OR url.full = '/api/users')",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -33,7 +33,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
expect(queryData).not.toHaveProperty('filters.items');
|
||||
});
|
||||
|
||||
it('uses new domain filter format: (http_host)', () => {
|
||||
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
|
||||
const widget = getRateOverTimeWidgetData(
|
||||
mockDomainName,
|
||||
mockEndpointName,
|
||||
@@ -44,7 +44,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
|
||||
// Verify EXACT new filter format with OR operator
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`http_host = '${mockDomainName}'`,
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Endpoint name is used in legend, not filter
|
||||
@@ -90,7 +90,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
|
||||
// Verify domain filter is present
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`http_host = '${mockDomainName}'`,
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Verify custom filters are merged into the expression
|
||||
@@ -120,7 +120,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
expect(queryData).not.toHaveProperty('filters.items');
|
||||
});
|
||||
|
||||
it('uses new domain filter format: (http_host)', () => {
|
||||
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
|
||||
const widget = getLatencyOverTimeWidgetData(
|
||||
mockDomainName,
|
||||
mockEndpointName,
|
||||
@@ -132,7 +132,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
// Verify EXACT new filter format with OR operator
|
||||
expect(queryData.filter).toBeDefined();
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`http_host = '${mockDomainName}'`,
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Endpoint name is used in legend, not filter
|
||||
@@ -166,7 +166,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
|
||||
// Verify domain filter is present
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`http_host = '${mockDomainName}' service.name = 'user-service'`,
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') service.name = 'user-service'`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -142,6 +142,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
endTime: 1609545600000,
|
||||
};
|
||||
const mockDomainName = 'test-domain';
|
||||
const mockEndPointName = '/api/test';
|
||||
const onDragSelectMock = jest.fn();
|
||||
const refetchFn = jest.fn();
|
||||
|
||||
@@ -231,6 +232,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -266,6 +268,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -308,6 +311,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -352,6 +356,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -399,6 +404,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -413,6 +419,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
// but we've confirmed the function is mocked and ready to be tested
|
||||
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
|
||||
mockDomainName,
|
||||
mockEndPointName,
|
||||
expect.objectContaining({
|
||||
items: [],
|
||||
op: 'AND',
|
||||
@@ -460,6 +467,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockCustomFilters as IBuilderQuery['filters']}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -469,6 +477,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
// Assert widget creation was called with the correct parameters
|
||||
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
|
||||
mockDomainName,
|
||||
mockEndPointName,
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({ id: 'custom-filter' }),
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
*
|
||||
* V5 Changes:
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Domain filter: (http_host)
|
||||
* - Domain filter: (net.peer.name OR server.address)
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - stepInterval: 60 → null
|
||||
* - Grouped by response_status_code
|
||||
@@ -47,9 +47,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters.items');
|
||||
|
||||
// Base filter 1: Domain (http_host)
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`http_host = '${mockDomainName}'`,
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
@@ -96,9 +96,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters.items');
|
||||
|
||||
// Base filter 1: Domain (http_host)
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`http_host = '${mockDomainName}'`,
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
@@ -177,7 +177,7 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
expect(callsExpression).toBe(latencyExpression);
|
||||
|
||||
// Verify base filters
|
||||
expect(callsExpression).toContain('http_host');
|
||||
expect(callsExpression).toContain('net.peer.name');
|
||||
expect(callsExpression).toContain("kind_string = 'Client'");
|
||||
|
||||
// Verify custom filters are merged
|
||||
@@ -187,4 +187,51 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
expect(callsExpression).toContain('production');
|
||||
});
|
||||
});
|
||||
|
||||
describe('4. HTTP URL Filter Handling', () => {
|
||||
it('converts http.url filter to (http.url OR url.full) expression in both charts', () => {
|
||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'http-url-filter',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
dataType: 'string' as any,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: '/api/metrics',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
filtersWithHttpUrl,
|
||||
);
|
||||
|
||||
const callsChartQuery = payload[4];
|
||||
const latencyChartQuery = payload[5];
|
||||
|
||||
const callsExpression =
|
||||
callsChartQuery.query.builder.queryData[0].filter?.expression;
|
||||
const latencyExpression =
|
||||
latencyChartQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// CRITICAL: http.url converted to OR logic
|
||||
expect(callsExpression).toContain(
|
||||
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
|
||||
);
|
||||
expect(latencyExpression).toContain(
|
||||
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
|
||||
);
|
||||
|
||||
// Base filters still present
|
||||
expect(callsExpression).toContain('net.peer.name');
|
||||
expect(callsExpression).toContain("kind_string = 'Client'");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
* These tests validate the migration from V4 to V5 format for the second payload
|
||||
* in getEndPointDetailsQueryPayload (status code table data):
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - URL handling: Special logic for http_url
|
||||
* - Domain filter: http_host = '${domainName}'
|
||||
* - URL handling: Special logic for (http.url OR url.full)
|
||||
* - Domain filter: (net.peer.name OR server.address)
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Kind filter: response_status_code EXISTS
|
||||
* - Three queries: A (count), B (p99 latency), C (rate)
|
||||
@@ -45,9 +45,9 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters.items');
|
||||
|
||||
// Base filter 1: Domain (http_host)
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`http_host = '${mockDomainName}'`,
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
@@ -149,7 +149,7 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
|
||||
statusCodeQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// Base filters present
|
||||
expect(expression).toContain('http_host');
|
||||
expect(expression).toContain('net.peer.name');
|
||||
expect(expression).toContain("kind_string = 'Client'");
|
||||
expect(expression).toContain('response_status_code EXISTS');
|
||||
|
||||
@@ -165,4 +165,62 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
|
||||
expect(queries[1].filter?.expression).toBe(queries[2].filter?.expression);
|
||||
});
|
||||
});
|
||||
|
||||
describe('4. HTTP URL Filter Handling', () => {
|
||||
it('converts http.url filter to (http.url OR url.full) expression', () => {
|
||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'http-url-filter',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
dataType: 'string' as any,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: '/api/users',
|
||||
},
|
||||
{
|
||||
id: 'service-filter',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'user-service',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
filtersWithHttpUrl,
|
||||
);
|
||||
|
||||
const statusCodeQuery = payload[1];
|
||||
const expression =
|
||||
statusCodeQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// CRITICAL: http.url converted to OR logic
|
||||
expect(expression).toContain(
|
||||
"(http.url = '/api/users' OR url.full = '/api/users')",
|
||||
);
|
||||
|
||||
// Other filters still present
|
||||
expect(expression).toContain('service.name');
|
||||
expect(expression).toContain('user-service');
|
||||
|
||||
// Base filters present
|
||||
expect(expression).toContain('net.peer.name');
|
||||
expect(expression).toContain("kind_string = 'Client'");
|
||||
expect(expression).toContain('response_status_code EXISTS');
|
||||
|
||||
// All ANDed together (at least 2 ANDs: domain+kind, custom filter, url condition)
|
||||
expect(expression?.match(/AND/g)?.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -84,7 +84,7 @@ describe('TopErrors', () => {
|
||||
{
|
||||
columns: [
|
||||
{
|
||||
name: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
name: 'http.url',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
@@ -124,7 +124,7 @@ describe('TopErrors', () => {
|
||||
table: {
|
||||
rows: [
|
||||
{
|
||||
http_url: '/api/test',
|
||||
'http.url': '/api/test',
|
||||
A: 100,
|
||||
},
|
||||
],
|
||||
@@ -206,7 +206,7 @@ describe('TopErrors', () => {
|
||||
expect(navigateMock).toHaveBeenCalledWith({
|
||||
filters: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
key: expect.objectContaining({ key: 'http.url' }),
|
||||
op: '=',
|
||||
value: '/api/test',
|
||||
}),
|
||||
@@ -335,7 +335,7 @@ describe('TopErrors', () => {
|
||||
|
||||
// Verify all required filters are present
|
||||
expect(filterExpression).toContain(
|
||||
`kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS AND ${SPAN_ATTRIBUTES.SERVER_NAME} = 'test-domain' AND has_error = true`,
|
||||
`kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) AND (net.peer.name = 'test-domain' OR server.address = 'test-domain') AND has_error = true`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -15,6 +15,7 @@ import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsAppli
|
||||
import { convertNanoToMilliseconds } from 'container/MetricsExplorer/Summary/utils';
|
||||
import dayjs from 'dayjs';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { ArrowUpDown, ChevronDown, ChevronRight, Info } from 'lucide-react';
|
||||
import { getWidgetQuery } from 'pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil';
|
||||
@@ -56,12 +57,12 @@ export const getDisplayValue = (value: unknown): string =>
|
||||
isEmptyFilterValue(value) ? '-' : String(value);
|
||||
|
||||
export const getDomainNameFilterExpression = (domainName: string): string =>
|
||||
`http_host = '${domainName}'`;
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`;
|
||||
|
||||
export const clientKindExpression = `kind_string = 'Client'`;
|
||||
|
||||
/**
|
||||
* Converts filters to expression
|
||||
* Converts filters to expression, handling http.url specially by creating (http.url OR url.full) condition
|
||||
* @param filters Filters to convert
|
||||
* @param baseExpression Base expression to combine with filters
|
||||
* @returns Filter expression string
|
||||
@@ -74,6 +75,34 @@ export const convertFiltersWithUrlHandling = (
|
||||
return baseExpression;
|
||||
}
|
||||
|
||||
// Check if filters contain http.url (SPAN_ATTRIBUTES.URL_PATH)
|
||||
const httpUrlFilter = filters.items?.find(
|
||||
(item) => item.key?.key === SPAN_ATTRIBUTES.URL_PATH,
|
||||
);
|
||||
|
||||
// If http.url filter exists, create modified filters with (http.url OR url.full)
|
||||
if (httpUrlFilter && httpUrlFilter.value) {
|
||||
// Remove ALL http.url filters from items (guards against duplicates)
|
||||
const otherFilters = filters.items?.filter(
|
||||
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
|
||||
);
|
||||
|
||||
// Convert to expression first with other filters
|
||||
const {
|
||||
filter: intermediateFilter,
|
||||
} = convertFiltersToExpressionWithExistingQuery(
|
||||
{ ...filters, items: otherFilters || [] },
|
||||
baseExpression,
|
||||
);
|
||||
|
||||
// Add the OR condition for http.url and url.full
|
||||
const urlValue = httpUrlFilter.value;
|
||||
const urlCondition = `(http.url = '${urlValue}' OR url.full = '${urlValue}')`;
|
||||
return intermediateFilter.expression.trim()
|
||||
? `${intermediateFilter.expression} AND ${urlCondition}`
|
||||
: urlCondition;
|
||||
}
|
||||
|
||||
const { filter } = convertFiltersToExpressionWithExistingQuery(
|
||||
filters,
|
||||
baseExpression,
|
||||
@@ -342,7 +371,7 @@ export const formatDataForTable = (
|
||||
});
|
||||
};
|
||||
|
||||
const urlExpression = `${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`;
|
||||
const urlExpression = `(url.full EXISTS OR http.url EXISTS)`;
|
||||
|
||||
export const getDomainMetricsQueryPayload = (
|
||||
domainName: string,
|
||||
@@ -559,7 +588,14 @@ const defaultGroupBy = [
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
type: 'attribute',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'url.full',
|
||||
type: 'attribute',
|
||||
},
|
||||
// {
|
||||
@@ -831,8 +867,8 @@ function buildFilterExpression(
|
||||
): string {
|
||||
const baseFilterParts = [
|
||||
`kind_string = 'Client'`,
|
||||
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
`${SPAN_ATTRIBUTES.SERVER_NAME} = '${domainName}'`,
|
||||
`(http.url EXISTS OR url.full EXISTS)`,
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
|
||||
`has_error = true`,
|
||||
];
|
||||
if (showStatusCodeErrors) {
|
||||
@@ -874,7 +910,12 @@ export const getTopErrorsQueryPayload = (
|
||||
filter: { expression: filterExpression },
|
||||
groupBy: [
|
||||
{
|
||||
name: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
name: 'http.url',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
{
|
||||
name: 'url.full',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
@@ -1093,11 +1134,11 @@ export const formatEndPointsDataForTable = (
|
||||
if (!isGroupedByAttribute) {
|
||||
formattedData = data?.map((endpoint) => {
|
||||
const { port } = extractPortAndEndpoint(
|
||||
(endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '',
|
||||
(endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '',
|
||||
);
|
||||
return {
|
||||
key: v4(),
|
||||
endpointName: (endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '-',
|
||||
endpointName: (endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '-',
|
||||
port,
|
||||
callCount:
|
||||
endpoint.data.A === 'n/a' || endpoint.data.A === undefined
|
||||
@@ -1221,7 +1262,9 @@ export const formatTopErrorsDataForTable = (
|
||||
|
||||
return {
|
||||
key: v4(),
|
||||
endpointName: getDisplayValue(rowObj[SPAN_ATTRIBUTES.HTTP_URL]),
|
||||
endpointName: getDisplayValue(
|
||||
rowObj[SPAN_ATTRIBUTES.URL_PATH] || rowObj['url.full'],
|
||||
),
|
||||
statusCode: getDisplayValue(rowObj[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]),
|
||||
statusMessage: getDisplayValue(rowObj.status_message),
|
||||
count: getDisplayValue(rowObj.__result_0),
|
||||
@@ -1238,10 +1281,10 @@ export const getTopErrorsCoRelationQueryFilters = (
|
||||
{
|
||||
id: 'ea16470b',
|
||||
key: {
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
key: 'http.url',
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
id: `${SPAN_ATTRIBUTES.HTTP_URL}--string--tag--false`,
|
||||
id: 'http.url--string--tag--false',
|
||||
},
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
@@ -1738,7 +1781,7 @@ export const getEndPointDetailsQueryPayload = (
|
||||
filters || { items: [], op: 'AND' },
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
),
|
||||
},
|
||||
expression: 'A',
|
||||
@@ -1750,7 +1793,12 @@ export const getEndPointDetailsQueryPayload = (
|
||||
orderBy: [],
|
||||
groupBy: [
|
||||
{
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
dataType: DataTypes.String,
|
||||
type: 'attribute',
|
||||
},
|
||||
{
|
||||
key: 'url.full',
|
||||
dataType: DataTypes.String,
|
||||
type: 'attribute',
|
||||
},
|
||||
@@ -2177,7 +2225,7 @@ export const getEndPointZeroStateQueryPayload = (
|
||||
orderBy: [],
|
||||
groupBy: [
|
||||
{
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -2371,7 +2419,8 @@ export const statusCodeWidgetInfo = [
|
||||
|
||||
interface EndPointDropDownResponseRow {
|
||||
data: {
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: string;
|
||||
[SPAN_ATTRIBUTES.URL_PATH]: string;
|
||||
'url.full': string;
|
||||
A: number;
|
||||
};
|
||||
}
|
||||
@@ -2390,8 +2439,8 @@ export const getFormattedEndPointDropDownData = (
|
||||
}
|
||||
return data.map((row) => ({
|
||||
key: v4(),
|
||||
label: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
|
||||
value: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
|
||||
label: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
|
||||
value: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
|
||||
}));
|
||||
};
|
||||
|
||||
@@ -2720,6 +2769,7 @@ export const groupStatusCodes = (
|
||||
|
||||
export const getStatusCodeBarChartWidgetData = (
|
||||
domainName: string,
|
||||
endPointName: string,
|
||||
filters: IBuilderQuery['filters'],
|
||||
): Widgets => ({
|
||||
query: {
|
||||
@@ -2748,6 +2798,20 @@ export const getStatusCodeBarChartWidgetData = (
|
||||
op: '=',
|
||||
value: domainName,
|
||||
},
|
||||
...(endPointName
|
||||
? [
|
||||
{
|
||||
id: '8b1be6f0',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
...(filters?.items || []),
|
||||
],
|
||||
op: 'AND',
|
||||
@@ -2869,7 +2933,7 @@ export const getAllEndpointsWidgetData = (
|
||||
filters,
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND http_url EXISTS`,
|
||||
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -2901,7 +2965,7 @@ export const getAllEndpointsWidgetData = (
|
||||
filters,
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND http_url EXISTS`,
|
||||
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -2933,7 +2997,7 @@ export const getAllEndpointsWidgetData = (
|
||||
filters,
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND http_url EXISTS`,
|
||||
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -2965,7 +3029,7 @@ export const getAllEndpointsWidgetData = (
|
||||
filters,
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND has_error = true AND http_url EXISTS`,
|
||||
)} AND ${clientKindExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -2996,12 +3060,24 @@ export const getAllEndpointsWidgetData = (
|
||||
);
|
||||
|
||||
widget.renderColumnCell = {
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: (url: string | number): ReactNode => {
|
||||
if (isEmptyFilterValue(url) || !url || url === 'n/a') {
|
||||
[SPAN_ATTRIBUTES.URL_PATH]: (
|
||||
url: string | number,
|
||||
record?: RowData,
|
||||
): ReactNode => {
|
||||
// First try to use the url from the column value
|
||||
let urlValue = url;
|
||||
|
||||
// If url is empty/null and we have the record, fallback to url.full
|
||||
if (isEmptyFilterValue(url) && record) {
|
||||
const { 'url.full': urlFull } = record;
|
||||
urlValue = urlFull;
|
||||
}
|
||||
|
||||
if (!urlValue || urlValue === 'n/a') {
|
||||
return <span>-</span>;
|
||||
}
|
||||
|
||||
const { endpoint } = extractPortAndEndpoint(String(url));
|
||||
const { endpoint } = extractPortAndEndpoint(String(urlValue));
|
||||
return <span>{getDisplayValue(endpoint)}</span>;
|
||||
},
|
||||
A: (numOfCalls: any): ReactNode => (
|
||||
@@ -3056,8 +3132,8 @@ export const getAllEndpointsWidgetData = (
|
||||
};
|
||||
|
||||
widget.customColTitles = {
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: 'Endpoint',
|
||||
[SPAN_ATTRIBUTES.SERVER_PORT]: 'Port',
|
||||
[SPAN_ATTRIBUTES.URL_PATH]: 'Endpoint',
|
||||
'net.peer.port': 'Port',
|
||||
};
|
||||
|
||||
widget.title = (
|
||||
@@ -3082,10 +3158,12 @@ export const getAllEndpointsWidgetData = (
|
||||
</div>
|
||||
);
|
||||
|
||||
widget.hiddenColumns = ['url.full'];
|
||||
|
||||
return widget;
|
||||
};
|
||||
|
||||
const keysToRemove = [SPAN_ATTRIBUTES.HTTP_URL, 'A', 'B', 'C', 'F1'];
|
||||
const keysToRemove = ['http.url', 'url.full', 'A', 'B', 'C', 'F1'];
|
||||
|
||||
export const getGroupByFiltersFromGroupByValues = (
|
||||
rowData: any,
|
||||
@@ -3143,7 +3221,7 @@ export const getRateOverTimeWidgetData = (
|
||||
filter: {
|
||||
expression: convertFiltersWithUrlHandling(
|
||||
filters || { items: [], op: 'AND' },
|
||||
`http_host = '${domainName}'`,
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -3194,7 +3272,7 @@ export const getLatencyOverTimeWidgetData = (
|
||||
filter: {
|
||||
expression: convertFiltersWithUrlHandling(
|
||||
filters || { items: [], op: 'AND' },
|
||||
`http_host = '${domainName}'`,
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
/* eslint-disable jsx-a11y/no-static-element-interactions */
|
||||
/* eslint-disable jsx-a11y/click-events-have-key-events */
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { useCopyToClipboard } from 'react-use';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import {
|
||||
@@ -14,16 +15,14 @@ import {
|
||||
Tag,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import {
|
||||
RenderErrorResponseDTO,
|
||||
ZeustypesHostDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { useGetHosts, usePutHost } from 'api/generated/services/zeus';
|
||||
import updateSubDomainAPI from 'api/customDomain/updateSubDomain';
|
||||
import { AxiosError } from 'axios';
|
||||
import LaunchChatSupport from 'components/LaunchChatSupport/LaunchChatSupport';
|
||||
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { InfoIcon, Link2, Pencil } from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { HostsProps } from 'types/api/customDomain/types';
|
||||
|
||||
import './CustomDomainSettings.styles.scss';
|
||||
|
||||
@@ -36,7 +35,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
const { notifications } = useNotifications();
|
||||
const [isEditModalOpen, setIsEditModalOpen] = useState(false);
|
||||
const [isPollingEnabled, setIsPollingEnabled] = useState(false);
|
||||
const [hosts, setHosts] = useState<ZeustypesHostDTO[] | null>(null);
|
||||
const [hosts, setHosts] = useState<HostsProps[] | null>(null);
|
||||
|
||||
const [updateDomainError, setUpdateDomainError] = useState<AxiosError | null>(
|
||||
null,
|
||||
@@ -58,37 +57,36 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
};
|
||||
|
||||
const {
|
||||
data: hostsData,
|
||||
isLoading: isLoadingHosts,
|
||||
isFetching: isFetchingHosts,
|
||||
refetch: refetchHosts,
|
||||
} = useGetHosts();
|
||||
data: deploymentsData,
|
||||
isLoading: isLoadingDeploymentsData,
|
||||
isFetching: isFetchingDeploymentsData,
|
||||
refetch: refetchDeploymentsData,
|
||||
} = useGetDeploymentsData(true);
|
||||
|
||||
const {
|
||||
mutate: updateSubDomain,
|
||||
isLoading: isLoadingUpdateCustomDomain,
|
||||
} = usePutHost<AxiosError<RenderErrorResponseDTO>>();
|
||||
|
||||
const stripProtocol = (url: string): string => {
|
||||
return url?.split('://')[1] ?? url;
|
||||
};
|
||||
|
||||
const dnsSuffix = useMemo(() => {
|
||||
const defaultHost = hosts?.find((h) => h.is_default);
|
||||
return defaultHost?.url && defaultHost?.name
|
||||
? defaultHost.url.split(`${defaultHost.name}.`)[1] || ''
|
||||
: '';
|
||||
}, [hosts]);
|
||||
} = useMutation(updateSubDomainAPI, {
|
||||
onSuccess: () => {
|
||||
setIsPollingEnabled(true);
|
||||
refetchDeploymentsData();
|
||||
setIsEditModalOpen(false);
|
||||
},
|
||||
onError: (error: AxiosError) => {
|
||||
setUpdateDomainError(error);
|
||||
setIsPollingEnabled(false);
|
||||
},
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (isFetchingHosts) {
|
||||
if (isFetchingDeploymentsData) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (hostsData?.data?.status === 'success') {
|
||||
setHosts(hostsData?.data?.data?.hosts ?? null);
|
||||
if (deploymentsData?.data?.status === 'success') {
|
||||
setHosts(deploymentsData.data.data.hosts);
|
||||
|
||||
const activeCustomDomain = hostsData?.data?.data?.hosts?.find(
|
||||
const activeCustomDomain = deploymentsData.data.data.hosts.find(
|
||||
(host) => !host.is_default,
|
||||
);
|
||||
|
||||
@@ -99,36 +97,32 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
}
|
||||
}
|
||||
|
||||
if (hostsData?.data?.data?.state !== 'HEALTHY' && isPollingEnabled) {
|
||||
if (deploymentsData?.data?.data?.state !== 'HEALTHY' && isPollingEnabled) {
|
||||
setTimeout(() => {
|
||||
refetchHosts();
|
||||
refetchDeploymentsData();
|
||||
}, 3000);
|
||||
}
|
||||
|
||||
if (hostsData?.data?.data?.state === 'HEALTHY') {
|
||||
if (deploymentsData?.data?.data.state === 'HEALTHY') {
|
||||
setIsPollingEnabled(false);
|
||||
}
|
||||
}, [hostsData, refetchHosts, isPollingEnabled, isFetchingHosts]);
|
||||
}, [
|
||||
deploymentsData,
|
||||
refetchDeploymentsData,
|
||||
isPollingEnabled,
|
||||
isFetchingDeploymentsData,
|
||||
]);
|
||||
|
||||
const onUpdateCustomDomainSettings = (): void => {
|
||||
editForm
|
||||
.validateFields()
|
||||
.then((values) => {
|
||||
if (values.subdomain) {
|
||||
updateSubDomain(
|
||||
{ data: { name: values.subdomain } },
|
||||
{
|
||||
onSuccess: () => {
|
||||
setIsPollingEnabled(true);
|
||||
refetchHosts();
|
||||
setIsEditModalOpen(false);
|
||||
},
|
||||
onError: (error: AxiosError<RenderErrorResponseDTO>) => {
|
||||
setUpdateDomainError(error as AxiosError);
|
||||
setIsPollingEnabled(false);
|
||||
},
|
||||
updateSubDomain({
|
||||
data: {
|
||||
name: values.subdomain,
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
setCustomDomainDetails({
|
||||
subdomain: values.subdomain,
|
||||
@@ -140,8 +134,10 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
});
|
||||
};
|
||||
|
||||
const onCopyUrlHandler = (url: string): void => {
|
||||
setCopyUrl(stripProtocol(url));
|
||||
const onCopyUrlHandler = (host: string): void => {
|
||||
const url = `${host}.${deploymentsData?.data.data.cluster.region.dns}`;
|
||||
|
||||
setCopyUrl(url);
|
||||
notifications.success({
|
||||
message: 'Copied to clipboard',
|
||||
});
|
||||
@@ -161,7 +157,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
</div>
|
||||
|
||||
<div className="custom-domain-settings-content">
|
||||
{!isLoadingHosts && (
|
||||
{!isLoadingDeploymentsData && (
|
||||
<Card className="custom-domain-settings-card">
|
||||
<div className="custom-domain-settings-content-header">
|
||||
Team {org?.[0]?.displayName} Information
|
||||
@@ -173,9 +169,10 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
<div
|
||||
className="custom-domain-url"
|
||||
key={host.name}
|
||||
onClick={(): void => onCopyUrlHandler(host.url || '')}
|
||||
onClick={(): void => onCopyUrlHandler(host.name)}
|
||||
>
|
||||
<Link2 size={12} /> {stripProtocol(host.url || '')}
|
||||
<Link2 size={12} /> {host.name}.
|
||||
{deploymentsData?.data.data.cluster.region.dns}
|
||||
{host.is_default && <Tag color={Color.BG_ROBIN_500}>Default</Tag>}
|
||||
</div>
|
||||
))}
|
||||
@@ -184,7 +181,11 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
<div className="custom-domain-url-edit-btn">
|
||||
<Button
|
||||
className="periscope-btn"
|
||||
disabled={isLoadingHosts || isFetchingHosts || isPollingEnabled}
|
||||
disabled={
|
||||
isLoadingDeploymentsData ||
|
||||
isFetchingDeploymentsData ||
|
||||
isPollingEnabled
|
||||
}
|
||||
type="default"
|
||||
icon={<Pencil size={10} />}
|
||||
onClick={(): void => setIsEditModalOpen(true)}
|
||||
@@ -197,7 +198,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
{isPollingEnabled && (
|
||||
<Alert
|
||||
className="custom-domain-update-status"
|
||||
message={`Updating your URL to ⎯ ${customDomainDetails?.subdomain}.${dnsSuffix}. This may take a few mins.`}
|
||||
message={`Updating your URL to ⎯ ${customDomainDetails?.subdomain}.${deploymentsData?.data.data.cluster.region.dns}. This may take a few mins.`}
|
||||
type="info"
|
||||
icon={<InfoIcon size={12} />}
|
||||
/>
|
||||
@@ -205,7 +206,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{isLoadingHosts && (
|
||||
{isLoadingDeploymentsData && (
|
||||
<Card className="custom-domain-settings-card">
|
||||
<Skeleton
|
||||
className="custom-domain-settings-skeleton"
|
||||
@@ -254,7 +255,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
addonBefore={updateDomainError && <InfoIcon size={12} color="red" />}
|
||||
placeholder="Enter Domain"
|
||||
onChange={(): void => setUpdateDomainError(null)}
|
||||
addonAfter={dnsSuffix}
|
||||
addonAfter={deploymentsData?.data.data.cluster.region.dns}
|
||||
autoFocus
|
||||
/>
|
||||
</Form.Item>
|
||||
@@ -266,8 +267,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
{updateDomainError.status === 409 ? (
|
||||
<Alert
|
||||
message={
|
||||
(updateDomainError?.response?.data as RenderErrorResponseDTO)?.error
|
||||
?.message ||
|
||||
(updateDomainError?.response?.data as { error?: string })?.error ||
|
||||
'You’ve already updated the custom domain once today. To make further changes, please contact our support team for assistance.'
|
||||
}
|
||||
type="warning"
|
||||
@@ -275,10 +275,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
/>
|
||||
) : (
|
||||
<Typography.Text type="danger">
|
||||
{
|
||||
(updateDomainError?.response?.data as RenderErrorResponseDTO)?.error
|
||||
?.message
|
||||
}
|
||||
{(updateDomainError.response?.data as { error: string })?.error}
|
||||
</Typography.Text>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -1,128 +0,0 @@
|
||||
import { GetHosts200 } from 'api/generated/services/sigNoz.schemas';
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
|
||||
import CustomDomainSettings from '../CustomDomainSettings';
|
||||
|
||||
const ZEUS_HOSTS_ENDPOINT = '*/api/v2/zeus/hosts';
|
||||
|
||||
const mockHostsResponse: GetHosts200 = {
|
||||
status: 'success',
|
||||
data: {
|
||||
name: 'accepted-starfish',
|
||||
state: 'HEALTHY',
|
||||
tier: 'PREMIUM',
|
||||
hosts: [
|
||||
{
|
||||
name: 'accepted-starfish',
|
||||
is_default: true,
|
||||
url: 'https://accepted-starfish.test.cloud',
|
||||
},
|
||||
{
|
||||
name: 'custom-host',
|
||||
is_default: false,
|
||||
url: 'https://custom-host.test.cloud',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
describe('CustomDomainSettings', () => {
|
||||
afterEach(() => server.resetHandlers());
|
||||
|
||||
it('renders host URLs with protocol stripped and marks the default host', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<CustomDomainSettings />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
await screen.findByText(/custom-host\.test\.cloud/i);
|
||||
expect(screen.getByText('Default')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('opens edit modal with DNS suffix derived from the default host', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
render(<CustomDomainSettings />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
|
||||
await user.click(
|
||||
screen.getByRole('button', { name: /customize team['’]s url/i }),
|
||||
);
|
||||
|
||||
expect(
|
||||
screen.getByRole('dialog', { name: /customize your team['’]s url/i }),
|
||||
).toBeInTheDocument();
|
||||
// DNS suffix is the part of the default host URL after the name prefix
|
||||
expect(screen.getByText('test.cloud')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('submits PUT to /zeus/hosts with the entered subdomain as the payload', async () => {
|
||||
let capturedBody: Record<string, unknown> = {};
|
||||
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
rest.put(ZEUS_HOSTS_ENDPOINT, async (req, res, ctx) => {
|
||||
capturedBody = await req.json<Record<string, unknown>>();
|
||||
return res(ctx.status(200), ctx.json({}));
|
||||
}),
|
||||
);
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
render(<CustomDomainSettings />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
await user.click(
|
||||
screen.getByRole('button', { name: /customize team['’]s url/i }),
|
||||
);
|
||||
|
||||
const input = screen.getByPlaceholderText(/enter domain/i);
|
||||
await user.clear(input);
|
||||
await user.type(input, 'myteam');
|
||||
await user.click(screen.getByRole('button', { name: /apply changes/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(capturedBody).toEqual({ name: 'myteam' });
|
||||
});
|
||||
});
|
||||
|
||||
it('shows contact support option when domain update returns 409', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
rest.put(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(
|
||||
ctx.status(409),
|
||||
ctx.json({ error: { message: 'Already updated today' } }),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
render(<CustomDomainSettings />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
await user.click(
|
||||
screen.getByRole('button', { name: /customize team['’]s url/i }),
|
||||
);
|
||||
await user.type(screen.getByPlaceholderText(/enter domain/i), 'myteam');
|
||||
await user.click(screen.getByRole('button', { name: /apply changes/i }));
|
||||
|
||||
expect(
|
||||
await screen.findByRole('button', { name: /contact support/i }),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -34,9 +34,6 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
const sortedVariablesArray = useDashboardVariablesSelector(
|
||||
(state) => state.sortedVariablesArray,
|
||||
);
|
||||
const dynamicVariableOrder = useDashboardVariablesSelector(
|
||||
(state) => state.dynamicVariableOrder,
|
||||
);
|
||||
const dependencyData = useDashboardVariablesSelector(
|
||||
(state) => state.dependencyData,
|
||||
);
|
||||
@@ -55,11 +52,10 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
}, [getUrlVariables, updateUrlVariable, dashboardVariables]);
|
||||
|
||||
// Memoize the order key to avoid unnecessary triggers
|
||||
const variableOrderKey = useMemo(() => {
|
||||
const queryVariableOrderKey = dependencyData?.order?.join(',') ?? '';
|
||||
const dynamicVariableOrderKey = dynamicVariableOrder?.join(',') ?? '';
|
||||
return `${queryVariableOrderKey}|${dynamicVariableOrderKey}`;
|
||||
}, [dependencyData?.order, dynamicVariableOrder]);
|
||||
const dependencyOrderKey = useMemo(
|
||||
() => dependencyData?.order?.join(',') ?? '',
|
||||
[dependencyData?.order],
|
||||
);
|
||||
|
||||
// Initialize fetch store then start a new fetch cycle.
|
||||
// Runs on dependency order changes, and time range changes.
|
||||
@@ -70,7 +66,7 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
initializeVariableFetchStore(allVariableNames);
|
||||
enqueueFetchOfAllVariables();
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [variableOrderKey, minTime, maxTime]);
|
||||
}, [dependencyOrderKey, minTime, maxTime]);
|
||||
|
||||
// Performance optimization: For dynamic variables with allSelected=true, we don't store
|
||||
// individual values in localStorage since we can always derive them from available options.
|
||||
|
||||
@@ -1,203 +0,0 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { act, render } from '@testing-library/react';
|
||||
import {
|
||||
dashboardVariablesStore,
|
||||
setDashboardVariablesStore,
|
||||
updateDashboardVariablesStore,
|
||||
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
|
||||
import {
|
||||
IDashboardVariables,
|
||||
IDashboardVariablesStoreState,
|
||||
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import {
|
||||
enqueueFetchOfAllVariables,
|
||||
initializeVariableFetchStore,
|
||||
} from 'providers/Dashboard/store/variableFetchStore';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import DashboardVariableSelection from '../DashboardVariableSelection';
|
||||
|
||||
// Mock providers/Dashboard/Dashboard
|
||||
const mockSetSelectedDashboard = jest.fn();
|
||||
const mockUpdateLocalStorageDashboardVariables = jest.fn();
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): Record<string, unknown> => ({
|
||||
setSelectedDashboard: mockSetSelectedDashboard,
|
||||
updateLocalStorageDashboardVariables: mockUpdateLocalStorageDashboardVariables,
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock hooks/dashboard/useVariablesFromUrl
|
||||
const mockUpdateUrlVariable = jest.fn();
|
||||
const mockGetUrlVariables = jest.fn().mockReturnValue({});
|
||||
jest.mock('hooks/dashboard/useVariablesFromUrl', () => ({
|
||||
__esModule: true,
|
||||
default: (): Record<string, unknown> => ({
|
||||
updateUrlVariable: mockUpdateUrlVariable,
|
||||
getUrlVariables: mockGetUrlVariables,
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock variableFetchStore functions
|
||||
jest.mock('providers/Dashboard/store/variableFetchStore', () => ({
|
||||
initializeVariableFetchStore: jest.fn(),
|
||||
enqueueFetchOfAllVariables: jest.fn(),
|
||||
enqueueDescendantsOfVariable: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock initializeDefaultVariables
|
||||
jest.mock('providers/Dashboard/initializeDefaultVariables', () => ({
|
||||
initializeDefaultVariables: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock react-redux useSelector for globalTime
|
||||
jest.mock('react-redux', () => ({
|
||||
...jest.requireActual('react-redux'),
|
||||
useSelector: jest.fn().mockReturnValue({ minTime: 1000, maxTime: 2000 }),
|
||||
}));
|
||||
|
||||
// Mock VariableItem to avoid rendering complexity
|
||||
jest.mock('../VariableItem', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => <div data-testid="variable-item" />,
|
||||
}));
|
||||
|
||||
function createVariable(
|
||||
overrides: Partial<IDashboardVariable> = {},
|
||||
): IDashboardVariable {
|
||||
return {
|
||||
id: 'test-id',
|
||||
name: 'test-var',
|
||||
description: '',
|
||||
type: 'QUERY',
|
||||
sort: 'DISABLED',
|
||||
showALLOption: false,
|
||||
multiSelect: false,
|
||||
order: 0,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function resetStore(): void {
|
||||
dashboardVariablesStore.set(() => ({
|
||||
dashboardId: '',
|
||||
variables: {},
|
||||
sortedVariablesArray: [],
|
||||
dependencyData: null,
|
||||
variableTypes: {},
|
||||
dynamicVariableOrder: [],
|
||||
}));
|
||||
}
|
||||
|
||||
describe('DashboardVariableSelection', () => {
|
||||
beforeEach(() => {
|
||||
resetStore();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should call initializeVariableFetchStore and enqueueFetchOfAllVariables on mount', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
expect(initializeVariableFetchStore).toHaveBeenCalledWith(['env']);
|
||||
expect(enqueueFetchOfAllVariables).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should re-trigger fetch cycle when dynamicVariableOrder changes', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
// Clear mocks after initial render
|
||||
(initializeVariableFetchStore as jest.Mock).mockClear();
|
||||
(enqueueFetchOfAllVariables as jest.Mock).mockClear();
|
||||
|
||||
// Add a DYNAMIC variable which changes dynamicVariableOrder
|
||||
act(() => {
|
||||
updateDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
expect(initializeVariableFetchStore).toHaveBeenCalledWith(
|
||||
expect.arrayContaining(['env', 'dyn1']),
|
||||
);
|
||||
expect(enqueueFetchOfAllVariables).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should re-trigger fetch cycle when a dynamic variable is removed', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
dyn2: createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 2 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
(initializeVariableFetchStore as jest.Mock).mockClear();
|
||||
(enqueueFetchOfAllVariables as jest.Mock).mockClear();
|
||||
|
||||
// Remove dyn2, changing dynamicVariableOrder from ['dyn1','dyn2'] to ['dyn1']
|
||||
act(() => {
|
||||
updateDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
expect(initializeVariableFetchStore).toHaveBeenCalledWith(['env', 'dyn1']);
|
||||
expect(enqueueFetchOfAllVariables).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should NOT re-trigger fetch cycle when dynamicVariableOrder stays the same', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
(initializeVariableFetchStore as jest.Mock).mockClear();
|
||||
(enqueueFetchOfAllVariables as jest.Mock).mockClear();
|
||||
|
||||
// Update a non-dynamic variable's selectedValue — dynamicVariableOrder unchanged
|
||||
act(() => {
|
||||
const snapshot = dashboardVariablesStore.getSnapshot();
|
||||
dashboardVariablesStore.set(
|
||||
(): IDashboardVariablesStoreState => ({
|
||||
...snapshot,
|
||||
variables: {
|
||||
...snapshot.variables,
|
||||
env: {
|
||||
...snapshot.variables.env,
|
||||
selectedValue: 'production',
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
expect(initializeVariableFetchStore).not.toHaveBeenCalled();
|
||||
expect(enqueueFetchOfAllVariables).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useCallback } from 'react';
|
||||
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
|
||||
import HistogramTooltip from 'lib/uPlotV2/components/Tooltip/HistogramTooltip';
|
||||
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
|
||||
import {
|
||||
HistogramTooltipProps,
|
||||
TooltipRenderArgs,
|
||||
@@ -21,11 +22,21 @@ export default function Histogram(props: HistogramChartProps): JSX.Element {
|
||||
if (customRenderTooltip) {
|
||||
return customRenderTooltip(props);
|
||||
}
|
||||
const content = buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: rest.yAxisUnit ?? '',
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
});
|
||||
const tooltipProps: HistogramTooltipProps = {
|
||||
...props,
|
||||
timezone: rest.timezone,
|
||||
yAxisUnit: rest.yAxisUnit,
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
content,
|
||||
};
|
||||
return <HistogramTooltip {...tooltipProps} />;
|
||||
},
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useCallback } from 'react';
|
||||
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
|
||||
import TimeSeriesTooltip from 'lib/uPlotV2/components/Tooltip/TimeSeriesTooltip';
|
||||
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
|
||||
import {
|
||||
TimeSeriesTooltipProps,
|
||||
TooltipRenderArgs,
|
||||
@@ -16,11 +17,21 @@ export default function TimeSeries(props: TimeSeriesChartProps): JSX.Element {
|
||||
if (customRenderTooltip) {
|
||||
return customRenderTooltip(props);
|
||||
}
|
||||
const content = buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: rest.yAxisUnit ?? '',
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
});
|
||||
const tooltipProps: TimeSeriesTooltipProps = {
|
||||
...props,
|
||||
timezone: rest.timezone,
|
||||
yAxisUnit: rest.yAxisUnit,
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
content,
|
||||
};
|
||||
return <TimeSeriesTooltip {...tooltipProps} />;
|
||||
},
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { ExecStats } from 'api/v5/v5';
|
||||
import { Timezone } from 'components/CustomTimePicker/timezoneUtils';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { getInitialStackedBands } from 'container/DashboardContainer/visualization/charts/utils/stackSeriesUtils';
|
||||
@@ -55,13 +54,6 @@ export function prepareBarPanelConfig({
|
||||
minTimeScale?: number;
|
||||
maxTimeScale?: number;
|
||||
}): UPlotConfigBuilder {
|
||||
const stepIntervals: ExecStats['stepIntervals'] = get(
|
||||
apiResponse,
|
||||
'data.newResult.meta.stepIntervals',
|
||||
{},
|
||||
);
|
||||
const minStepInterval = Math.min(...Object.values(stepIntervals));
|
||||
|
||||
const builder = buildBaseConfig({
|
||||
widget,
|
||||
isDarkMode,
|
||||
@@ -73,7 +65,12 @@ export function prepareBarPanelConfig({
|
||||
panelType: PANEL_TYPES.BAR,
|
||||
minTimeScale,
|
||||
maxTimeScale,
|
||||
stepInterval: minStepInterval,
|
||||
});
|
||||
|
||||
builder.setCursor({
|
||||
focus: {
|
||||
prox: 1e3,
|
||||
},
|
||||
});
|
||||
|
||||
if (widget.stackedBarChart) {
|
||||
@@ -81,6 +78,12 @@ export function prepareBarPanelConfig({
|
||||
builder.setBands(getInitialStackedBands(seriesCount));
|
||||
}
|
||||
|
||||
const stepIntervals: Record<string, number> = get(
|
||||
apiResponse,
|
||||
'data.newResult.meta.stepIntervals',
|
||||
{},
|
||||
);
|
||||
|
||||
const seriesList: QueryData[] = apiResponse?.data?.result || [];
|
||||
seriesList.forEach((series) => {
|
||||
const baseLabelName = getLabelName(
|
||||
|
||||
@@ -1,325 +0,0 @@
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import {
|
||||
MetricRangePayloadProps,
|
||||
MetricRangePayloadV3,
|
||||
} from 'types/api/metrics/getQueryRange';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { PanelMode } from '../../types';
|
||||
import { prepareChartData, prepareUPlotConfig } from '../utils';
|
||||
|
||||
jest.mock(
|
||||
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
|
||||
() => ({
|
||||
getStoredSeriesVisibility: jest.fn(),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock('lib/uPlotLib/plugins/onClickPlugin', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockReturnValue({ name: 'onClickPlugin' }),
|
||||
}));
|
||||
|
||||
jest.mock('lib/dashboard/getQueryResults', () => ({
|
||||
getLegend: jest.fn(
|
||||
(_queryData: unknown, _query: unknown, labelName: string) =>
|
||||
`legend-${labelName}`,
|
||||
),
|
||||
}));
|
||||
|
||||
jest.mock('lib/getLabelName', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(
|
||||
(_metric: unknown, _queryName: string, _legend: string) => 'baseLabel',
|
||||
),
|
||||
}));
|
||||
|
||||
const getLegendMock = jest.requireMock('lib/dashboard/getQueryResults')
|
||||
.getLegend as jest.Mock;
|
||||
const getLabelNameMock = jest.requireMock('lib/getLabelName')
|
||||
.default as jest.Mock;
|
||||
|
||||
const createApiResponse = (
|
||||
result: MetricRangePayloadProps['data']['result'] = [],
|
||||
): MetricRangePayloadProps => ({
|
||||
data: {
|
||||
result,
|
||||
resultType: 'matrix',
|
||||
newResult: (null as unknown) as MetricRangePayloadV3,
|
||||
},
|
||||
});
|
||||
|
||||
const createWidget = (overrides: Partial<Widgets> = {}): Widgets =>
|
||||
({
|
||||
id: 'widget-1',
|
||||
yAxisUnit: 'ms',
|
||||
isLogScale: false,
|
||||
thresholds: [],
|
||||
customLegendColors: {},
|
||||
...overrides,
|
||||
} as Widgets);
|
||||
|
||||
const defaultTimezone = {
|
||||
name: 'UTC',
|
||||
value: 'UTC',
|
||||
offset: 'UTC',
|
||||
searchIndex: 'UTC',
|
||||
};
|
||||
|
||||
describe('TimeSeriesPanel utils', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
getLabelNameMock.mockReturnValue('baseLabel');
|
||||
getLegendMock.mockImplementation(
|
||||
(_queryData: unknown, _query: unknown, labelName: string) =>
|
||||
`legend-${labelName}`,
|
||||
);
|
||||
});
|
||||
|
||||
describe('prepareChartData', () => {
|
||||
it('returns aligned data with timestamps and empty series when result is empty', () => {
|
||||
const apiResponse = createApiResponse([]);
|
||||
|
||||
const data = prepareChartData(apiResponse);
|
||||
|
||||
expect(data).toHaveLength(1);
|
||||
expect(data[0]).toEqual([]);
|
||||
});
|
||||
|
||||
it('returns timestamps and one series of y values for single series', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q',
|
||||
legend: 'Series A',
|
||||
values: [
|
||||
[1000, '10'],
|
||||
[2000, '20'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const data = prepareChartData(apiResponse);
|
||||
|
||||
expect(data).toHaveLength(2);
|
||||
expect(data[0]).toEqual([1000, 2000]);
|
||||
expect(data[1]).toEqual([10, 20]);
|
||||
});
|
||||
|
||||
it('merges timestamps and fills missing values with null for multiple series', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q1',
|
||||
values: [
|
||||
[1000, '1'],
|
||||
[3000, '3'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q2',
|
||||
values: [
|
||||
[1000, '10'],
|
||||
[2000, '20'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const data = prepareChartData(apiResponse);
|
||||
|
||||
expect(data[0]).toEqual([1000, 2000, 3000]);
|
||||
// First series: 1, null, 3
|
||||
expect(data[1]).toEqual([1, null, 3]);
|
||||
// Second series: 10, 20, null
|
||||
expect(data[2]).toEqual([10, 20, null]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prepareUPlotConfig', () => {
|
||||
const baseParams = {
|
||||
widget: createWidget(),
|
||||
isDarkMode: true,
|
||||
currentQuery: {} as Query,
|
||||
onClick: jest.fn(),
|
||||
onDragSelect: jest.fn(),
|
||||
apiResponse: createApiResponse(),
|
||||
timezone: defaultTimezone,
|
||||
panelMode: PanelMode.DASHBOARD_VIEW,
|
||||
};
|
||||
|
||||
it('adds no series when apiResponse has empty result', () => {
|
||||
const builder = prepareUPlotConfig(baseParams);
|
||||
|
||||
const config = builder.getConfig();
|
||||
// Base series (timestamp) only
|
||||
expect(config.series).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('adds one series per result item with label from getLabelName when no currentQuery', () => {
|
||||
getLegendMock.mockReset();
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: { __name__: 'cpu' },
|
||||
queryName: 'Q1',
|
||||
legend: 'CPU',
|
||||
values: [
|
||||
[1000, '1'],
|
||||
[2000, '2'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const builder = prepareUPlotConfig({
|
||||
...baseParams,
|
||||
apiResponse,
|
||||
currentQuery: (null as unknown) as Query,
|
||||
});
|
||||
|
||||
expect(getLabelNameMock).toHaveBeenCalled();
|
||||
expect(getLegendMock).not.toHaveBeenCalled();
|
||||
|
||||
const config = builder.getConfig();
|
||||
expect(config.series).toHaveLength(2);
|
||||
expect(config.series?.[1]).toMatchObject({
|
||||
label: 'baseLabel',
|
||||
scale: 'y',
|
||||
});
|
||||
});
|
||||
|
||||
it('uses getLegend for label when currentQuery is provided', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q1',
|
||||
legend: 'L1',
|
||||
values: [[1000, '1']],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
prepareUPlotConfig({
|
||||
...baseParams,
|
||||
apiResponse,
|
||||
currentQuery: {} as Query,
|
||||
});
|
||||
|
||||
expect(getLegendMock).toHaveBeenCalledWith(
|
||||
{
|
||||
legend: 'L1',
|
||||
metric: {},
|
||||
queryName: 'Q1',
|
||||
values: [[1000, '1']],
|
||||
},
|
||||
{},
|
||||
'baseLabel',
|
||||
);
|
||||
|
||||
const config = prepareUPlotConfig({
|
||||
...baseParams,
|
||||
apiResponse,
|
||||
currentQuery: {} as Query,
|
||||
}).getConfig();
|
||||
expect(config.series?.[1]).toMatchObject({
|
||||
label: 'legend-baseLabel',
|
||||
});
|
||||
});
|
||||
|
||||
it('uses DrawStyle.Line and VisibilityMode.Never when series has multiple valid points', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q',
|
||||
values: [
|
||||
[1000, '1'],
|
||||
[2000, '2'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const builder = prepareUPlotConfig({ ...baseParams, apiResponse });
|
||||
const config = builder.getConfig();
|
||||
const series = config.series?.[1];
|
||||
|
||||
expect(config.series).toHaveLength(2);
|
||||
// Line style and points never for multi-point series (checked via builder API)
|
||||
const legendItems = builder.getLegendItems();
|
||||
expect(Object.keys(legendItems)).toHaveLength(1);
|
||||
// multi-point series → points hidden
|
||||
expect(series).toBeDefined();
|
||||
expect(series!.points?.show).toBe(false);
|
||||
});
|
||||
|
||||
it('uses DrawStyle.Points and shows points when series has only one valid point', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q',
|
||||
values: [
|
||||
[1000, '1'],
|
||||
[2000, 'NaN'],
|
||||
[3000, 'invalid'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const builder = prepareUPlotConfig({ ...baseParams, apiResponse });
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.series).toHaveLength(2);
|
||||
const seriesConfig = config.series?.[1];
|
||||
expect(seriesConfig).toBeDefined();
|
||||
// Single valid point -> Points draw style (asserted via series config)
|
||||
expect(seriesConfig).toMatchObject({
|
||||
scale: 'y',
|
||||
spanGaps: true,
|
||||
});
|
||||
// single-point series → points shown
|
||||
expect(seriesConfig).toBeDefined();
|
||||
expect(seriesConfig!.points?.show).toBe(true);
|
||||
});
|
||||
|
||||
it('uses widget customLegendColors to set series stroke color', () => {
|
||||
const widget = createWidget({
|
||||
customLegendColors: { 'legend-baseLabel': '#ff0000' },
|
||||
});
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q',
|
||||
values: [[1000, '1']],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const builder = prepareUPlotConfig({
|
||||
...baseParams,
|
||||
widget,
|
||||
apiResponse,
|
||||
});
|
||||
|
||||
const config = builder.getConfig();
|
||||
const seriesConfig = config.series?.[1];
|
||||
expect(seriesConfig).toBeDefined();
|
||||
expect(seriesConfig!.stroke).toBe('#ff0000');
|
||||
});
|
||||
|
||||
it('adds multiple series when result has multiple items', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q1',
|
||||
values: [[1000, '1']],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q2',
|
||||
values: [[1000, '2']],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const builder = prepareUPlotConfig({ ...baseParams, apiResponse });
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.series).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,4 +1,3 @@
|
||||
import { ExecStats } from 'api/v5/v5';
|
||||
import { Timezone } from 'components/CustomTimePicker/timezoneUtils';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import {
|
||||
@@ -15,12 +14,9 @@ import {
|
||||
VisibilityMode,
|
||||
} from 'lib/uPlotV2/config/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { isInvalidPlotValue } from 'lib/uPlotV2/utils/dataUtils';
|
||||
import get from 'lodash-es/get';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { QueryData } from 'types/api/widgets/getQuery';
|
||||
|
||||
import { PanelMode } from '../types';
|
||||
import { buildBaseConfig } from '../utils/baseConfigBuilder';
|
||||
@@ -35,22 +31,6 @@ export const prepareChartData = (
|
||||
return [timestampArr, ...yAxisValuesArr];
|
||||
};
|
||||
|
||||
function hasSingleVisiblePointForSeries(series: QueryData): boolean {
|
||||
const rawValues = series.values ?? [];
|
||||
let validPointCount = 0;
|
||||
|
||||
for (const [, rawValue] of rawValues) {
|
||||
if (!isInvalidPlotValue(rawValue)) {
|
||||
validPointCount += 1;
|
||||
if (validPointCount > 1) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export const prepareUPlotConfig = ({
|
||||
widget,
|
||||
isDarkMode,
|
||||
@@ -74,13 +54,6 @@ export const prepareUPlotConfig = ({
|
||||
minTimeScale?: number;
|
||||
maxTimeScale?: number;
|
||||
}): UPlotConfigBuilder => {
|
||||
const stepIntervals: ExecStats['stepIntervals'] = get(
|
||||
apiResponse,
|
||||
'data.newResult.meta.stepIntervals',
|
||||
{},
|
||||
);
|
||||
const minStepInterval = Math.min(...Object.values(stepIntervals));
|
||||
|
||||
const builder = buildBaseConfig({
|
||||
widget,
|
||||
isDarkMode,
|
||||
@@ -92,11 +65,9 @@ export const prepareUPlotConfig = ({
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
minTimeScale,
|
||||
maxTimeScale,
|
||||
stepInterval: minStepInterval,
|
||||
});
|
||||
|
||||
apiResponse.data?.result?.forEach((series) => {
|
||||
const hasSingleValidPoint = hasSingleVisiblePointForSeries(series);
|
||||
const baseLabelName = getLabelName(
|
||||
series.metric,
|
||||
series.queryName || '', // query
|
||||
@@ -109,15 +80,13 @@ export const prepareUPlotConfig = ({
|
||||
|
||||
builder.addSeries({
|
||||
scaleKey: 'y',
|
||||
drawStyle: hasSingleValidPoint ? DrawStyle.Points : DrawStyle.Line,
|
||||
drawStyle: DrawStyle.Line,
|
||||
label: label,
|
||||
colorMapping: widget.customLegendColors ?? {},
|
||||
spanGaps: true,
|
||||
lineStyle: LineStyle.Solid,
|
||||
lineInterpolation: LineInterpolation.Spline,
|
||||
showPoints: hasSingleValidPoint
|
||||
? VisibilityMode.Always
|
||||
: VisibilityMode.Never,
|
||||
showPoints: VisibilityMode.Never,
|
||||
pointSize: 5,
|
||||
isDarkMode,
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
|
||||
@@ -1,233 +0,0 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { STEP_INTERVAL_MULTIPLIER } from 'lib/uPlotV2/constants';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
import { PanelMode } from '../../types';
|
||||
import { buildBaseConfig } from '../baseConfigBuilder';
|
||||
|
||||
jest.mock(
|
||||
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
|
||||
() => ({
|
||||
getStoredSeriesVisibility: jest.fn(),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock('lib/uPlotV2/utils', () => ({
|
||||
calculateWidthBasedOnStepInterval: jest.fn(),
|
||||
}));
|
||||
|
||||
const calculateWidthBasedOnStepIntervalMock = jest.requireMock(
|
||||
'lib/uPlotV2/utils',
|
||||
).calculateWidthBasedOnStepInterval as jest.Mock;
|
||||
|
||||
jest.mock('lib/uPlotLib/plugins/onClickPlugin', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockReturnValue({ name: 'onClickPlugin' }),
|
||||
}));
|
||||
|
||||
const createWidget = (overrides: Partial<Widgets> = {}): Widgets =>
|
||||
({
|
||||
id: 'widget-1',
|
||||
yAxisUnit: 'ms',
|
||||
isLogScale: false,
|
||||
softMin: undefined,
|
||||
softMax: undefined,
|
||||
thresholds: [],
|
||||
...overrides,
|
||||
} as Widgets);
|
||||
|
||||
const createApiResponse = (
|
||||
overrides: Partial<MetricRangePayloadProps> = {},
|
||||
): MetricRangePayloadProps =>
|
||||
({
|
||||
data: { result: [], resultType: 'matrix', newResult: null },
|
||||
...overrides,
|
||||
} as MetricRangePayloadProps);
|
||||
|
||||
const baseProps = {
|
||||
widget: createWidget(),
|
||||
apiResponse: createApiResponse(),
|
||||
isDarkMode: true,
|
||||
panelMode: PanelMode.DASHBOARD_VIEW,
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
};
|
||||
|
||||
describe('buildBaseConfig', () => {
|
||||
it('returns a UPlotConfigBuilder instance', () => {
|
||||
const builder = buildBaseConfig(baseProps);
|
||||
|
||||
expect(builder).toBeDefined();
|
||||
expect(typeof builder.getConfig).toBe('function');
|
||||
expect(typeof builder.getLegendItems).toBe('function');
|
||||
});
|
||||
|
||||
it('configures builder with widgetId and DASHBOARD_VIEW preferences', () => {
|
||||
const builder = buildBaseConfig({
|
||||
...baseProps,
|
||||
panelMode: PanelMode.DASHBOARD_VIEW,
|
||||
widget: createWidget({ id: 'my-widget' }),
|
||||
});
|
||||
|
||||
expect(builder.getWidgetId()).toBe('my-widget');
|
||||
expect(builder.getShouldSaveSelectionPreference()).toBe(true);
|
||||
});
|
||||
|
||||
it('configures builder with IN_MEMORY selection when panelMode is DASHBOARD_EDIT', () => {
|
||||
const builder = buildBaseConfig({
|
||||
...baseProps,
|
||||
panelMode: PanelMode.DASHBOARD_EDIT,
|
||||
});
|
||||
|
||||
expect(builder.getShouldSaveSelectionPreference()).toBe(false);
|
||||
const config = builder.getConfig();
|
||||
expect(config.series).toBeDefined();
|
||||
});
|
||||
|
||||
it('passes stepInterval to builder and cursor prox uses width * multiplier', () => {
|
||||
const stepInterval = 60;
|
||||
const mockWidth = 100;
|
||||
calculateWidthBasedOnStepIntervalMock.mockReturnValue(mockWidth);
|
||||
|
||||
const builder = buildBaseConfig({
|
||||
...baseProps,
|
||||
stepInterval,
|
||||
});
|
||||
|
||||
const config = builder.getConfig();
|
||||
const prox = config.cursor?.hover?.prox;
|
||||
expect(typeof prox).toBe('function');
|
||||
|
||||
const uPlotInstance = {} as uPlot;
|
||||
const proxResult = (prox as (u: uPlot) => number)(uPlotInstance);
|
||||
|
||||
expect(calculateWidthBasedOnStepIntervalMock).toHaveBeenCalledWith({
|
||||
uPlotInstance,
|
||||
stepInterval,
|
||||
});
|
||||
expect(proxResult).toBe(mockWidth * STEP_INTERVAL_MULTIPLIER);
|
||||
});
|
||||
|
||||
it('adds x scale with time config and min/max when provided', () => {
|
||||
const builder = buildBaseConfig({
|
||||
...baseProps,
|
||||
minTimeScale: 1000,
|
||||
maxTimeScale: 2000,
|
||||
});
|
||||
|
||||
const config = builder.getConfig();
|
||||
expect(config.scales?.x).toBeDefined();
|
||||
expect(config.scales?.x?.time).toBe(true);
|
||||
const range = config.scales?.x?.range;
|
||||
expect(Array.isArray(range)).toBe(true);
|
||||
expect((range as [number, number])[0]).toBe(1000);
|
||||
});
|
||||
|
||||
it('configures log scale on y axis when widget.isLogScale is true', () => {
|
||||
const builder = buildBaseConfig({
|
||||
...baseProps,
|
||||
widget: createWidget({ isLogScale: true }),
|
||||
});
|
||||
|
||||
const config = builder.getConfig();
|
||||
expect(config.scales?.y).toBeDefined();
|
||||
expect(config.scales?.y?.log).toBe(10);
|
||||
});
|
||||
|
||||
it('adds onClick plugin when onClick is a function', () => {
|
||||
const onClickPlugin = jest.requireMock('lib/uPlotLib/plugins/onClickPlugin')
|
||||
.default;
|
||||
const onClick = jest.fn();
|
||||
|
||||
buildBaseConfig({
|
||||
...baseProps,
|
||||
onClick,
|
||||
apiResponse: createApiResponse(),
|
||||
});
|
||||
|
||||
expect(onClickPlugin).toHaveBeenCalledWith({
|
||||
onClick,
|
||||
apiResponse: expect.any(Object),
|
||||
});
|
||||
});
|
||||
|
||||
it('does not add onClick plugin when onClick is not a function', () => {
|
||||
const onClickPlugin = jest.requireMock('lib/uPlotLib/plugins/onClickPlugin')
|
||||
.default;
|
||||
|
||||
const builder = buildBaseConfig({
|
||||
...baseProps,
|
||||
});
|
||||
|
||||
const config = builder.getConfig();
|
||||
const plugins = config.plugins ?? [];
|
||||
expect(
|
||||
plugins.some((p) => (p as { name?: string }).name === 'onClickPlugin'),
|
||||
).toBe(false);
|
||||
expect(onClickPlugin).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('adds thresholds from widget', () => {
|
||||
const builder = buildBaseConfig({
|
||||
...baseProps,
|
||||
widget: createWidget({
|
||||
thresholds: [
|
||||
{
|
||||
thresholdValue: 80,
|
||||
thresholdColor: '#ff0000',
|
||||
thresholdUnit: 'ms',
|
||||
thresholdLabel: 'High',
|
||||
},
|
||||
] as Widgets['thresholds'],
|
||||
}),
|
||||
});
|
||||
|
||||
const config = builder.getConfig();
|
||||
const drawHooks = config.hooks?.draw ?? [];
|
||||
expect(drawHooks.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('adds x and y axes with correct scaleKeys and panelType', () => {
|
||||
const builder = buildBaseConfig(baseProps);
|
||||
|
||||
const config = builder.getConfig();
|
||||
expect(config.axes).toHaveLength(2);
|
||||
expect(config.axes?.[0].scale).toBe('x');
|
||||
expect(config.axes?.[1].scale).toBe('y');
|
||||
});
|
||||
|
||||
it('sets tzDate when timezone is provided', () => {
|
||||
const builder = buildBaseConfig({
|
||||
...baseProps,
|
||||
timezone: {
|
||||
name: 'America/New_York',
|
||||
value: 'America/New_York',
|
||||
offset: 'UTC-5',
|
||||
searchIndex: 'America/New_York',
|
||||
},
|
||||
});
|
||||
|
||||
const config = builder.getConfig();
|
||||
expect(config.tzDate).toBeDefined();
|
||||
expect(typeof config.tzDate).toBe('function');
|
||||
});
|
||||
|
||||
it('leaves tzDate undefined when timezone is not provided', () => {
|
||||
const builder = buildBaseConfig(baseProps);
|
||||
|
||||
const config = builder.getConfig();
|
||||
expect(config.tzDate).toBeUndefined();
|
||||
});
|
||||
|
||||
it('register setSelect hook when onDragSelect is provided', () => {
|
||||
const onDragSelect = jest.fn();
|
||||
const builder = buildBaseConfig({
|
||||
...baseProps,
|
||||
onDragSelect,
|
||||
});
|
||||
|
||||
const config = builder.getConfig();
|
||||
expect(config.hooks?.setSelect).toBeDefined();
|
||||
});
|
||||
});
|
||||
@@ -26,7 +26,6 @@ export interface BaseConfigBuilderProps {
|
||||
panelType: PANEL_TYPES;
|
||||
minTimeScale?: number;
|
||||
maxTimeScale?: number;
|
||||
stepInterval?: number;
|
||||
}
|
||||
|
||||
export function buildBaseConfig({
|
||||
@@ -40,7 +39,6 @@ export function buildBaseConfig({
|
||||
panelType,
|
||||
minTimeScale,
|
||||
maxTimeScale,
|
||||
stepInterval,
|
||||
}: BaseConfigBuilderProps): UPlotConfigBuilder {
|
||||
const tzDate = timezone
|
||||
? (timestamp: number): Date =>
|
||||
@@ -58,7 +56,6 @@ export function buildBaseConfig({
|
||||
].includes(panelMode)
|
||||
? SelectionPreferencesSource.LOCAL_STORAGE
|
||||
: SelectionPreferencesSource.IN_MEMORY,
|
||||
stepInterval,
|
||||
});
|
||||
|
||||
const thresholdOptions: ThresholdsDrawHookOptions = {
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import { Button, Skeleton, Tag, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { useGetHosts } from 'api/generated/services/zeus';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
|
||||
import history from 'lib/history';
|
||||
import { Link2 } from 'lucide-react';
|
||||
import { Globe, Link2 } from 'lucide-react';
|
||||
import Card from 'periscope/components/Card/Card';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { LicensePlatform } from 'types/api/licensesV3/getActive';
|
||||
@@ -26,21 +26,36 @@ function DataSourceInfo({
|
||||
const isEnabled =
|
||||
activeLicense && activeLicense.platform === LicensePlatform.CLOUD;
|
||||
|
||||
const { data: hostsData, isError } = useGetHosts({
|
||||
query: { enabled: isEnabled || false },
|
||||
});
|
||||
const {
|
||||
data: deploymentsData,
|
||||
isError: isErrorDeploymentsData,
|
||||
} = useGetDeploymentsData(isEnabled || false);
|
||||
|
||||
const [region, setRegion] = useState<string>('');
|
||||
const [url, setUrl] = useState<string>('');
|
||||
|
||||
useEffect(() => {
|
||||
if (hostsData) {
|
||||
const defaultHost = hostsData?.data?.data?.hosts?.find((h) => h.is_default);
|
||||
if (defaultHost?.url) {
|
||||
const url = defaultHost?.url?.split('://')[1] ?? '';
|
||||
setUrl(url);
|
||||
if (deploymentsData) {
|
||||
switch (deploymentsData?.data.data.cluster.region.name) {
|
||||
case 'in':
|
||||
setRegion('India');
|
||||
break;
|
||||
case 'us':
|
||||
setRegion('United States');
|
||||
break;
|
||||
case 'eu':
|
||||
setRegion('Europe');
|
||||
break;
|
||||
default:
|
||||
setRegion(deploymentsData?.data.data.cluster.region.name);
|
||||
break;
|
||||
}
|
||||
|
||||
setUrl(
|
||||
`${deploymentsData?.data.data.name}.${deploymentsData?.data.data.cluster.region.dns}`,
|
||||
);
|
||||
}
|
||||
}, [hostsData]);
|
||||
}, [deploymentsData]);
|
||||
|
||||
const renderNotSendingData = (): JSX.Element => (
|
||||
<>
|
||||
@@ -108,8 +123,14 @@ function DataSourceInfo({
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{!isError && hostsData && (
|
||||
{!isErrorDeploymentsData && deploymentsData && (
|
||||
<div className="workspace-details">
|
||||
<div className="workspace-region">
|
||||
<Globe size={10} />
|
||||
|
||||
<Typography>{region}</Typography>
|
||||
</div>
|
||||
|
||||
<div className="workspace-url">
|
||||
<Link2 size={12} />
|
||||
|
||||
@@ -135,11 +156,17 @@ function DataSourceInfo({
|
||||
Hello there, Welcome to your SigNoz workspace
|
||||
</Typography>
|
||||
|
||||
{!isError && hostsData && (
|
||||
{!isErrorDeploymentsData && deploymentsData && (
|
||||
<Card className="welcome-card">
|
||||
<Card.Content>
|
||||
<div className="workspace-ready-container">
|
||||
<div className="workspace-details">
|
||||
<div className="workspace-region">
|
||||
<Globe size={10} />
|
||||
|
||||
<Typography>{region}</Typography>
|
||||
</div>
|
||||
|
||||
<div className="workspace-url">
|
||||
<Link2 size={12} />
|
||||
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
import { GetHosts200 } from 'api/generated/services/sigNoz.schemas';
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
|
||||
import DataSourceInfo from '../DataSourceInfo';
|
||||
|
||||
const ZEUS_HOSTS_ENDPOINT = '*/api/v2/zeus/hosts';
|
||||
|
||||
const mockHostsResponse: GetHosts200 = {
|
||||
status: 'success',
|
||||
data: {
|
||||
name: 'accepted-starfish',
|
||||
state: 'HEALTHY',
|
||||
tier: 'PREMIUM',
|
||||
hosts: [
|
||||
{
|
||||
name: 'accepted-starfish',
|
||||
is_default: true,
|
||||
url: 'https://accepted-starfish.test.cloud',
|
||||
},
|
||||
{
|
||||
name: 'custom-host',
|
||||
is_default: false,
|
||||
url: 'https://custom-host.test.cloud',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
describe('DataSourceInfo', () => {
|
||||
afterEach(() => server.resetHandlers());
|
||||
|
||||
it('renders the default workspace URL with protocol stripped', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<DataSourceInfo dataSentToSigNoz={false} isLoading={false} />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
});
|
||||
|
||||
it('does not render workspace URL when GET /zeus/hosts fails', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(500), ctx.json({})),
|
||||
),
|
||||
);
|
||||
|
||||
render(<DataSourceInfo dataSentToSigNoz={false} isLoading={false} />);
|
||||
|
||||
await screen.findByText(/Your workspace is ready/i);
|
||||
expect(screen.queryByText(/signoz\.cloud/i)).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders workspace URL in the data-received view when telemetry is flowing', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<DataSourceInfo dataSentToSigNoz={true} isLoading={false} />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
});
|
||||
});
|
||||
@@ -36,6 +36,24 @@ jest.mock('react-router-dom', () => {
|
||||
};
|
||||
});
|
||||
|
||||
// Mock deployments data hook to avoid unrelated network calls in this page
|
||||
jest.mock(
|
||||
'hooks/CustomDomain/useGetDeploymentsData',
|
||||
(): Record<string, unknown> => ({
|
||||
useGetDeploymentsData: (): {
|
||||
data: undefined;
|
||||
isLoading: boolean;
|
||||
isFetching: boolean;
|
||||
isError: boolean;
|
||||
} => ({
|
||||
data: undefined,
|
||||
isLoading: false,
|
||||
isFetching: false,
|
||||
isError: false,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
const TEST_CREATED_UPDATED = '2024-01-01T00:00:00Z';
|
||||
const TEST_EXPIRES_AT = '2030-01-01T00:00:00Z';
|
||||
const TEST_WORKSPACE_ID = 'w1';
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { ReactNode, useCallback, useState } from 'react';
|
||||
import { ReactNode, useState } from 'react';
|
||||
import MEditor, { EditorProps, Monaco } from '@monaco-editor/react';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import type { InputRef } from 'antd';
|
||||
import {
|
||||
Button,
|
||||
Collapse,
|
||||
@@ -47,18 +46,12 @@ function Overview({
|
||||
handleChangeSelectedView,
|
||||
}: Props): JSX.Element {
|
||||
const [isWrapWord, setIsWrapWord] = useState<boolean>(true);
|
||||
const [isSearchVisible, setIsSearchVisible] = useState<boolean>(true);
|
||||
const [isSearchVisible, setIsSearchVisible] = useState<boolean>(false);
|
||||
const [isAttributesExpanded, setIsAttributesExpanded] = useState<boolean>(
|
||||
true,
|
||||
);
|
||||
const [fieldSearchInput, setFieldSearchInput] = useState<string>('');
|
||||
|
||||
const searchInputRef = useCallback((node: InputRef | null) => {
|
||||
if (node) {
|
||||
setTimeout(() => node.focus(), 100);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const options: EditorProps['options'] = {
|
||||
@@ -203,7 +196,7 @@ function Overview({
|
||||
<>
|
||||
{isSearchVisible && (
|
||||
<Input
|
||||
ref={searchInputRef}
|
||||
autoFocus
|
||||
placeholder="Search for a field..."
|
||||
className="search-input"
|
||||
value={fieldSearchInput}
|
||||
|
||||
@@ -111,19 +111,23 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
|
||||
);
|
||||
|
||||
const itemContent = useCallback(
|
||||
(index: number, log: Record<string, unknown>): JSX.Element => (
|
||||
<TableRow
|
||||
tableColumns={tableColumns}
|
||||
index={index}
|
||||
log={log}
|
||||
logs={tableViewProps.logs}
|
||||
hasActions
|
||||
fontSize={tableViewProps.fontSize}
|
||||
onShowLogDetails={onSetActiveLog}
|
||||
isActiveLog={activeLog?.id === log.id}
|
||||
onClearActiveLog={onCloseActiveLog}
|
||||
/>
|
||||
),
|
||||
(index: number, log: Record<string, unknown>): JSX.Element => {
|
||||
return (
|
||||
<div key={log.id as string}>
|
||||
<TableRow
|
||||
tableColumns={tableColumns}
|
||||
index={index}
|
||||
log={log}
|
||||
logs={tableViewProps.logs}
|
||||
hasActions
|
||||
fontSize={tableViewProps.fontSize}
|
||||
onShowLogDetails={onSetActiveLog}
|
||||
isActiveLog={activeLog?.id === log.id}
|
||||
onClearActiveLog={onCloseActiveLog}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
[
|
||||
tableColumns,
|
||||
onSetActiveLog,
|
||||
@@ -139,8 +143,7 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
|
||||
{tableColumns
|
||||
.filter((column) => column.key)
|
||||
.map((column) => {
|
||||
const isDragColumn =
|
||||
column.key !== 'expand' && column.key !== 'state-indicator';
|
||||
const isDragColumn = column.key !== 'expand';
|
||||
|
||||
return (
|
||||
<TableHeaderCellStyled
|
||||
|
||||
@@ -21,7 +21,7 @@ import {
|
||||
METRIC_TYPE_LABEL_MAP,
|
||||
METRIC_TYPE_VALUES_MAP,
|
||||
} from '../Summary/constants';
|
||||
import { MetricTypeRenderer } from '../Summary/utils';
|
||||
import MetricTypeRenderer from '../Summary/MetricTypeRenderer';
|
||||
import { METRIC_METADATA_KEYS } from './constants';
|
||||
import { MetadataProps } from './types';
|
||||
import { determineIsMonotonic } from './utils';
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useSearchParams } from 'react-router-dom-v5-compat';
|
||||
import {
|
||||
Button,
|
||||
Empty,
|
||||
@@ -10,22 +9,24 @@ import {
|
||||
Popover,
|
||||
Spin,
|
||||
} from 'antd';
|
||||
import { Filter } from 'api/v5/v5';
|
||||
import {
|
||||
convertExpressionToFilters,
|
||||
convertFiltersToExpression,
|
||||
} from 'components/QueryBuilderV2/utils';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useGetMetricsListFilterValues } from 'hooks/metricsExplorer/useGetMetricsListFilterValues';
|
||||
import useDebouncedFn from 'hooks/useDebouncedFunction';
|
||||
import { Search } from 'lucide-react';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { SUMMARY_FILTERS_KEY } from './constants';
|
||||
|
||||
function MetricNameSearch({
|
||||
queryFilters,
|
||||
queryFilterExpression,
|
||||
onFilterChange,
|
||||
}: {
|
||||
queryFilters: TagFilter;
|
||||
queryFilterExpression: Filter;
|
||||
onFilterChange: (value: string) => void;
|
||||
}): JSX.Element {
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
|
||||
const [isPopoverOpen, setIsPopoverOpen] = useState<boolean>(false);
|
||||
const [searchString, setSearchString] = useState<string>('');
|
||||
const [debouncedSearchString, setDebouncedSearchString] = useState<string>('');
|
||||
@@ -67,9 +68,12 @@ function MetricNameSearch({
|
||||
|
||||
const handleSelect = useCallback(
|
||||
(selectedMetricName: string): void => {
|
||||
const queryFilters = convertExpressionToFilters(
|
||||
queryFilterExpression.expression,
|
||||
);
|
||||
const newFilters = {
|
||||
items: [
|
||||
...queryFilters.items,
|
||||
...queryFilters,
|
||||
{
|
||||
id: 'metric_name',
|
||||
op: 'CONTAINS',
|
||||
@@ -83,13 +87,11 @@ function MetricNameSearch({
|
||||
],
|
||||
op: 'and',
|
||||
};
|
||||
setSearchParams({
|
||||
...Object.fromEntries(searchParams.entries()),
|
||||
[SUMMARY_FILTERS_KEY]: JSON.stringify(newFilters),
|
||||
});
|
||||
const newFilterExpression = convertFiltersToExpression(newFilters);
|
||||
onFilterChange(newFilterExpression.expression);
|
||||
setIsPopoverOpen(false);
|
||||
},
|
||||
[queryFilters.items, setSearchParams, searchParams],
|
||||
[queryFilterExpression, onFilterChange],
|
||||
);
|
||||
|
||||
const metricNameFilterValues = useMemo(
|
||||
|
||||
@@ -0,0 +1,65 @@
|
||||
import { useMemo } from 'react';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Typography } from 'antd';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import {
|
||||
BarChart,
|
||||
BarChart2,
|
||||
BarChartHorizontal,
|
||||
Diff,
|
||||
Gauge,
|
||||
} from 'lucide-react';
|
||||
|
||||
import { METRIC_TYPE_LABEL_MAP } from './constants';
|
||||
|
||||
function MetricTypeRenderer({ type }: { type: MetricType }): JSX.Element {
|
||||
const [icon, color] = useMemo(() => {
|
||||
switch (type) {
|
||||
case MetricType.SUM:
|
||||
return [
|
||||
<Diff key={type} size={12} color={Color.BG_ROBIN_500} />,
|
||||
Color.BG_ROBIN_500,
|
||||
];
|
||||
case MetricType.GAUGE:
|
||||
return [
|
||||
<Gauge key={type} size={12} color={Color.BG_SAKURA_500} />,
|
||||
Color.BG_SAKURA_500,
|
||||
];
|
||||
case MetricType.HISTOGRAM:
|
||||
return [
|
||||
<BarChart2 key={type} size={12} color={Color.BG_SIENNA_500} />,
|
||||
Color.BG_SIENNA_500,
|
||||
];
|
||||
case MetricType.SUMMARY:
|
||||
return [
|
||||
<BarChartHorizontal key={type} size={12} color={Color.BG_FOREST_500} />,
|
||||
Color.BG_FOREST_500,
|
||||
];
|
||||
case MetricType.EXPONENTIAL_HISTOGRAM:
|
||||
return [
|
||||
<BarChart key={type} size={12} color={Color.BG_AQUA_500} />,
|
||||
Color.BG_AQUA_500,
|
||||
];
|
||||
default:
|
||||
return [null, ''];
|
||||
}
|
||||
}, [type]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className="metric-type-renderer"
|
||||
style={{
|
||||
backgroundColor: `${color}33`,
|
||||
border: `1px solid ${color}`,
|
||||
color,
|
||||
}}
|
||||
>
|
||||
{icon}
|
||||
<Typography.Text style={{ color, fontSize: 12 }}>
|
||||
{METRIC_TYPE_LABEL_MAP[type]}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default MetricTypeRenderer;
|
||||
@@ -0,0 +1,69 @@
|
||||
import { useMemo } from 'react';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Typography } from 'antd';
|
||||
import { MetrictypesTypeDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import {
|
||||
BarChart,
|
||||
BarChart2,
|
||||
BarChartHorizontal,
|
||||
Diff,
|
||||
Gauge,
|
||||
} from 'lucide-react';
|
||||
|
||||
import { METRIC_TYPE_LABEL_MAP_V2 } from './constants';
|
||||
|
||||
export function MetricTypeRendererV2({
|
||||
type,
|
||||
}: {
|
||||
type: MetrictypesTypeDTO;
|
||||
}): JSX.Element {
|
||||
const [icon, color] = useMemo(() => {
|
||||
switch (type) {
|
||||
case MetrictypesTypeDTO.sum:
|
||||
return [
|
||||
<Diff key={type} size={12} color={Color.BG_ROBIN_500} />,
|
||||
Color.BG_ROBIN_500,
|
||||
];
|
||||
case MetrictypesTypeDTO.gauge:
|
||||
return [
|
||||
<Gauge key={type} size={12} color={Color.BG_SAKURA_500} />,
|
||||
Color.BG_SAKURA_500,
|
||||
];
|
||||
case MetrictypesTypeDTO.histogram:
|
||||
return [
|
||||
<BarChart2 key={type} size={12} color={Color.BG_SIENNA_500} />,
|
||||
Color.BG_SIENNA_500,
|
||||
];
|
||||
case MetrictypesTypeDTO.summary:
|
||||
return [
|
||||
<BarChartHorizontal key={type} size={12} color={Color.BG_FOREST_500} />,
|
||||
Color.BG_FOREST_500,
|
||||
];
|
||||
case MetrictypesTypeDTO.exponentialhistogram:
|
||||
return [
|
||||
<BarChart key={type} size={12} color={Color.BG_AQUA_500} />,
|
||||
Color.BG_AQUA_500,
|
||||
];
|
||||
default:
|
||||
return [null, ''];
|
||||
}
|
||||
}, [type]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className="metric-type-renderer"
|
||||
style={{
|
||||
backgroundColor: `${color}33`,
|
||||
border: `1px solid ${color}`,
|
||||
color,
|
||||
}}
|
||||
>
|
||||
{icon}
|
||||
<Typography.Text style={{ color, fontSize: 12 }}>
|
||||
{METRIC_TYPE_LABEL_MAP_V2[type]}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default MetricTypeRendererV2;
|
||||
@@ -1,23 +1,19 @@
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useSearchParams } from 'react-router-dom-v5-compat';
|
||||
import { Button, Menu, Popover, Tooltip } from 'antd';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { MetrictypesTypeDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { convertFiltersToExpression } from 'components/QueryBuilderV2/utils';
|
||||
import { Search } from 'lucide-react';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import {
|
||||
METRIC_TYPE_LABEL_MAP,
|
||||
METRIC_TYPE_VALUES_MAP,
|
||||
SUMMARY_FILTERS_KEY,
|
||||
} from './constants';
|
||||
import { METRIC_TYPE_LABEL_MAP_V2 } from './constants';
|
||||
|
||||
function MetricTypeSearch({
|
||||
queryFilters,
|
||||
onFilterChange,
|
||||
}: {
|
||||
queryFilters: TagFilter;
|
||||
onFilterChange: (expression: string) => void;
|
||||
}): JSX.Element {
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
|
||||
const [isPopoverOpen, setIsPopoverOpen] = useState<boolean>(false);
|
||||
|
||||
const menuItems = useMemo(
|
||||
@@ -26,9 +22,9 @@ function MetricTypeSearch({
|
||||
key: 'all',
|
||||
value: 'All',
|
||||
},
|
||||
...Object.keys(METRIC_TYPE_LABEL_MAP).map((key) => ({
|
||||
key: METRIC_TYPE_VALUES_MAP[key as MetricType],
|
||||
value: METRIC_TYPE_LABEL_MAP[key as MetricType],
|
||||
...Object.keys(METRIC_TYPE_LABEL_MAP_V2).map((key) => ({
|
||||
key: METRIC_TYPE_LABEL_MAP_V2[key as MetrictypesTypeDTO],
|
||||
value: METRIC_TYPE_LABEL_MAP_V2[key as MetrictypesTypeDTO],
|
||||
})),
|
||||
],
|
||||
[],
|
||||
@@ -36,16 +32,17 @@ function MetricTypeSearch({
|
||||
|
||||
const handleSelect = useCallback(
|
||||
(selectedMetricType: string): void => {
|
||||
let newFilters;
|
||||
if (selectedMetricType !== 'all') {
|
||||
const newFilters = {
|
||||
newFilters = {
|
||||
items: [
|
||||
...queryFilters.items,
|
||||
{
|
||||
id: 'metric_type',
|
||||
id: 'type',
|
||||
op: '=',
|
||||
key: {
|
||||
id: 'metric_type',
|
||||
key: 'metric_type',
|
||||
id: 'type',
|
||||
key: 'type',
|
||||
type: 'tag',
|
||||
},
|
||||
value: selectedMetricType,
|
||||
@@ -53,23 +50,17 @@ function MetricTypeSearch({
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
setSearchParams({
|
||||
...Object.fromEntries(searchParams.entries()),
|
||||
[SUMMARY_FILTERS_KEY]: JSON.stringify(newFilters),
|
||||
});
|
||||
} else {
|
||||
const newFilters = {
|
||||
items: queryFilters.items.filter((item) => item.id !== 'metric_type'),
|
||||
newFilters = {
|
||||
items: queryFilters.items.filter((item) => item.id !== 'type'),
|
||||
op: 'AND',
|
||||
};
|
||||
setSearchParams({
|
||||
...Object.fromEntries(searchParams.entries()),
|
||||
[SUMMARY_FILTERS_KEY]: JSON.stringify(newFilters),
|
||||
});
|
||||
}
|
||||
const newFilterExpression = convertFiltersToExpression(newFilters);
|
||||
onFilterChange(newFilterExpression.expression);
|
||||
setIsPopoverOpen(false);
|
||||
},
|
||||
[queryFilters.items, setSearchParams, searchParams],
|
||||
[queryFilters.items, onFilterChange],
|
||||
);
|
||||
|
||||
const menu = (
|
||||
|
||||
@@ -1,27 +1,55 @@
|
||||
import { Tooltip } from 'antd';
|
||||
import QueryBuilderSearch from 'container/QueryBuilder/filters/QueryBuilderSearch';
|
||||
import { Button, Tooltip } from 'antd';
|
||||
import QuerySearch from 'components/QueryBuilderV2/QueryV2/QuerySearch/QuerySearch';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import { HardHat, Info } from 'lucide-react';
|
||||
import { Info, Play } from 'lucide-react';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import { MetricsSearchProps } from './types';
|
||||
|
||||
function MetricsSearch({ query, onChange }: MetricsSearchProps): JSX.Element {
|
||||
function MetricsSearch({
|
||||
query,
|
||||
onChange,
|
||||
currentQueryFilterExpression,
|
||||
setCurrentQueryFilterExpression,
|
||||
}: MetricsSearchProps): JSX.Element {
|
||||
const handleOnChange = (expression: string): void => {
|
||||
setCurrentQueryFilterExpression(expression);
|
||||
};
|
||||
|
||||
const handleStageAndRunQuery = (): void =>
|
||||
onChange(currentQueryFilterExpression);
|
||||
|
||||
return (
|
||||
<div className="metrics-search-container">
|
||||
<div className="qb-search-container">
|
||||
<div data-testid="qb-search-container" className="qb-search-container">
|
||||
<Tooltip
|
||||
title="Use filters to refine metrics based on attributes. Example: service_name=api - Shows all metrics associated with the API service"
|
||||
placement="right"
|
||||
>
|
||||
<Info size={16} />
|
||||
</Tooltip>
|
||||
<QueryBuilderSearch
|
||||
query={query}
|
||||
onChange={onChange}
|
||||
suffixIcon={<HardHat size={16} />}
|
||||
isMetricsExplorer
|
||||
<QuerySearch
|
||||
onChange={handleOnChange}
|
||||
dataSource={DataSource.METRICS}
|
||||
queryData={{
|
||||
...query,
|
||||
filter: {
|
||||
...query?.filter,
|
||||
expression: currentQueryFilterExpression,
|
||||
},
|
||||
}}
|
||||
onRun={handleOnChange}
|
||||
showFilterSuggestionsWithoutMetric
|
||||
/>
|
||||
</div>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={handleStageAndRunQuery}
|
||||
className="stage-run-query"
|
||||
icon={<Play size={14} />}
|
||||
>
|
||||
Stage & Run Query
|
||||
</Button>
|
||||
<div className="metrics-search-options">
|
||||
<DateTimeSelectionV2
|
||||
showAutoRefresh={false}
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import { SorterResult } from 'antd/es/table/interface';
|
||||
import { Querybuildertypesv5OrderDirectionDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { Info } from 'lucide-react';
|
||||
|
||||
import { MetricsListItemRowData, MetricsTableProps } from './types';
|
||||
@@ -24,7 +25,8 @@ function MetricsTable({
|
||||
setOrderBy,
|
||||
totalCount,
|
||||
openMetricDetails,
|
||||
queryFilters,
|
||||
queryFilterExpression,
|
||||
onFilterChange,
|
||||
}: MetricsTableProps): JSX.Element {
|
||||
const handleTableChange: TableProps<MetricsListItemRowData>['onChange'] = useCallback(
|
||||
(
|
||||
@@ -36,13 +38,20 @@ function MetricsTable({
|
||||
): void => {
|
||||
if ('field' in sorter && sorter.order) {
|
||||
setOrderBy({
|
||||
columnName: sorter.field as string,
|
||||
order: sorter.order === 'ascend' ? 'asc' : 'desc',
|
||||
key: {
|
||||
name: sorter.field as string,
|
||||
},
|
||||
direction:
|
||||
sorter.order === 'ascend'
|
||||
? Querybuildertypesv5OrderDirectionDTO.asc
|
||||
: Querybuildertypesv5OrderDirectionDTO.desc,
|
||||
});
|
||||
} else {
|
||||
setOrderBy({
|
||||
columnName: 'samples',
|
||||
order: 'desc',
|
||||
key: {
|
||||
name: 'samples',
|
||||
},
|
||||
direction: Querybuildertypesv5OrderDirectionDTO.desc,
|
||||
});
|
||||
}
|
||||
},
|
||||
@@ -51,19 +60,17 @@ function MetricsTable({
|
||||
|
||||
return (
|
||||
<div className="metrics-table-container">
|
||||
{!isError && !isLoading && (
|
||||
<div className="metrics-table-title" data-testid="metrics-table-title">
|
||||
<Typography.Title level={4} className="metrics-table-title">
|
||||
List View
|
||||
</Typography.Title>
|
||||
<Tooltip
|
||||
title="The table displays all metrics in the selected time range. Each row represents a unique metric, and its metric name, and metadata like description, type, unit, and samples/timeseries cardinality observed in the selected time range."
|
||||
placement="right"
|
||||
>
|
||||
<Info size={16} />
|
||||
</Tooltip>
|
||||
</div>
|
||||
)}
|
||||
<div className="metrics-table-title" data-testid="metrics-table-title">
|
||||
<Typography.Title level={4} className="metrics-table-title">
|
||||
List View
|
||||
</Typography.Title>
|
||||
<Tooltip
|
||||
title="The table displays all metrics in the selected time range. Each row represents a unique metric, and its metric name, and metadata like description, type, unit, and samples/timeseries cardinality observed in the selected time range."
|
||||
placement="right"
|
||||
>
|
||||
<Info size={16} />
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Table
|
||||
loading={{
|
||||
spinning: isLoading,
|
||||
@@ -75,7 +82,7 @@ function MetricsTable({
|
||||
),
|
||||
}}
|
||||
dataSource={data}
|
||||
columns={getMetricsTableColumns(queryFilters)}
|
||||
columns={getMetricsTableColumns(queryFilterExpression, onFilterChange)}
|
||||
locale={{
|
||||
emptyText: isLoading ? null : (
|
||||
<div
|
||||
|
||||
@@ -3,6 +3,7 @@ import { useWindowSize } from 'react-use';
|
||||
import { Group } from '@visx/group';
|
||||
import { Treemap } from '@visx/hierarchy';
|
||||
import { Empty, Select, Skeleton, Tooltip, Typography } from 'antd';
|
||||
import { MetricsexplorertypesTreemapModeDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { stratify, treemapBinary } from 'd3-hierarchy';
|
||||
import { Info } from 'lucide-react';
|
||||
|
||||
@@ -12,21 +13,20 @@ import {
|
||||
TREEMAP_SQUARE_PADDING,
|
||||
TREEMAP_VIEW_OPTIONS,
|
||||
} from './constants';
|
||||
import { MetricsTreemapProps, TreemapTile, TreemapViewType } from './types';
|
||||
import { MetricsTreemapProps, TreemapContentProps, TreemapTile } from './types';
|
||||
import {
|
||||
getTreemapTileStyle,
|
||||
getTreemapTileTextStyle,
|
||||
transformTreemapData,
|
||||
} from './utils';
|
||||
|
||||
function MetricsTreemap({
|
||||
viewType,
|
||||
data,
|
||||
function TreemapContent({
|
||||
isLoading,
|
||||
isError,
|
||||
data,
|
||||
viewType,
|
||||
openMetricDetails,
|
||||
setHeatmapView,
|
||||
}: MetricsTreemapProps): JSX.Element {
|
||||
}: TreemapContentProps): JSX.Element {
|
||||
const { width: windowWidth } = useWindowSize();
|
||||
|
||||
const treemapWidth = useMemo(
|
||||
@@ -40,9 +40,9 @@ function MetricsTreemap({
|
||||
|
||||
const treemapData = useMemo(() => {
|
||||
const extracedTreemapData =
|
||||
(viewType === TreemapViewType.TIMESERIES
|
||||
? data?.data?.[TreemapViewType.TIMESERIES]
|
||||
: data?.data?.[TreemapViewType.SAMPLES]) || [];
|
||||
(viewType === MetricsexplorertypesTreemapModeDTO.timeseries
|
||||
? data?.timeseries
|
||||
: data?.samples) || [];
|
||||
return transformTreemapData(extracedTreemapData, viewType);
|
||||
}, [data, viewType]);
|
||||
|
||||
@@ -57,29 +57,12 @@ function MetricsTreemap({
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div data-testid="metrics-treemap-loading-state">
|
||||
<Skeleton
|
||||
style={{ width: treemapWidth, height: TREEMAP_HEIGHT + 55 }}
|
||||
active
|
||||
/>
|
||||
<Skeleton style={{ width: treemapWidth, height: TREEMAP_HEIGHT }} active />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
!data ||
|
||||
!data.data ||
|
||||
(data?.status === 'success' && !data?.data?.[viewType])
|
||||
) {
|
||||
return (
|
||||
<Empty
|
||||
description="No metrics found"
|
||||
data-testid="metrics-treemap-empty-state"
|
||||
style={{ width: treemapWidth, height: TREEMAP_HEIGHT, paddingTop: 30 }}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
if (data?.status === 'error' || isError) {
|
||||
if (isError) {
|
||||
return (
|
||||
<Empty
|
||||
description="Error fetching metrics. If the problem persists, please contact support."
|
||||
@@ -89,6 +72,88 @@ function MetricsTreemap({
|
||||
);
|
||||
}
|
||||
|
||||
if (!data || !data?.[viewType]?.length) {
|
||||
return (
|
||||
<Empty
|
||||
description="No metrics found"
|
||||
data-testid="metrics-treemap-empty-state"
|
||||
style={{ width: treemapWidth, height: TREEMAP_HEIGHT, paddingTop: 30 }}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<svg width={treemapWidth} height={TREEMAP_HEIGHT} className="metrics-treemap">
|
||||
<rect
|
||||
width={treemapWidth}
|
||||
height={TREEMAP_HEIGHT}
|
||||
rx={14}
|
||||
fill="transparent"
|
||||
/>
|
||||
<Treemap<TreemapTile>
|
||||
top={TREEMAP_MARGINS.TOP}
|
||||
root={transformedTreemapData}
|
||||
size={[xMax, yMax]}
|
||||
tile={treemapBinary}
|
||||
round
|
||||
>
|
||||
{(treemap): JSX.Element => (
|
||||
<Group>
|
||||
{treemap
|
||||
.descendants()
|
||||
.reverse()
|
||||
.map((node, i) => {
|
||||
const nodeWidth = node.x1 - node.x0 - TREEMAP_SQUARE_PADDING;
|
||||
const nodeHeight = node.y1 - node.y0 - TREEMAP_SQUARE_PADDING;
|
||||
if (nodeWidth < 0 || nodeHeight < 0) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<Group
|
||||
// eslint-disable-next-line react/no-array-index-key
|
||||
key={node.data.id || `node-${i}`}
|
||||
top={node.y0 + TREEMAP_MARGINS.TOP}
|
||||
left={node.x0 + TREEMAP_MARGINS.LEFT}
|
||||
>
|
||||
{node.depth > 0 && (
|
||||
<Tooltip
|
||||
title={`${node.data.id}: ${node.data.displayValue}%`}
|
||||
placement="top"
|
||||
>
|
||||
<foreignObject
|
||||
width={nodeWidth}
|
||||
height={nodeHeight}
|
||||
onClick={(): void => openMetricDetails(node.data.id, 'treemap')}
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
...getTreemapTileStyle(node.data),
|
||||
...getTreemapTileTextStyle(),
|
||||
}}
|
||||
>
|
||||
{`${node.data.displayValue}%`}
|
||||
</div>
|
||||
</foreignObject>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Group>
|
||||
);
|
||||
})}
|
||||
</Group>
|
||||
)}
|
||||
</Treemap>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
|
||||
function MetricsTreemap({
|
||||
viewType,
|
||||
data,
|
||||
isLoading,
|
||||
isError,
|
||||
openMetricDetails,
|
||||
setHeatmapView,
|
||||
}: MetricsTreemapProps): JSX.Element {
|
||||
return (
|
||||
<div
|
||||
className="metrics-treemap-container"
|
||||
@@ -108,72 +173,16 @@ function MetricsTreemap({
|
||||
options={TREEMAP_VIEW_OPTIONS}
|
||||
value={viewType}
|
||||
onChange={setHeatmapView}
|
||||
disabled={isLoading}
|
||||
/>
|
||||
</div>
|
||||
<svg
|
||||
width={treemapWidth}
|
||||
height={TREEMAP_HEIGHT}
|
||||
className="metrics-treemap"
|
||||
>
|
||||
<rect
|
||||
width={treemapWidth}
|
||||
height={TREEMAP_HEIGHT}
|
||||
rx={14}
|
||||
fill="transparent"
|
||||
/>
|
||||
<Treemap<TreemapTile>
|
||||
top={TREEMAP_MARGINS.TOP}
|
||||
root={transformedTreemapData}
|
||||
size={[xMax, yMax]}
|
||||
tile={treemapBinary}
|
||||
round
|
||||
>
|
||||
{(treemap): JSX.Element => (
|
||||
<Group>
|
||||
{treemap
|
||||
.descendants()
|
||||
.reverse()
|
||||
.map((node, i) => {
|
||||
const nodeWidth = node.x1 - node.x0 - TREEMAP_SQUARE_PADDING;
|
||||
const nodeHeight = node.y1 - node.y0 - TREEMAP_SQUARE_PADDING;
|
||||
if (nodeWidth < 0 || nodeHeight < 0) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<Group
|
||||
// eslint-disable-next-line react/no-array-index-key
|
||||
key={node.data.id || `node-${i}`}
|
||||
top={node.y0 + TREEMAP_MARGINS.TOP}
|
||||
left={node.x0 + TREEMAP_MARGINS.LEFT}
|
||||
>
|
||||
{node.depth > 0 && (
|
||||
<Tooltip
|
||||
title={`${node.data.id}: ${node.data.displayValue}%`}
|
||||
placement="top"
|
||||
>
|
||||
<foreignObject
|
||||
width={nodeWidth}
|
||||
height={nodeHeight}
|
||||
onClick={(): void => openMetricDetails(node.data.id, 'treemap')}
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
...getTreemapTileStyle(node.data),
|
||||
...getTreemapTileTextStyle(),
|
||||
}}
|
||||
>
|
||||
{`${node.data.displayValue}%`}
|
||||
</div>
|
||||
</foreignObject>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Group>
|
||||
);
|
||||
})}
|
||||
</Group>
|
||||
)}
|
||||
</Treemap>
|
||||
</svg>
|
||||
<TreemapContent
|
||||
isLoading={isLoading}
|
||||
isError={isError}
|
||||
data={data}
|
||||
viewType={viewType}
|
||||
openMetricDetails={openMetricDetails}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -38,6 +38,7 @@
|
||||
.metrics-search-container {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
align-items: center;
|
||||
|
||||
.metrics-search-options {
|
||||
display: flex;
|
||||
|
||||
@@ -4,11 +4,24 @@ import { useSelector } from 'react-redux';
|
||||
import { useSearchParams } from 'react-router-dom-v5-compat';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import {
|
||||
useGetMetricsStats,
|
||||
useGetMetricsTreemap,
|
||||
} from 'api/generated/services/metrics';
|
||||
import {
|
||||
MetricsexplorertypesStatsRequestDTO,
|
||||
MetricsexplorertypesTreemapModeDTO,
|
||||
MetricsexplorertypesTreemapRequestDTO,
|
||||
Querybuildertypesv5OrderByDTO,
|
||||
Querybuildertypesv5OrderDirectionDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import {
|
||||
convertExpressionToFilters,
|
||||
convertFiltersToExpression,
|
||||
} from 'components/QueryBuilderV2/utils';
|
||||
import { usePageSize } from 'container/InfraMonitoringK8s/utils';
|
||||
import NoLogs from 'container/NoLogs/NoLogs';
|
||||
import { useGetMetricsList } from 'hooks/metricsExplorer/useGetMetricsList';
|
||||
import { useGetMetricsTreeMap } from 'hooks/metricsExplorer/useGetMetricsTreeMap';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -23,32 +36,38 @@ import {
|
||||
IS_INSPECT_MODAL_OPEN_KEY,
|
||||
IS_METRIC_DETAILS_OPEN_KEY,
|
||||
SELECTED_METRIC_NAME_KEY,
|
||||
SUMMARY_FILTERS_KEY,
|
||||
} from './constants';
|
||||
import MetricsSearch from './MetricsSearch';
|
||||
import MetricsTable from './MetricsTable';
|
||||
import MetricsTreemap from './MetricsTreemap';
|
||||
import { OrderByPayload, TreemapViewType } from './types';
|
||||
import {
|
||||
convertNanoToMilliseconds,
|
||||
formatDataForMetricsTable,
|
||||
getMetricsListQuery,
|
||||
} from './utils';
|
||||
import { convertNanoToMilliseconds, formatDataForMetricsTable } from './utils';
|
||||
|
||||
import './Summary.styles.scss';
|
||||
|
||||
const DEFAULT_ORDER_BY: OrderByPayload = {
|
||||
columnName: 'samples',
|
||||
order: 'desc',
|
||||
const DEFAULT_ORDER_BY: Querybuildertypesv5OrderByDTO = {
|
||||
key: {
|
||||
name: 'samples',
|
||||
},
|
||||
direction: Querybuildertypesv5OrderDirectionDTO.desc,
|
||||
};
|
||||
|
||||
function Summary(): JSX.Element {
|
||||
const { pageSize, setPageSize } = usePageSize('metricsExplorer');
|
||||
const [currentPage, setCurrentPage] = useState(1);
|
||||
const [orderBy, setOrderBy] = useState<OrderByPayload>(DEFAULT_ORDER_BY);
|
||||
const [heatmapView, setHeatmapView] = useState<TreemapViewType>(
|
||||
TreemapViewType.TIMESERIES,
|
||||
const [orderBy, setOrderBy] = useState<Querybuildertypesv5OrderByDTO>(
|
||||
DEFAULT_ORDER_BY,
|
||||
);
|
||||
const [
|
||||
heatmapView,
|
||||
setHeatmapView,
|
||||
] = useState<MetricsexplorertypesTreemapModeDTO>(
|
||||
MetricsexplorertypesTreemapModeDTO.timeseries,
|
||||
);
|
||||
|
||||
const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder();
|
||||
const query = useMemo(() => currentQuery?.builder?.queryData[0], [
|
||||
currentQuery,
|
||||
]);
|
||||
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
const [isMetricDetailsOpen, setIsMetricDetailsOpen] = useState(
|
||||
@@ -65,16 +84,10 @@ function Summary(): JSX.Element {
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const queryFilters: TagFilter = useMemo(() => {
|
||||
const encodedFilters = searchParams.get(SUMMARY_FILTERS_KEY);
|
||||
if (encodedFilters) {
|
||||
return JSON.parse(encodedFilters);
|
||||
}
|
||||
return {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
};
|
||||
}, [searchParams]);
|
||||
const [
|
||||
currentQueryFilterExpression,
|
||||
setCurrentQueryFilterExpression,
|
||||
] = useState<string>(query?.filter?.expression || '');
|
||||
|
||||
useEffect(() => {
|
||||
logEvent(MetricsExplorerEvents.TabChanged, {
|
||||
@@ -87,105 +100,113 @@ function Summary(): JSX.Element {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
// This is used to avoid the filters from being serialized with the id
|
||||
const queryFiltersWithoutId = useMemo(() => {
|
||||
const filtersWithoutId = {
|
||||
...queryFilters,
|
||||
items: queryFilters.items.map(({ id: _id, ...rest }) => rest),
|
||||
};
|
||||
return JSON.stringify(filtersWithoutId);
|
||||
}, [queryFilters]);
|
||||
const queryFilterExpression = useMemo(() => {
|
||||
const filters = query?.filters || { items: [], op: 'AND' };
|
||||
return convertFiltersToExpression(filters);
|
||||
}, [query?.filters]);
|
||||
|
||||
const metricsListQuery = useMemo(() => {
|
||||
const baseQuery = getMetricsListQuery();
|
||||
const metricsListQuery: MetricsexplorertypesStatsRequestDTO = useMemo(() => {
|
||||
return {
|
||||
...baseQuery,
|
||||
limit: pageSize,
|
||||
offset: (currentPage - 1) * pageSize,
|
||||
filters: queryFilters,
|
||||
start: convertNanoToMilliseconds(minTime),
|
||||
end: convertNanoToMilliseconds(maxTime),
|
||||
limit: pageSize,
|
||||
offset: (currentPage - 1) * pageSize,
|
||||
orderBy,
|
||||
filter: {
|
||||
expression: queryFilterExpression.expression,
|
||||
},
|
||||
};
|
||||
}, [queryFilters, minTime, maxTime, orderBy, pageSize, currentPage]);
|
||||
}, [
|
||||
minTime,
|
||||
maxTime,
|
||||
orderBy,
|
||||
pageSize,
|
||||
currentPage,
|
||||
queryFilterExpression.expression,
|
||||
]);
|
||||
|
||||
const metricsTreemapQuery = useMemo(
|
||||
const metricsTreemapQuery: MetricsexplorertypesTreemapRequestDTO = useMemo(
|
||||
() => ({
|
||||
limit: 100,
|
||||
filters: queryFilters,
|
||||
treemap: heatmapView,
|
||||
start: convertNanoToMilliseconds(minTime),
|
||||
end: convertNanoToMilliseconds(maxTime),
|
||||
mode: heatmapView,
|
||||
filter: {
|
||||
expression: queryFilterExpression.expression,
|
||||
},
|
||||
}),
|
||||
[queryFilters, heatmapView, minTime, maxTime],
|
||||
[heatmapView, minTime, maxTime, queryFilterExpression.expression],
|
||||
);
|
||||
|
||||
const {
|
||||
data: metricsData,
|
||||
isLoading: isMetricsLoading,
|
||||
isFetching: isMetricsFetching,
|
||||
isError: isMetricsError,
|
||||
} = useGetMetricsList(metricsListQuery, {
|
||||
enabled: !!metricsListQuery && !isInspectModalOpen,
|
||||
queryKey: [
|
||||
'metricsList',
|
||||
queryFiltersWithoutId,
|
||||
orderBy,
|
||||
pageSize,
|
||||
currentPage,
|
||||
minTime,
|
||||
maxTime,
|
||||
],
|
||||
});
|
||||
mutate: getMetricsStats,
|
||||
isLoading: isGetMetricsStatsLoading,
|
||||
isError: isGetMetricsStatsError,
|
||||
} = useGetMetricsStats();
|
||||
|
||||
const isListViewError = useMemo(
|
||||
() => isMetricsError || !!(metricsData && metricsData.statusCode !== 200),
|
||||
[isMetricsError, metricsData],
|
||||
() => isGetMetricsStatsError || metricsData?.status !== 200,
|
||||
[isGetMetricsStatsError, metricsData],
|
||||
);
|
||||
|
||||
const {
|
||||
data: treeMapData,
|
||||
isLoading: isTreeMapLoading,
|
||||
isFetching: isTreeMapFetching,
|
||||
isError: isTreeMapError,
|
||||
} = useGetMetricsTreeMap(metricsTreemapQuery, {
|
||||
enabled: !!metricsTreemapQuery && !isInspectModalOpen,
|
||||
queryKey: [
|
||||
'metricsTreemap',
|
||||
queryFiltersWithoutId,
|
||||
heatmapView,
|
||||
minTime,
|
||||
maxTime,
|
||||
],
|
||||
});
|
||||
mutate: getMetricsTreemap,
|
||||
isLoading: isGetMetricsTreemapLoading,
|
||||
isError: isGetMetricsTreemapError,
|
||||
} = useGetMetricsTreemap();
|
||||
|
||||
useEffect(() => {
|
||||
getMetricsStats({
|
||||
data: metricsListQuery,
|
||||
});
|
||||
getMetricsTreemap({
|
||||
data: metricsTreemapQuery,
|
||||
});
|
||||
}, [
|
||||
metricsTreemapQuery,
|
||||
metricsListQuery,
|
||||
getMetricsTreemap,
|
||||
getMetricsStats,
|
||||
]);
|
||||
|
||||
const isProportionViewError = useMemo(
|
||||
() => isTreeMapError || treeMapData?.statusCode !== 200,
|
||||
[isTreeMapError, treeMapData],
|
||||
() => isGetMetricsTreemapError || treeMapData?.status !== 200,
|
||||
[isGetMetricsTreemapError, treeMapData],
|
||||
);
|
||||
|
||||
const handleFilterChange = useCallback(
|
||||
(value: TagFilter) => {
|
||||
setSearchParams({
|
||||
...Object.fromEntries(searchParams.entries()),
|
||||
[SUMMARY_FILTERS_KEY]: JSON.stringify(value),
|
||||
(expression: string) => {
|
||||
const newFilters: TagFilter = {
|
||||
items: convertExpressionToFilters(expression),
|
||||
op: 'AND',
|
||||
};
|
||||
redirectWithQueryBuilderData({
|
||||
...currentQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...currentQuery.builder.queryData[0],
|
||||
filters: newFilters,
|
||||
filter: {
|
||||
expression,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
setCurrentQueryFilterExpression(expression);
|
||||
setCurrentPage(1);
|
||||
if (value.items.length > 0) {
|
||||
if (expression) {
|
||||
logEvent(MetricsExplorerEvents.FilterApplied, {
|
||||
[MetricsExplorerEventKeys.Tab]: 'summary',
|
||||
});
|
||||
}
|
||||
},
|
||||
[setSearchParams, searchParams],
|
||||
);
|
||||
|
||||
const searchQuery = useMemo(
|
||||
() => ({
|
||||
...initialQueriesMap.metrics.builder.queryData[0],
|
||||
filters: queryFilters,
|
||||
}),
|
||||
[queryFilters],
|
||||
[currentQuery, redirectWithQueryBuilderData],
|
||||
);
|
||||
|
||||
const onPaginationChange = (page: number, pageSize: number): void => {
|
||||
@@ -202,7 +223,7 @@ function Summary(): JSX.Element {
|
||||
};
|
||||
|
||||
const formattedMetricsData = useMemo(
|
||||
() => formatDataForMetricsTable(metricsData?.payload?.data?.metrics || []),
|
||||
() => formatDataForMetricsTable(metricsData?.data?.data?.metrics || []),
|
||||
[metricsData],
|
||||
);
|
||||
|
||||
@@ -254,7 +275,9 @@ function Summary(): JSX.Element {
|
||||
});
|
||||
};
|
||||
|
||||
const handleSetHeatmapView = (view: TreemapViewType): void => {
|
||||
const handleSetHeatmapView = (
|
||||
view: MetricsexplorertypesTreemapModeDTO,
|
||||
): void => {
|
||||
setHeatmapView(view);
|
||||
logEvent(MetricsExplorerEvents.TreemapViewChanged, {
|
||||
[MetricsExplorerEventKeys.Tab]: 'summary',
|
||||
@@ -262,63 +285,62 @@ function Summary(): JSX.Element {
|
||||
});
|
||||
};
|
||||
|
||||
const handleSetOrderBy = (orderBy: OrderByPayload): void => {
|
||||
const handleSetOrderBy = (orderBy: Querybuildertypesv5OrderByDTO): void => {
|
||||
setOrderBy(orderBy);
|
||||
logEvent(MetricsExplorerEvents.OrderByApplied, {
|
||||
[MetricsExplorerEventKeys.Tab]: 'summary',
|
||||
[MetricsExplorerEventKeys.ColumnName]: orderBy.columnName,
|
||||
[MetricsExplorerEventKeys.Order]: orderBy.order,
|
||||
[MetricsExplorerEventKeys.ColumnName]: orderBy.key?.name,
|
||||
[MetricsExplorerEventKeys.Order]: orderBy.direction,
|
||||
});
|
||||
};
|
||||
|
||||
const isMetricsListDataEmpty = useMemo(
|
||||
() =>
|
||||
formattedMetricsData.length === 0 && !isMetricsLoading && !isMetricsFetching,
|
||||
[formattedMetricsData, isMetricsLoading, isMetricsFetching],
|
||||
);
|
||||
const isMetricsListDataEmpty =
|
||||
formattedMetricsData.length === 0 && !isGetMetricsStatsLoading;
|
||||
|
||||
const isMetricsTreeMapDataEmpty = useMemo(
|
||||
() =>
|
||||
!treeMapData?.payload?.data[heatmapView]?.length &&
|
||||
!isTreeMapLoading &&
|
||||
!isTreeMapFetching,
|
||||
[
|
||||
treeMapData?.payload?.data,
|
||||
heatmapView,
|
||||
isTreeMapLoading,
|
||||
isTreeMapFetching,
|
||||
],
|
||||
);
|
||||
const isMetricsTreeMapDataEmpty =
|
||||
!treeMapData?.data?.data?.[heatmapView]?.length &&
|
||||
!isGetMetricsTreemapLoading;
|
||||
|
||||
const showFullScreenLoading =
|
||||
(isGetMetricsStatsLoading || isGetMetricsTreemapLoading) &&
|
||||
formattedMetricsData.length === 0 &&
|
||||
!treeMapData?.data?.data?.[heatmapView]?.length;
|
||||
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<div className="metrics-explorer-summary-tab">
|
||||
<MetricsSearch query={searchQuery} onChange={handleFilterChange} />
|
||||
{isMetricsLoading || isTreeMapLoading ? (
|
||||
<MetricsSearch
|
||||
query={query}
|
||||
onChange={handleFilterChange}
|
||||
currentQueryFilterExpression={currentQueryFilterExpression}
|
||||
setCurrentQueryFilterExpression={setCurrentQueryFilterExpression}
|
||||
/>
|
||||
{showFullScreenLoading ? (
|
||||
<MetricsLoading />
|
||||
) : isMetricsListDataEmpty && isMetricsTreeMapDataEmpty ? (
|
||||
<NoLogs dataSource={DataSource.METRICS} />
|
||||
) : (
|
||||
<>
|
||||
<MetricsTreemap
|
||||
data={treeMapData?.payload}
|
||||
isLoading={isTreeMapLoading || isTreeMapFetching}
|
||||
data={treeMapData?.data?.data}
|
||||
isLoading={isGetMetricsTreemapLoading}
|
||||
isError={isProportionViewError}
|
||||
viewType={heatmapView}
|
||||
openMetricDetails={openMetricDetails}
|
||||
setHeatmapView={handleSetHeatmapView}
|
||||
/>
|
||||
<MetricsTable
|
||||
isLoading={isMetricsLoading || isMetricsFetching}
|
||||
isLoading={isGetMetricsStatsLoading}
|
||||
isError={isListViewError}
|
||||
data={formattedMetricsData}
|
||||
pageSize={pageSize}
|
||||
currentPage={currentPage}
|
||||
onPaginationChange={onPaginationChange}
|
||||
setOrderBy={handleSetOrderBy}
|
||||
totalCount={metricsData?.payload?.data?.total || 0}
|
||||
totalCount={metricsData?.data?.data?.total || 0}
|
||||
openMetricDetails={openMetricDetails}
|
||||
queryFilters={queryFilters}
|
||||
queryFilterExpression={queryFilterExpression}
|
||||
onFilterChange={handleFilterChange}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { render } from '@testing-library/react';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
|
||||
import MetricTypeRenderer from '../MetricTypeRenderer';
|
||||
|
||||
describe('MetricTypeRenderer', () => {
|
||||
it('should render correct icon and color for each metric type', () => {
|
||||
const types = [
|
||||
{
|
||||
type: MetricType.SUM,
|
||||
color: Color.BG_ROBIN_500,
|
||||
},
|
||||
{
|
||||
type: MetricType.GAUGE,
|
||||
color: Color.BG_SAKURA_500,
|
||||
},
|
||||
{
|
||||
type: MetricType.HISTOGRAM,
|
||||
color: Color.BG_SIENNA_500,
|
||||
},
|
||||
{
|
||||
type: MetricType.SUMMARY,
|
||||
color: Color.BG_FOREST_500,
|
||||
},
|
||||
{
|
||||
type: MetricType.EXPONENTIAL_HISTOGRAM,
|
||||
color: Color.BG_AQUA_500,
|
||||
},
|
||||
];
|
||||
|
||||
types.forEach(({ type, color }) => {
|
||||
const { container } = render(<MetricTypeRenderer type={type} />);
|
||||
const rendererDiv = container.firstChild as HTMLElement;
|
||||
|
||||
expect(rendererDiv).toHaveStyle({
|
||||
backgroundColor: `${color}33`,
|
||||
border: `1px solid ${color}`,
|
||||
color,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should return empty icon and color for unknown metric type', () => {
|
||||
const { container } = render(
|
||||
<MetricTypeRenderer type={'UNKNOWN' as MetricType} />,
|
||||
);
|
||||
const rendererDiv = container.firstChild as HTMLElement;
|
||||
|
||||
expect(rendererDiv.querySelector('svg')).toBeNull();
|
||||
});
|
||||
});
|
||||
@@ -1,10 +1,10 @@
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { Filter } from 'api/v5/v5';
|
||||
import * as useGetMetricsListFilterValues from 'hooks/metricsExplorer/useGetMetricsListFilterValues';
|
||||
import * as useQueryBuilderOperationsHooks from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import store from 'store';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import MetricsTable from '../MetricsTable';
|
||||
import { MetricsListItemRowData } from '../types';
|
||||
@@ -30,9 +30,8 @@ const mockData: MetricsListItemRowData[] = [
|
||||
},
|
||||
];
|
||||
|
||||
const mockQueryFilters: TagFilter = {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
const mockQueryFilterExpression: Filter = {
|
||||
expression: '',
|
||||
};
|
||||
|
||||
jest.mock('react-router-dom-v5-compat', () => {
|
||||
@@ -82,7 +81,8 @@ describe('MetricsTable', () => {
|
||||
setOrderBy={jest.fn()}
|
||||
totalCount={2}
|
||||
openMetricDetails={jest.fn()}
|
||||
queryFilters={mockQueryFilters}
|
||||
queryFilterExpression={mockQueryFilterExpression}
|
||||
onFilterChange={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
</MemoryRouter>,
|
||||
@@ -106,8 +106,9 @@ describe('MetricsTable', () => {
|
||||
setOrderBy={jest.fn()}
|
||||
totalCount={2}
|
||||
openMetricDetails={jest.fn()}
|
||||
queryFilters={mockQueryFilters}
|
||||
queryFilterExpression={mockQueryFilterExpression}
|
||||
isLoading
|
||||
onFilterChange={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
</MemoryRouter>,
|
||||
@@ -130,7 +131,8 @@ describe('MetricsTable', () => {
|
||||
setOrderBy={jest.fn()}
|
||||
totalCount={2}
|
||||
openMetricDetails={jest.fn()}
|
||||
queryFilters={mockQueryFilters}
|
||||
queryFilterExpression={mockQueryFilterExpression}
|
||||
onFilterChange={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
</MemoryRouter>,
|
||||
@@ -158,7 +160,8 @@ describe('MetricsTable', () => {
|
||||
setOrderBy={jest.fn()}
|
||||
totalCount={2}
|
||||
openMetricDetails={jest.fn()}
|
||||
queryFilters={mockQueryFilters}
|
||||
queryFilterExpression={mockQueryFilterExpression}
|
||||
onFilterChange={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
</MemoryRouter>,
|
||||
@@ -187,7 +190,8 @@ describe('MetricsTable', () => {
|
||||
setOrderBy={jest.fn()}
|
||||
totalCount={2}
|
||||
openMetricDetails={mockOpenMetricDetails}
|
||||
queryFilters={mockQueryFilters}
|
||||
queryFilterExpression={mockQueryFilterExpression}
|
||||
onFilterChange={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
</MemoryRouter>,
|
||||
@@ -212,7 +216,8 @@ describe('MetricsTable', () => {
|
||||
setOrderBy={mockSetOrderBy}
|
||||
totalCount={2}
|
||||
openMetricDetails={jest.fn()}
|
||||
queryFilters={mockQueryFilters}
|
||||
queryFilterExpression={mockQueryFilterExpression}
|
||||
onFilterChange={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
</MemoryRouter>,
|
||||
@@ -222,8 +227,10 @@ describe('MetricsTable', () => {
|
||||
fireEvent.click(samplesHeader);
|
||||
|
||||
expect(mockSetOrderBy).toHaveBeenCalledWith({
|
||||
columnName: 'samples',
|
||||
order: 'asc',
|
||||
key: {
|
||||
name: 'samples',
|
||||
},
|
||||
direction: 'asc',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { MetricsexplorertypesTreemapModeDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import store from 'store';
|
||||
|
||||
import MetricsTreemap from '../MetricsTreemap';
|
||||
import { TreemapViewType } from '../types';
|
||||
|
||||
jest.mock('d3-hierarchy', () => ({
|
||||
stratify: jest.fn().mockReturnValue({
|
||||
@@ -27,14 +27,14 @@ jest.mock('react-use', () => ({
|
||||
|
||||
const mockData = [
|
||||
{
|
||||
metric_name: 'Metric 1',
|
||||
metricName: 'Metric 1',
|
||||
percentage: 0.5,
|
||||
total_value: 15,
|
||||
totalValue: 15,
|
||||
},
|
||||
{
|
||||
metric_name: 'Metric 2',
|
||||
metricName: 'Metric 2',
|
||||
percentage: 0.6,
|
||||
total_value: 10,
|
||||
totalValue: 10,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -47,14 +47,11 @@ describe('MetricsTreemap', () => {
|
||||
isLoading={false}
|
||||
isError={false}
|
||||
data={{
|
||||
status: 'success',
|
||||
data: {
|
||||
timeseries: [mockData[0]],
|
||||
samples: [mockData[1]],
|
||||
},
|
||||
timeseries: [mockData[0]],
|
||||
samples: [mockData[1]],
|
||||
}}
|
||||
openMetricDetails={jest.fn()}
|
||||
viewType={TreemapViewType.SAMPLES}
|
||||
viewType={MetricsexplorertypesTreemapModeDTO.samples}
|
||||
setHeatmapView={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
@@ -72,14 +69,11 @@ describe('MetricsTreemap', () => {
|
||||
isLoading
|
||||
isError={false}
|
||||
data={{
|
||||
status: 'success',
|
||||
data: {
|
||||
timeseries: [mockData[0]],
|
||||
samples: [mockData[1]],
|
||||
},
|
||||
timeseries: [mockData[0]],
|
||||
samples: [mockData[1]],
|
||||
}}
|
||||
openMetricDetails={jest.fn()}
|
||||
viewType={TreemapViewType.SAMPLES}
|
||||
viewType={MetricsexplorertypesTreemapModeDTO.samples}
|
||||
setHeatmapView={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
@@ -99,14 +93,11 @@ describe('MetricsTreemap', () => {
|
||||
isLoading={false}
|
||||
isError
|
||||
data={{
|
||||
status: 'success',
|
||||
data: {
|
||||
timeseries: [mockData[0]],
|
||||
samples: [mockData[1]],
|
||||
},
|
||||
timeseries: [mockData[0]],
|
||||
samples: [mockData[1]],
|
||||
}}
|
||||
openMetricDetails={jest.fn()}
|
||||
viewType={TreemapViewType.SAMPLES}
|
||||
viewType={MetricsexplorertypesTreemapModeDTO.samples}
|
||||
setHeatmapView={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
@@ -130,7 +121,7 @@ describe('MetricsTreemap', () => {
|
||||
isError={false}
|
||||
data={null}
|
||||
openMetricDetails={jest.fn()}
|
||||
viewType={TreemapViewType.SAMPLES}
|
||||
viewType={MetricsexplorertypesTreemapModeDTO.samples}
|
||||
setHeatmapView={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
|
||||
@@ -1,109 +1,81 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { render } from '@testing-library/react';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { MetrictypesTypeDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { Filter } from 'api/v5/v5';
|
||||
import { getUniversalNameFromMetricUnit } from 'components/YAxisUnitSelector/utils';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { TreemapViewType } from '../types';
|
||||
import {
|
||||
formatDataForMetricsTable,
|
||||
getMetricsTableColumns,
|
||||
MetricTypeRenderer,
|
||||
} from '../utils';
|
||||
import { formatDataForMetricsTable, getMetricsTableColumns } from '../utils';
|
||||
|
||||
const mockQueryExpression: Filter = {
|
||||
expression: '',
|
||||
};
|
||||
const mockOnChange = jest.fn();
|
||||
|
||||
describe('metricsTableColumns', () => {
|
||||
const mockQueryFilters: TagFilter = {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
it('should have correct column definitions', () => {
|
||||
expect(getMetricsTableColumns(mockQueryFilters)).toHaveLength(6);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange),
|
||||
).toHaveLength(6);
|
||||
|
||||
// Metric Name column
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[0].dataIndex).toBe(
|
||||
'metric_name',
|
||||
);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[0].width).toBe(400);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[0].sorter).toBe(false);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[0].dataIndex,
|
||||
).toBe('metric_name');
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[0].width,
|
||||
).toBe(400);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[0].sorter,
|
||||
).toBe(false);
|
||||
|
||||
// Description column
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[1].dataIndex).toBe(
|
||||
'description',
|
||||
);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[1].width).toBe(400);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[1].dataIndex,
|
||||
).toBe('description');
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[1].width,
|
||||
).toBe(400);
|
||||
|
||||
// Type column
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[2].dataIndex).toBe(
|
||||
'metric_type',
|
||||
);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[2].width).toBe(150);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[2].sorter).toBe(false);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[2].dataIndex,
|
||||
).toBe('metric_type');
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[2].width,
|
||||
).toBe(150);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[2].sorter,
|
||||
).toBe(false);
|
||||
|
||||
// Unit column
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[3].dataIndex).toBe('unit');
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[3].width).toBe(150);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[3].dataIndex,
|
||||
).toBe('unit');
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[3].width,
|
||||
).toBe(150);
|
||||
|
||||
// Samples column
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[4].dataIndex).toBe(
|
||||
TreemapViewType.SAMPLES,
|
||||
);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[4].width).toBe(150);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[4].sorter).toBe(true);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[4].dataIndex,
|
||||
).toBe(TreemapViewType.SAMPLES);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[4].width,
|
||||
).toBe(150);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[4].sorter,
|
||||
).toBe(true);
|
||||
|
||||
// Time Series column
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[5].dataIndex).toBe(
|
||||
TreemapViewType.TIMESERIES,
|
||||
);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[5].width).toBe(150);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[5].sorter).toBe(true);
|
||||
});
|
||||
|
||||
describe('MetricTypeRenderer', () => {
|
||||
it('should render correct icon and color for each metric type', () => {
|
||||
const types = [
|
||||
{
|
||||
type: MetricType.SUM,
|
||||
color: Color.BG_ROBIN_500,
|
||||
},
|
||||
{
|
||||
type: MetricType.GAUGE,
|
||||
color: Color.BG_SAKURA_500,
|
||||
},
|
||||
{
|
||||
type: MetricType.HISTOGRAM,
|
||||
color: Color.BG_SIENNA_500,
|
||||
},
|
||||
{
|
||||
type: MetricType.SUMMARY,
|
||||
color: Color.BG_FOREST_500,
|
||||
},
|
||||
{
|
||||
type: MetricType.EXPONENTIAL_HISTOGRAM,
|
||||
color: Color.BG_AQUA_500,
|
||||
},
|
||||
];
|
||||
|
||||
types.forEach(({ type, color }) => {
|
||||
const { container } = render(<MetricTypeRenderer type={type} />);
|
||||
const rendererDiv = container.firstChild as HTMLElement;
|
||||
|
||||
expect(rendererDiv).toHaveStyle({
|
||||
backgroundColor: `${color}33`,
|
||||
border: `1px solid ${color}`,
|
||||
color,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should return empty icon and color for unknown metric type', () => {
|
||||
const { container } = render(
|
||||
<MetricTypeRenderer type={'UNKNOWN' as MetricType} />,
|
||||
);
|
||||
const rendererDiv = container.firstChild as HTMLElement;
|
||||
|
||||
expect(rendererDiv.querySelector('svg')).toBeNull();
|
||||
});
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[5].dataIndex,
|
||||
).toBe(TreemapViewType.TIMESERIES);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[5].width,
|
||||
).toBe(150);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[5].sorter,
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -111,12 +83,12 @@ describe('formatDataForMetricsTable', () => {
|
||||
it('should format metrics data correctly', () => {
|
||||
const mockData = [
|
||||
{
|
||||
metric_name: 'test_metric',
|
||||
metricName: 'test_metric',
|
||||
description: 'Test description',
|
||||
type: MetricType.GAUGE,
|
||||
type: MetrictypesTypeDTO.gauge,
|
||||
unit: 'bytes',
|
||||
[TreemapViewType.SAMPLES]: 1000,
|
||||
[TreemapViewType.TIMESERIES]: 2000,
|
||||
samples: 1000,
|
||||
timeseries: 2000,
|
||||
lastReceived: '2023-01-01T00:00:00Z',
|
||||
},
|
||||
];
|
||||
@@ -163,12 +135,12 @@ describe('formatDataForMetricsTable', () => {
|
||||
it('should handle empty/null values', () => {
|
||||
const mockData = [
|
||||
{
|
||||
metric_name: '',
|
||||
metricName: '',
|
||||
description: '',
|
||||
type: MetricType.GAUGE,
|
||||
type: MetrictypesTypeDTO.gauge,
|
||||
unit: '',
|
||||
[TreemapViewType.SAMPLES]: 0,
|
||||
[TreemapViewType.TIMESERIES]: 0,
|
||||
samples: 0,
|
||||
timeseries: 0,
|
||||
lastReceived: '2023-01-01T00:00:00Z',
|
||||
},
|
||||
];
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
import {
|
||||
MetricsexplorertypesTreemapModeDTO,
|
||||
MetrictypesTypeDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
|
||||
import { TreemapViewType } from './types';
|
||||
|
||||
export const METRICS_TABLE_PAGE_SIZE = 10;
|
||||
|
||||
export const TREEMAP_VIEW_OPTIONS: {
|
||||
value: TreemapViewType;
|
||||
value: MetricsexplorertypesTreemapModeDTO;
|
||||
label: string;
|
||||
}[] = [
|
||||
{ value: TreemapViewType.TIMESERIES, label: 'Time Series' },
|
||||
{ value: TreemapViewType.SAMPLES, label: 'Samples' },
|
||||
{ value: MetricsexplorertypesTreemapModeDTO.timeseries, label: 'Time Series' },
|
||||
{ value: MetricsexplorertypesTreemapModeDTO.samples, label: 'Samples' },
|
||||
];
|
||||
|
||||
export const TREEMAP_HEIGHT = 200;
|
||||
@@ -17,6 +19,7 @@ export const TREEMAP_SQUARE_PADDING = 5;
|
||||
|
||||
export const TREEMAP_MARGINS = { TOP: 10, LEFT: 10, RIGHT: 10, BOTTOM: 10 };
|
||||
|
||||
// TODO: Remove this once API migration is complete
|
||||
export const METRIC_TYPE_LABEL_MAP = {
|
||||
[MetricType.SUM]: 'Sum',
|
||||
[MetricType.GAUGE]: 'Gauge',
|
||||
@@ -25,6 +28,14 @@ export const METRIC_TYPE_LABEL_MAP = {
|
||||
[MetricType.EXPONENTIAL_HISTOGRAM]: 'Exp. Histogram',
|
||||
};
|
||||
|
||||
export const METRIC_TYPE_LABEL_MAP_V2 = {
|
||||
[MetrictypesTypeDTO.sum]: 'Sum',
|
||||
[MetrictypesTypeDTO.gauge]: 'Gauge',
|
||||
[MetrictypesTypeDTO.histogram]: 'Histogram',
|
||||
[MetrictypesTypeDTO.summary]: 'Summary',
|
||||
[MetrictypesTypeDTO.exponentialhistogram]: 'Exp. Histogram',
|
||||
};
|
||||
|
||||
export const METRIC_TYPE_VALUES_MAP = {
|
||||
[MetricType.SUM]: 'Sum',
|
||||
[MetricType.GAUGE]: 'Gauge',
|
||||
@@ -36,4 +47,3 @@ export const METRIC_TYPE_VALUES_MAP = {
|
||||
export const IS_METRIC_DETAILS_OPEN_KEY = 'isMetricDetailsOpen';
|
||||
export const IS_INSPECT_MODAL_OPEN_KEY = 'isInspectModalOpen';
|
||||
export const SELECTED_METRIC_NAME_KEY = 'selectedMetricName';
|
||||
export const SUMMARY_FILTERS_KEY = 'summaryFilters';
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import React from 'react';
|
||||
import { MetricsTreeMapResponse } from 'api/metricsExplorer/getMetricsTreeMap';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
TagFilter,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
MetricsexplorertypesTreemapModeDTO,
|
||||
MetricsexplorertypesTreemapResponseDTO,
|
||||
Querybuildertypesv5OrderByDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { Filter } from 'api/v5/v5';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
export interface MetricsTableProps {
|
||||
isLoading: boolean;
|
||||
@@ -12,24 +14,35 @@ export interface MetricsTableProps {
|
||||
pageSize: number;
|
||||
currentPage: number;
|
||||
onPaginationChange: (page: number, pageSize: number) => void;
|
||||
setOrderBy: (orderBy: OrderByPayload) => void;
|
||||
setOrderBy: (orderBy: Querybuildertypesv5OrderByDTO) => void;
|
||||
totalCount: number;
|
||||
openMetricDetails: (metricName: string, view: 'list' | 'treemap') => void;
|
||||
queryFilters: TagFilter;
|
||||
queryFilterExpression: Filter;
|
||||
onFilterChange: (expression: string) => void;
|
||||
}
|
||||
|
||||
export interface MetricsSearchProps {
|
||||
query: IBuilderQuery;
|
||||
onChange: (value: TagFilter) => void;
|
||||
onChange: (expression: string) => void;
|
||||
currentQueryFilterExpression: string;
|
||||
setCurrentQueryFilterExpression: (expression: string) => void;
|
||||
}
|
||||
|
||||
export interface MetricsTreemapProps {
|
||||
data: MetricsTreeMapResponse | null | undefined;
|
||||
data: MetricsexplorertypesTreemapResponseDTO | null | undefined;
|
||||
isLoading: boolean;
|
||||
isError: boolean;
|
||||
viewType: TreemapViewType;
|
||||
viewType: MetricsexplorertypesTreemapModeDTO;
|
||||
openMetricDetails: (metricName: string, view: 'list' | 'treemap') => void;
|
||||
setHeatmapView: (value: MetricsexplorertypesTreemapModeDTO) => void;
|
||||
}
|
||||
|
||||
export interface TreemapContentProps {
|
||||
isLoading: boolean;
|
||||
isError: boolean;
|
||||
data: MetricsexplorertypesTreemapResponseDTO | null | undefined;
|
||||
viewType: MetricsexplorertypesTreemapModeDTO;
|
||||
openMetricDetails: (metricName: string, view: 'list' | 'treemap') => void;
|
||||
setHeatmapView: (value: TreemapViewType) => void;
|
||||
}
|
||||
|
||||
export interface OrderByPayload {
|
||||
|
||||
@@ -1,39 +1,31 @@
|
||||
import { useMemo } from 'react';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Tooltip, Typography } from 'antd';
|
||||
import { Tooltip } from 'antd';
|
||||
import { ColumnType } from 'antd/es/table';
|
||||
import {
|
||||
MetricsListItemData,
|
||||
MetricsListPayload,
|
||||
MetricType,
|
||||
} from 'api/metricsExplorer/getMetricsList';
|
||||
import {
|
||||
SamplesData,
|
||||
TimeseriesData,
|
||||
} from 'api/metricsExplorer/getMetricsTreeMap';
|
||||
MetricsexplorertypesStatDTO,
|
||||
MetricsexplorertypesTreemapEntryDTO,
|
||||
MetricsexplorertypesTreemapModeDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { MetricsListPayload } from 'api/metricsExplorer/getMetricsList';
|
||||
import { Filter } from 'api/v5/v5';
|
||||
import { getUniversalNameFromMetricUnit } from 'components/YAxisUnitSelector/utils';
|
||||
import {
|
||||
BarChart,
|
||||
BarChart2,
|
||||
BarChartHorizontal,
|
||||
Diff,
|
||||
Gauge,
|
||||
} from 'lucide-react';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { METRIC_TYPE_LABEL_MAP } from './constants';
|
||||
import MetricNameSearch from './MetricNameSearch';
|
||||
import MetricTypeSearch from './MetricTypeSearch';
|
||||
import { MetricsListItemRowData, TreemapTile, TreemapViewType } from './types';
|
||||
import MetricTypeRendererV2 from './MetricTypeRendererV2';
|
||||
import { MetricsListItemRowData, TreemapTile } from './types';
|
||||
|
||||
export const getMetricsTableColumns = (
|
||||
queryFilters: TagFilter,
|
||||
queryFilterExpression: Filter,
|
||||
onFilterChange: (expression: string) => void,
|
||||
): ColumnType<MetricsListItemRowData>[] => [
|
||||
{
|
||||
title: (
|
||||
<div className="metric-name-column-header">
|
||||
<span className="metric-name-column-header-text">METRIC</span>
|
||||
<MetricNameSearch queryFilters={queryFilters} />
|
||||
<MetricNameSearch
|
||||
queryFilterExpression={queryFilterExpression}
|
||||
onFilterChange={onFilterChange}
|
||||
/>
|
||||
</div>
|
||||
),
|
||||
dataIndex: 'metric_name',
|
||||
@@ -55,7 +47,11 @@ export const getMetricsTableColumns = (
|
||||
title: (
|
||||
<div className="metric-type-column-header">
|
||||
<span className="metric-type-column-header-text">TYPE</span>
|
||||
<MetricTypeSearch queryFilters={queryFilters} />
|
||||
{/* TODO: @amlannandy: Re-enable once API supports metric type filtering */}
|
||||
{/* <MetricTypeSearch
|
||||
queryFilters={queryFilters}
|
||||
onFilterChange={onFilterChange}
|
||||
/> */}
|
||||
</div>
|
||||
),
|
||||
dataIndex: 'metric_type',
|
||||
@@ -69,13 +65,13 @@ export const getMetricsTableColumns = (
|
||||
},
|
||||
{
|
||||
title: 'SAMPLES',
|
||||
dataIndex: TreemapViewType.SAMPLES,
|
||||
dataIndex: MetricsexplorertypesTreemapModeDTO.samples,
|
||||
width: 150,
|
||||
sorter: true,
|
||||
},
|
||||
{
|
||||
title: 'TIME SERIES',
|
||||
dataIndex: TreemapViewType.TIMESERIES,
|
||||
dataIndex: MetricsexplorertypesTreemapModeDTO.timeseries,
|
||||
width: 150,
|
||||
sorter: true,
|
||||
},
|
||||
@@ -89,60 +85,6 @@ export const getMetricsListQuery = (): MetricsListPayload => ({
|
||||
orderBy: { columnName: 'metric_name', order: 'asc' },
|
||||
});
|
||||
|
||||
export function MetricTypeRenderer({
|
||||
type,
|
||||
}: {
|
||||
type: MetricType;
|
||||
}): JSX.Element {
|
||||
const [icon, color] = useMemo(() => {
|
||||
switch (type) {
|
||||
case MetricType.SUM:
|
||||
return [
|
||||
<Diff key={type} size={12} color={Color.BG_ROBIN_500} />,
|
||||
Color.BG_ROBIN_500,
|
||||
];
|
||||
case MetricType.GAUGE:
|
||||
return [
|
||||
<Gauge key={type} size={12} color={Color.BG_SAKURA_500} />,
|
||||
Color.BG_SAKURA_500,
|
||||
];
|
||||
case MetricType.HISTOGRAM:
|
||||
return [
|
||||
<BarChart2 key={type} size={12} color={Color.BG_SIENNA_500} />,
|
||||
Color.BG_SIENNA_500,
|
||||
];
|
||||
case MetricType.SUMMARY:
|
||||
return [
|
||||
<BarChartHorizontal key={type} size={12} color={Color.BG_FOREST_500} />,
|
||||
Color.BG_FOREST_500,
|
||||
];
|
||||
case MetricType.EXPONENTIAL_HISTOGRAM:
|
||||
return [
|
||||
<BarChart key={type} size={12} color={Color.BG_AQUA_500} />,
|
||||
Color.BG_AQUA_500,
|
||||
];
|
||||
default:
|
||||
return [null, ''];
|
||||
}
|
||||
}, [type]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className="metric-type-renderer"
|
||||
style={{
|
||||
backgroundColor: `${color}33`,
|
||||
border: `1px solid ${color}`,
|
||||
color,
|
||||
}}
|
||||
>
|
||||
{icon}
|
||||
<Typography.Text style={{ color, fontSize: 12 }}>
|
||||
{METRIC_TYPE_LABEL_MAP[type]}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function ValidateRowValueWrapper({
|
||||
value,
|
||||
children,
|
||||
@@ -182,13 +124,13 @@ export const formatNumberIntoHumanReadableFormat = (
|
||||
};
|
||||
|
||||
export const formatDataForMetricsTable = (
|
||||
data: MetricsListItemData[],
|
||||
data: MetricsexplorertypesStatDTO[],
|
||||
): MetricsListItemRowData[] =>
|
||||
data.map((metric) => ({
|
||||
key: metric.metric_name,
|
||||
key: metric.metricName,
|
||||
metric_name: (
|
||||
<ValidateRowValueWrapper value={metric.metric_name}>
|
||||
<Tooltip title={metric.metric_name}>{metric.metric_name}</Tooltip>
|
||||
<ValidateRowValueWrapper value={metric.metricName}>
|
||||
<Tooltip title={metric.metricName}>{metric.metricName}</Tooltip>
|
||||
</ValidateRowValueWrapper>
|
||||
),
|
||||
description: (
|
||||
@@ -198,39 +140,54 @@ export const formatDataForMetricsTable = (
|
||||
</Tooltip>
|
||||
</ValidateRowValueWrapper>
|
||||
),
|
||||
metric_type: <MetricTypeRenderer type={metric.type} />,
|
||||
metric_type: <MetricTypeRendererV2 type={metric.type} />,
|
||||
unit: (
|
||||
<ValidateRowValueWrapper value={getUniversalNameFromMetricUnit(metric.unit)}>
|
||||
{getUniversalNameFromMetricUnit(metric.unit)}
|
||||
</ValidateRowValueWrapper>
|
||||
),
|
||||
[TreemapViewType.SAMPLES]: (
|
||||
<ValidateRowValueWrapper value={metric[TreemapViewType.SAMPLES]}>
|
||||
<Tooltip title={metric[TreemapViewType.SAMPLES].toLocaleString()}>
|
||||
{formatNumberIntoHumanReadableFormat(metric[TreemapViewType.SAMPLES])}
|
||||
[MetricsexplorertypesTreemapModeDTO.samples]: (
|
||||
<ValidateRowValueWrapper
|
||||
value={metric[MetricsexplorertypesTreemapModeDTO.samples]}
|
||||
>
|
||||
<Tooltip
|
||||
title={metric[MetricsexplorertypesTreemapModeDTO.samples].toLocaleString()}
|
||||
>
|
||||
{formatNumberIntoHumanReadableFormat(
|
||||
metric[MetricsexplorertypesTreemapModeDTO.samples],
|
||||
)}
|
||||
</Tooltip>
|
||||
</ValidateRowValueWrapper>
|
||||
),
|
||||
[TreemapViewType.TIMESERIES]: (
|
||||
<ValidateRowValueWrapper value={metric[TreemapViewType.TIMESERIES]}>
|
||||
<Tooltip title={metric[TreemapViewType.TIMESERIES].toLocaleString()}>
|
||||
{formatNumberIntoHumanReadableFormat(metric[TreemapViewType.TIMESERIES])}
|
||||
[MetricsexplorertypesTreemapModeDTO.timeseries]: (
|
||||
<ValidateRowValueWrapper
|
||||
value={metric[MetricsexplorertypesTreemapModeDTO.timeseries]}
|
||||
>
|
||||
<Tooltip
|
||||
title={metric[
|
||||
MetricsexplorertypesTreemapModeDTO.timeseries
|
||||
].toLocaleString()}
|
||||
>
|
||||
{formatNumberIntoHumanReadableFormat(
|
||||
metric[MetricsexplorertypesTreemapModeDTO.timeseries],
|
||||
)}
|
||||
</Tooltip>
|
||||
</ValidateRowValueWrapper>
|
||||
),
|
||||
}));
|
||||
|
||||
export const transformTreemapData = (
|
||||
data: TimeseriesData[] | SamplesData[],
|
||||
viewType: TreemapViewType,
|
||||
data: MetricsexplorertypesTreemapEntryDTO[],
|
||||
viewType: MetricsexplorertypesTreemapModeDTO,
|
||||
): TreemapTile[] => {
|
||||
const totalSize = (data as (TimeseriesData | SamplesData)[]).reduce(
|
||||
(acc: number, item: TimeseriesData | SamplesData) => acc + item.percentage,
|
||||
const totalSize = data.reduce(
|
||||
(acc: number, item: MetricsexplorertypesTreemapEntryDTO) =>
|
||||
acc + item.percentage,
|
||||
0,
|
||||
);
|
||||
|
||||
const children = data.map((item) => ({
|
||||
id: item.metric_name,
|
||||
id: item.metricName,
|
||||
size: totalSize > 0 ? Number((item.percentage / totalSize).toFixed(2)) : 0,
|
||||
displayValue: Number(item.percentage).toFixed(2),
|
||||
parent: viewType,
|
||||
|
||||
@@ -51,6 +51,28 @@ function WidgetGraphContainer({
|
||||
return <Spinner size="large" tip="Loading..." />;
|
||||
}
|
||||
|
||||
if (
|
||||
selectedGraph !== PANEL_TYPES.LIST &&
|
||||
selectedGraph !== PANEL_TYPES.VALUE &&
|
||||
queryResponse.data?.payload.data?.result?.length === 0
|
||||
) {
|
||||
return (
|
||||
<NotFoundContainer>
|
||||
<Typography>No Data</Typography>
|
||||
</NotFoundContainer>
|
||||
);
|
||||
}
|
||||
if (
|
||||
(selectedGraph === PANEL_TYPES.LIST || selectedGraph === PANEL_TYPES.VALUE) &&
|
||||
queryResponse.data?.payload?.data?.newResult?.data?.result?.length === 0
|
||||
) {
|
||||
return (
|
||||
<NotFoundContainer>
|
||||
<Typography>No Data</Typography>
|
||||
</NotFoundContainer>
|
||||
);
|
||||
}
|
||||
|
||||
if (queryResponse.isIdle) {
|
||||
return (
|
||||
<NotFoundContainer>
|
||||
|
||||
@@ -26,7 +26,7 @@ jest.mock('lib/history', () => ({
|
||||
// API Endpoints
|
||||
const ORG_PREFERENCES_ENDPOINT = '*/api/v1/org/preferences/list';
|
||||
const UPDATE_ORG_PREFERENCE_ENDPOINT = '*/api/v1/org/preferences/name/update';
|
||||
const UPDATE_PROFILE_ENDPOINT = '*/api/v2/zeus/profiles';
|
||||
const UPDATE_PROFILE_ENDPOINT = '*/api/gateway/v2/profiles/me';
|
||||
const EDIT_ORG_ENDPOINT = '*/api/v2/orgs/me';
|
||||
const INVITE_USERS_ENDPOINT = '*/api/v1/invite/bulk/create';
|
||||
|
||||
@@ -277,46 +277,6 @@ describe('OnboardingQuestionaire Component', () => {
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('fires PUT to /zeus/profiles and advances to step 4 on success', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
let profilePutCalled = false;
|
||||
|
||||
server.use(
|
||||
rest.put(UPDATE_PROFILE_ENDPOINT, (_, res, ctx) => {
|
||||
profilePutCalled = true;
|
||||
return res(ctx.status(200), ctx.json({ status: 'success', data: {} }));
|
||||
}),
|
||||
);
|
||||
|
||||
render(<OnboardingQuestionaire />);
|
||||
|
||||
// Navigate to step 3
|
||||
await user.click(screen.getByLabelText(/datadog/i));
|
||||
await user.click(screen.getByRole('radio', { name: /yes/i }));
|
||||
await user.click(screen.getByLabelText(/just exploring/i));
|
||||
await user.click(screen.getByRole('button', { name: /next/i }));
|
||||
|
||||
await user.type(
|
||||
await screen.findByPlaceholderText(/e\.g\., googling/i),
|
||||
'Found via Google',
|
||||
);
|
||||
await user.click(screen.getByLabelText(/lowering observability costs/i));
|
||||
await user.click(screen.getByRole('button', { name: /next/i }));
|
||||
|
||||
// Click "I'll do this later" on step 3 — triggers PUT /zeus/profiles
|
||||
await user.click(
|
||||
await screen.findByRole('button', { name: /i'll do this later/i }),
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(profilePutCalled).toBe(true);
|
||||
// Step 3 content is gone — successfully advanced to step 4
|
||||
expect(
|
||||
screen.queryByText(/what does your scale approximately look like/i),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('shows do later button', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
render(<OnboardingQuestionaire />);
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
import { toast } from '@signozhq/sonner';
|
||||
import { NotificationInstance } from 'antd/es/notification/interface';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { RenderErrorResponseDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { usePutProfile } from 'api/generated/services/zeus';
|
||||
import updateProfileAPI from 'api/onboarding/updateProfile';
|
||||
import listOrgPreferences from 'api/v1/org/preferences/list';
|
||||
import updateOrgPreferenceAPI from 'api/v1/org/preferences/name/update';
|
||||
import { AxiosError } from 'axios';
|
||||
@@ -123,9 +121,20 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
optimiseSignozDetails.hostsPerDay === 0 &&
|
||||
optimiseSignozDetails.services === 0;
|
||||
|
||||
const { mutate: updateProfile, isLoading: isUpdatingProfile } = usePutProfile<
|
||||
AxiosError<RenderErrorResponseDTO>
|
||||
>();
|
||||
const { mutate: updateProfile, isLoading: isUpdatingProfile } = useMutation(
|
||||
updateProfileAPI,
|
||||
{
|
||||
onSuccess: () => {
|
||||
setCurrentStep(4);
|
||||
},
|
||||
onError: (error) => {
|
||||
showErrorNotification(notifications, error as AxiosError);
|
||||
|
||||
// Allow user to proceed even if API fails
|
||||
setCurrentStep(4);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const { mutate: updateOrgPreference } = useMutation(updateOrgPreferenceAPI, {
|
||||
onSuccess: () => {
|
||||
@@ -144,44 +153,29 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
nextPageID: 4,
|
||||
});
|
||||
|
||||
updateProfile(
|
||||
{
|
||||
data: {
|
||||
uses_otel: orgDetails?.usesOtel as boolean,
|
||||
has_existing_observability_tool: orgDetails?.usesObservability as boolean,
|
||||
existing_observability_tool:
|
||||
orgDetails?.observabilityTool === 'Others'
|
||||
? (orgDetails?.otherTool as string)
|
||||
: (orgDetails?.observabilityTool as string),
|
||||
where_did_you_discover_signoz: signozDetails?.discoverSignoz as string,
|
||||
timeline_for_migrating_to_signoz: orgDetails?.migrationTimeline as string,
|
||||
reasons_for_interest_in_signoz: signozDetails?.interestInSignoz?.includes(
|
||||
'Others',
|
||||
)
|
||||
? ([
|
||||
...(signozDetails?.interestInSignoz?.filter(
|
||||
(item) => item !== 'Others',
|
||||
) || []),
|
||||
signozDetails?.otherInterestInSignoz,
|
||||
] as string[])
|
||||
: (signozDetails?.interestInSignoz as string[]),
|
||||
logs_scale_per_day_in_gb: optimiseSignozDetails?.logsPerDay as number,
|
||||
number_of_hosts: optimiseSignozDetails?.hostsPerDay as number,
|
||||
number_of_services: optimiseSignozDetails?.services as number,
|
||||
},
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
setCurrentStep(4);
|
||||
},
|
||||
onError: (error: any) => {
|
||||
toast.error(error?.message || SOMETHING_WENT_WRONG);
|
||||
|
||||
// Allow user to proceed even if API fails
|
||||
setCurrentStep(4);
|
||||
},
|
||||
},
|
||||
);
|
||||
updateProfile({
|
||||
uses_otel: orgDetails?.usesOtel as boolean,
|
||||
has_existing_observability_tool: orgDetails?.usesObservability as boolean,
|
||||
existing_observability_tool:
|
||||
orgDetails?.observabilityTool === 'Others'
|
||||
? (orgDetails?.otherTool as string)
|
||||
: (orgDetails?.observabilityTool as string),
|
||||
where_did_you_discover_signoz: signozDetails?.discoverSignoz as string,
|
||||
timeline_for_migrating_to_signoz: orgDetails?.migrationTimeline as string,
|
||||
reasons_for_interest_in_signoz: signozDetails?.interestInSignoz?.includes(
|
||||
'Others',
|
||||
)
|
||||
? ([
|
||||
...(signozDetails?.interestInSignoz?.filter(
|
||||
(item) => item !== 'Others',
|
||||
) || []),
|
||||
signozDetails?.otherInterestInSignoz,
|
||||
] as string[])
|
||||
: (signozDetails?.interestInSignoz as string[]),
|
||||
logs_scale_per_day_in_gb: optimiseSignozDetails?.logsPerDay as number,
|
||||
number_of_hosts: optimiseSignozDetails?.hostsPerDay as number,
|
||||
number_of_services: optimiseSignozDetails?.services as number,
|
||||
});
|
||||
};
|
||||
|
||||
const handleOnboardingComplete = (): void => {
|
||||
|
||||
@@ -7,12 +7,6 @@
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
|
||||
.auth-domain-title {
|
||||
margin: 0;
|
||||
font-size: 20px;
|
||||
font-weight: 600;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,36 +15,5 @@
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
gap: 24px;
|
||||
|
||||
.auth-domain-list-action-link {
|
||||
cursor: pointer;
|
||||
color: var(--primary);
|
||||
transition: color 0.3s;
|
||||
border: none;
|
||||
background: none;
|
||||
padding: 0;
|
||||
font-size: inherit;
|
||||
|
||||
&:hover {
|
||||
opacity: 0.8;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
&.delete {
|
||||
color: var(--destructive);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.auth-domain-list-na {
|
||||
padding-left: 6px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
}
|
||||
|
||||
.delete-ingestion-key-modal {
|
||||
.delete-text {
|
||||
color: var(--text);
|
||||
margin: 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,38 +1,28 @@
|
||||
import { useCallback, useState } from 'react';
|
||||
import { Button } from '@signozhq/button';
|
||||
import { toast } from '@signozhq/sonner';
|
||||
import { Form, Modal } from 'antd';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import {
|
||||
useCreateAuthDomain,
|
||||
useUpdateAuthDomain,
|
||||
} from 'api/generated/services/authdomains';
|
||||
import {
|
||||
AuthtypesGettableAuthDomainDTO,
|
||||
AuthtypesGoogleConfigDTO,
|
||||
AuthtypesRoleMappingDTO,
|
||||
RenderErrorResponseDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { AxiosError } from 'axios';
|
||||
import { useState } from 'react';
|
||||
import { Button, Form, Modal } from 'antd';
|
||||
import put from 'api/v1/domains/id/put';
|
||||
import post from 'api/v1/domains/post';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { defaultTo } from 'lodash-es';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import { ErrorV2Resp } from 'types/api';
|
||||
import APIError from 'types/api/error';
|
||||
import { GettableAuthDomain } from 'types/api/v1/domains/list';
|
||||
import { PostableAuthDomain } from 'types/api/v1/domains/post';
|
||||
|
||||
import AuthnProviderSelector from './AuthnProviderSelector';
|
||||
import {
|
||||
convertDomainMappingsToRecord,
|
||||
convertGroupMappingsToRecord,
|
||||
FormValues,
|
||||
prepareInitialValues,
|
||||
} from './CreateEdit.utils';
|
||||
import ConfigureGoogleAuthAuthnProvider from './Providers/AuthnGoogleAuth';
|
||||
import ConfigureOIDCAuthnProvider from './Providers/AuthnOIDC';
|
||||
import ConfigureSAMLAuthnProvider from './Providers/AuthnSAML';
|
||||
|
||||
import './CreateEdit.styles.scss';
|
||||
|
||||
interface CreateOrEditProps {
|
||||
isCreate: boolean;
|
||||
onClose: () => void;
|
||||
record?: GettableAuthDomain;
|
||||
}
|
||||
|
||||
function configureAuthnProvider(
|
||||
authnProvider: string,
|
||||
isCreate: boolean,
|
||||
@@ -49,186 +39,64 @@ function configureAuthnProvider(
|
||||
}
|
||||
}
|
||||
|
||||
interface CreateOrEditProps {
|
||||
isCreate: boolean;
|
||||
onClose: () => void;
|
||||
record?: AuthtypesGettableAuthDomainDTO;
|
||||
}
|
||||
|
||||
function CreateOrEdit(props: CreateOrEditProps): JSX.Element {
|
||||
const { isCreate, record, onClose } = props;
|
||||
const [form] = Form.useForm<FormValues>();
|
||||
const [form] = Form.useForm<PostableAuthDomain>();
|
||||
const [authnProvider, setAuthnProvider] = useState<string>(
|
||||
record?.ssoType || '',
|
||||
);
|
||||
|
||||
const { showErrorModal } = useErrorModal();
|
||||
const { featureFlags } = useAppContext();
|
||||
|
||||
const handleError = useCallback(
|
||||
(error: AxiosError<RenderErrorResponseDTO>): void => {
|
||||
try {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
} catch (apiError) {
|
||||
showErrorModal(apiError as APIError);
|
||||
}
|
||||
},
|
||||
[showErrorModal],
|
||||
);
|
||||
const samlEnabled =
|
||||
featureFlags?.find((flag) => flag.name === FeatureKeys.SSO)?.active || false;
|
||||
|
||||
const {
|
||||
mutate: createAuthDomain,
|
||||
isLoading: isCreating,
|
||||
} = useCreateAuthDomain<AxiosError<RenderErrorResponseDTO>>();
|
||||
|
||||
const {
|
||||
mutate: updateAuthDomain,
|
||||
isLoading: isUpdating,
|
||||
} = useUpdateAuthDomain<AxiosError<RenderErrorResponseDTO>>();
|
||||
|
||||
/**
|
||||
* Prepares Google Auth config for API payload
|
||||
*/
|
||||
const getGoogleAuthConfig = useCallback(():
|
||||
| AuthtypesGoogleConfigDTO
|
||||
| undefined => {
|
||||
const config = form.getFieldValue('googleAuthConfig');
|
||||
if (!config) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const { domainToAdminEmailList, ...rest } = config;
|
||||
const domainToAdminEmail = convertDomainMappingsToRecord(
|
||||
domainToAdminEmailList,
|
||||
);
|
||||
|
||||
return {
|
||||
...rest,
|
||||
...(domainToAdminEmail && { domainToAdminEmail }),
|
||||
};
|
||||
}, [form]);
|
||||
|
||||
// Prepares role mapping for API payload
|
||||
const getRoleMapping = useCallback((): AuthtypesRoleMappingDTO | undefined => {
|
||||
const roleMapping = form.getFieldValue('roleMapping');
|
||||
if (!roleMapping) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const { groupMappingsList, ...rest } = roleMapping;
|
||||
const groupMappings = convertGroupMappingsToRecord(groupMappingsList);
|
||||
|
||||
// Only return roleMapping if there's meaningful content
|
||||
const hasDefaultRole = !!rest.defaultRole;
|
||||
const hasUseRoleAttribute = rest.useRoleAttribute === true;
|
||||
const hasGroupMappings =
|
||||
groupMappings && Object.keys(groupMappings).length > 0;
|
||||
|
||||
if (!hasDefaultRole && !hasUseRoleAttribute && !hasGroupMappings) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return {
|
||||
...rest,
|
||||
...(groupMappings && { groupMappings }),
|
||||
};
|
||||
}, [form]);
|
||||
|
||||
const onSubmitHandler = useCallback(async (): Promise<void> => {
|
||||
try {
|
||||
await form.validateFields();
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
|
||||
const onSubmitHandler = async (): Promise<void> => {
|
||||
const name = form.getFieldValue('name');
|
||||
const googleAuthConfig = getGoogleAuthConfig();
|
||||
const googleAuthConfig = form.getFieldValue('googleAuthConfig');
|
||||
const samlConfig = form.getFieldValue('samlConfig');
|
||||
const oidcConfig = form.getFieldValue('oidcConfig');
|
||||
const roleMapping = getRoleMapping();
|
||||
|
||||
if (isCreate) {
|
||||
createAuthDomain(
|
||||
{
|
||||
data: {
|
||||
name,
|
||||
config: {
|
||||
ssoEnabled: true,
|
||||
ssoType: authnProvider,
|
||||
googleAuthConfig,
|
||||
samlConfig,
|
||||
oidcConfig,
|
||||
roleMapping,
|
||||
},
|
||||
try {
|
||||
if (isCreate) {
|
||||
await post({
|
||||
name,
|
||||
config: {
|
||||
ssoEnabled: true,
|
||||
ssoType: authnProvider,
|
||||
googleAuthConfig,
|
||||
samlConfig,
|
||||
oidcConfig,
|
||||
},
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
toast.success('Domain created successfully');
|
||||
onClose();
|
||||
});
|
||||
} else {
|
||||
await put({
|
||||
id: record?.id || '',
|
||||
config: {
|
||||
ssoEnabled: form.getFieldValue('ssoEnabled'),
|
||||
ssoType: authnProvider,
|
||||
googleAuthConfig,
|
||||
samlConfig,
|
||||
oidcConfig,
|
||||
},
|
||||
onError: handleError,
|
||||
},
|
||||
);
|
||||
} else {
|
||||
if (!record?.id) {
|
||||
return;
|
||||
});
|
||||
}
|
||||
|
||||
updateAuthDomain(
|
||||
{
|
||||
pathParams: { id: record.id },
|
||||
data: {
|
||||
config: {
|
||||
ssoEnabled: form.getFieldValue('ssoEnabled'),
|
||||
ssoType: authnProvider,
|
||||
googleAuthConfig,
|
||||
samlConfig,
|
||||
oidcConfig,
|
||||
roleMapping,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
toast.success('Domain updated successfully');
|
||||
onClose();
|
||||
},
|
||||
onError: handleError,
|
||||
},
|
||||
);
|
||||
onClose();
|
||||
} catch (error) {
|
||||
showErrorModal(error as APIError);
|
||||
}
|
||||
}, [
|
||||
authnProvider,
|
||||
createAuthDomain,
|
||||
form,
|
||||
getGoogleAuthConfig,
|
||||
getRoleMapping,
|
||||
handleError,
|
||||
isCreate,
|
||||
};
|
||||
|
||||
onClose,
|
||||
record,
|
||||
updateAuthDomain,
|
||||
]);
|
||||
|
||||
const onBackHandler = useCallback((): void => {
|
||||
form.resetFields();
|
||||
const onBackHandler = (): void => {
|
||||
setAuthnProvider('');
|
||||
}, [form]);
|
||||
};
|
||||
|
||||
return (
|
||||
<Modal
|
||||
open
|
||||
footer={null}
|
||||
onCancel={onClose}
|
||||
width={authnProvider ? 980 : undefined}
|
||||
>
|
||||
<Modal open footer={null} onCancel={onClose}>
|
||||
<Form
|
||||
name="auth-domain"
|
||||
initialValues={defaultTo(prepareInitialValues(record), {
|
||||
initialValues={defaultTo(record, {
|
||||
name: '',
|
||||
ssoEnabled: false,
|
||||
ssoType: '',
|
||||
@@ -246,22 +114,9 @@ function CreateOrEdit(props: CreateOrEditProps): JSX.Element {
|
||||
<div className="auth-domain-configure">
|
||||
{configureAuthnProvider(authnProvider, isCreate)}
|
||||
<section className="action-buttons">
|
||||
{isCreate && (
|
||||
<Button onClick={onBackHandler} variant="solid" color="secondary">
|
||||
Back
|
||||
</Button>
|
||||
)}
|
||||
{!isCreate && (
|
||||
<Button onClick={onClose} variant="solid" color="secondary">
|
||||
Cancel
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
onClick={onSubmitHandler}
|
||||
variant="solid"
|
||||
color="primary"
|
||||
loading={isCreating || isUpdating}
|
||||
>
|
||||
{isCreate && <Button onClick={onBackHandler}>Back</Button>}
|
||||
{!isCreate && <Button onClick={onClose}>Cancel</Button>}
|
||||
<Button onClick={onSubmitHandler} type="primary">
|
||||
Save Changes
|
||||
</Button>
|
||||
</section>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user