mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-20 15:52:41 +00:00
Compare commits
62 Commits
fts-body
...
issue_3017
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
108f03c7bd | ||
|
|
ec5738032d | ||
|
|
4352c4de91 | ||
|
|
6acbc7156d | ||
|
|
09f9a2d4f2 | ||
|
|
6a5354df39 | ||
|
|
ca9ff25314 | ||
|
|
07e66e8c24 | ||
|
|
6283c6c26a | ||
|
|
3515e59a39 | ||
|
|
7756067914 | ||
|
|
d3ef59cba7 | ||
|
|
81e33d59bb | ||
|
|
a05957dc69 | ||
|
|
24cf357b04 | ||
|
|
91e4da28e6 | ||
|
|
4cc727b7f8 | ||
|
|
9b24097a61 | ||
|
|
3a5d6b4493 | ||
|
|
d341f1f810 | ||
|
|
df1b47230a | ||
|
|
6261c9586f | ||
|
|
cda48874d2 | ||
|
|
277b6de266 | ||
|
|
6f87ebe092 | ||
|
|
62c70715e0 | ||
|
|
585a2b5282 | ||
|
|
6ad4c8ad8e | ||
|
|
68df57965d | ||
|
|
d155cc6a10 | ||
|
|
90a6902093 | ||
|
|
2bf92c9c2f | ||
|
|
aa2c1676b6 | ||
|
|
239c0f4e2e | ||
|
|
97ecfdea23 | ||
|
|
6a02db8685 | ||
|
|
9f85dfb307 | ||
|
|
ebc236857d | ||
|
|
0a1e252bb5 | ||
|
|
dd696bab13 | ||
|
|
7f87103b30 | ||
|
|
726bd0ea7a | ||
|
|
ab443c2d65 | ||
|
|
8be9a79d56 | ||
|
|
471ad88971 | ||
|
|
a5c46beeec | ||
|
|
41f720950d | ||
|
|
d9bce4a3c6 | ||
|
|
a5ac40c33c | ||
|
|
86b1366d4a | ||
|
|
eddb43a901 | ||
|
|
505cfe2314 | ||
|
|
6e54ee822a | ||
|
|
d88cb8aba4 | ||
|
|
b823b2a1e1 | ||
|
|
7cfb7118a3 | ||
|
|
59dfe7c0ed | ||
|
|
96b68b91c9 | ||
|
|
be6ce8d4f1 | ||
|
|
1fc58695c6 | ||
|
|
43450a187e | ||
|
|
f4666d9c97 |
4
.github/workflows/integrationci.yaml
vendored
4
.github/workflows/integrationci.yaml
vendored
@@ -53,9 +53,9 @@ jobs:
|
||||
- sqlite
|
||||
clickhouse-version:
|
||||
- 25.5.6
|
||||
- 25.10.5
|
||||
- 25.10.1
|
||||
schema-migrator-version:
|
||||
- v0.142.0
|
||||
- v0.129.7
|
||||
postgres-version:
|
||||
- 15
|
||||
if: |
|
||||
|
||||
@@ -1920,63 +1920,6 @@ components:
|
||||
format: date-time
|
||||
type: string
|
||||
type: object
|
||||
ZeustypesGettableHost:
|
||||
properties:
|
||||
hosts:
|
||||
items:
|
||||
$ref: '#/components/schemas/ZeustypesHost'
|
||||
nullable: true
|
||||
type: array
|
||||
name:
|
||||
type: string
|
||||
state:
|
||||
type: string
|
||||
tier:
|
||||
type: string
|
||||
type: object
|
||||
ZeustypesHost:
|
||||
properties:
|
||||
is_default:
|
||||
type: boolean
|
||||
name:
|
||||
type: string
|
||||
url:
|
||||
type: string
|
||||
type: object
|
||||
ZeustypesPostableHost:
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
type: object
|
||||
ZeustypesPostableProfile:
|
||||
properties:
|
||||
existing_observability_tool:
|
||||
type: string
|
||||
has_existing_observability_tool:
|
||||
type: boolean
|
||||
logs_scale_per_day_in_gb:
|
||||
format: int64
|
||||
type: integer
|
||||
number_of_hosts:
|
||||
format: int64
|
||||
type: integer
|
||||
number_of_services:
|
||||
format: int64
|
||||
type: integer
|
||||
reasons_for_interest_in_signoz:
|
||||
items:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
timeline_for_migrating_to_signoz:
|
||||
type: string
|
||||
uses_otel:
|
||||
type: boolean
|
||||
where_did_you_discover_signoz:
|
||||
type: string
|
||||
type: object
|
||||
securitySchemes:
|
||||
api_key:
|
||||
description: API Keys
|
||||
@@ -5595,174 +5538,6 @@ paths:
|
||||
summary: Rotate session
|
||||
tags:
|
||||
- sessions
|
||||
/api/v2/zeus/hosts:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoint gets the host info from zeus.
|
||||
operationId: GetHosts
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/ZeustypesGettableHost'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Get host info from Zeus.
|
||||
tags:
|
||||
- zeus
|
||||
put:
|
||||
deprecated: false
|
||||
description: This endpoint saves the host of a deployment to zeus.
|
||||
operationId: PutHost
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ZeustypesPostableHost'
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"409":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Conflict
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Put host in Zeus for a deployment.
|
||||
tags:
|
||||
- zeus
|
||||
/api/v2/zeus/profiles:
|
||||
put:
|
||||
deprecated: false
|
||||
description: This endpoint saves the profile of a deployment to zeus.
|
||||
operationId: PutProfile
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ZeustypesPostableProfile'
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Not Found
|
||||
"409":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Conflict
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Put profile in Zeus for a deployment.
|
||||
tags:
|
||||
- zeus
|
||||
/api/v5/query_range:
|
||||
post:
|
||||
deprecated: false
|
||||
|
||||
@@ -155,7 +155,6 @@ The `handler.New` function ties the HTTP handler to OpenAPI metadata via `OpenAP
|
||||
- **Request / RequestContentType**:
|
||||
- `Request` is a Go type that describes the request body or form.
|
||||
- `RequestContentType` is usually `"application/json"` or `"application/x-www-form-urlencoded"` (for callbacks like SAML).
|
||||
- **RequestExamples**: An array of `handler.OpenAPIExample` that provide concrete request payloads in the generated spec. See [Adding request examples](#adding-request-examples) below.
|
||||
- **Response / ResponseContentType**:
|
||||
- `Response` is the Go type for the successful response payload.
|
||||
- `ResponseContentType` is usually `"application/json"`; use `""` for responses without a body.
|
||||
@@ -173,169 +172,8 @@ See existing examples in:
|
||||
- `addUserRoutes` (for typical JSON request/response)
|
||||
- `addSessionRoutes` (for form-encoded and redirect flows)
|
||||
|
||||
## OpenAPI schema details for request/response types
|
||||
|
||||
The OpenAPI spec is generated from the Go types you pass as `Request` and `Response` in `OpenAPIDef`. The following struct tags and interfaces control how those types appear in the generated schema.
|
||||
|
||||
### Adding request examples
|
||||
|
||||
Use the `RequestExamples` field in `OpenAPIDef` to provide concrete request payloads. Each example is a `handler.OpenAPIExample`:
|
||||
|
||||
```go
|
||||
type OpenAPIExample struct {
|
||||
Name string // unique key for the example (e.g. "traces_time_series")
|
||||
Summary string // short description shown in docs (e.g. "Time series: count spans grouped by service")
|
||||
Description string // optional longer description
|
||||
Value any // the example payload, typically map[string]any
|
||||
}
|
||||
```
|
||||
|
||||
For reference, see `pkg/querier/openapi.go` which defines some examples for the `/api/v5/query_range` endpoint:
|
||||
|
||||
```go
|
||||
var QueryRangeV5OpenAPIDef = handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"query"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestExamples: queryRangeV5Examples, // []handler.OpenAPIExample
|
||||
// ...
|
||||
}
|
||||
|
||||
var queryRangeV5Examples = []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
// ...
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// ... more examples
|
||||
}
|
||||
```
|
||||
|
||||
### `required` tag
|
||||
|
||||
Use `required:"true"` on struct fields where the property is expected to be **present** in the JSON payload. This is different from the zero value, a field can have its zero value (e.g. `0`, `""`, `false`) and still be required. The `required` tag means the key itself must exist in the JSON object.
|
||||
|
||||
```go
|
||||
type ListItem struct {
|
||||
...
|
||||
}
|
||||
|
||||
type ListResponse struct {
|
||||
List []ListItem `json:"list" required:"true" nullable:"true"`
|
||||
Total uint64 `json:"total" required:"true"`
|
||||
}
|
||||
```
|
||||
|
||||
In this example, a response like `{"list": null, "total": 0}` is valid. Both keys are present (satisfying `required`), `total` has its zero value, and `list` is null (allowed by `nullable`). But `{"total": 0}` would violate the schema because the `list` key is missing.
|
||||
|
||||
### `nullable` tag
|
||||
|
||||
Use `nullable:"true"` on struct fields that can be `null` in the JSON payload. This is especially important for **slice and map fields** because in Go, the zero value for these types is `nil`, which serializes to `null` in JSON (not `[]` or `{}`).
|
||||
|
||||
Be explicit about the distinction:
|
||||
|
||||
- **Nullable list** (`nullable:"true"`): the field can be `null`. Use this when the Go code may return `nil` for the slice.
|
||||
- **Non-nullable list** (no `nullable` tag): the field is always an array, never `null`. Ensure the Go code initializes it to an empty slice (e.g. `make([]T, 0)`) before serializing.
|
||||
|
||||
```go
|
||||
// Non-nullable: Go code must ensure this is always an initialized slice.
|
||||
type NonNullableExample struct {
|
||||
Items []Item `json:"items" required:"true"`
|
||||
}
|
||||
```
|
||||
|
||||
When defining your types, ask yourself: "Can this field be `null` in the JSON response, or is it always an array/object?" If the Go code ever returns a `nil` slice or map, mark it `nullable:"true"`.
|
||||
|
||||
### `Enum()` method
|
||||
|
||||
For types that have a fixed set of acceptable values, implement the `Enum() []any` method. This generates an `enum` constraint in the JSON schema so the OpenAPI spec accurately restricts the values.
|
||||
|
||||
```go
|
||||
type Signal struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
SignalTraces = Signal{valuer.NewString("traces")}
|
||||
SignalLogs = Signal{valuer.NewString("logs")}
|
||||
SignalMetrics = Signal{valuer.NewString("metrics")}
|
||||
)
|
||||
|
||||
func (Signal) Enum() []any {
|
||||
return []any{
|
||||
SignalTraces,
|
||||
SignalLogs,
|
||||
SignalMetrics,
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This produces the following in the generated OpenAPI spec:
|
||||
|
||||
```yaml
|
||||
Signal:
|
||||
enum:
|
||||
- traces
|
||||
- logs
|
||||
- metrics
|
||||
type: string
|
||||
```
|
||||
|
||||
Every type with a known set of values **must** implement `Enum()`. Without it, the JSON schema will only show the base type (e.g. `string`) with no value constraints.
|
||||
|
||||
### `JSONSchema()` method (custom schema)
|
||||
|
||||
For types that need a completely custom JSON schema (for example, a field that accepts either a string or a number), implement the `jsonschema.Exposer` interface:
|
||||
|
||||
```go
|
||||
var _ jsonschema.Exposer = Step{}
|
||||
|
||||
func (Step) JSONSchema() (jsonschema.Schema, error) {
|
||||
s := jsonschema.Schema{}
|
||||
s.WithDescription("Step interval. Accepts a duration string or seconds.")
|
||||
|
||||
strSchema := jsonschema.Schema{}
|
||||
strSchema.WithType(jsonschema.String.Type())
|
||||
strSchema.WithExamples("60s", "5m", "1h")
|
||||
|
||||
numSchema := jsonschema.Schema{}
|
||||
numSchema.WithType(jsonschema.Number.Type())
|
||||
numSchema.WithExamples(60, 300, 3600)
|
||||
|
||||
s.OneOf = []jsonschema.SchemaOrBool{
|
||||
strSchema.ToSchemaOrBool(),
|
||||
numSchema.ToSchemaOrBool(),
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- **Keep handlers thin**: focus on HTTP concerns and delegate logic to modules/services.
|
||||
- **Always register routes through `signozapiserver`** using `handler.New` and a complete `OpenAPIDef`.
|
||||
- **Choose accurate request/response types** from the `types` packages so OpenAPI schemas are correct.
|
||||
- **Add `required:"true"`** on fields where the key must be present in the JSON (this is about key presence, not about the zero value).
|
||||
- **Add `nullable:"true"`** on fields that can be `null`. Pay special attention to slices and maps -- in Go these default to `nil` which serializes to `null`. If the field should always be an array, initialize it and do not mark it nullable.
|
||||
- **Implement `Enum()`** on every type that has a fixed set of acceptable values so the JSON schema generates proper `enum` constraints.
|
||||
- **Add request examples** via `RequestExamples` in `OpenAPIDef` for any non-trivial endpoint. See `pkg/querier/openapi.go` for reference.
|
||||
|
||||
@@ -3,7 +3,6 @@ package httpzeus
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
@@ -11,7 +10,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/client"
|
||||
"github.com/SigNoz/signoz/pkg/types/zeustypes"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
@@ -121,13 +119,8 @@ func (provider *Provider) PutMeters(ctx context.Context, key string, data []byte
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *Provider) PutProfile(ctx context.Context, key string, profile *zeustypes.PostableProfile) error {
|
||||
body, err := json.Marshal(profile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.do(
|
||||
func (provider *Provider) PutProfile(ctx context.Context, key string, body []byte) error {
|
||||
_, err := provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/profiles/me"),
|
||||
http.MethodPut,
|
||||
@@ -138,15 +131,10 @@ func (provider *Provider) PutProfile(ctx context.Context, key string, profile *z
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *Provider) PutHost(ctx context.Context, key string, host *zeustypes.PostableHost) error {
|
||||
body, err := json.Marshal(host)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.do(
|
||||
func (provider *Provider) PutHost(ctx context.Context, key string, body []byte) error {
|
||||
_, err := provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/deployments/me/host"),
|
||||
provider.config.URL.JoinPath("/v2/deployments/me/hosts"),
|
||||
http.MethodPut,
|
||||
key,
|
||||
body,
|
||||
@@ -181,28 +169,21 @@ func (provider *Provider) do(ctx context.Context, url *url.URL, method string, k
|
||||
return body, nil
|
||||
}
|
||||
|
||||
errorMessage := gjson.GetBytes(body, "error").String()
|
||||
if errorMessage == "" {
|
||||
errorMessage = "an unknown error occurred"
|
||||
}
|
||||
|
||||
return nil, provider.errFromStatusCode(response.StatusCode, errorMessage)
|
||||
return nil, provider.errFromStatusCode(response.StatusCode)
|
||||
}
|
||||
|
||||
// This can be taken down to the client package
|
||||
func (provider *Provider) errFromStatusCode(statusCode int, errorMessage string) error {
|
||||
func (provider *Provider) errFromStatusCode(statusCode int) error {
|
||||
switch statusCode {
|
||||
case http.StatusBadRequest:
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, errorMessage)
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "bad request")
|
||||
case http.StatusUnauthorized:
|
||||
return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, errorMessage)
|
||||
return errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated")
|
||||
case http.StatusForbidden:
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, errorMessage)
|
||||
return errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "forbidden")
|
||||
case http.StatusNotFound:
|
||||
return errors.New(errors.TypeNotFound, errors.CodeNotFound, errorMessage)
|
||||
case http.StatusConflict:
|
||||
return errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, errorMessage)
|
||||
return errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "not found")
|
||||
}
|
||||
|
||||
return errors.New(errors.TypeInternal, errors.CodeInternal, errorMessage)
|
||||
return errors.Newf(errors.TypeInternal, errors.CodeInternal, "internal")
|
||||
}
|
||||
|
||||
@@ -58,7 +58,6 @@
|
||||
"@signozhq/radio-group": "0.0.2",
|
||||
"@signozhq/resizable": "0.0.0",
|
||||
"@signozhq/sonner": "0.1.0",
|
||||
"@signozhq/switch": "0.0.2",
|
||||
"@signozhq/table": "0.3.7",
|
||||
"@signozhq/tooltip": "0.0.2",
|
||||
"@tanstack/react-table": "8.20.6",
|
||||
|
||||
@@ -12,6 +12,5 @@
|
||||
"pipeline": "Pipeline",
|
||||
"pipelines": "Pipelines",
|
||||
"archives": "Archives",
|
||||
"logs_to_metrics": "Logs To Metrics",
|
||||
"roles": "Roles"
|
||||
"logs_to_metrics": "Logs To Metrics"
|
||||
}
|
||||
|
||||
@@ -12,6 +12,5 @@
|
||||
"pipeline": "Pipeline",
|
||||
"pipelines": "Pipelines",
|
||||
"archives": "Archives",
|
||||
"logs_to_metrics": "Logs To Metrics",
|
||||
"roles": "Roles"
|
||||
"logs_to_metrics": "Logs To Metrics"
|
||||
}
|
||||
|
||||
@@ -73,6 +73,5 @@
|
||||
"API_MONITORING": "SigNoz | External APIs",
|
||||
"METER_EXPLORER": "SigNoz | Meter Explorer",
|
||||
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
|
||||
"METER": "SigNoz | Meter",
|
||||
"ROLES_SETTINGS": "SigNoz | Roles"
|
||||
"METER": "SigNoz | Meter"
|
||||
}
|
||||
|
||||
@@ -2414,91 +2414,6 @@ export interface TypesUserDTO {
|
||||
updatedAt?: Date;
|
||||
}
|
||||
|
||||
export interface ZeustypesGettableHostDTO {
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
hosts?: ZeustypesHostDTO[] | null;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
state?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
tier?: string;
|
||||
}
|
||||
|
||||
export interface ZeustypesHostDTO {
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
is_default?: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
url?: string;
|
||||
}
|
||||
|
||||
export interface ZeustypesPostableHostDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface ZeustypesPostableProfileDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
existing_observability_tool?: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
has_existing_observability_tool?: boolean;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
logs_scale_per_day_in_gb?: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
number_of_hosts?: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
number_of_services?: number;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
reasons_for_interest_in_signoz?: string[] | null;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
timeline_for_migrating_to_signoz?: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
uses_otel?: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
where_did_you_discover_signoz?: string;
|
||||
}
|
||||
|
||||
export type ChangePasswordPathParameters = {
|
||||
id: string;
|
||||
};
|
||||
@@ -3214,14 +3129,6 @@ export type RotateSession200 = {
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type GetHosts200 = {
|
||||
data?: ZeustypesGettableHostDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type QueryRangeV5200 = {
|
||||
data?: Querybuildertypesv5QueryRangeResponseDTO;
|
||||
/**
|
||||
|
||||
@@ -1,269 +0,0 @@
|
||||
/**
|
||||
* ! Do not edit manually
|
||||
* * The file has been auto-generated using Orval for SigNoz
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
QueryClient,
|
||||
QueryFunction,
|
||||
QueryKey,
|
||||
UseMutationOptions,
|
||||
UseMutationResult,
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
GetHosts200,
|
||||
RenderErrorResponseDTO,
|
||||
ZeustypesPostableHostDTO,
|
||||
ZeustypesPostableProfileDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* This endpoint gets the host info from zeus.
|
||||
* @summary Get host info from Zeus.
|
||||
*/
|
||||
export const getHosts = (signal?: AbortSignal) => {
|
||||
return GeneratedAPIInstance<GetHosts200>({
|
||||
url: `/api/v2/zeus/hosts`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetHostsQueryKey = () => {
|
||||
return ['getHosts'] as const;
|
||||
};
|
||||
|
||||
export const getGetHostsQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getHosts>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof getHosts>>, TError, TData>;
|
||||
}) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getGetHostsQueryKey();
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof getHosts>>> = ({
|
||||
signal,
|
||||
}) => getHosts(signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getHosts>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type GetHostsQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getHosts>>
|
||||
>;
|
||||
export type GetHostsQueryError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Get host info from Zeus.
|
||||
*/
|
||||
|
||||
export function useGetHosts<
|
||||
TData = Awaited<ReturnType<typeof getHosts>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof getHosts>>, TError, TData>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetHostsQueryOptions(options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get host info from Zeus.
|
||||
*/
|
||||
export const invalidateGetHosts = async (
|
||||
queryClient: QueryClient,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetHostsQueryKey() },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint saves the host of a deployment to zeus.
|
||||
* @summary Put host in Zeus for a deployment.
|
||||
*/
|
||||
export const putHost = (zeustypesPostableHostDTO: ZeustypesPostableHostDTO) => {
|
||||
return GeneratedAPIInstance<void>({
|
||||
url: `/api/v2/zeus/hosts`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: zeustypesPostableHostDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getPutHostMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['putHost'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
{ data: ZeustypesPostableHostDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return putHost(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type PutHostMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof putHost>>
|
||||
>;
|
||||
export type PutHostMutationBody = ZeustypesPostableHostDTO;
|
||||
export type PutHostMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Put host in Zeus for a deployment.
|
||||
*/
|
||||
export const usePutHost = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getPutHostMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint saves the profile of a deployment to zeus.
|
||||
* @summary Put profile in Zeus for a deployment.
|
||||
*/
|
||||
export const putProfile = (
|
||||
zeustypesPostableProfileDTO: ZeustypesPostableProfileDTO,
|
||||
) => {
|
||||
return GeneratedAPIInstance<void>({
|
||||
url: `/api/v2/zeus/profiles`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: zeustypesPostableProfileDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getPutProfileMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['putProfile'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
{ data: ZeustypesPostableProfileDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return putProfile(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type PutProfileMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof putProfile>>
|
||||
>;
|
||||
export type PutProfileMutationBody = ZeustypesPostableProfileDTO;
|
||||
export type PutProfileMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Put profile in Zeus for a deployment.
|
||||
*/
|
||||
export const usePutProfile = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getPutProfileMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
19
frontend/src/api/v1/domains/id/delete.ts
Normal file
19
frontend/src/api/v1/domains/id/delete.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
|
||||
const deleteDomain = async (id: string): Promise<SuccessResponseV2<null>> => {
|
||||
try {
|
||||
const response = await axios.delete<null>(`/domains/${id}`);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: null,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default deleteDomain;
|
||||
25
frontend/src/api/v1/domains/id/put.ts
Normal file
25
frontend/src/api/v1/domains/id/put.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
|
||||
import { UpdatableAuthDomain } from 'types/api/v1/domains/put';
|
||||
|
||||
const put = async (
|
||||
props: UpdatableAuthDomain,
|
||||
): Promise<SuccessResponseV2<null>> => {
|
||||
try {
|
||||
const response = await axios.put<RawSuccessResponse<null>>(
|
||||
`/domains/${props.id}`,
|
||||
{ config: props.config },
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default put;
|
||||
24
frontend/src/api/v1/domains/list.ts
Normal file
24
frontend/src/api/v1/domains/list.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
|
||||
import { GettableAuthDomain } from 'types/api/v1/domains/list';
|
||||
|
||||
const listAllDomain = async (): Promise<
|
||||
SuccessResponseV2<GettableAuthDomain[]>
|
||||
> => {
|
||||
try {
|
||||
const response = await axios.get<RawSuccessResponse<GettableAuthDomain[]>>(
|
||||
`/domains`,
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default listAllDomain;
|
||||
26
frontend/src/api/v1/domains/post.ts
Normal file
26
frontend/src/api/v1/domains/post.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
|
||||
import { GettableAuthDomain } from 'types/api/v1/domains/list';
|
||||
import { PostableAuthDomain } from 'types/api/v1/domains/post';
|
||||
|
||||
const post = async (
|
||||
props: PostableAuthDomain,
|
||||
): Promise<SuccessResponseV2<GettableAuthDomain>> => {
|
||||
try {
|
||||
const response = await axios.post<RawSuccessResponse<GettableAuthDomain>>(
|
||||
`/domains`,
|
||||
props,
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default post;
|
||||
1
frontend/src/auto-import-registry.d.ts
vendored
1
frontend/src/auto-import-registry.d.ts
vendored
@@ -24,6 +24,5 @@ import '@signozhq/popover';
|
||||
import '@signozhq/radio-group';
|
||||
import '@signozhq/resizable';
|
||||
import '@signozhq/sonner';
|
||||
import '@signozhq/switch';
|
||||
import '@signozhq/table';
|
||||
import '@signozhq/tooltip';
|
||||
|
||||
@@ -42,7 +42,6 @@
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
padding: 16px;
|
||||
padding-bottom: 0;
|
||||
}
|
||||
|
||||
.title {
|
||||
@@ -191,7 +190,7 @@
|
||||
padding: 0px;
|
||||
}
|
||||
.log-detail-drawer__footer-hint {
|
||||
position: sticky;
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
@@ -367,7 +366,7 @@
|
||||
}
|
||||
|
||||
.log-detail-drawer__footer-hint {
|
||||
position: sticky;
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
|
||||
@@ -55,7 +55,6 @@ const ROUTES = {
|
||||
LOGS_INDEX_FIELDS: '/logs-explorer/index-fields',
|
||||
TRACE_EXPLORER: '/trace-explorer',
|
||||
BILLING: '/settings/billing',
|
||||
ROLES_SETTINGS: '/settings/roles',
|
||||
SUPPORT: '/support',
|
||||
LOGS_SAVE_VIEWS: '/logs/saved-views',
|
||||
TRACES_SAVE_VIEWS: '/traces/saved-views',
|
||||
|
||||
@@ -34,9 +34,6 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
const sortedVariablesArray = useDashboardVariablesSelector(
|
||||
(state) => state.sortedVariablesArray,
|
||||
);
|
||||
const dynamicVariableOrder = useDashboardVariablesSelector(
|
||||
(state) => state.dynamicVariableOrder,
|
||||
);
|
||||
const dependencyData = useDashboardVariablesSelector(
|
||||
(state) => state.dependencyData,
|
||||
);
|
||||
@@ -55,11 +52,10 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
}, [getUrlVariables, updateUrlVariable, dashboardVariables]);
|
||||
|
||||
// Memoize the order key to avoid unnecessary triggers
|
||||
const variableOrderKey = useMemo(() => {
|
||||
const queryVariableOrderKey = dependencyData?.order?.join(',') ?? '';
|
||||
const dynamicVariableOrderKey = dynamicVariableOrder?.join(',') ?? '';
|
||||
return `${queryVariableOrderKey}|${dynamicVariableOrderKey}`;
|
||||
}, [dependencyData?.order, dynamicVariableOrder]);
|
||||
const dependencyOrderKey = useMemo(
|
||||
() => dependencyData?.order?.join(',') ?? '',
|
||||
[dependencyData?.order],
|
||||
);
|
||||
|
||||
// Initialize fetch store then start a new fetch cycle.
|
||||
// Runs on dependency order changes, and time range changes.
|
||||
@@ -70,7 +66,7 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
initializeVariableFetchStore(allVariableNames);
|
||||
enqueueFetchOfAllVariables();
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [variableOrderKey, minTime, maxTime]);
|
||||
}, [dependencyOrderKey, minTime, maxTime]);
|
||||
|
||||
// Performance optimization: For dynamic variables with allSelected=true, we don't store
|
||||
// individual values in localStorage since we can always derive them from available options.
|
||||
|
||||
@@ -1,203 +0,0 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { act, render } from '@testing-library/react';
|
||||
import {
|
||||
dashboardVariablesStore,
|
||||
setDashboardVariablesStore,
|
||||
updateDashboardVariablesStore,
|
||||
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
|
||||
import {
|
||||
IDashboardVariables,
|
||||
IDashboardVariablesStoreState,
|
||||
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import {
|
||||
enqueueFetchOfAllVariables,
|
||||
initializeVariableFetchStore,
|
||||
} from 'providers/Dashboard/store/variableFetchStore';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import DashboardVariableSelection from '../DashboardVariableSelection';
|
||||
|
||||
// Mock providers/Dashboard/Dashboard
|
||||
const mockSetSelectedDashboard = jest.fn();
|
||||
const mockUpdateLocalStorageDashboardVariables = jest.fn();
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): Record<string, unknown> => ({
|
||||
setSelectedDashboard: mockSetSelectedDashboard,
|
||||
updateLocalStorageDashboardVariables: mockUpdateLocalStorageDashboardVariables,
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock hooks/dashboard/useVariablesFromUrl
|
||||
const mockUpdateUrlVariable = jest.fn();
|
||||
const mockGetUrlVariables = jest.fn().mockReturnValue({});
|
||||
jest.mock('hooks/dashboard/useVariablesFromUrl', () => ({
|
||||
__esModule: true,
|
||||
default: (): Record<string, unknown> => ({
|
||||
updateUrlVariable: mockUpdateUrlVariable,
|
||||
getUrlVariables: mockGetUrlVariables,
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock variableFetchStore functions
|
||||
jest.mock('providers/Dashboard/store/variableFetchStore', () => ({
|
||||
initializeVariableFetchStore: jest.fn(),
|
||||
enqueueFetchOfAllVariables: jest.fn(),
|
||||
enqueueDescendantsOfVariable: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock initializeDefaultVariables
|
||||
jest.mock('providers/Dashboard/initializeDefaultVariables', () => ({
|
||||
initializeDefaultVariables: jest.fn(),
|
||||
}));
|
||||
|
||||
// Mock react-redux useSelector for globalTime
|
||||
jest.mock('react-redux', () => ({
|
||||
...jest.requireActual('react-redux'),
|
||||
useSelector: jest.fn().mockReturnValue({ minTime: 1000, maxTime: 2000 }),
|
||||
}));
|
||||
|
||||
// Mock VariableItem to avoid rendering complexity
|
||||
jest.mock('../VariableItem', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => <div data-testid="variable-item" />,
|
||||
}));
|
||||
|
||||
function createVariable(
|
||||
overrides: Partial<IDashboardVariable> = {},
|
||||
): IDashboardVariable {
|
||||
return {
|
||||
id: 'test-id',
|
||||
name: 'test-var',
|
||||
description: '',
|
||||
type: 'QUERY',
|
||||
sort: 'DISABLED',
|
||||
showALLOption: false,
|
||||
multiSelect: false,
|
||||
order: 0,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function resetStore(): void {
|
||||
dashboardVariablesStore.set(() => ({
|
||||
dashboardId: '',
|
||||
variables: {},
|
||||
sortedVariablesArray: [],
|
||||
dependencyData: null,
|
||||
variableTypes: {},
|
||||
dynamicVariableOrder: [],
|
||||
}));
|
||||
}
|
||||
|
||||
describe('DashboardVariableSelection', () => {
|
||||
beforeEach(() => {
|
||||
resetStore();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should call initializeVariableFetchStore and enqueueFetchOfAllVariables on mount', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
expect(initializeVariableFetchStore).toHaveBeenCalledWith(['env']);
|
||||
expect(enqueueFetchOfAllVariables).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should re-trigger fetch cycle when dynamicVariableOrder changes', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
// Clear mocks after initial render
|
||||
(initializeVariableFetchStore as jest.Mock).mockClear();
|
||||
(enqueueFetchOfAllVariables as jest.Mock).mockClear();
|
||||
|
||||
// Add a DYNAMIC variable which changes dynamicVariableOrder
|
||||
act(() => {
|
||||
updateDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
expect(initializeVariableFetchStore).toHaveBeenCalledWith(
|
||||
expect.arrayContaining(['env', 'dyn1']),
|
||||
);
|
||||
expect(enqueueFetchOfAllVariables).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should re-trigger fetch cycle when a dynamic variable is removed', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
dyn2: createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 2 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
(initializeVariableFetchStore as jest.Mock).mockClear();
|
||||
(enqueueFetchOfAllVariables as jest.Mock).mockClear();
|
||||
|
||||
// Remove dyn2, changing dynamicVariableOrder from ['dyn1','dyn2'] to ['dyn1']
|
||||
act(() => {
|
||||
updateDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
expect(initializeVariableFetchStore).toHaveBeenCalledWith(['env', 'dyn1']);
|
||||
expect(enqueueFetchOfAllVariables).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should NOT re-trigger fetch cycle when dynamicVariableOrder stays the same', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
render(<DashboardVariableSelection />);
|
||||
|
||||
(initializeVariableFetchStore as jest.Mock).mockClear();
|
||||
(enqueueFetchOfAllVariables as jest.Mock).mockClear();
|
||||
|
||||
// Update a non-dynamic variable's selectedValue — dynamicVariableOrder unchanged
|
||||
act(() => {
|
||||
const snapshot = dashboardVariablesStore.getSnapshot();
|
||||
dashboardVariablesStore.set(
|
||||
(): IDashboardVariablesStoreState => ({
|
||||
...snapshot,
|
||||
variables: {
|
||||
...snapshot.variables,
|
||||
env: {
|
||||
...snapshot.variables.env,
|
||||
selectedValue: 'production',
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
expect(initializeVariableFetchStore).not.toHaveBeenCalled();
|
||||
expect(enqueueFetchOfAllVariables).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -1,325 +0,0 @@
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import {
|
||||
MetricRangePayloadProps,
|
||||
MetricRangePayloadV3,
|
||||
} from 'types/api/metrics/getQueryRange';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { PanelMode } from '../../types';
|
||||
import { prepareChartData, prepareUPlotConfig } from '../utils';
|
||||
|
||||
jest.mock(
|
||||
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
|
||||
() => ({
|
||||
getStoredSeriesVisibility: jest.fn(),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock('lib/uPlotLib/plugins/onClickPlugin', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockReturnValue({ name: 'onClickPlugin' }),
|
||||
}));
|
||||
|
||||
jest.mock('lib/dashboard/getQueryResults', () => ({
|
||||
getLegend: jest.fn(
|
||||
(_queryData: unknown, _query: unknown, labelName: string) =>
|
||||
`legend-${labelName}`,
|
||||
),
|
||||
}));
|
||||
|
||||
jest.mock('lib/getLabelName', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(
|
||||
(_metric: unknown, _queryName: string, _legend: string) => 'baseLabel',
|
||||
),
|
||||
}));
|
||||
|
||||
const getLegendMock = jest.requireMock('lib/dashboard/getQueryResults')
|
||||
.getLegend as jest.Mock;
|
||||
const getLabelNameMock = jest.requireMock('lib/getLabelName')
|
||||
.default as jest.Mock;
|
||||
|
||||
const createApiResponse = (
|
||||
result: MetricRangePayloadProps['data']['result'] = [],
|
||||
): MetricRangePayloadProps => ({
|
||||
data: {
|
||||
result,
|
||||
resultType: 'matrix',
|
||||
newResult: (null as unknown) as MetricRangePayloadV3,
|
||||
},
|
||||
});
|
||||
|
||||
const createWidget = (overrides: Partial<Widgets> = {}): Widgets =>
|
||||
({
|
||||
id: 'widget-1',
|
||||
yAxisUnit: 'ms',
|
||||
isLogScale: false,
|
||||
thresholds: [],
|
||||
customLegendColors: {},
|
||||
...overrides,
|
||||
} as Widgets);
|
||||
|
||||
const defaultTimezone = {
|
||||
name: 'UTC',
|
||||
value: 'UTC',
|
||||
offset: 'UTC',
|
||||
searchIndex: 'UTC',
|
||||
};
|
||||
|
||||
describe('TimeSeriesPanel utils', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
getLabelNameMock.mockReturnValue('baseLabel');
|
||||
getLegendMock.mockImplementation(
|
||||
(_queryData: unknown, _query: unknown, labelName: string) =>
|
||||
`legend-${labelName}`,
|
||||
);
|
||||
});
|
||||
|
||||
describe('prepareChartData', () => {
|
||||
it('returns aligned data with timestamps and empty series when result is empty', () => {
|
||||
const apiResponse = createApiResponse([]);
|
||||
|
||||
const data = prepareChartData(apiResponse);
|
||||
|
||||
expect(data).toHaveLength(1);
|
||||
expect(data[0]).toEqual([]);
|
||||
});
|
||||
|
||||
it('returns timestamps and one series of y values for single series', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q',
|
||||
legend: 'Series A',
|
||||
values: [
|
||||
[1000, '10'],
|
||||
[2000, '20'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const data = prepareChartData(apiResponse);
|
||||
|
||||
expect(data).toHaveLength(2);
|
||||
expect(data[0]).toEqual([1000, 2000]);
|
||||
expect(data[1]).toEqual([10, 20]);
|
||||
});
|
||||
|
||||
it('merges timestamps and fills missing values with null for multiple series', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q1',
|
||||
values: [
|
||||
[1000, '1'],
|
||||
[3000, '3'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q2',
|
||||
values: [
|
||||
[1000, '10'],
|
||||
[2000, '20'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const data = prepareChartData(apiResponse);
|
||||
|
||||
expect(data[0]).toEqual([1000, 2000, 3000]);
|
||||
// First series: 1, null, 3
|
||||
expect(data[1]).toEqual([1, null, 3]);
|
||||
// Second series: 10, 20, null
|
||||
expect(data[2]).toEqual([10, 20, null]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prepareUPlotConfig', () => {
|
||||
const baseParams = {
|
||||
widget: createWidget(),
|
||||
isDarkMode: true,
|
||||
currentQuery: {} as Query,
|
||||
onClick: jest.fn(),
|
||||
onDragSelect: jest.fn(),
|
||||
apiResponse: createApiResponse(),
|
||||
timezone: defaultTimezone,
|
||||
panelMode: PanelMode.DASHBOARD_VIEW,
|
||||
};
|
||||
|
||||
it('adds no series when apiResponse has empty result', () => {
|
||||
const builder = prepareUPlotConfig(baseParams);
|
||||
|
||||
const config = builder.getConfig();
|
||||
// Base series (timestamp) only
|
||||
expect(config.series).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('adds one series per result item with label from getLabelName when no currentQuery', () => {
|
||||
getLegendMock.mockReset();
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: { __name__: 'cpu' },
|
||||
queryName: 'Q1',
|
||||
legend: 'CPU',
|
||||
values: [
|
||||
[1000, '1'],
|
||||
[2000, '2'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const builder = prepareUPlotConfig({
|
||||
...baseParams,
|
||||
apiResponse,
|
||||
currentQuery: (null as unknown) as Query,
|
||||
});
|
||||
|
||||
expect(getLabelNameMock).toHaveBeenCalled();
|
||||
expect(getLegendMock).not.toHaveBeenCalled();
|
||||
|
||||
const config = builder.getConfig();
|
||||
expect(config.series).toHaveLength(2);
|
||||
expect(config.series?.[1]).toMatchObject({
|
||||
label: 'baseLabel',
|
||||
scale: 'y',
|
||||
});
|
||||
});
|
||||
|
||||
it('uses getLegend for label when currentQuery is provided', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q1',
|
||||
legend: 'L1',
|
||||
values: [[1000, '1']],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
prepareUPlotConfig({
|
||||
...baseParams,
|
||||
apiResponse,
|
||||
currentQuery: {} as Query,
|
||||
});
|
||||
|
||||
expect(getLegendMock).toHaveBeenCalledWith(
|
||||
{
|
||||
legend: 'L1',
|
||||
metric: {},
|
||||
queryName: 'Q1',
|
||||
values: [[1000, '1']],
|
||||
},
|
||||
{},
|
||||
'baseLabel',
|
||||
);
|
||||
|
||||
const config = prepareUPlotConfig({
|
||||
...baseParams,
|
||||
apiResponse,
|
||||
currentQuery: {} as Query,
|
||||
}).getConfig();
|
||||
expect(config.series?.[1]).toMatchObject({
|
||||
label: 'legend-baseLabel',
|
||||
});
|
||||
});
|
||||
|
||||
it('uses DrawStyle.Line and VisibilityMode.Never when series has multiple valid points', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q',
|
||||
values: [
|
||||
[1000, '1'],
|
||||
[2000, '2'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const builder = prepareUPlotConfig({ ...baseParams, apiResponse });
|
||||
const config = builder.getConfig();
|
||||
const series = config.series?.[1];
|
||||
|
||||
expect(config.series).toHaveLength(2);
|
||||
// Line style and points never for multi-point series (checked via builder API)
|
||||
const legendItems = builder.getLegendItems();
|
||||
expect(Object.keys(legendItems)).toHaveLength(1);
|
||||
// multi-point series → points hidden
|
||||
expect(series).toBeDefined();
|
||||
expect(series!.points?.show).toBe(false);
|
||||
});
|
||||
|
||||
it('uses DrawStyle.Points and shows points when series has only one valid point', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q',
|
||||
values: [
|
||||
[1000, '1'],
|
||||
[2000, 'NaN'],
|
||||
[3000, 'invalid'],
|
||||
],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const builder = prepareUPlotConfig({ ...baseParams, apiResponse });
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.series).toHaveLength(2);
|
||||
const seriesConfig = config.series?.[1];
|
||||
expect(seriesConfig).toBeDefined();
|
||||
// Single valid point -> Points draw style (asserted via series config)
|
||||
expect(seriesConfig).toMatchObject({
|
||||
scale: 'y',
|
||||
spanGaps: true,
|
||||
});
|
||||
// single-point series → points shown
|
||||
expect(seriesConfig).toBeDefined();
|
||||
expect(seriesConfig!.points?.show).toBe(true);
|
||||
});
|
||||
|
||||
it('uses widget customLegendColors to set series stroke color', () => {
|
||||
const widget = createWidget({
|
||||
customLegendColors: { 'legend-baseLabel': '#ff0000' },
|
||||
});
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q',
|
||||
values: [[1000, '1']],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const builder = prepareUPlotConfig({
|
||||
...baseParams,
|
||||
widget,
|
||||
apiResponse,
|
||||
});
|
||||
|
||||
const config = builder.getConfig();
|
||||
const seriesConfig = config.series?.[1];
|
||||
expect(seriesConfig).toBeDefined();
|
||||
expect(seriesConfig!.stroke).toBe('#ff0000');
|
||||
});
|
||||
|
||||
it('adds multiple series when result has multiple items', () => {
|
||||
const apiResponse = createApiResponse([
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q1',
|
||||
values: [[1000, '1']],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'Q2',
|
||||
values: [[1000, '2']],
|
||||
} as MetricRangePayloadProps['data']['result'][0],
|
||||
]);
|
||||
|
||||
const builder = prepareUPlotConfig({ ...baseParams, apiResponse });
|
||||
const config = builder.getConfig();
|
||||
|
||||
expect(config.series).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -15,12 +15,10 @@ import {
|
||||
VisibilityMode,
|
||||
} from 'lib/uPlotV2/config/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { isInvalidPlotValue } from 'lib/uPlotV2/utils/dataUtils';
|
||||
import get from 'lodash-es/get';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { QueryData } from 'types/api/widgets/getQuery';
|
||||
|
||||
import { PanelMode } from '../types';
|
||||
import { buildBaseConfig } from '../utils/baseConfigBuilder';
|
||||
@@ -35,22 +33,6 @@ export const prepareChartData = (
|
||||
return [timestampArr, ...yAxisValuesArr];
|
||||
};
|
||||
|
||||
function hasSingleVisiblePointForSeries(series: QueryData): boolean {
|
||||
const rawValues = series.values ?? [];
|
||||
let validPointCount = 0;
|
||||
|
||||
for (const [, rawValue] of rawValues) {
|
||||
if (!isInvalidPlotValue(rawValue)) {
|
||||
validPointCount += 1;
|
||||
if (validPointCount > 1) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export const prepareUPlotConfig = ({
|
||||
widget,
|
||||
isDarkMode,
|
||||
@@ -95,8 +77,9 @@ export const prepareUPlotConfig = ({
|
||||
stepInterval: minStepInterval,
|
||||
});
|
||||
|
||||
apiResponse.data?.result?.forEach((series) => {
|
||||
const hasSingleValidPoint = hasSingleVisiblePointForSeries(series);
|
||||
const seriesList = apiResponse.data?.result || [];
|
||||
|
||||
seriesList.forEach((series) => {
|
||||
const baseLabelName = getLabelName(
|
||||
series.metric,
|
||||
series.queryName || '', // query
|
||||
@@ -109,15 +92,13 @@ export const prepareUPlotConfig = ({
|
||||
|
||||
builder.addSeries({
|
||||
scaleKey: 'y',
|
||||
drawStyle: hasSingleValidPoint ? DrawStyle.Points : DrawStyle.Line,
|
||||
drawStyle: DrawStyle.Line,
|
||||
label: label,
|
||||
colorMapping: widget.customLegendColors ?? {},
|
||||
spanGaps: true,
|
||||
lineStyle: LineStyle.Solid,
|
||||
lineInterpolation: LineInterpolation.Spline,
|
||||
showPoints: hasSingleValidPoint
|
||||
? VisibilityMode.Always
|
||||
: VisibilityMode.Never,
|
||||
showPoints: VisibilityMode.Never,
|
||||
pointSize: 5,
|
||||
isDarkMode,
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
|
||||
@@ -51,6 +51,28 @@ function WidgetGraphContainer({
|
||||
return <Spinner size="large" tip="Loading..." />;
|
||||
}
|
||||
|
||||
if (
|
||||
selectedGraph !== PANEL_TYPES.LIST &&
|
||||
selectedGraph !== PANEL_TYPES.VALUE &&
|
||||
queryResponse.data?.payload.data?.result?.length === 0
|
||||
) {
|
||||
return (
|
||||
<NotFoundContainer>
|
||||
<Typography>No Data</Typography>
|
||||
</NotFoundContainer>
|
||||
);
|
||||
}
|
||||
if (
|
||||
(selectedGraph === PANEL_TYPES.LIST || selectedGraph === PANEL_TYPES.VALUE) &&
|
||||
queryResponse.data?.payload?.data?.newResult?.data?.result?.length === 0
|
||||
) {
|
||||
return (
|
||||
<NotFoundContainer>
|
||||
<Typography>No Data</Typography>
|
||||
</NotFoundContainer>
|
||||
);
|
||||
}
|
||||
|
||||
if (queryResponse.isIdle) {
|
||||
return (
|
||||
<NotFoundContainer>
|
||||
|
||||
@@ -7,12 +7,6 @@
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
|
||||
.auth-domain-title {
|
||||
margin: 0;
|
||||
font-size: 20px;
|
||||
font-weight: 600;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,36 +15,5 @@
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
gap: 24px;
|
||||
|
||||
.auth-domain-list-action-link {
|
||||
cursor: pointer;
|
||||
color: var(--primary);
|
||||
transition: color 0.3s;
|
||||
border: none;
|
||||
background: none;
|
||||
padding: 0;
|
||||
font-size: inherit;
|
||||
|
||||
&:hover {
|
||||
opacity: 0.8;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
&.delete {
|
||||
color: var(--destructive);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.auth-domain-list-na {
|
||||
padding-left: 6px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
}
|
||||
|
||||
.delete-ingestion-key-modal {
|
||||
.delete-text {
|
||||
color: var(--text);
|
||||
margin: 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,38 +1,28 @@
|
||||
import { useCallback, useState } from 'react';
|
||||
import { Button } from '@signozhq/button';
|
||||
import { toast } from '@signozhq/sonner';
|
||||
import { Form, Modal } from 'antd';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import {
|
||||
useCreateAuthDomain,
|
||||
useUpdateAuthDomain,
|
||||
} from 'api/generated/services/authdomains';
|
||||
import {
|
||||
AuthtypesGettableAuthDomainDTO,
|
||||
AuthtypesGoogleConfigDTO,
|
||||
AuthtypesRoleMappingDTO,
|
||||
RenderErrorResponseDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { AxiosError } from 'axios';
|
||||
import { useState } from 'react';
|
||||
import { Button, Form, Modal } from 'antd';
|
||||
import put from 'api/v1/domains/id/put';
|
||||
import post from 'api/v1/domains/post';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { defaultTo } from 'lodash-es';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import { ErrorV2Resp } from 'types/api';
|
||||
import APIError from 'types/api/error';
|
||||
import { GettableAuthDomain } from 'types/api/v1/domains/list';
|
||||
import { PostableAuthDomain } from 'types/api/v1/domains/post';
|
||||
|
||||
import AuthnProviderSelector from './AuthnProviderSelector';
|
||||
import {
|
||||
convertDomainMappingsToRecord,
|
||||
convertGroupMappingsToRecord,
|
||||
FormValues,
|
||||
prepareInitialValues,
|
||||
} from './CreateEdit.utils';
|
||||
import ConfigureGoogleAuthAuthnProvider from './Providers/AuthnGoogleAuth';
|
||||
import ConfigureOIDCAuthnProvider from './Providers/AuthnOIDC';
|
||||
import ConfigureSAMLAuthnProvider from './Providers/AuthnSAML';
|
||||
|
||||
import './CreateEdit.styles.scss';
|
||||
|
||||
interface CreateOrEditProps {
|
||||
isCreate: boolean;
|
||||
onClose: () => void;
|
||||
record?: GettableAuthDomain;
|
||||
}
|
||||
|
||||
function configureAuthnProvider(
|
||||
authnProvider: string,
|
||||
isCreate: boolean,
|
||||
@@ -49,186 +39,64 @@ function configureAuthnProvider(
|
||||
}
|
||||
}
|
||||
|
||||
interface CreateOrEditProps {
|
||||
isCreate: boolean;
|
||||
onClose: () => void;
|
||||
record?: AuthtypesGettableAuthDomainDTO;
|
||||
}
|
||||
|
||||
function CreateOrEdit(props: CreateOrEditProps): JSX.Element {
|
||||
const { isCreate, record, onClose } = props;
|
||||
const [form] = Form.useForm<FormValues>();
|
||||
const [form] = Form.useForm<PostableAuthDomain>();
|
||||
const [authnProvider, setAuthnProvider] = useState<string>(
|
||||
record?.ssoType || '',
|
||||
);
|
||||
|
||||
const { showErrorModal } = useErrorModal();
|
||||
const { featureFlags } = useAppContext();
|
||||
|
||||
const handleError = useCallback(
|
||||
(error: AxiosError<RenderErrorResponseDTO>): void => {
|
||||
try {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
} catch (apiError) {
|
||||
showErrorModal(apiError as APIError);
|
||||
}
|
||||
},
|
||||
[showErrorModal],
|
||||
);
|
||||
const samlEnabled =
|
||||
featureFlags?.find((flag) => flag.name === FeatureKeys.SSO)?.active || false;
|
||||
|
||||
const {
|
||||
mutate: createAuthDomain,
|
||||
isLoading: isCreating,
|
||||
} = useCreateAuthDomain<AxiosError<RenderErrorResponseDTO>>();
|
||||
|
||||
const {
|
||||
mutate: updateAuthDomain,
|
||||
isLoading: isUpdating,
|
||||
} = useUpdateAuthDomain<AxiosError<RenderErrorResponseDTO>>();
|
||||
|
||||
/**
|
||||
* Prepares Google Auth config for API payload
|
||||
*/
|
||||
const getGoogleAuthConfig = useCallback(():
|
||||
| AuthtypesGoogleConfigDTO
|
||||
| undefined => {
|
||||
const config = form.getFieldValue('googleAuthConfig');
|
||||
if (!config) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const { domainToAdminEmailList, ...rest } = config;
|
||||
const domainToAdminEmail = convertDomainMappingsToRecord(
|
||||
domainToAdminEmailList,
|
||||
);
|
||||
|
||||
return {
|
||||
...rest,
|
||||
...(domainToAdminEmail && { domainToAdminEmail }),
|
||||
};
|
||||
}, [form]);
|
||||
|
||||
// Prepares role mapping for API payload
|
||||
const getRoleMapping = useCallback((): AuthtypesRoleMappingDTO | undefined => {
|
||||
const roleMapping = form.getFieldValue('roleMapping');
|
||||
if (!roleMapping) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const { groupMappingsList, ...rest } = roleMapping;
|
||||
const groupMappings = convertGroupMappingsToRecord(groupMappingsList);
|
||||
|
||||
// Only return roleMapping if there's meaningful content
|
||||
const hasDefaultRole = !!rest.defaultRole;
|
||||
const hasUseRoleAttribute = rest.useRoleAttribute === true;
|
||||
const hasGroupMappings =
|
||||
groupMappings && Object.keys(groupMappings).length > 0;
|
||||
|
||||
if (!hasDefaultRole && !hasUseRoleAttribute && !hasGroupMappings) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return {
|
||||
...rest,
|
||||
...(groupMappings && { groupMappings }),
|
||||
};
|
||||
}, [form]);
|
||||
|
||||
const onSubmitHandler = useCallback(async (): Promise<void> => {
|
||||
try {
|
||||
await form.validateFields();
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
|
||||
const onSubmitHandler = async (): Promise<void> => {
|
||||
const name = form.getFieldValue('name');
|
||||
const googleAuthConfig = getGoogleAuthConfig();
|
||||
const googleAuthConfig = form.getFieldValue('googleAuthConfig');
|
||||
const samlConfig = form.getFieldValue('samlConfig');
|
||||
const oidcConfig = form.getFieldValue('oidcConfig');
|
||||
const roleMapping = getRoleMapping();
|
||||
|
||||
if (isCreate) {
|
||||
createAuthDomain(
|
||||
{
|
||||
data: {
|
||||
name,
|
||||
config: {
|
||||
ssoEnabled: true,
|
||||
ssoType: authnProvider,
|
||||
googleAuthConfig,
|
||||
samlConfig,
|
||||
oidcConfig,
|
||||
roleMapping,
|
||||
},
|
||||
try {
|
||||
if (isCreate) {
|
||||
await post({
|
||||
name,
|
||||
config: {
|
||||
ssoEnabled: true,
|
||||
ssoType: authnProvider,
|
||||
googleAuthConfig,
|
||||
samlConfig,
|
||||
oidcConfig,
|
||||
},
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
toast.success('Domain created successfully');
|
||||
onClose();
|
||||
});
|
||||
} else {
|
||||
await put({
|
||||
id: record?.id || '',
|
||||
config: {
|
||||
ssoEnabled: form.getFieldValue('ssoEnabled'),
|
||||
ssoType: authnProvider,
|
||||
googleAuthConfig,
|
||||
samlConfig,
|
||||
oidcConfig,
|
||||
},
|
||||
onError: handleError,
|
||||
},
|
||||
);
|
||||
} else {
|
||||
if (!record?.id) {
|
||||
return;
|
||||
});
|
||||
}
|
||||
|
||||
updateAuthDomain(
|
||||
{
|
||||
pathParams: { id: record.id },
|
||||
data: {
|
||||
config: {
|
||||
ssoEnabled: form.getFieldValue('ssoEnabled'),
|
||||
ssoType: authnProvider,
|
||||
googleAuthConfig,
|
||||
samlConfig,
|
||||
oidcConfig,
|
||||
roleMapping,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
toast.success('Domain updated successfully');
|
||||
onClose();
|
||||
},
|
||||
onError: handleError,
|
||||
},
|
||||
);
|
||||
onClose();
|
||||
} catch (error) {
|
||||
showErrorModal(error as APIError);
|
||||
}
|
||||
}, [
|
||||
authnProvider,
|
||||
createAuthDomain,
|
||||
form,
|
||||
getGoogleAuthConfig,
|
||||
getRoleMapping,
|
||||
handleError,
|
||||
isCreate,
|
||||
};
|
||||
|
||||
onClose,
|
||||
record,
|
||||
updateAuthDomain,
|
||||
]);
|
||||
|
||||
const onBackHandler = useCallback((): void => {
|
||||
form.resetFields();
|
||||
const onBackHandler = (): void => {
|
||||
setAuthnProvider('');
|
||||
}, [form]);
|
||||
};
|
||||
|
||||
return (
|
||||
<Modal
|
||||
open
|
||||
footer={null}
|
||||
onCancel={onClose}
|
||||
width={authnProvider ? 980 : undefined}
|
||||
>
|
||||
<Modal open footer={null} onCancel={onClose}>
|
||||
<Form
|
||||
name="auth-domain"
|
||||
initialValues={defaultTo(prepareInitialValues(record), {
|
||||
initialValues={defaultTo(record, {
|
||||
name: '',
|
||||
ssoEnabled: false,
|
||||
ssoType: '',
|
||||
@@ -246,22 +114,9 @@ function CreateOrEdit(props: CreateOrEditProps): JSX.Element {
|
||||
<div className="auth-domain-configure">
|
||||
{configureAuthnProvider(authnProvider, isCreate)}
|
||||
<section className="action-buttons">
|
||||
{isCreate && (
|
||||
<Button onClick={onBackHandler} variant="solid" color="secondary">
|
||||
Back
|
||||
</Button>
|
||||
)}
|
||||
{!isCreate && (
|
||||
<Button onClick={onClose} variant="solid" color="secondary">
|
||||
Cancel
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
onClick={onSubmitHandler}
|
||||
variant="solid"
|
||||
color="primary"
|
||||
loading={isCreating || isUpdating}
|
||||
>
|
||||
{isCreate && <Button onClick={onBackHandler}>Back</Button>}
|
||||
{!isCreate && <Button onClick={onClose}>Cancel</Button>}
|
||||
<Button onClick={onSubmitHandler} type="primary">
|
||||
Save Changes
|
||||
</Button>
|
||||
</section>
|
||||
|
||||
@@ -1,134 +0,0 @@
|
||||
import {
|
||||
AuthtypesGettableAuthDomainDTO,
|
||||
AuthtypesGoogleConfigDTO,
|
||||
AuthtypesOIDCConfigDTO,
|
||||
AuthtypesRoleMappingDTO,
|
||||
AuthtypesSamlConfigDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
|
||||
// Form values interface for internal use (includes array-based fields for UI)
|
||||
export interface FormValues {
|
||||
name?: string;
|
||||
ssoEnabled?: boolean;
|
||||
ssoType?: string;
|
||||
googleAuthConfig?: AuthtypesGoogleConfigDTO & {
|
||||
domainToAdminEmailList?: Array<{ domain?: string; adminEmail?: string }>;
|
||||
};
|
||||
samlConfig?: AuthtypesSamlConfigDTO;
|
||||
oidcConfig?: AuthtypesOIDCConfigDTO;
|
||||
roleMapping?: AuthtypesRoleMappingDTO & {
|
||||
groupMappingsList?: Array<{ groupName?: string; role?: string }>;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts groupMappingsList array to groupMappings Record for API
|
||||
*/
|
||||
export function convertGroupMappingsToRecord(
|
||||
groupMappingsList?: Array<{ groupName?: string; role?: string }>,
|
||||
): Record<string, string> | undefined {
|
||||
if (!Array.isArray(groupMappingsList) || groupMappingsList.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const groupMappings: Record<string, string> = {};
|
||||
groupMappingsList.forEach((item) => {
|
||||
if (item.groupName && item.role) {
|
||||
groupMappings[item.groupName] = item.role;
|
||||
}
|
||||
});
|
||||
|
||||
return Object.keys(groupMappings).length > 0 ? groupMappings : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts groupMappings Record to groupMappingsList array for form
|
||||
*/
|
||||
export function convertGroupMappingsToList(
|
||||
groupMappings?: Record<string, string> | null,
|
||||
): Array<{ groupName: string; role: string }> {
|
||||
if (!groupMappings) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return Object.entries(groupMappings).map(([groupName, role]) => ({
|
||||
groupName,
|
||||
role,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts domainToAdminEmailList array to domainToAdminEmail Record for API
|
||||
*/
|
||||
export function convertDomainMappingsToRecord(
|
||||
domainToAdminEmailList?: Array<{ domain?: string; adminEmail?: string }>,
|
||||
): Record<string, string> | undefined {
|
||||
if (
|
||||
!Array.isArray(domainToAdminEmailList) ||
|
||||
domainToAdminEmailList.length === 0
|
||||
) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const domainToAdminEmail: Record<string, string> = {};
|
||||
domainToAdminEmailList.forEach((item) => {
|
||||
if (item.domain && item.adminEmail) {
|
||||
domainToAdminEmail[item.domain] = item.adminEmail;
|
||||
}
|
||||
});
|
||||
|
||||
return Object.keys(domainToAdminEmail).length > 0
|
||||
? domainToAdminEmail
|
||||
: undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts domainToAdminEmail Record to domainToAdminEmailList array for form
|
||||
*/
|
||||
export function convertDomainMappingsToList(
|
||||
domainToAdminEmail?: Record<string, string>,
|
||||
): Array<{ domain: string; adminEmail: string }> {
|
||||
if (!domainToAdminEmail) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return Object.entries(domainToAdminEmail).map(([domain, adminEmail]) => ({
|
||||
domain,
|
||||
adminEmail,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares initial form values from API record
|
||||
*/
|
||||
export function prepareInitialValues(
|
||||
record?: AuthtypesGettableAuthDomainDTO,
|
||||
): FormValues {
|
||||
if (!record) {
|
||||
return {
|
||||
name: '',
|
||||
ssoEnabled: false,
|
||||
ssoType: '',
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
...record,
|
||||
googleAuthConfig: record.googleAuthConfig
|
||||
? {
|
||||
...record.googleAuthConfig,
|
||||
domainToAdminEmailList: convertDomainMappingsToList(
|
||||
record.googleAuthConfig.domainToAdminEmail,
|
||||
),
|
||||
}
|
||||
: undefined,
|
||||
roleMapping: record.roleMapping
|
||||
? {
|
||||
...record.roleMapping,
|
||||
groupMappingsList: convertGroupMappingsToList(
|
||||
record.roleMapping.groupMappings,
|
||||
),
|
||||
}
|
||||
: undefined,
|
||||
};
|
||||
}
|
||||
@@ -1,67 +1,20 @@
|
||||
import { useCallback, useState } from 'react';
|
||||
import { Callout } from '@signozhq/callout';
|
||||
import { Checkbox } from '@signozhq/checkbox';
|
||||
import { Color, Style } from '@signozhq/design-tokens';
|
||||
import {
|
||||
ChevronDown,
|
||||
ChevronRight,
|
||||
CircleHelp,
|
||||
TriangleAlert,
|
||||
} from '@signozhq/icons';
|
||||
import { Input } from '@signozhq/input';
|
||||
import { Collapse, Form, Tooltip } from 'antd';
|
||||
import TextArea from 'antd/lib/input/TextArea';
|
||||
import { useCollapseSectionErrors } from 'hooks/useCollapseSectionErrors';
|
||||
|
||||
import DomainMappingList from './components/DomainMappingList';
|
||||
import EmailTagInput from './components/EmailTagInput';
|
||||
import RoleMappingSection from './components/RoleMappingSection';
|
||||
import { Form, Input, Typography } from 'antd';
|
||||
|
||||
import './Providers.styles.scss';
|
||||
|
||||
type ExpandedSection = 'workspace-groups' | 'role-mapping' | null;
|
||||
|
||||
function ConfigureGoogleAuthAuthnProvider({
|
||||
isCreate,
|
||||
}: {
|
||||
isCreate: boolean;
|
||||
}): JSX.Element {
|
||||
const form = Form.useFormInstance();
|
||||
const fetchGroups = Form.useWatch(['googleAuthConfig', 'fetchGroups'], form);
|
||||
|
||||
const [expandedSection, setExpandedSection] = useState<ExpandedSection>(null);
|
||||
|
||||
const handleWorkspaceGroupsChange = useCallback(
|
||||
(keys: string | string[]): void => {
|
||||
const isExpanding = Array.isArray(keys) ? keys.length > 0 : !!keys;
|
||||
setExpandedSection(isExpanding ? 'workspace-groups' : null);
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const handleRoleMappingChange = useCallback((expanded: boolean): void => {
|
||||
setExpandedSection(expanded ? 'role-mapping' : null);
|
||||
}, []);
|
||||
|
||||
const {
|
||||
hasErrors: hasWorkspaceGroupsErrors,
|
||||
errorMessages: workspaceGroupsErrorMessages,
|
||||
} = useCollapseSectionErrors(
|
||||
['googleAuthConfig'],
|
||||
[
|
||||
['googleAuthConfig', 'fetchGroups'],
|
||||
['googleAuthConfig', 'serviceAccountJson'],
|
||||
['googleAuthConfig', 'domainToAdminEmailList'],
|
||||
['googleAuthConfig', 'fetchTransitiveGroupMembership'],
|
||||
['googleAuthConfig', 'allowedGroups'],
|
||||
],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="authn-provider">
|
||||
<section className="authn-provider__header">
|
||||
<h3 className="authn-provider__title">Edit Google Authentication</h3>
|
||||
<p className="authn-provider__description">
|
||||
<div className="google-auth">
|
||||
<section className="header">
|
||||
<Typography.Text className="title">
|
||||
Edit Google Authentication
|
||||
</Typography.Text>
|
||||
<Typography.Paragraph className="description">
|
||||
Enter OAuth 2.0 credentials obtained from the Google API Console below.
|
||||
Read the{' '}
|
||||
<a
|
||||
@@ -72,247 +25,50 @@ function ConfigureGoogleAuthAuthnProvider({
|
||||
docs
|
||||
</a>{' '}
|
||||
for more information.
|
||||
</p>
|
||||
</Typography.Paragraph>
|
||||
</section>
|
||||
|
||||
<div className="authn-provider__columns">
|
||||
{/* Left Column - Core OAuth Settings */}
|
||||
<div className="authn-provider__left">
|
||||
<div className="authn-provider__field-group">
|
||||
<label className="authn-provider__label" htmlFor="google-domain">
|
||||
Domain
|
||||
<Tooltip title="The email domain for users who should use SSO (e.g., `example.com` for users with `@example.com` emails)">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name="name"
|
||||
className="authn-provider__form-item"
|
||||
rules={[
|
||||
{ required: true, message: 'Domain is required', whitespace: true },
|
||||
]}
|
||||
>
|
||||
<Input id="google-domain" disabled={!isCreate} />
|
||||
</Form.Item>
|
||||
</div>
|
||||
<Form.Item
|
||||
label="Domain"
|
||||
name="name"
|
||||
className="field"
|
||||
tooltip={{
|
||||
title:
|
||||
'The email domain for users who should use SSO (e.g., `example.com` for users with `@example.com` emails)',
|
||||
}}
|
||||
>
|
||||
<Input disabled={!isCreate} />
|
||||
</Form.Item>
|
||||
|
||||
<div className="authn-provider__field-group">
|
||||
<label className="authn-provider__label" htmlFor="google-client-id">
|
||||
Client ID
|
||||
<Tooltip title="ClientID is the application's ID. For example, 292085223830.apps.googleusercontent.com.">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={['googleAuthConfig', 'clientId']}
|
||||
className="authn-provider__form-item"
|
||||
rules={[
|
||||
{ required: true, message: 'Client ID is required', whitespace: true },
|
||||
]}
|
||||
>
|
||||
<Input id="google-client-id" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
<Form.Item
|
||||
label="Client ID"
|
||||
name={['googleAuthConfig', 'clientId']}
|
||||
className="field"
|
||||
tooltip={{
|
||||
title: `ClientID is the application's ID. For example, 292085223830.apps.googleusercontent.com.`,
|
||||
}}
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
|
||||
<div className="authn-provider__field-group">
|
||||
<label className="authn-provider__label" htmlFor="google-client-secret">
|
||||
Client Secret
|
||||
<Tooltip title="It is the application's secret.">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={['googleAuthConfig', 'clientSecret']}
|
||||
className="authn-provider__form-item"
|
||||
rules={[
|
||||
{
|
||||
required: true,
|
||||
message: 'Client Secret is required',
|
||||
whitespace: true,
|
||||
},
|
||||
]}
|
||||
>
|
||||
<Input id="google-client-secret" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
<Form.Item
|
||||
label="Client Secret"
|
||||
name={['googleAuthConfig', 'clientSecret']}
|
||||
className="field"
|
||||
tooltip={{
|
||||
title: `It is the application's secret.`,
|
||||
}}
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
|
||||
<div className="authn-provider__checkbox-row">
|
||||
<Form.Item
|
||||
name={['googleAuthConfig', 'insecureSkipEmailVerified']}
|
||||
valuePropName="checked"
|
||||
noStyle
|
||||
>
|
||||
<Checkbox
|
||||
id="google-skip-email-verification"
|
||||
labelName="Skip Email Verification"
|
||||
onCheckedChange={(checked: boolean): void => {
|
||||
form.setFieldValue(
|
||||
['googleAuthConfig', 'insecureSkipEmailVerified'],
|
||||
checked,
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Tooltip title='Whether to skip email verification. Defaults to "false"'>
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
<Callout
|
||||
type="warning"
|
||||
size="small"
|
||||
showIcon
|
||||
description="Google OAuth2 won't be enabled unless you enter all the attributes above"
|
||||
className="callout"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Right Column - Google Workspace Groups (Advanced) */}
|
||||
<div className="authn-provider__right">
|
||||
<Collapse
|
||||
bordered={false}
|
||||
activeKey={
|
||||
expandedSection === 'workspace-groups' ? ['workspace-groups'] : []
|
||||
}
|
||||
onChange={handleWorkspaceGroupsChange}
|
||||
className="authn-provider__collapse"
|
||||
expandIcon={(): null => null}
|
||||
>
|
||||
<Collapse.Panel
|
||||
key="workspace-groups"
|
||||
header={
|
||||
<div className="authn-provider__collapse-header">
|
||||
{expandedSection !== 'workspace-groups' ? (
|
||||
<ChevronRight size={16} />
|
||||
) : (
|
||||
<ChevronDown size={16} />
|
||||
)}
|
||||
<div className="authn-provider__collapse-header-text">
|
||||
<h4 className="authn-provider__section-title">
|
||||
Google Workspace Groups (Advanced)
|
||||
</h4>
|
||||
<p className="authn-provider__section-description">
|
||||
Enable group fetching to retrieve user groups from Google Workspace.
|
||||
Requires a Service Account with domain-wide delegation.
|
||||
</p>
|
||||
</div>
|
||||
{expandedSection !== 'workspace-groups' && hasWorkspaceGroupsErrors && (
|
||||
<Tooltip
|
||||
title={
|
||||
<div>
|
||||
{workspaceGroupsErrorMessages.map((msg) => (
|
||||
<div key={msg}>{msg}</div>
|
||||
))}
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<TriangleAlert size={16} color={Color.BG_CHERRY_500} />
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<div className="authn-provider__group-content">
|
||||
<div className="authn-provider__checkbox-row">
|
||||
<Form.Item
|
||||
name={['googleAuthConfig', 'fetchGroups']}
|
||||
valuePropName="checked"
|
||||
noStyle
|
||||
>
|
||||
<Checkbox
|
||||
id="google-fetch-groups"
|
||||
labelName="Fetch Groups"
|
||||
onCheckedChange={(checked: boolean): void => {
|
||||
form.setFieldValue(['googleAuthConfig', 'fetchGroups'], checked);
|
||||
}}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Tooltip title="Enable fetching Google Workspace groups for the user. Requires service account configuration.">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
{fetchGroups && (
|
||||
<div className="authn-provider__group-fields">
|
||||
<div className="authn-provider__field-group">
|
||||
<label
|
||||
className="authn-provider__label"
|
||||
htmlFor="google-service-account-json"
|
||||
>
|
||||
Service Account JSON
|
||||
<Tooltip title="The JSON content of the Google Service Account credentials file. Required for group fetching.">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={['googleAuthConfig', 'serviceAccountJson']}
|
||||
className="authn-provider__form-item"
|
||||
>
|
||||
<TextArea
|
||||
id="google-service-account-json"
|
||||
rows={3}
|
||||
placeholder="Paste service account JSON"
|
||||
className="authn-provider__textarea"
|
||||
/>
|
||||
</Form.Item>
|
||||
</div>
|
||||
|
||||
<DomainMappingList
|
||||
fieldNamePrefix={['googleAuthConfig', 'domainToAdminEmailList']}
|
||||
/>
|
||||
|
||||
<div className="authn-provider__checkbox-row">
|
||||
<Form.Item
|
||||
name={['googleAuthConfig', 'fetchTransitiveGroupMembership']}
|
||||
valuePropName="checked"
|
||||
noStyle
|
||||
>
|
||||
<Checkbox
|
||||
id="google-transitive-membership"
|
||||
labelName="Fetch Transitive Group Membership"
|
||||
onCheckedChange={(checked: boolean): void => {
|
||||
form.setFieldValue(
|
||||
['googleAuthConfig', 'fetchTransitiveGroupMembership'],
|
||||
checked,
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Tooltip title="If enabled, recursively fetch groups that contain other groups (transitive membership).">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
<div className="authn-provider__field-group">
|
||||
<label
|
||||
className="authn-provider__label"
|
||||
htmlFor="google-allowed-groups"
|
||||
>
|
||||
Allowed Groups
|
||||
<Tooltip title="Optional list of allowed groups. If configured, only users belonging to one of these groups will be allowed to login.">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={['googleAuthConfig', 'allowedGroups']}
|
||||
className="authn-provider__form-item"
|
||||
>
|
||||
<EmailTagInput placeholder="Type a group email and press Enter" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</Collapse.Panel>
|
||||
</Collapse>
|
||||
|
||||
<RoleMappingSection
|
||||
fieldNamePrefix={['roleMapping']}
|
||||
isExpanded={expandedSection === 'role-mapping'}
|
||||
onExpandChange={handleRoleMappingChange}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<Callout
|
||||
type="warning"
|
||||
size="small"
|
||||
showIcon
|
||||
description="Google OAuth2 won’t be enabled unless you enter all the attributes above"
|
||||
className="callout"
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,212 +1,110 @@
|
||||
import { useCallback, useState } from 'react';
|
||||
import { Callout } from '@signozhq/callout';
|
||||
import { Checkbox } from '@signozhq/checkbox';
|
||||
import { Style } from '@signozhq/design-tokens';
|
||||
import { CircleHelp } from '@signozhq/icons';
|
||||
import { Input } from '@signozhq/input';
|
||||
import { Form, Tooltip } from 'antd';
|
||||
|
||||
import ClaimMappingSection from './components/ClaimMappingSection';
|
||||
import RoleMappingSection from './components/RoleMappingSection';
|
||||
import { Checkbox, Form, Input, Typography } from 'antd';
|
||||
|
||||
import './Providers.styles.scss';
|
||||
|
||||
type ExpandedSection = 'claim-mapping' | 'role-mapping' | null;
|
||||
|
||||
function ConfigureOIDCAuthnProvider({
|
||||
isCreate,
|
||||
}: {
|
||||
isCreate: boolean;
|
||||
}): JSX.Element {
|
||||
const form = Form.useFormInstance();
|
||||
|
||||
const [expandedSection, setExpandedSection] = useState<ExpandedSection>(null);
|
||||
|
||||
const handleClaimMappingChange = useCallback((expanded: boolean): void => {
|
||||
setExpandedSection(expanded ? 'claim-mapping' : null);
|
||||
}, []);
|
||||
|
||||
const handleRoleMappingChange = useCallback((expanded: boolean): void => {
|
||||
setExpandedSection(expanded ? 'role-mapping' : null);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="authn-provider">
|
||||
<section className="authn-provider__header">
|
||||
<h3 className="authn-provider__title">Edit OIDC Authentication</h3>
|
||||
<p className="authn-provider__description">
|
||||
Configure OpenID Connect Single Sign-On with your Identity Provider. Read
|
||||
the{' '}
|
||||
<a
|
||||
href="https://signoz.io/docs/userguide/sso-authentication"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
docs
|
||||
</a>{' '}
|
||||
for more information.
|
||||
</p>
|
||||
<div className="saml">
|
||||
<section className="header">
|
||||
<Typography.Text className="title">
|
||||
Edit OIDC Authentication
|
||||
</Typography.Text>
|
||||
</section>
|
||||
|
||||
<div className="authn-provider__columns">
|
||||
{/* Left Column - Core OIDC Settings */}
|
||||
<div className="authn-provider__left">
|
||||
<div className="authn-provider__field-group">
|
||||
<label className="authn-provider__label" htmlFor="oidc-domain">
|
||||
Domain
|
||||
<Tooltip title="The email domain for users who should use SSO (e.g., `example.com` for users with `@example.com` emails)">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name="name"
|
||||
className="authn-provider__form-item"
|
||||
rules={[
|
||||
{ required: true, message: 'Domain is required', whitespace: true },
|
||||
]}
|
||||
>
|
||||
<Input id="oidc-domain" disabled={!isCreate} />
|
||||
</Form.Item>
|
||||
</div>
|
||||
<Form.Item
|
||||
label="Domain"
|
||||
name="name"
|
||||
tooltip={{
|
||||
title:
|
||||
'The email domain for users who should use SSO (e.g., `example.com` for users with `@example.com` emails)',
|
||||
}}
|
||||
>
|
||||
<Input disabled={!isCreate} />
|
||||
</Form.Item>
|
||||
|
||||
<div className="authn-provider__field-group">
|
||||
<label className="authn-provider__label" htmlFor="oidc-issuer">
|
||||
Issuer URL
|
||||
<Tooltip title='The URL identifier for the OIDC provider. For example: "https://accounts.google.com" or "https://login.salesforce.com".'>
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={['oidcConfig', 'issuer']}
|
||||
className="authn-provider__form-item"
|
||||
rules={[
|
||||
{ required: true, message: 'Issuer URL is required', whitespace: true },
|
||||
]}
|
||||
>
|
||||
<Input id="oidc-issuer" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
<Form.Item
|
||||
label="Issuer URL"
|
||||
name={['oidcConfig', 'issuer']}
|
||||
tooltip={{
|
||||
title: `It is the URL identifier for the service. For example: "https://accounts.google.com" or "https://login.salesforce.com".`,
|
||||
}}
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
|
||||
<div className="authn-provider__field-group">
|
||||
<label className="authn-provider__label" htmlFor="oidc-issuer-alias">
|
||||
Issuer Alias
|
||||
<Tooltip title="Optional: Override the issuer URL from .well-known/openid-configuration for providers like Azure or Oracle IDCS.">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={['oidcConfig', 'issuerAlias']}
|
||||
className="authn-provider__form-item"
|
||||
>
|
||||
<Input id="oidc-issuer-alias" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
<Form.Item
|
||||
label="Issuer Alias"
|
||||
name={['oidcConfig', 'issuerAlias']}
|
||||
tooltip={{
|
||||
title: `Some offspec providers like Azure, Oracle IDCS have oidc discovery url different from issuer url which causes issuerValidation to fail.
|
||||
This provides a way to override the Issuer url from the .well-known/openid-configuration issuer`,
|
||||
}}
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
|
||||
<div className="authn-provider__field-group">
|
||||
<label className="authn-provider__label" htmlFor="oidc-client-id">
|
||||
Client ID
|
||||
<Tooltip title="The application's client ID from your OIDC provider.">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={['oidcConfig', 'clientId']}
|
||||
className="authn-provider__form-item"
|
||||
rules={[
|
||||
{ required: true, message: 'Client ID is required', whitespace: true },
|
||||
]}
|
||||
>
|
||||
<Input id="oidc-client-id" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
<Form.Item
|
||||
label="Client ID"
|
||||
name={['oidcConfig', 'clientId']}
|
||||
tooltip={{ title: `It is the application's ID.` }}
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
|
||||
<div className="authn-provider__field-group">
|
||||
<label className="authn-provider__label" htmlFor="oidc-client-secret">
|
||||
Client Secret
|
||||
<Tooltip title="The application's client secret from your OIDC provider.">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={['oidcConfig', 'clientSecret']}
|
||||
className="authn-provider__form-item"
|
||||
rules={[
|
||||
{
|
||||
required: true,
|
||||
message: 'Client Secret is required',
|
||||
whitespace: true,
|
||||
},
|
||||
]}
|
||||
>
|
||||
<Input id="oidc-client-secret" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
<Form.Item
|
||||
label="Client Secret"
|
||||
name={['oidcConfig', 'clientSecret']}
|
||||
tooltip={{ title: `It is the application's secret.` }}
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
|
||||
<div className="authn-provider__checkbox-row">
|
||||
<Form.Item
|
||||
name={['oidcConfig', 'insecureSkipEmailVerified']}
|
||||
valuePropName="checked"
|
||||
noStyle
|
||||
>
|
||||
<Checkbox
|
||||
id="oidc-skip-email-verification"
|
||||
labelName="Skip Email Verification"
|
||||
onCheckedChange={(checked: boolean): void => {
|
||||
form.setFieldValue(
|
||||
['oidcConfig', 'insecureSkipEmailVerified'],
|
||||
checked,
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Tooltip title='Whether to skip email verification. Defaults to "false"'>
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Form.Item
|
||||
label="Email Claim Mapping"
|
||||
name={['oidcConfig', 'claimMapping', 'email']}
|
||||
tooltip={{
|
||||
title: `Mapping of email claims to the corresponding email field in the token.`,
|
||||
}}
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
|
||||
<div className="authn-provider__checkbox-row">
|
||||
<Form.Item
|
||||
name={['oidcConfig', 'getUserInfo']}
|
||||
valuePropName="checked"
|
||||
noStyle
|
||||
>
|
||||
<Checkbox
|
||||
id="oidc-get-user-info"
|
||||
labelName="Get User Info"
|
||||
onCheckedChange={(checked: boolean): void => {
|
||||
form.setFieldValue(['oidcConfig', 'getUserInfo'], checked);
|
||||
}}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Tooltip title="Use the userinfo endpoint to get additional claims. Useful when providers return thin ID tokens.">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Form.Item
|
||||
label="Skip Email Verification"
|
||||
name={['oidcConfig', 'insecureSkipEmailVerified']}
|
||||
valuePropName="checked"
|
||||
className="field"
|
||||
tooltip={{
|
||||
title: `Whether to skip email verification. Defaults to "false"`,
|
||||
}}
|
||||
>
|
||||
<Checkbox />
|
||||
</Form.Item>
|
||||
|
||||
<Callout
|
||||
type="warning"
|
||||
size="small"
|
||||
showIcon
|
||||
description="OIDC won't be enabled unless you enter all the attributes above"
|
||||
className="callout"
|
||||
/>
|
||||
</div>
|
||||
<Form.Item
|
||||
label="Get User Info"
|
||||
name={['oidcConfig', 'getUserInfo']}
|
||||
valuePropName="checked"
|
||||
className="field"
|
||||
tooltip={{
|
||||
title: `Uses the userinfo endpoint to get additional claims for the token. This is especially useful where upstreams return "thin" id tokens`,
|
||||
}}
|
||||
>
|
||||
<Checkbox />
|
||||
</Form.Item>
|
||||
|
||||
{/* Right Column - Advanced Settings */}
|
||||
<div className="authn-provider__right">
|
||||
<ClaimMappingSection
|
||||
fieldNamePrefix={['oidcConfig', 'claimMapping']}
|
||||
isExpanded={expandedSection === 'claim-mapping'}
|
||||
onExpandChange={handleClaimMappingChange}
|
||||
/>
|
||||
|
||||
<RoleMappingSection
|
||||
fieldNamePrefix={['roleMapping']}
|
||||
isExpanded={expandedSection === 'role-mapping'}
|
||||
onExpandChange={handleRoleMappingChange}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<Callout
|
||||
type="warning"
|
||||
size="small"
|
||||
showIcon
|
||||
description="OIDC won’t be enabled unless you enter all the attributes above"
|
||||
className="callout"
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,191 +1,82 @@
|
||||
import { useCallback, useState } from 'react';
|
||||
import { Callout } from '@signozhq/callout';
|
||||
import { Checkbox } from '@signozhq/checkbox';
|
||||
import { Style } from '@signozhq/design-tokens';
|
||||
import { CircleHelp } from '@signozhq/icons';
|
||||
import { Input } from '@signozhq/input';
|
||||
import { Form, Tooltip } from 'antd';
|
||||
import TextArea from 'antd/lib/input/TextArea';
|
||||
|
||||
import AttributeMappingSection from './components/AttributeMappingSection';
|
||||
import RoleMappingSection from './components/RoleMappingSection';
|
||||
import { Checkbox, Form, Input, Typography } from 'antd';
|
||||
|
||||
import './Providers.styles.scss';
|
||||
|
||||
type ExpandedSection = 'attribute-mapping' | 'role-mapping' | null;
|
||||
|
||||
function ConfigureSAMLAuthnProvider({
|
||||
isCreate,
|
||||
}: {
|
||||
isCreate: boolean;
|
||||
}): JSX.Element {
|
||||
const form = Form.useFormInstance();
|
||||
|
||||
const [expandedSection, setExpandedSection] = useState<ExpandedSection>(null);
|
||||
|
||||
const handleAttributeMappingChange = useCallback((expanded: boolean): void => {
|
||||
setExpandedSection(expanded ? 'attribute-mapping' : null);
|
||||
}, []);
|
||||
|
||||
const handleRoleMappingChange = useCallback((expanded: boolean): void => {
|
||||
setExpandedSection(expanded ? 'role-mapping' : null);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="authn-provider">
|
||||
<section className="authn-provider__header">
|
||||
<h3 className="authn-provider__title">Edit SAML Authentication</h3>
|
||||
<p className="authn-provider__description">
|
||||
Configure SAML 2.0 Single Sign-On with your Identity Provider. Read the{' '}
|
||||
<a
|
||||
href="https://signoz.io/docs/userguide/sso-authentication"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
docs
|
||||
</a>{' '}
|
||||
for more information.
|
||||
</p>
|
||||
<div className="saml">
|
||||
<section className="header">
|
||||
<Typography.Text className="title">
|
||||
Edit SAML Authentication
|
||||
</Typography.Text>
|
||||
</section>
|
||||
|
||||
<div className="authn-provider__columns">
|
||||
{/* Left Column - Core SAML Settings */}
|
||||
<div className="authn-provider__left">
|
||||
<div className="authn-provider__field-group">
|
||||
<label className="authn-provider__label" htmlFor="saml-domain">
|
||||
Domain
|
||||
<Tooltip title="The email domain for users who should use SSO (e.g., `example.com` for users with `@example.com` emails)">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name="name"
|
||||
className="authn-provider__form-item"
|
||||
rules={[
|
||||
{ required: true, message: 'Domain is required', whitespace: true },
|
||||
]}
|
||||
>
|
||||
<Input id="saml-domain" disabled={!isCreate} />
|
||||
</Form.Item>
|
||||
</div>
|
||||
<Form.Item
|
||||
label="Domain"
|
||||
name="name"
|
||||
tooltip={{
|
||||
title:
|
||||
'The email domain for users who should use SSO (e.g., `example.com` for users with `@example.com` emails)',
|
||||
}}
|
||||
>
|
||||
<Input disabled={!isCreate} />
|
||||
</Form.Item>
|
||||
|
||||
<div className="authn-provider__field-group">
|
||||
<label className="authn-provider__label" htmlFor="saml-acs-url">
|
||||
SAML ACS URL
|
||||
<Tooltip title="The SSO endpoint of the SAML identity provider. It can typically be found in the SingleSignOnService element in the SAML metadata of the identity provider.">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={['samlConfig', 'samlIdp']}
|
||||
className="authn-provider__form-item"
|
||||
rules={[
|
||||
{
|
||||
required: true,
|
||||
message: 'SAML ACS URL is required',
|
||||
whitespace: true,
|
||||
},
|
||||
]}
|
||||
>
|
||||
<Input id="saml-acs-url" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
<Form.Item
|
||||
label="SAML ACS URL"
|
||||
name={['samlConfig', 'samlIdp']}
|
||||
tooltip={{
|
||||
title: `The SSO endpoint of the SAML identity provider. It can typically be found in the SingleSignOnService element in the SAML metadata of the identity provider. Example: <md:SingleSignOnService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" Location="{samlIdp}"/>`,
|
||||
}}
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
|
||||
<div className="authn-provider__field-group">
|
||||
<label className="authn-provider__label" htmlFor="saml-entity-id">
|
||||
SAML Entity ID
|
||||
<Tooltip title="The entityID of the SAML identity provider. It can typically be found in the EntityID attribute of the EntityDescriptor element in the SAML metadata.">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={['samlConfig', 'samlEntity']}
|
||||
className="authn-provider__form-item"
|
||||
rules={[
|
||||
{
|
||||
required: true,
|
||||
message: 'SAML Entity ID is required',
|
||||
whitespace: true,
|
||||
},
|
||||
]}
|
||||
>
|
||||
<Input id="saml-entity-id" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
<Form.Item
|
||||
label="SAML Entity ID"
|
||||
name={['samlConfig', 'samlEntity']}
|
||||
tooltip={{
|
||||
title: `The entityID of the SAML identity provider. It can typically be found in the EntityID attribute of the EntityDescriptor element in the SAML metadata of the identity provider. Example: <md:EntityDescriptor xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata" entityID="{samlEntity}">`,
|
||||
}}
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
|
||||
<div className="authn-provider__field-group">
|
||||
<label className="authn-provider__label" htmlFor="saml-certificate">
|
||||
SAML X.509 Certificate
|
||||
<Tooltip title="The certificate of the SAML identity provider. It can typically be found in the X509Certificate element in the SAML metadata.">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={['samlConfig', 'samlCert']}
|
||||
className="authn-provider__form-item"
|
||||
rules={[
|
||||
{
|
||||
required: true,
|
||||
message: 'SAML Certificate is required',
|
||||
whitespace: true,
|
||||
},
|
||||
]}
|
||||
>
|
||||
<TextArea
|
||||
id="saml-certificate"
|
||||
rows={3}
|
||||
placeholder="Paste X.509 certificate"
|
||||
className="authn-provider__textarea"
|
||||
/>
|
||||
</Form.Item>
|
||||
</div>
|
||||
<Form.Item
|
||||
label="SAML X.509 Certificate"
|
||||
name={['samlConfig', 'samlCert']}
|
||||
tooltip={{
|
||||
title: `The certificate of the SAML identity provider. It can typically be found in the X509Certificate element in the SAML metadata of the identity provider. Example: <ds:X509Certificate><ds:X509Certificate>{samlCert}</ds:X509Certificate></ds:X509Certificate>`,
|
||||
}}
|
||||
>
|
||||
<Input.TextArea rows={4} />
|
||||
</Form.Item>
|
||||
|
||||
<div className="authn-provider__checkbox-row">
|
||||
<Form.Item
|
||||
name={['samlConfig', 'insecureSkipAuthNRequestsSigned']}
|
||||
valuePropName="checked"
|
||||
noStyle
|
||||
>
|
||||
<Checkbox
|
||||
id="saml-skip-signing"
|
||||
labelName="Skip Signing AuthN Requests"
|
||||
onCheckedChange={(checked: boolean): void => {
|
||||
form.setFieldValue(
|
||||
['samlConfig', 'insecureSkipAuthNRequestsSigned'],
|
||||
checked,
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Tooltip title="Whether to skip signing the SAML requests. For providers like JumpCloud, this should be enabled.">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Form.Item
|
||||
label="Skip Signing AuthN Requests"
|
||||
name={['samlConfig', 'insecureSkipAuthNRequestsSigned']}
|
||||
valuePropName="checked"
|
||||
className="field"
|
||||
tooltip={{
|
||||
title: `Whether to skip signing the SAML requests. It can typically be found in the WantAuthnRequestsSigned attribute of the IDPSSODescriptor element in the SAML metadata of the identity provider. Example: <md:IDPSSODescriptor WantAuthnRequestsSigned="false" protocolSupportEnumeration="urn:oasis:names:tc:SAML:2.0:protocol">
|
||||
For providers like jumpcloud, this should be set to true.Note: This is the reverse of WantAuthnRequestsSigned. If WantAuthnRequestsSigned is false, then InsecureSkipAuthNRequestsSigned should be true.`,
|
||||
}}
|
||||
>
|
||||
<Checkbox />
|
||||
</Form.Item>
|
||||
|
||||
<Callout
|
||||
type="warning"
|
||||
size="small"
|
||||
showIcon
|
||||
description="SAML won't be enabled unless you enter all the attributes above"
|
||||
className="callout"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Right Column - Advanced Settings */}
|
||||
<div className="authn-provider__right">
|
||||
<AttributeMappingSection
|
||||
fieldNamePrefix={['samlConfig', 'attributeMapping']}
|
||||
isExpanded={expandedSection === 'attribute-mapping'}
|
||||
onExpandChange={handleAttributeMappingChange}
|
||||
/>
|
||||
|
||||
<RoleMappingSection
|
||||
fieldNamePrefix={['roleMapping']}
|
||||
isExpanded={expandedSection === 'role-mapping'}
|
||||
onExpandChange={handleRoleMappingChange}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<Callout
|
||||
type="warning"
|
||||
size="small"
|
||||
showIcon
|
||||
description="SAML won’t be enabled unless you enter all the attributes above"
|
||||
className="callout"
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,240 +1,24 @@
|
||||
.authn-provider {
|
||||
.google-auth {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
&__header {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
margin-bottom: 16px;
|
||||
.ant-form-item {
|
||||
margin-bottom: 12px !important;
|
||||
}
|
||||
|
||||
&__title {
|
||||
margin: 0;
|
||||
color: var(--l1-foreground);
|
||||
font-family: 'Inter', sans-serif;
|
||||
font-size: 14px;
|
||||
font-weight: 600;
|
||||
line-height: 20px;
|
||||
}
|
||||
|
||||
&__description {
|
||||
margin: 0;
|
||||
color: var(--l2-foreground);
|
||||
font-family: 'Inter', sans-serif;
|
||||
font-size: 14px;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
|
||||
a {
|
||||
color: var(--accent-primary);
|
||||
text-decoration: none;
|
||||
|
||||
&:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&__columns {
|
||||
display: grid;
|
||||
grid-template-columns: 0.9fr 1fr;
|
||||
gap: 24px;
|
||||
}
|
||||
|
||||
&__left {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
&__right {
|
||||
border-left: 1px solid var(--l3-border);
|
||||
padding-left: 24px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
&__field-group {
|
||||
.header {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
&__label {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
color: var(--l1-foreground);
|
||||
}
|
||||
|
||||
&__form-item {
|
||||
margin-bottom: 0 !important;
|
||||
}
|
||||
|
||||
&__checkbox-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
input,
|
||||
textarea {
|
||||
height: 32px;
|
||||
background: var(--l3-background) !important;
|
||||
border: 1px solid var(--l3-border) !important;
|
||||
border-radius: 2px;
|
||||
color: var(--l1-foreground) !important;
|
||||
|
||||
&::placeholder {
|
||||
color: var(--l3-foreground) !important;
|
||||
opacity: 1;
|
||||
.title {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
&:hover {
|
||||
border-color: var(--l3-border) !important;
|
||||
.description {
|
||||
margin-bottom: 0px !important;
|
||||
}
|
||||
|
||||
&:focus,
|
||||
&:focus-visible {
|
||||
border-color: var(--primary) !important;
|
||||
box-shadow: none !important;
|
||||
outline: none;
|
||||
}
|
||||
}
|
||||
|
||||
textarea {
|
||||
height: auto;
|
||||
}
|
||||
&__textarea {
|
||||
min-height: 60px !important;
|
||||
max-height: 200px;
|
||||
resize: vertical;
|
||||
background: var(--l3-background) !important;
|
||||
border: 1px solid var(--l3-border) !important;
|
||||
border-radius: 2px;
|
||||
color: var(--l1-foreground) !important;
|
||||
font-family: 'SF Mono', monospace;
|
||||
font-size: 12px;
|
||||
line-height: 18px;
|
||||
|
||||
&::placeholder {
|
||||
color: var(--l3-foreground) !important;
|
||||
font-family: Inter, sans-serif;
|
||||
}
|
||||
|
||||
&:hover {
|
||||
border-color: var(--l3-border) !important;
|
||||
}
|
||||
|
||||
&:focus,
|
||||
&:focus-visible {
|
||||
border-color: var(--primary) !important;
|
||||
box-shadow: none !important;
|
||||
outline: none;
|
||||
}
|
||||
}
|
||||
|
||||
button[role='checkbox'] {
|
||||
border: 1px solid var(--l2-foreground) !important;
|
||||
border-radius: 2px;
|
||||
|
||||
&[data-state='checked'] {
|
||||
background-color: var(--primary) !important;
|
||||
border-color: var(--primary) !important;
|
||||
}
|
||||
}
|
||||
|
||||
&__collapse {
|
||||
background: transparent !important;
|
||||
|
||||
.ant-collapse-item {
|
||||
border: none !important;
|
||||
}
|
||||
|
||||
.ant-collapse-header {
|
||||
padding: 0 !important;
|
||||
}
|
||||
|
||||
.ant-collapse-content {
|
||||
border-top: none !important;
|
||||
background: transparent !important;
|
||||
}
|
||||
|
||||
.ant-collapse-content-box {
|
||||
padding: 12px 0 0 24px !important;
|
||||
}
|
||||
}
|
||||
|
||||
&__collapse-header {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 8px;
|
||||
cursor: pointer;
|
||||
position: relative;
|
||||
width: 100%;
|
||||
|
||||
svg {
|
||||
margin-top: 2px;
|
||||
color: var(--l3-foreground);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
}
|
||||
|
||||
&__collapse-header-text {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
&__section-title {
|
||||
margin: 0;
|
||||
color: var(--l1-foreground);
|
||||
}
|
||||
|
||||
&__section-description {
|
||||
margin: 0;
|
||||
color: var(--l3-foreground);
|
||||
}
|
||||
|
||||
&__group-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
&__group-fields {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 24px;
|
||||
max-height: 45vh;
|
||||
overflow-y: auto;
|
||||
padding-right: 4px;
|
||||
|
||||
.authn-provider__field-group,
|
||||
.authn-provider__checkbox-row {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
&::-webkit-scrollbar {
|
||||
width: 4px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--l3-foreground);
|
||||
border-radius: 4px;
|
||||
|
||||
&:hover {
|
||||
background: var(--l2-foreground);
|
||||
}
|
||||
}
|
||||
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: var(--l3-foreground) transparent;
|
||||
}
|
||||
|
||||
.callout {
|
||||
|
||||
@@ -1,128 +0,0 @@
|
||||
.attribute-mapping-section {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
&__collapse {
|
||||
background: transparent !important;
|
||||
|
||||
.ant-collapse-item {
|
||||
border: none !important;
|
||||
}
|
||||
|
||||
.ant-collapse-header {
|
||||
padding: 0 !important;
|
||||
}
|
||||
|
||||
.ant-collapse-content {
|
||||
border-top: none !important;
|
||||
background: transparent !important;
|
||||
}
|
||||
|
||||
.ant-collapse-content-box {
|
||||
padding: 12px 0 0 24px !important;
|
||||
}
|
||||
}
|
||||
|
||||
&__collapse-header {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 8px;
|
||||
cursor: pointer;
|
||||
position: relative;
|
||||
width: 100%;
|
||||
|
||||
svg {
|
||||
margin-top: 2px;
|
||||
color: var(--l3-foreground);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
}
|
||||
|
||||
&__collapse-header-text {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
&__section-title {
|
||||
margin: 0;
|
||||
color: var(--l1-foreground);
|
||||
}
|
||||
|
||||
&__section-description {
|
||||
margin: 0;
|
||||
color: var(--l3-foreground);
|
||||
}
|
||||
|
||||
&__content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
max-height: 45vh;
|
||||
overflow-y: auto;
|
||||
padding-right: 4px;
|
||||
|
||||
// Thin scrollbar
|
||||
&::-webkit-scrollbar {
|
||||
width: 4px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--l3-foreground);
|
||||
border-radius: 4px;
|
||||
|
||||
&:hover {
|
||||
background: var(--l2-foreground);
|
||||
}
|
||||
}
|
||||
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: var(--l3-foreground) transparent;
|
||||
}
|
||||
|
||||
&__field-group {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
&__label {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
color: var(--l1-foreground);
|
||||
}
|
||||
|
||||
&__form-item {
|
||||
margin-bottom: 0 !important;
|
||||
}
|
||||
|
||||
// todo: https://github.com/SigNoz/components/issues/116
|
||||
input {
|
||||
height: 32px;
|
||||
background: var(--l3-background) !important;
|
||||
border: 1px solid var(--l3-border) !important;
|
||||
border-radius: 2px;
|
||||
color: var(--l1-foreground) !important;
|
||||
|
||||
&::placeholder {
|
||||
color: var(--l3-foreground) !important;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
&:hover {
|
||||
border-color: var(--l3-border) !important;
|
||||
}
|
||||
|
||||
&:focus,
|
||||
&:focus-visible {
|
||||
border-color: var(--primary) !important;
|
||||
box-shadow: none !important;
|
||||
outline: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,175 +0,0 @@
|
||||
import { useCallback, useState } from 'react';
|
||||
import { Color, Style } from '@signozhq/design-tokens';
|
||||
import {
|
||||
ChevronDown,
|
||||
ChevronRight,
|
||||
CircleHelp,
|
||||
TriangleAlert,
|
||||
} from '@signozhq/icons';
|
||||
import { Input } from '@signozhq/input';
|
||||
import { Collapse, Form, Tooltip } from 'antd';
|
||||
import { useCollapseSectionErrors } from 'hooks/useCollapseSectionErrors';
|
||||
|
||||
import './AttributeMappingSection.styles.scss';
|
||||
|
||||
interface AttributeMappingSectionProps {
|
||||
fieldNamePrefix: string[];
|
||||
isExpanded?: boolean;
|
||||
onExpandChange?: (expanded: boolean) => void;
|
||||
}
|
||||
|
||||
function AttributeMappingSection({
|
||||
fieldNamePrefix,
|
||||
isExpanded,
|
||||
onExpandChange,
|
||||
}: AttributeMappingSectionProps): JSX.Element {
|
||||
// Support both controlled and uncontrolled modes
|
||||
const [internalExpanded, setInternalExpanded] = useState(false);
|
||||
const isControlled = isExpanded !== undefined;
|
||||
const expanded = isControlled ? isExpanded : internalExpanded;
|
||||
|
||||
const handleCollapseChange = useCallback(
|
||||
(keys: string | string[]): void => {
|
||||
const newExpanded = Array.isArray(keys) ? keys.length > 0 : !!keys;
|
||||
if (isControlled && onExpandChange) {
|
||||
onExpandChange(newExpanded);
|
||||
} else {
|
||||
setInternalExpanded(newExpanded);
|
||||
}
|
||||
},
|
||||
[isControlled, onExpandChange],
|
||||
);
|
||||
|
||||
const collapseActiveKey = expanded ? ['attribute-mapping'] : [];
|
||||
const { hasErrors, errorMessages } = useCollapseSectionErrors(fieldNamePrefix);
|
||||
|
||||
return (
|
||||
<div className="attribute-mapping-section">
|
||||
<Collapse
|
||||
bordered={false}
|
||||
activeKey={collapseActiveKey}
|
||||
onChange={handleCollapseChange}
|
||||
className="attribute-mapping-section__collapse"
|
||||
expandIcon={(): null => null}
|
||||
>
|
||||
<Collapse.Panel
|
||||
key="attribute-mapping"
|
||||
header={
|
||||
<div
|
||||
className="attribute-mapping-section__collapse-header"
|
||||
role="button"
|
||||
aria-expanded={expanded}
|
||||
aria-controls="attribute-mapping-content"
|
||||
>
|
||||
{!expanded ? <ChevronRight size={16} /> : <ChevronDown size={16} />}
|
||||
<div className="attribute-mapping-section__collapse-header-text">
|
||||
<h4 className="attribute-mapping-section__section-title">
|
||||
Attribute Mapping (Advanced)
|
||||
</h4>
|
||||
<p className="attribute-mapping-section__section-description">
|
||||
Configure how SAML assertion attributes from your Identity Provider map
|
||||
to SigNoz user attributes. Leave empty to use default values.
|
||||
</p>
|
||||
</div>
|
||||
{!expanded && hasErrors && (
|
||||
<Tooltip
|
||||
title={
|
||||
<>
|
||||
{errorMessages.map((msg) => (
|
||||
<div key={msg}>{msg}</div>
|
||||
))}
|
||||
</>
|
||||
}
|
||||
>
|
||||
<TriangleAlert size={16} color={Color.BG_CHERRY_500} />
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<div
|
||||
id="attribute-mapping-content"
|
||||
className="attribute-mapping-section__content"
|
||||
>
|
||||
<div className="attribute-mapping-section__field-group">
|
||||
<label
|
||||
className="attribute-mapping-section__label"
|
||||
htmlFor="email-attribute"
|
||||
>
|
||||
Email Attribute
|
||||
<Tooltip title="The SAML attribute key that contains the user's email. Default: 'email'">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={[...fieldNamePrefix, 'email']}
|
||||
className="attribute-mapping-section__form-item"
|
||||
>
|
||||
<Input id="email-attribute" placeholder="Email" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
|
||||
{/* Name Attribute */}
|
||||
<div className="attribute-mapping-section__field-group">
|
||||
<label
|
||||
className="attribute-mapping-section__label"
|
||||
htmlFor="name-attribute"
|
||||
>
|
||||
Name Attribute
|
||||
<Tooltip title="The SAML attribute key that contains the user's display name. Default: 'name'">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={[...fieldNamePrefix, 'name']}
|
||||
className="attribute-mapping-section__form-item"
|
||||
>
|
||||
<Input id="name-attribute" placeholder="Name" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
|
||||
{/* Groups Attribute */}
|
||||
<div className="attribute-mapping-section__field-group">
|
||||
<label
|
||||
className="attribute-mapping-section__label"
|
||||
htmlFor="groups-attribute"
|
||||
>
|
||||
Groups Attribute
|
||||
<Tooltip title="The SAML attribute key that contains the user's group memberships. Used for role mapping. Default: 'groups'">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={[...fieldNamePrefix, 'groups']}
|
||||
className="attribute-mapping-section__form-item"
|
||||
>
|
||||
<Input id="groups-attribute" placeholder="Groups" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
|
||||
{/* Role Attribute */}
|
||||
<div className="attribute-mapping-section__field-group">
|
||||
<label
|
||||
className="attribute-mapping-section__label"
|
||||
htmlFor="role-attribute"
|
||||
>
|
||||
Role Attribute
|
||||
<Tooltip title="The SAML attribute key that contains the user's role directly from the IDP. Default: 'role'">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={[...fieldNamePrefix, 'role']}
|
||||
className="attribute-mapping-section__form-item"
|
||||
>
|
||||
<Input id="role-attribute" placeholder="Role" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
</div>
|
||||
</Collapse.Panel>
|
||||
</Collapse>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default AttributeMappingSection;
|
||||
@@ -1,128 +0,0 @@
|
||||
.claim-mapping-section {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
&__collapse {
|
||||
background: transparent !important;
|
||||
|
||||
.ant-collapse-item {
|
||||
border: none !important;
|
||||
}
|
||||
|
||||
.ant-collapse-header {
|
||||
padding: 0 !important;
|
||||
}
|
||||
|
||||
.ant-collapse-content {
|
||||
border-top: none !important;
|
||||
background: transparent !important;
|
||||
}
|
||||
|
||||
.ant-collapse-content-box {
|
||||
padding: 12px 0 0 24px !important;
|
||||
}
|
||||
}
|
||||
|
||||
&__collapse-header {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 8px;
|
||||
cursor: pointer;
|
||||
position: relative;
|
||||
width: 100%;
|
||||
|
||||
svg {
|
||||
margin-top: 2px;
|
||||
color: var(--l3-foreground);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
}
|
||||
|
||||
&__collapse-header-text {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
&__section-title {
|
||||
margin: 0;
|
||||
color: var(--l1-foreground);
|
||||
}
|
||||
|
||||
&__section-description {
|
||||
margin: 0;
|
||||
color: var(--l3-foreground);
|
||||
}
|
||||
|
||||
&__content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
max-height: 45vh;
|
||||
overflow-y: auto;
|
||||
padding-right: 4px;
|
||||
|
||||
// Thin scrollbar
|
||||
&::-webkit-scrollbar {
|
||||
width: 4px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--l3-foreground);
|
||||
border-radius: 4px;
|
||||
|
||||
&:hover {
|
||||
background: var(--l2-foreground);
|
||||
}
|
||||
}
|
||||
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: var(--l3-foreground) transparent;
|
||||
}
|
||||
|
||||
&__field-group {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
&__label {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
color: var(--l1-foreground);
|
||||
}
|
||||
|
||||
&__form-item {
|
||||
margin-bottom: 0 !important;
|
||||
}
|
||||
|
||||
// todo: https://github.com/SigNoz/components/issues/116
|
||||
input {
|
||||
height: 32px;
|
||||
background: var(--l3-background) !important;
|
||||
border: 1px solid var(--l3-border) !important;
|
||||
border-radius: 2px;
|
||||
color: var(--l1-foreground) !important;
|
||||
|
||||
&::placeholder {
|
||||
color: var(--l3-foreground) !important;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
&:hover {
|
||||
border-color: var(--l3-border) !important;
|
||||
}
|
||||
|
||||
&:focus,
|
||||
&:focus-visible {
|
||||
border-color: var(--primary) !important;
|
||||
box-shadow: none !important;
|
||||
outline: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,161 +0,0 @@
|
||||
import { useCallback, useState } from 'react';
|
||||
import { Color, Style } from '@signozhq/design-tokens';
|
||||
import {
|
||||
ChevronDown,
|
||||
ChevronRight,
|
||||
CircleHelp,
|
||||
TriangleAlert,
|
||||
} from '@signozhq/icons';
|
||||
import { Input } from '@signozhq/input';
|
||||
import { Collapse, Form, Tooltip } from 'antd';
|
||||
import { useCollapseSectionErrors } from 'hooks/useCollapseSectionErrors';
|
||||
|
||||
import './ClaimMappingSection.styles.scss';
|
||||
|
||||
interface ClaimMappingSectionProps {
|
||||
fieldNamePrefix: string[];
|
||||
isExpanded?: boolean;
|
||||
onExpandChange?: (expanded: boolean) => void;
|
||||
}
|
||||
|
||||
function ClaimMappingSection({
|
||||
fieldNamePrefix,
|
||||
isExpanded,
|
||||
onExpandChange,
|
||||
}: ClaimMappingSectionProps): JSX.Element {
|
||||
// Support both controlled and uncontrolled modes
|
||||
const [internalExpanded, setInternalExpanded] = useState(false);
|
||||
const isControlled = isExpanded !== undefined;
|
||||
const expanded = isControlled ? isExpanded : internalExpanded;
|
||||
|
||||
const handleCollapseChange = useCallback(
|
||||
(keys: string | string[]): void => {
|
||||
const newExpanded = Array.isArray(keys) ? keys.length > 0 : !!keys;
|
||||
if (isControlled && onExpandChange) {
|
||||
onExpandChange(newExpanded);
|
||||
} else {
|
||||
setInternalExpanded(newExpanded);
|
||||
}
|
||||
},
|
||||
[isControlled, onExpandChange],
|
||||
);
|
||||
|
||||
const collapseActiveKey = expanded ? ['claim-mapping'] : [];
|
||||
const { hasErrors, errorMessages } = useCollapseSectionErrors(fieldNamePrefix);
|
||||
|
||||
return (
|
||||
<div className="claim-mapping-section">
|
||||
<Collapse
|
||||
bordered={false}
|
||||
activeKey={collapseActiveKey}
|
||||
onChange={handleCollapseChange}
|
||||
className="claim-mapping-section__collapse"
|
||||
expandIcon={(): null => null}
|
||||
>
|
||||
<Collapse.Panel
|
||||
key="claim-mapping"
|
||||
header={
|
||||
<div
|
||||
className="claim-mapping-section__collapse-header"
|
||||
role="button"
|
||||
aria-expanded={expanded}
|
||||
aria-controls="claim-mapping-content"
|
||||
>
|
||||
{!expanded ? <ChevronRight size={16} /> : <ChevronDown size={16} />}
|
||||
<div className="claim-mapping-section__collapse-header-text">
|
||||
<h4 className="claim-mapping-section__section-title">
|
||||
Claim Mapping (Advanced)
|
||||
</h4>
|
||||
<p className="claim-mapping-section__section-description">
|
||||
Configure how claims from your Identity Provider map to SigNoz user
|
||||
attributes. Leave empty to use default values.
|
||||
</p>
|
||||
</div>
|
||||
{!expanded && hasErrors && (
|
||||
<Tooltip
|
||||
title={
|
||||
<>
|
||||
{errorMessages.map((msg) => (
|
||||
<div key={msg}>{msg}</div>
|
||||
))}
|
||||
</>
|
||||
}
|
||||
>
|
||||
<TriangleAlert size={16} color={Color.BG_CHERRY_500} />
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<div id="claim-mapping-content" className="claim-mapping-section__content">
|
||||
{/* Email Claim */}
|
||||
<div className="claim-mapping-section__field-group">
|
||||
<label className="claim-mapping-section__label" htmlFor="email-claim">
|
||||
Email Claim
|
||||
<Tooltip title="The claim key that contains the user's email address. Default: 'email'">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={[...fieldNamePrefix, 'email']}
|
||||
className="claim-mapping-section__form-item"
|
||||
>
|
||||
<Input id="email-claim" placeholder="Email" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
|
||||
{/* Name Claim */}
|
||||
<div className="claim-mapping-section__field-group">
|
||||
<label className="claim-mapping-section__label" htmlFor="name-claim">
|
||||
Name Claim
|
||||
<Tooltip title="The claim key that contains the user's display name. Default: 'name'">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={[...fieldNamePrefix, 'name']}
|
||||
className="claim-mapping-section__form-item"
|
||||
>
|
||||
<Input id="name-claim" placeholder="Name" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
|
||||
{/* Groups Claim */}
|
||||
<div className="claim-mapping-section__field-group">
|
||||
<label className="claim-mapping-section__label" htmlFor="groups-claim">
|
||||
Groups Claim
|
||||
<Tooltip title="The claim key that contains the user's group memberships. Used for role mapping. Default: 'groups'">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={[...fieldNamePrefix, 'groups']}
|
||||
className="claim-mapping-section__form-item"
|
||||
>
|
||||
<Input id="groups-claim" placeholder="Groups" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
|
||||
{/* Role Claim */}
|
||||
<div className="claim-mapping-section__field-group">
|
||||
<label className="claim-mapping-section__label" htmlFor="role-claim">
|
||||
Role Claim
|
||||
<Tooltip title="The claim key that contains the user's role directly from the IDP. Default: 'role'">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={[...fieldNamePrefix, 'role']}
|
||||
className="claim-mapping-section__form-item"
|
||||
>
|
||||
<Input id="role-claim" placeholder="Role" />
|
||||
</Form.Item>
|
||||
</div>
|
||||
</div>
|
||||
</Collapse.Panel>
|
||||
</Collapse>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default ClaimMappingSection;
|
||||
@@ -1,103 +0,0 @@
|
||||
.domain-mapping-list {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
|
||||
&__header {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
&__title {
|
||||
margin: 0;
|
||||
color: var(--l1-foreground);
|
||||
}
|
||||
|
||||
&__description {
|
||||
margin: 0;
|
||||
color: var(--l3-foreground);
|
||||
}
|
||||
|
||||
&__items {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
&__row {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 8px;
|
||||
|
||||
.ant-form-item {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
}
|
||||
|
||||
&__field {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
&__remove-btn {
|
||||
flex-shrink: 0;
|
||||
width: 32px !important;
|
||||
height: 32px !important;
|
||||
min-width: 32px !important;
|
||||
padding: 0 !important;
|
||||
border: none !important;
|
||||
border-radius: 2px !important;
|
||||
background: transparent !important;
|
||||
color: var(--destructive) !important;
|
||||
opacity: 0.6 !important;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.2s, opacity 0.2s;
|
||||
box-shadow: none !important;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
|
||||
svg {
|
||||
color: var(--destructive) !important;
|
||||
width: 12px !important;
|
||||
height: 12px !important;
|
||||
}
|
||||
|
||||
&:hover {
|
||||
background: rgba(229, 72, 77, 0.1) !important;
|
||||
opacity: 0.9 !important;
|
||||
color: var(--destructive) !important;
|
||||
|
||||
svg {
|
||||
color: var(--destructive) !important;
|
||||
}
|
||||
}
|
||||
|
||||
&:active {
|
||||
opacity: 0.7 !important;
|
||||
background: rgba(229, 72, 77, 0.15) !important;
|
||||
}
|
||||
}
|
||||
|
||||
&__add-btn {
|
||||
width: 100%;
|
||||
|
||||
// Ensure icon is visible
|
||||
svg,
|
||||
[class*='icon'] {
|
||||
color: var(--l2-foreground) !important;
|
||||
display: inline-block !important;
|
||||
opacity: 1 !important;
|
||||
}
|
||||
|
||||
&:hover {
|
||||
color: var(--l1-foreground);
|
||||
|
||||
svg,
|
||||
[class*='icon'] {
|
||||
color: var(--l1-foreground) !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,87 +0,0 @@
|
||||
import { Button } from '@signozhq/button';
|
||||
import { Plus, Trash2 } from '@signozhq/icons';
|
||||
import { Input } from '@signozhq/input';
|
||||
import { Form } from 'antd';
|
||||
|
||||
import './DomainMappingList.styles.scss';
|
||||
|
||||
const EMAIL_REGEX = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
||||
|
||||
const validateEmail = (_: unknown, value: string): Promise<void> => {
|
||||
if (!value) {
|
||||
return Promise.reject(new Error('Admin email is required'));
|
||||
}
|
||||
if (!EMAIL_REGEX.test(value)) {
|
||||
return Promise.reject(new Error('Please enter a valid email'));
|
||||
}
|
||||
return Promise.resolve();
|
||||
};
|
||||
|
||||
interface DomainMappingListProps {
|
||||
fieldNamePrefix: string[];
|
||||
}
|
||||
|
||||
function DomainMappingList({
|
||||
fieldNamePrefix,
|
||||
}: DomainMappingListProps): JSX.Element {
|
||||
return (
|
||||
<div className="domain-mapping-list">
|
||||
<div className="domain-mapping-list__header">
|
||||
<span className="domain-mapping-list__title">
|
||||
Domain to Admin Email Mapping
|
||||
</span>
|
||||
<p className="domain-mapping-list__description">
|
||||
Map workspace domains to admin emails for service account impersonation.
|
||||
Use "*" as a wildcard for any domain.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<Form.List name={fieldNamePrefix}>
|
||||
{(fields, { add, remove }): JSX.Element => (
|
||||
<div className="domain-mapping-list__items">
|
||||
{fields.map((field) => (
|
||||
<div key={field.key} className="domain-mapping-list__row">
|
||||
<Form.Item
|
||||
name={[field.name, 'domain']}
|
||||
className="domain-mapping-list__field"
|
||||
rules={[{ required: true, message: 'Domain is required' }]}
|
||||
>
|
||||
<Input placeholder="Domain (e.g., example.com or *)" />
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name={[field.name, 'adminEmail']}
|
||||
className="domain-mapping-list__field"
|
||||
rules={[{ validator: validateEmail }]}
|
||||
>
|
||||
<Input placeholder="Admin Email" />
|
||||
</Form.Item>
|
||||
|
||||
<Button
|
||||
variant="ghost"
|
||||
color="secondary"
|
||||
className="domain-mapping-list__remove-btn"
|
||||
onClick={(): void => remove(field.name)}
|
||||
aria-label="Remove mapping"
|
||||
>
|
||||
<Trash2 size={12} />
|
||||
</Button>
|
||||
</div>
|
||||
))}
|
||||
|
||||
<Button
|
||||
variant="dashed"
|
||||
onClick={(): void => add({ domain: '', adminEmail: '' })}
|
||||
prefixIcon={<Plus size={14} />}
|
||||
className="domain-mapping-list__add-btn"
|
||||
>
|
||||
Add Domain Mapping
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</Form.List>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default DomainMappingList;
|
||||
@@ -1,28 +0,0 @@
|
||||
.email-tag-input {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
|
||||
&__select {
|
||||
width: 100%;
|
||||
|
||||
.ant-select-selector {
|
||||
.ant-select-selection-search {
|
||||
input {
|
||||
height: 32px !important;
|
||||
border: none !important;
|
||||
background: transparent !important;
|
||||
padding: 0 !important;
|
||||
margin: 0 !important;
|
||||
box-shadow: none !important;
|
||||
font-family: inherit !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&__error {
|
||||
margin: 0;
|
||||
color: var(--destructive);
|
||||
}
|
||||
}
|
||||
@@ -1,58 +0,0 @@
|
||||
import { useCallback, useState } from 'react';
|
||||
import { Select, Tooltip } from 'antd';
|
||||
|
||||
import './EmailTagInput.styles.scss';
|
||||
|
||||
const EMAIL_REGEX = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
||||
|
||||
interface EmailTagInputProps {
|
||||
value?: string[];
|
||||
onChange?: (value: string[]) => void;
|
||||
placeholder?: string;
|
||||
}
|
||||
|
||||
function EmailTagInput({
|
||||
value = [],
|
||||
onChange,
|
||||
placeholder = 'Type an email and press Enter',
|
||||
}: EmailTagInputProps): JSX.Element {
|
||||
const [validationError, setValidationError] = useState('');
|
||||
|
||||
const handleChange = useCallback(
|
||||
(newValues: string[]): void => {
|
||||
const addedValues = newValues.filter((v) => !value.includes(v));
|
||||
const invalidEmail = addedValues.find((v) => !EMAIL_REGEX.test(v));
|
||||
|
||||
if (invalidEmail) {
|
||||
setValidationError(`"${invalidEmail}" is not a valid email`);
|
||||
return;
|
||||
}
|
||||
setValidationError('');
|
||||
onChange?.(newValues);
|
||||
},
|
||||
[onChange, value],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="email-tag-input">
|
||||
<Tooltip
|
||||
title={validationError}
|
||||
open={!!validationError}
|
||||
placement="topRight"
|
||||
>
|
||||
<Select
|
||||
mode="tags"
|
||||
value={value}
|
||||
onChange={handleChange}
|
||||
placeholder={placeholder}
|
||||
tokenSeparators={[',', ' ']}
|
||||
className="email-tag-input__select"
|
||||
allowClear
|
||||
status={validationError ? 'error' : undefined}
|
||||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default EmailTagInput;
|
||||
@@ -1,288 +0,0 @@
|
||||
.role-mapping-section {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
margin-top: 24px;
|
||||
|
||||
&__collapse {
|
||||
background: transparent !important;
|
||||
|
||||
.ant-collapse-item {
|
||||
border: none !important;
|
||||
}
|
||||
|
||||
.ant-collapse-header {
|
||||
padding: 0 !important;
|
||||
}
|
||||
|
||||
.ant-collapse-content {
|
||||
border-top: none !important;
|
||||
background: transparent !important;
|
||||
}
|
||||
|
||||
.ant-collapse-content-box {
|
||||
padding: 12px 0 0 24px !important;
|
||||
}
|
||||
}
|
||||
|
||||
&__collapse-header {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 8px;
|
||||
cursor: pointer;
|
||||
position: relative;
|
||||
width: 100%;
|
||||
|
||||
svg {
|
||||
margin-top: 2px;
|
||||
color: var(--l3-foreground);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
}
|
||||
|
||||
&__collapse-header-text {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
&__section-title {
|
||||
margin: 0;
|
||||
color: var(--l1-foreground);
|
||||
}
|
||||
|
||||
&__section-description {
|
||||
margin: 0;
|
||||
color: var(--l3-foreground);
|
||||
}
|
||||
|
||||
&__content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
max-height: 45vh;
|
||||
overflow-y: auto;
|
||||
padding-right: 4px;
|
||||
|
||||
// Thin scrollbar
|
||||
&::-webkit-scrollbar {
|
||||
width: 4px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--l3-foreground);
|
||||
border-radius: 4px;
|
||||
|
||||
&:hover {
|
||||
background: var(--l2-foreground);
|
||||
}
|
||||
}
|
||||
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: var(--l3-foreground) transparent;
|
||||
}
|
||||
|
||||
&__field-group {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
&__label {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
color: var(--l1-foreground);
|
||||
}
|
||||
|
||||
&__form-item {
|
||||
margin-bottom: 0 !important;
|
||||
}
|
||||
|
||||
&__checkbox-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
}
|
||||
|
||||
&__select {
|
||||
width: 100%;
|
||||
|
||||
&.ant-select {
|
||||
.ant-select-selector {
|
||||
height: 32px;
|
||||
background: var(--l3-background) !important;
|
||||
border: 1px solid var(--l3-border) !important;
|
||||
border-radius: 2px;
|
||||
color: var(--l1-foreground) !important;
|
||||
|
||||
.ant-select-selection-item {
|
||||
color: var(--l1-foreground) !important;
|
||||
}
|
||||
}
|
||||
|
||||
&:hover .ant-select-selector {
|
||||
border-color: var(--l3-border) !important;
|
||||
}
|
||||
|
||||
&.ant-select-focused .ant-select-selector {
|
||||
border-color: var(--primary) !important;
|
||||
box-shadow: none !important;
|
||||
}
|
||||
|
||||
.ant-select-arrow {
|
||||
color: var(--l3-foreground);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&__group-mappings {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
&__group-header {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
&__group-title {
|
||||
margin: 0;
|
||||
color: var(--l1-foreground);
|
||||
}
|
||||
|
||||
&__group-description {
|
||||
margin: 0;
|
||||
color: var(--l3-foreground);
|
||||
}
|
||||
|
||||
&__items {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
&__row {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 8px;
|
||||
|
||||
.ant-form-item {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
}
|
||||
|
||||
&__field {
|
||||
&--group {
|
||||
flex: 2;
|
||||
}
|
||||
|
||||
&--role {
|
||||
flex: 1;
|
||||
min-width: 120px;
|
||||
}
|
||||
}
|
||||
|
||||
&__remove-btn {
|
||||
flex-shrink: 0;
|
||||
width: 32px !important;
|
||||
height: 32px !important;
|
||||
min-width: 32px !important;
|
||||
padding: 0 !important;
|
||||
border: none !important;
|
||||
border-radius: 2px !important;
|
||||
background: transparent !important;
|
||||
color: var(--destructive) !important;
|
||||
opacity: 0.6 !important;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.2s, opacity 0.2s;
|
||||
box-shadow: none !important;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
|
||||
svg {
|
||||
color: var(--destructive) !important;
|
||||
width: 12px !important;
|
||||
height: 12px !important;
|
||||
}
|
||||
|
||||
&:hover {
|
||||
background: rgba(229, 72, 77, 0.1) !important;
|
||||
opacity: 0.9 !important;
|
||||
color: var(--destructive) !important;
|
||||
|
||||
svg {
|
||||
color: var(--destructive) !important;
|
||||
}
|
||||
}
|
||||
|
||||
&:active {
|
||||
opacity: 0.7 !important;
|
||||
background: rgba(229, 72, 77, 0.15) !important;
|
||||
}
|
||||
}
|
||||
|
||||
&__add-btn {
|
||||
width: 100%;
|
||||
|
||||
// Ensure icon is visible
|
||||
svg,
|
||||
[class*='icon'] {
|
||||
color: var(--l2-foreground) !important;
|
||||
display: inline-block !important;
|
||||
opacity: 1 !important;
|
||||
}
|
||||
|
||||
&:hover {
|
||||
color: var(--l1-foreground);
|
||||
|
||||
svg,
|
||||
[class*='icon'] {
|
||||
color: var(--l1-foreground) !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// --- Checkbox border visibility ---
|
||||
button[role='checkbox'] {
|
||||
border: 1px solid var(--l2-foreground) !important;
|
||||
border-radius: 2px;
|
||||
|
||||
&[data-state='checked'] {
|
||||
background-color: var(--primary) !important;
|
||||
border-color: var(--primary) !important;
|
||||
}
|
||||
}
|
||||
|
||||
// todo: https://github.com/SigNoz/components/issues/116
|
||||
input {
|
||||
height: 32px;
|
||||
background: var(--l3-background) !important;
|
||||
border: 1px solid var(--l3-border) !important;
|
||||
border-radius: 2px;
|
||||
color: var(--l1-foreground) !important;
|
||||
|
||||
&::placeholder {
|
||||
color: var(--l3-foreground) !important;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
&:hover {
|
||||
border-color: var(--l3-border) !important;
|
||||
}
|
||||
|
||||
&:focus,
|
||||
&:focus-visible {
|
||||
border-color: var(--primary) !important;
|
||||
box-shadow: none !important;
|
||||
outline: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,216 +0,0 @@
|
||||
import { useCallback, useState } from 'react';
|
||||
import { Button } from '@signozhq/button';
|
||||
import { Checkbox } from '@signozhq/checkbox';
|
||||
import { Color, Style } from '@signozhq/design-tokens';
|
||||
import {
|
||||
ChevronDown,
|
||||
ChevronRight,
|
||||
CircleHelp,
|
||||
Plus,
|
||||
Trash2,
|
||||
TriangleAlert,
|
||||
} from '@signozhq/icons';
|
||||
import { Input } from '@signozhq/input';
|
||||
import { Collapse, Form, Select, Tooltip } from 'antd';
|
||||
import { useCollapseSectionErrors } from 'hooks/useCollapseSectionErrors';
|
||||
|
||||
import './RoleMappingSection.styles.scss';
|
||||
|
||||
const ROLE_OPTIONS = [
|
||||
{ value: 'VIEWER', label: 'VIEWER' },
|
||||
{ value: 'EDITOR', label: 'EDITOR' },
|
||||
{ value: 'ADMIN', label: 'ADMIN' },
|
||||
];
|
||||
|
||||
interface RoleMappingSectionProps {
|
||||
fieldNamePrefix: string[];
|
||||
isExpanded?: boolean;
|
||||
onExpandChange?: (expanded: boolean) => void;
|
||||
}
|
||||
|
||||
function RoleMappingSection({
|
||||
fieldNamePrefix,
|
||||
isExpanded,
|
||||
onExpandChange,
|
||||
}: RoleMappingSectionProps): JSX.Element {
|
||||
const form = Form.useFormInstance();
|
||||
const useRoleAttribute = Form.useWatch(
|
||||
[...fieldNamePrefix, 'useRoleAttribute'],
|
||||
form,
|
||||
);
|
||||
|
||||
// Support both controlled and uncontrolled modes
|
||||
const [internalExpanded, setInternalExpanded] = useState(false);
|
||||
const isControlled = isExpanded !== undefined;
|
||||
const expanded = isControlled ? isExpanded : internalExpanded;
|
||||
|
||||
const handleCollapseChange = useCallback(
|
||||
(keys: string | string[]): void => {
|
||||
const newExpanded = Array.isArray(keys) ? keys.length > 0 : !!keys;
|
||||
if (isControlled && onExpandChange) {
|
||||
onExpandChange(newExpanded);
|
||||
} else {
|
||||
setInternalExpanded(newExpanded);
|
||||
}
|
||||
},
|
||||
[isControlled, onExpandChange],
|
||||
);
|
||||
|
||||
const collapseActiveKey = expanded ? ['role-mapping'] : [];
|
||||
const { hasErrors, errorMessages } = useCollapseSectionErrors(fieldNamePrefix);
|
||||
|
||||
return (
|
||||
<div className="role-mapping-section">
|
||||
<Collapse
|
||||
bordered={false}
|
||||
activeKey={collapseActiveKey}
|
||||
onChange={handleCollapseChange}
|
||||
className="role-mapping-section__collapse"
|
||||
expandIcon={(): null => null}
|
||||
>
|
||||
<Collapse.Panel
|
||||
key="role-mapping"
|
||||
header={
|
||||
<div
|
||||
className="role-mapping-section__collapse-header"
|
||||
role="button"
|
||||
aria-expanded={expanded}
|
||||
aria-controls="role-mapping-content"
|
||||
>
|
||||
{!expanded ? <ChevronRight size={16} /> : <ChevronDown size={16} />}
|
||||
<div className="role-mapping-section__collapse-header-text">
|
||||
<h4 className="role-mapping-section__section-title">
|
||||
Role Mapping (Advanced)
|
||||
</h4>
|
||||
<p className="role-mapping-section__section-description">
|
||||
Configure how user roles are determined from your Identity Provider.
|
||||
You can either use a direct role attribute or map IDP groups to SigNoz
|
||||
roles.
|
||||
</p>
|
||||
</div>
|
||||
{!expanded && hasErrors && (
|
||||
<Tooltip
|
||||
title={
|
||||
<>
|
||||
{errorMessages.map((msg) => (
|
||||
<div key={msg}>{msg}</div>
|
||||
))}
|
||||
</>
|
||||
}
|
||||
>
|
||||
<TriangleAlert size={16} color={Color.BG_CHERRY_500} />
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<div id="role-mapping-content" className="role-mapping-section__content">
|
||||
<div className="role-mapping-section__field-group">
|
||||
<label className="role-mapping-section__label" htmlFor="default-role">
|
||||
Default Role
|
||||
<Tooltip title='The default role assigned to new SSO users if no other role mapping applies. Default: "VIEWER"'>
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</label>
|
||||
<Form.Item
|
||||
name={[...fieldNamePrefix, 'defaultRole']}
|
||||
className="role-mapping-section__form-item"
|
||||
initialValue="VIEWER"
|
||||
>
|
||||
<Select
|
||||
id="default-role"
|
||||
options={ROLE_OPTIONS}
|
||||
className="role-mapping-section__select"
|
||||
/>
|
||||
</Form.Item>
|
||||
</div>
|
||||
|
||||
<div className="role-mapping-section__checkbox-row">
|
||||
<Form.Item
|
||||
name={[...fieldNamePrefix, 'useRoleAttribute']}
|
||||
valuePropName="checked"
|
||||
noStyle
|
||||
>
|
||||
<Checkbox
|
||||
id="use-role-attribute"
|
||||
labelName="Use Role Attribute Directly"
|
||||
onCheckedChange={(checked: boolean): void => {
|
||||
form.setFieldValue([...fieldNamePrefix, 'useRoleAttribute'], checked);
|
||||
}}
|
||||
/>
|
||||
</Form.Item>
|
||||
<Tooltip title="If enabled, the role claim/attribute from the IDP will be used directly instead of group mappings. The role value must match a SigNoz role (VIEWER, EDITOR, or ADMIN).">
|
||||
<CircleHelp size={14} color={Style.L3_FOREGROUND} cursor="help" />
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
{!useRoleAttribute && (
|
||||
<div className="role-mapping-section__group-mappings">
|
||||
<div className="role-mapping-section__group-header">
|
||||
<span className="role-mapping-section__group-title">
|
||||
Group to Role Mappings
|
||||
</span>
|
||||
<p className="role-mapping-section__group-description">
|
||||
Map IDP group names to SigNoz roles. If a user belongs to multiple
|
||||
groups, the highest privilege role will be assigned.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<Form.List name={[...fieldNamePrefix, 'groupMappingsList']}>
|
||||
{(fields, { add, remove }): JSX.Element => (
|
||||
<div className="role-mapping-section__items">
|
||||
{fields.map((field) => (
|
||||
<div key={field.key} className="role-mapping-section__row">
|
||||
<Form.Item
|
||||
name={[field.name, 'groupName']}
|
||||
className="role-mapping-section__field role-mapping-section__field--group"
|
||||
rules={[{ required: true, message: 'Group name is required' }]}
|
||||
>
|
||||
<Input placeholder="IDP Group Name" />
|
||||
</Form.Item>
|
||||
|
||||
<Form.Item
|
||||
name={[field.name, 'role']}
|
||||
className="role-mapping-section__field role-mapping-section__field--role"
|
||||
rules={[{ required: true, message: 'Role is required' }]}
|
||||
initialValue="VIEWER"
|
||||
>
|
||||
<Select
|
||||
options={ROLE_OPTIONS}
|
||||
className="role-mapping-section__select"
|
||||
/>
|
||||
</Form.Item>
|
||||
|
||||
<Button
|
||||
variant="ghost"
|
||||
color="secondary"
|
||||
className="role-mapping-section__remove-btn"
|
||||
onClick={(): void => remove(field.name)}
|
||||
aria-label="Remove mapping"
|
||||
>
|
||||
<Trash2 size={12} />
|
||||
</Button>
|
||||
</div>
|
||||
))}
|
||||
|
||||
<Button
|
||||
variant="dashed"
|
||||
onClick={(): void => add({ groupName: '', role: 'VIEWER' })}
|
||||
prefixIcon={<Plus size={14} />}
|
||||
className="role-mapping-section__add-btn"
|
||||
>
|
||||
Add Group Mapping
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</Form.List>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</Collapse.Panel>
|
||||
</Collapse>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default RoleMappingSection;
|
||||
@@ -1,77 +0,0 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import { Switch } from '@signozhq/switch';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { useUpdateAuthDomain } from 'api/generated/services/authdomains';
|
||||
import {
|
||||
AuthtypesGettableAuthDomainDTO,
|
||||
RenderErrorResponseDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { AxiosError } from 'axios';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import { ErrorV2Resp } from 'types/api';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
interface SSOEnforcementToggleProps {
|
||||
isDefaultChecked: boolean;
|
||||
record: AuthtypesGettableAuthDomainDTO;
|
||||
}
|
||||
|
||||
function SSOEnforcementToggle({
|
||||
isDefaultChecked,
|
||||
record,
|
||||
}: SSOEnforcementToggleProps): JSX.Element {
|
||||
const [isChecked, setIsChecked] = useState<boolean>(isDefaultChecked);
|
||||
const { showErrorModal } = useErrorModal();
|
||||
|
||||
useEffect(() => {
|
||||
setIsChecked(isDefaultChecked);
|
||||
}, [isDefaultChecked]);
|
||||
|
||||
const { mutate: updateAuthDomain, isLoading } = useUpdateAuthDomain<
|
||||
AxiosError<RenderErrorResponseDTO>
|
||||
>();
|
||||
|
||||
const onChangeHandler = (checked: boolean): void => {
|
||||
if (!record.id) {
|
||||
return;
|
||||
}
|
||||
|
||||
setIsChecked(checked);
|
||||
|
||||
updateAuthDomain(
|
||||
{
|
||||
pathParams: { id: record.id },
|
||||
data: {
|
||||
config: {
|
||||
ssoEnabled: checked,
|
||||
ssoType: record.ssoType,
|
||||
googleAuthConfig: record.googleAuthConfig,
|
||||
oidcConfig: record.oidcConfig,
|
||||
samlConfig: record.samlConfig,
|
||||
roleMapping: record.roleMapping,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
onError: (error) => {
|
||||
setIsChecked(!checked);
|
||||
try {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
} catch (apiError) {
|
||||
showErrorModal(apiError as APIError);
|
||||
}
|
||||
},
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<Switch
|
||||
disabled={isLoading}
|
||||
checked={isChecked}
|
||||
onCheckedChange={onChangeHandler}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default SSOEnforcementToggle;
|
||||
@@ -0,0 +1,45 @@
|
||||
import { useState } from 'react';
|
||||
import { Switch } from 'antd';
|
||||
import put from 'api/v1/domains/id/put';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import APIError from 'types/api/error';
|
||||
import { GettableAuthDomain } from 'types/api/v1/domains/list';
|
||||
|
||||
function Toggle({ isDefaultChecked, record }: ToggleProps): JSX.Element {
|
||||
const [isChecked, setIsChecked] = useState<boolean>(isDefaultChecked);
|
||||
const [isLoading, setIsLoading] = useState<boolean>(false);
|
||||
const { showErrorModal } = useErrorModal();
|
||||
|
||||
const onChangeHandler = async (checked: boolean): Promise<void> => {
|
||||
setIsLoading(true);
|
||||
|
||||
try {
|
||||
await put({
|
||||
id: record.id,
|
||||
config: {
|
||||
ssoEnabled: checked,
|
||||
ssoType: record.ssoType,
|
||||
googleAuthConfig: record.googleAuthConfig,
|
||||
oidcConfig: record.oidcConfig,
|
||||
samlConfig: record.samlConfig,
|
||||
},
|
||||
});
|
||||
setIsChecked(checked);
|
||||
} catch (error) {
|
||||
showErrorModal(error as APIError);
|
||||
}
|
||||
|
||||
setIsLoading(false);
|
||||
};
|
||||
|
||||
return (
|
||||
<Switch loading={isLoading} checked={isChecked} onChange={onChangeHandler} />
|
||||
);
|
||||
}
|
||||
|
||||
interface ToggleProps {
|
||||
isDefaultChecked: boolean;
|
||||
record: GettableAuthDomain;
|
||||
}
|
||||
|
||||
export default Toggle;
|
||||
@@ -1,138 +0,0 @@
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
|
||||
import AuthDomain from '../index';
|
||||
import {
|
||||
AUTH_DOMAINS_LIST_ENDPOINT,
|
||||
mockDomainsListResponse,
|
||||
mockEmptyDomainsResponse,
|
||||
mockErrorResponse,
|
||||
} from './mocks';
|
||||
|
||||
jest.mock('@signozhq/sonner', () => ({
|
||||
toast: {
|
||||
success: jest.fn(),
|
||||
error: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('AuthDomain', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
server.resetHandlers();
|
||||
});
|
||||
|
||||
describe('List View', () => {
|
||||
it('renders page header and add button', async () => {
|
||||
server.use(
|
||||
rest.get(AUTH_DOMAINS_LIST_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockEmptyDomainsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<AuthDomain />);
|
||||
|
||||
expect(screen.getByText(/authenticated domains/i)).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByRole('button', { name: /add domain/i }),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders list of auth domains successfully', async () => {
|
||||
server.use(
|
||||
rest.get(AUTH_DOMAINS_LIST_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockDomainsListResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<AuthDomain />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('signoz.io')).toBeInTheDocument();
|
||||
expect(screen.getByText('example.com')).toBeInTheDocument();
|
||||
expect(screen.getByText('corp.io')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('renders empty state when no domains exist', async () => {
|
||||
server.use(
|
||||
rest.get(AUTH_DOMAINS_LIST_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockEmptyDomainsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<AuthDomain />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/no data/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('displays error content when API fails', async () => {
|
||||
server.use(
|
||||
rest.get(AUTH_DOMAINS_LIST_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(500), ctx.json(mockErrorResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<AuthDomain />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByText(/failed to perform operation/i),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Add Domain', () => {
|
||||
it('opens create modal when Add Domain button is clicked', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
server.use(
|
||||
rest.get(AUTH_DOMAINS_LIST_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockEmptyDomainsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<AuthDomain />);
|
||||
|
||||
const addButton = await screen.findByRole('button', { name: /add domain/i });
|
||||
await user.click(addButton);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByText(/configure authentication method/i),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Configure Domain', () => {
|
||||
it('opens edit modal when configure action is clicked', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
server.use(
|
||||
rest.get(AUTH_DOMAINS_LIST_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockDomainsListResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<AuthDomain />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('signoz.io')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const configureLinks = await screen.findAllByText(/configure google auth/i);
|
||||
await user.click(configureLinks[0]);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/edit google authentication/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,354 +0,0 @@
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
|
||||
import CreateEdit from '../CreateEdit/CreateEdit';
|
||||
import {
|
||||
mockDomainWithRoleMapping,
|
||||
mockGoogleAuthDomain,
|
||||
mockGoogleAuthWithWorkspaceGroups,
|
||||
mockOidcAuthDomain,
|
||||
mockOidcWithClaimMapping,
|
||||
mockSamlAuthDomain,
|
||||
mockSamlWithAttributeMapping,
|
||||
} from './mocks';
|
||||
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
describe('CreateEdit Modal', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('Provider Selection (Create Mode)', () => {
|
||||
it('renders provider selection when creating new domain', () => {
|
||||
render(<CreateEdit isCreate onClose={mockOnClose} />);
|
||||
|
||||
expect(
|
||||
screen.getByText(/configure authentication method/i),
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByText(/google apps authentication/i)).toBeInTheDocument();
|
||||
expect(screen.getByText(/saml authentication/i)).toBeInTheDocument();
|
||||
expect(screen.getByText(/oidc authentication/i)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('returns to provider selection when back button is clicked', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(<CreateEdit isCreate onClose={mockOnClose} />);
|
||||
|
||||
const configureButtons = await screen.findAllByRole('button', {
|
||||
name: /configure/i,
|
||||
});
|
||||
await user.click(configureButtons[0]);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/edit google authentication/i)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const backButton = screen.getByRole('button', { name: /back/i });
|
||||
await user.click(backButton);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByText(/configure authentication method/i),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edit Mode', () => {
|
||||
it('shows provider form directly when editing existing domain', () => {
|
||||
render(
|
||||
<CreateEdit
|
||||
isCreate={false}
|
||||
record={mockGoogleAuthDomain}
|
||||
onClose={mockOnClose}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByText(/edit google authentication/i)).toBeInTheDocument();
|
||||
expect(
|
||||
screen.queryByText(/configure authentication method/i),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('pre-fills form with existing domain values', () => {
|
||||
render(
|
||||
<CreateEdit
|
||||
isCreate={false}
|
||||
record={mockGoogleAuthDomain}
|
||||
onClose={mockOnClose}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByDisplayValue('signoz.io')).toBeInTheDocument();
|
||||
expect(screen.getByDisplayValue('test-client-id')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('disables domain field when editing', () => {
|
||||
render(
|
||||
<CreateEdit
|
||||
isCreate={false}
|
||||
record={mockGoogleAuthDomain}
|
||||
onClose={mockOnClose}
|
||||
/>,
|
||||
);
|
||||
|
||||
const domainInput = screen.getByDisplayValue('signoz.io');
|
||||
expect(domainInput).toBeDisabled();
|
||||
});
|
||||
|
||||
it('shows cancel button instead of back when editing', () => {
|
||||
render(
|
||||
<CreateEdit
|
||||
isCreate={false}
|
||||
record={mockGoogleAuthDomain}
|
||||
onClose={mockOnClose}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByRole('button', { name: /cancel/i })).toBeInTheDocument();
|
||||
expect(
|
||||
screen.queryByRole('button', { name: /back/i }),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Form Validation', () => {
|
||||
it('shows validation error when submitting without required fields', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(<CreateEdit isCreate onClose={mockOnClose} />);
|
||||
|
||||
const configureButtons = await screen.findAllByRole('button', {
|
||||
name: /configure/i,
|
||||
});
|
||||
await user.click(configureButtons[0]);
|
||||
|
||||
const saveButton = await screen.findByRole('button', {
|
||||
name: /save changes/i,
|
||||
});
|
||||
await user.click(saveButton);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/domain is required/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Google Auth Provider', () => {
|
||||
it('shows Google Auth form fields', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(<CreateEdit isCreate onClose={mockOnClose} />);
|
||||
|
||||
const configureButtons = await screen.findAllByRole('button', {
|
||||
name: /configure/i,
|
||||
});
|
||||
await user.click(configureButtons[0]);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/edit google authentication/i)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/domain/i)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/client id/i)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/client secret/i)).toBeInTheDocument();
|
||||
expect(screen.getByText(/skip email verification/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('shows workspace groups section when expanded', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(
|
||||
<CreateEdit
|
||||
isCreate={false}
|
||||
record={mockGoogleAuthWithWorkspaceGroups}
|
||||
onClose={mockOnClose}
|
||||
/>,
|
||||
);
|
||||
|
||||
const workspaceHeader = screen.getByText(/google workspace groups/i);
|
||||
await user.click(workspaceHeader);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/fetch groups/i)).toBeInTheDocument();
|
||||
expect(screen.getByText(/service account json/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('SAML Provider', () => {
|
||||
it('shows SAML-specific fields when editing SAML domain', () => {
|
||||
render(
|
||||
<CreateEdit
|
||||
isCreate={false}
|
||||
record={mockSamlAuthDomain}
|
||||
onClose={mockOnClose}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByText(/edit saml authentication/i)).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByDisplayValue('https://idp.example.com/sso'),
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByDisplayValue('urn:example:idp')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows attribute mapping section for SAML', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(
|
||||
<CreateEdit
|
||||
isCreate={false}
|
||||
record={mockSamlWithAttributeMapping}
|
||||
onClose={mockOnClose}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(
|
||||
screen.getByText(/attribute mapping \(advanced\)/i),
|
||||
).toBeInTheDocument();
|
||||
|
||||
const attributeHeader = screen.getByText(/attribute mapping \(advanced\)/i);
|
||||
await user.click(attributeHeader);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByLabelText(/name attribute/i)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/groups attribute/i)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/role attribute/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('OIDC Provider', () => {
|
||||
it('shows OIDC-specific fields when editing OIDC domain', () => {
|
||||
render(
|
||||
<CreateEdit
|
||||
isCreate={false}
|
||||
record={mockOidcAuthDomain}
|
||||
onClose={mockOnClose}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByText(/edit oidc authentication/i)).toBeInTheDocument();
|
||||
expect(screen.getByDisplayValue('https://oidc.corp.io')).toBeInTheDocument();
|
||||
expect(screen.getByDisplayValue('oidc-client-id')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows claim mapping section for OIDC', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(
|
||||
<CreateEdit
|
||||
isCreate={false}
|
||||
record={mockOidcWithClaimMapping}
|
||||
onClose={mockOnClose}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByText(/claim mapping \(advanced\)/i)).toBeInTheDocument();
|
||||
|
||||
const claimHeader = screen.getByText(/claim mapping \(advanced\)/i);
|
||||
await user.click(claimHeader);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByLabelText(/email claim/i)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/name claim/i)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/groups claim/i)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/role claim/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('shows OIDC options checkboxes', () => {
|
||||
render(
|
||||
<CreateEdit
|
||||
isCreate={false}
|
||||
record={mockOidcAuthDomain}
|
||||
onClose={mockOnClose}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByText(/skip email verification/i)).toBeInTheDocument();
|
||||
expect(screen.getByText(/get user info/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Role Mapping', () => {
|
||||
it('shows role mapping section in provider forms', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(<CreateEdit isCreate onClose={mockOnClose} />);
|
||||
|
||||
const configureButtons = await screen.findAllByRole('button', {
|
||||
name: /configure/i,
|
||||
});
|
||||
await user.click(configureButtons[0]);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/role mapping \(advanced\)/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('expands role mapping section to show default role selector', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(
|
||||
<CreateEdit
|
||||
isCreate={false}
|
||||
record={mockDomainWithRoleMapping}
|
||||
onClose={mockOnClose}
|
||||
/>,
|
||||
);
|
||||
|
||||
const roleMappingHeader = screen.getByText(/role mapping \(advanced\)/i);
|
||||
await user.click(roleMappingHeader);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/default role/i)).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText(/use role attribute directly/i),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('shows group mappings section when useRoleAttribute is false', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(
|
||||
<CreateEdit
|
||||
isCreate={false}
|
||||
record={mockDomainWithRoleMapping}
|
||||
onClose={mockOnClose}
|
||||
/>,
|
||||
);
|
||||
|
||||
const roleMappingHeader = screen.getByText(/role mapping \(advanced\)/i);
|
||||
await user.click(roleMappingHeader);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/group to role mappings/i)).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByRole('button', { name: /add group mapping/i }),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Modal Actions', () => {
|
||||
it('calls onClose when cancel button is clicked', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(
|
||||
<CreateEdit
|
||||
isCreate={false}
|
||||
record={mockGoogleAuthDomain}
|
||||
onClose={mockOnClose}
|
||||
/>,
|
||||
);
|
||||
|
||||
const cancelButton = screen.getByRole('button', { name: /cancel/i });
|
||||
await user.click(cancelButton);
|
||||
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,130 +0,0 @@
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
|
||||
import SSOEnforcementToggle from '../SSOEnforcementToggle';
|
||||
import {
|
||||
AUTH_DOMAINS_UPDATE_ENDPOINT,
|
||||
mockErrorResponse,
|
||||
mockGoogleAuthDomain,
|
||||
mockUpdateSuccessResponse,
|
||||
} from './mocks';
|
||||
|
||||
describe('SSOEnforcementToggle', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
server.resetHandlers();
|
||||
});
|
||||
|
||||
it('renders switch with correct initial state', () => {
|
||||
render(
|
||||
<SSOEnforcementToggle
|
||||
isDefaultChecked={true}
|
||||
record={mockGoogleAuthDomain}
|
||||
/>,
|
||||
);
|
||||
|
||||
const switchElement = screen.getByRole('switch');
|
||||
expect(switchElement).toBeChecked();
|
||||
});
|
||||
|
||||
it('renders unchecked switch when SSO is disabled', () => {
|
||||
render(
|
||||
<SSOEnforcementToggle
|
||||
isDefaultChecked={false}
|
||||
record={{ ...mockGoogleAuthDomain, ssoEnabled: false }}
|
||||
/>,
|
||||
);
|
||||
|
||||
const switchElement = screen.getByRole('switch');
|
||||
expect(switchElement).not.toBeChecked();
|
||||
});
|
||||
|
||||
it('calls update API when toggle is clicked', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
const mockUpdateAPI = jest.fn();
|
||||
|
||||
server.use(
|
||||
rest.put(AUTH_DOMAINS_UPDATE_ENDPOINT, async (req, res, ctx) => {
|
||||
const body = await req.json();
|
||||
mockUpdateAPI(body);
|
||||
return res(ctx.status(200), ctx.json(mockUpdateSuccessResponse));
|
||||
}),
|
||||
);
|
||||
|
||||
render(
|
||||
<SSOEnforcementToggle
|
||||
isDefaultChecked={true}
|
||||
record={mockGoogleAuthDomain}
|
||||
/>,
|
||||
);
|
||||
|
||||
const switchElement = screen.getByRole('switch');
|
||||
await user.click(switchElement);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(switchElement).not.toBeChecked();
|
||||
});
|
||||
|
||||
expect(mockUpdateAPI).toHaveBeenCalledTimes(1);
|
||||
expect(mockUpdateAPI).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
config: expect.objectContaining({
|
||||
ssoEnabled: false,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('shows error modal when update fails', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
server.use(
|
||||
rest.put(AUTH_DOMAINS_UPDATE_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(500), ctx.json(mockErrorResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(
|
||||
<SSOEnforcementToggle
|
||||
isDefaultChecked={true}
|
||||
record={mockGoogleAuthDomain}
|
||||
/>,
|
||||
);
|
||||
|
||||
const switchElement = screen.getByRole('switch');
|
||||
await user.click(switchElement);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/failed to perform operation/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('does not call API when record has no id', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
let apiCalled = false;
|
||||
|
||||
server.use(
|
||||
rest.put(AUTH_DOMAINS_UPDATE_ENDPOINT, (_, res, ctx) => {
|
||||
apiCalled = true;
|
||||
return res(ctx.status(200), ctx.json(mockUpdateSuccessResponse));
|
||||
}),
|
||||
);
|
||||
|
||||
render(
|
||||
<SSOEnforcementToggle
|
||||
isDefaultChecked={true}
|
||||
record={{ ...mockGoogleAuthDomain, id: undefined }}
|
||||
/>,
|
||||
);
|
||||
|
||||
const switchElement = screen.getByRole('switch');
|
||||
await user.click(switchElement);
|
||||
|
||||
// Wait a bit to ensure no API call was made
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
expect(apiCalled).toBe(false);
|
||||
});
|
||||
});
|
||||
@@ -1,220 +0,0 @@
|
||||
import { AuthtypesGettableAuthDomainDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
|
||||
// API Endpoints
|
||||
export const AUTH_DOMAINS_LIST_ENDPOINT = '*/api/v1/domains';
|
||||
export const AUTH_DOMAINS_CREATE_ENDPOINT = '*/api/v1/domains';
|
||||
export const AUTH_DOMAINS_UPDATE_ENDPOINT = '*/api/v1/domains/:id';
|
||||
export const AUTH_DOMAINS_DELETE_ENDPOINT = '*/api/v1/domains/:id';
|
||||
|
||||
// Mock Auth Domain with Google Auth
|
||||
export const mockGoogleAuthDomain: AuthtypesGettableAuthDomainDTO = {
|
||||
id: 'domain-1',
|
||||
name: 'signoz.io',
|
||||
ssoEnabled: true,
|
||||
ssoType: 'google_auth',
|
||||
googleAuthConfig: {
|
||||
clientId: 'test-client-id',
|
||||
clientSecret: 'test-client-secret',
|
||||
},
|
||||
authNProviderInfo: {
|
||||
relayStatePath: 'api/v1/sso/relay/domain-1',
|
||||
},
|
||||
};
|
||||
|
||||
// Mock Auth Domain with SAML
|
||||
export const mockSamlAuthDomain: AuthtypesGettableAuthDomainDTO = {
|
||||
id: 'domain-2',
|
||||
name: 'example.com',
|
||||
ssoEnabled: false,
|
||||
ssoType: 'saml',
|
||||
samlConfig: {
|
||||
samlIdp: 'https://idp.example.com/sso',
|
||||
samlEntity: 'urn:example:idp',
|
||||
samlCert: 'MOCK_CERTIFICATE',
|
||||
},
|
||||
authNProviderInfo: {
|
||||
relayStatePath: 'api/v1/sso/relay/domain-2',
|
||||
},
|
||||
};
|
||||
|
||||
// Mock Auth Domain with OIDC
|
||||
export const mockOidcAuthDomain: AuthtypesGettableAuthDomainDTO = {
|
||||
id: 'domain-3',
|
||||
name: 'corp.io',
|
||||
ssoEnabled: true,
|
||||
ssoType: 'oidc',
|
||||
oidcConfig: {
|
||||
issuer: 'https://oidc.corp.io',
|
||||
clientId: 'oidc-client-id',
|
||||
clientSecret: 'oidc-client-secret',
|
||||
},
|
||||
authNProviderInfo: {
|
||||
relayStatePath: 'api/v1/sso/relay/domain-3',
|
||||
},
|
||||
};
|
||||
|
||||
// Mock Auth Domain with Role Mapping
|
||||
export const mockDomainWithRoleMapping: AuthtypesGettableAuthDomainDTO = {
|
||||
id: 'domain-4',
|
||||
name: 'enterprise.com',
|
||||
ssoEnabled: true,
|
||||
ssoType: 'saml',
|
||||
samlConfig: {
|
||||
samlIdp: 'https://idp.enterprise.com/sso',
|
||||
samlEntity: 'urn:enterprise:idp',
|
||||
samlCert: 'MOCK_CERTIFICATE',
|
||||
},
|
||||
roleMapping: {
|
||||
defaultRole: 'EDITOR',
|
||||
useRoleAttribute: false,
|
||||
groupMappings: {
|
||||
'admin-group': 'ADMIN',
|
||||
'dev-team': 'EDITOR',
|
||||
viewers: 'VIEWER',
|
||||
},
|
||||
},
|
||||
authNProviderInfo: {
|
||||
relayStatePath: 'api/v1/sso/relay/domain-4',
|
||||
},
|
||||
};
|
||||
|
||||
// Mock Auth Domain with useRoleAttribute enabled
|
||||
export const mockDomainWithDirectRoleAttribute: AuthtypesGettableAuthDomainDTO = {
|
||||
id: 'domain-5',
|
||||
name: 'direct-role.com',
|
||||
ssoEnabled: true,
|
||||
ssoType: 'oidc',
|
||||
oidcConfig: {
|
||||
issuer: 'https://oidc.direct-role.com',
|
||||
clientId: 'direct-role-client-id',
|
||||
clientSecret: 'direct-role-client-secret',
|
||||
},
|
||||
roleMapping: {
|
||||
defaultRole: 'VIEWER',
|
||||
useRoleAttribute: true,
|
||||
},
|
||||
authNProviderInfo: {
|
||||
relayStatePath: 'api/v1/sso/relay/domain-5',
|
||||
},
|
||||
};
|
||||
|
||||
// Mock OIDC domain with claim mapping
|
||||
export const mockOidcWithClaimMapping: AuthtypesGettableAuthDomainDTO = {
|
||||
id: 'domain-6',
|
||||
name: 'oidc-claims.com',
|
||||
ssoEnabled: true,
|
||||
ssoType: 'oidc',
|
||||
oidcConfig: {
|
||||
issuer: 'https://oidc.claims.com',
|
||||
issuerAlias: 'https://alias.claims.com',
|
||||
clientId: 'claims-client-id',
|
||||
clientSecret: 'claims-client-secret',
|
||||
insecureSkipEmailVerified: true,
|
||||
getUserInfo: true,
|
||||
claimMapping: {
|
||||
email: 'user_email',
|
||||
name: 'display_name',
|
||||
groups: 'user_groups',
|
||||
role: 'user_role',
|
||||
},
|
||||
},
|
||||
authNProviderInfo: {
|
||||
relayStatePath: 'api/v1/sso/relay/domain-6',
|
||||
},
|
||||
};
|
||||
|
||||
// Mock SAML domain with attribute mapping
|
||||
export const mockSamlWithAttributeMapping: AuthtypesGettableAuthDomainDTO = {
|
||||
id: 'domain-7',
|
||||
name: 'saml-attrs.com',
|
||||
ssoEnabled: true,
|
||||
ssoType: 'saml',
|
||||
samlConfig: {
|
||||
samlIdp: 'https://idp.saml-attrs.com/sso',
|
||||
samlEntity: 'urn:saml-attrs:idp',
|
||||
samlCert: 'MOCK_CERTIFICATE_ATTRS',
|
||||
insecureSkipAuthNRequestsSigned: true,
|
||||
attributeMapping: {
|
||||
name: 'user_display_name',
|
||||
groups: 'member_of',
|
||||
role: 'signoz_role',
|
||||
},
|
||||
},
|
||||
authNProviderInfo: {
|
||||
relayStatePath: 'api/v1/sso/relay/domain-7',
|
||||
},
|
||||
};
|
||||
|
||||
// Mock Google Auth with workspace groups
|
||||
export const mockGoogleAuthWithWorkspaceGroups: AuthtypesGettableAuthDomainDTO = {
|
||||
id: 'domain-8',
|
||||
name: 'google-groups.com',
|
||||
ssoEnabled: true,
|
||||
ssoType: 'google_auth',
|
||||
googleAuthConfig: {
|
||||
clientId: 'google-groups-client-id',
|
||||
clientSecret: 'google-groups-client-secret',
|
||||
insecureSkipEmailVerified: false,
|
||||
fetchGroups: true,
|
||||
serviceAccountJson: '{"type": "service_account"}',
|
||||
domainToAdminEmail: {
|
||||
'google-groups.com': 'admin@google-groups.com',
|
||||
},
|
||||
fetchTransitiveGroupMembership: true,
|
||||
allowedGroups: ['allowed-group-1', 'allowed-group-2'],
|
||||
},
|
||||
authNProviderInfo: {
|
||||
relayStatePath: 'api/v1/sso/relay/domain-8',
|
||||
},
|
||||
};
|
||||
|
||||
// Mock empty list response
|
||||
export const mockEmptyDomainsResponse = {
|
||||
status: 'success',
|
||||
data: [],
|
||||
};
|
||||
|
||||
// Mock list response with domains
|
||||
export const mockDomainsListResponse = {
|
||||
status: 'success',
|
||||
data: [mockGoogleAuthDomain, mockSamlAuthDomain, mockOidcAuthDomain],
|
||||
};
|
||||
|
||||
// Mock single domain list response
|
||||
export const mockSingleDomainResponse = {
|
||||
status: 'success',
|
||||
data: [mockGoogleAuthDomain],
|
||||
};
|
||||
|
||||
// Mock success responses
|
||||
export const mockCreateSuccessResponse = {
|
||||
status: 'success',
|
||||
data: mockGoogleAuthDomain,
|
||||
};
|
||||
|
||||
export const mockUpdateSuccessResponse = {
|
||||
status: 'success',
|
||||
data: { ...mockGoogleAuthDomain, ssoEnabled: false },
|
||||
};
|
||||
|
||||
export const mockDeleteSuccessResponse = {
|
||||
status: 'success',
|
||||
data: 'Domain deleted successfully',
|
||||
};
|
||||
|
||||
// Mock error responses
|
||||
export const mockErrorResponse = {
|
||||
error: {
|
||||
code: 'internal_error',
|
||||
message: 'Failed to perform operation',
|
||||
url: '',
|
||||
},
|
||||
};
|
||||
|
||||
export const mockValidationErrorResponse = {
|
||||
error: {
|
||||
code: 'invalid_input',
|
||||
message: 'Domain name is required',
|
||||
url: '',
|
||||
},
|
||||
};
|
||||
@@ -1,214 +1,151 @@
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { PlusOutlined } from '@ant-design/icons';
|
||||
import { Button } from '@signozhq/button';
|
||||
import { Trash2, X } from '@signozhq/icons';
|
||||
import { toast } from '@signozhq/sonner';
|
||||
import { Modal } from 'antd';
|
||||
import { Table } from 'antd';
|
||||
import { Button, Table, Typography } from 'antd';
|
||||
import { ColumnsType } from 'antd/lib/table';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import {
|
||||
useDeleteAuthDomain,
|
||||
useListAuthDomains,
|
||||
} from 'api/generated/services/authdomains';
|
||||
import {
|
||||
AuthtypesGettableAuthDomainDTO,
|
||||
RenderErrorResponseDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { AxiosError } from 'axios';
|
||||
import deleteDomain from 'api/v1/domains/id/delete';
|
||||
import listAllDomain from 'api/v1/domains/list';
|
||||
import ErrorContent from 'components/ErrorModal/components/ErrorContent';
|
||||
import CopyToClipboard from 'periscope/components/CopyToClipboard';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import { ErrorV2Resp } from 'types/api';
|
||||
import APIError from 'types/api/error';
|
||||
import { GettableAuthDomain, SSOType } from 'types/api/v1/domains/list';
|
||||
|
||||
import CreateEdit from './CreateEdit/CreateEdit';
|
||||
import SSOEnforcementToggle from './SSOEnforcementToggle';
|
||||
import Toggle from './Toggle';
|
||||
|
||||
import './AuthDomain.styles.scss';
|
||||
import '../../IngestionSettings/IngestionSettings.styles.scss';
|
||||
|
||||
export const SSOType = new Map<string, string>([
|
||||
['google_auth', 'Google Auth'],
|
||||
['saml', 'SAML'],
|
||||
['email_password', 'Email Password'],
|
||||
['oidc', 'OIDC'],
|
||||
]);
|
||||
const columns: ColumnsType<GettableAuthDomain> = [
|
||||
{
|
||||
title: 'Domain',
|
||||
dataIndex: 'name',
|
||||
key: 'name',
|
||||
width: 100,
|
||||
render: (val): JSX.Element => <Typography.Text>{val}</Typography.Text>,
|
||||
},
|
||||
{
|
||||
title: 'Enforce SSO',
|
||||
dataIndex: 'ssoEnabled',
|
||||
key: 'ssoEnabled',
|
||||
width: 80,
|
||||
render: (value: boolean, record: GettableAuthDomain): JSX.Element => (
|
||||
<Toggle isDefaultChecked={value} record={record} />
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'IDP Initiated SSO URL',
|
||||
dataIndex: 'relayState',
|
||||
key: 'relayState',
|
||||
width: 80,
|
||||
render: (_, record: GettableAuthDomain): JSX.Element => {
|
||||
const relayPath = record.authNProviderInfo.relayStatePath;
|
||||
if (!relayPath) {
|
||||
return (
|
||||
<Typography.Text style={{ paddingLeft: '6px' }}>N/A</Typography.Text>
|
||||
);
|
||||
}
|
||||
|
||||
const href = `${window.location.origin}/${relayPath}`;
|
||||
return <CopyToClipboard textToCopy={href} />;
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'Action',
|
||||
dataIndex: 'action',
|
||||
key: 'action',
|
||||
width: 100,
|
||||
render: (_, record: GettableAuthDomain): JSX.Element => (
|
||||
<section className="auth-domain-list-column-action">
|
||||
<Typography.Link data-column-action="configure">
|
||||
Configure {SSOType.get(record.ssoType)}
|
||||
</Typography.Link>
|
||||
<Typography.Link type="danger" data-column-action="delete">
|
||||
Delete
|
||||
</Typography.Link>
|
||||
</section>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
async function deleteDomainById(
|
||||
id: string,
|
||||
showErrorModal: (error: APIError) => void,
|
||||
refetchAuthDomainListResponse: () => void,
|
||||
): Promise<void> {
|
||||
try {
|
||||
await deleteDomain(id);
|
||||
refetchAuthDomainListResponse();
|
||||
} catch (error) {
|
||||
showErrorModal(error as APIError);
|
||||
}
|
||||
}
|
||||
|
||||
function AuthDomain(): JSX.Element {
|
||||
const [record, setRecord] = useState<AuthtypesGettableAuthDomainDTO>();
|
||||
const [record, setRecord] = useState<GettableAuthDomain>();
|
||||
const [addDomain, setAddDomain] = useState<boolean>(false);
|
||||
const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false);
|
||||
const [
|
||||
activeDomain,
|
||||
setActiveDomain,
|
||||
] = useState<AuthtypesGettableAuthDomainDTO | null>(null);
|
||||
|
||||
const { showErrorModal } = useErrorModal();
|
||||
|
||||
const {
|
||||
data: authDomainListResponse,
|
||||
isLoading: isLoadingAuthDomainListResponse,
|
||||
isFetching: isFetchingAuthDomainListResponse,
|
||||
error: errorFetchingAuthDomainListResponse,
|
||||
refetch: refetchAuthDomainListResponse,
|
||||
} = useListAuthDomains();
|
||||
|
||||
const { mutate: deleteAuthDomain, isLoading } = useDeleteAuthDomain<
|
||||
AxiosError<RenderErrorResponseDTO>
|
||||
>();
|
||||
|
||||
const showDeleteModal = useCallback(
|
||||
(domain: AuthtypesGettableAuthDomainDTO): void => {
|
||||
setActiveDomain(domain);
|
||||
setIsDeleteModalOpen(true);
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const hideDeleteModal = useCallback((): void => {
|
||||
setIsDeleteModalOpen(false);
|
||||
setActiveDomain(null);
|
||||
}, []);
|
||||
|
||||
const handleDeleteDomain = useCallback((): void => {
|
||||
if (!activeDomain?.id) {
|
||||
return;
|
||||
}
|
||||
|
||||
deleteAuthDomain(
|
||||
{ pathParams: { id: activeDomain.id } },
|
||||
{
|
||||
onSuccess: () => {
|
||||
toast.success('Domain deleted successfully');
|
||||
refetchAuthDomainListResponse();
|
||||
hideDeleteModal();
|
||||
},
|
||||
onError: (error) => {
|
||||
try {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
} catch (apiError) {
|
||||
showErrorModal(apiError as APIError);
|
||||
}
|
||||
},
|
||||
},
|
||||
);
|
||||
}, [
|
||||
activeDomain,
|
||||
deleteAuthDomain,
|
||||
hideDeleteModal,
|
||||
|
||||
refetchAuthDomainListResponse,
|
||||
showErrorModal,
|
||||
]);
|
||||
|
||||
const formattedError = useMemo(() => {
|
||||
if (!errorFetchingAuthDomainListResponse) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let errorResult: APIError | null = null;
|
||||
try {
|
||||
ErrorResponseHandlerV2(
|
||||
errorFetchingAuthDomainListResponse as AxiosError<ErrorV2Resp>,
|
||||
);
|
||||
} catch (error) {
|
||||
errorResult = error as APIError;
|
||||
}
|
||||
return errorResult;
|
||||
}, [errorFetchingAuthDomainListResponse]);
|
||||
|
||||
const columns: ColumnsType<AuthtypesGettableAuthDomainDTO> = useMemo(
|
||||
() => [
|
||||
{
|
||||
title: 'Domain',
|
||||
dataIndex: 'name',
|
||||
key: 'name',
|
||||
width: 100,
|
||||
render: (val): JSX.Element => <span>{val}</span>,
|
||||
},
|
||||
{
|
||||
title: 'Enforce SSO',
|
||||
dataIndex: 'ssoEnabled',
|
||||
key: 'ssoEnabled',
|
||||
width: 80,
|
||||
render: (
|
||||
value: boolean,
|
||||
record: AuthtypesGettableAuthDomainDTO,
|
||||
): JSX.Element => (
|
||||
<SSOEnforcementToggle isDefaultChecked={value} record={record} />
|
||||
),
|
||||
},
|
||||
{
|
||||
title: 'IDP Initiated SSO URL',
|
||||
dataIndex: 'relayState',
|
||||
key: 'relayState',
|
||||
width: 80,
|
||||
render: (_, record: AuthtypesGettableAuthDomainDTO): JSX.Element => {
|
||||
const relayPath = record.authNProviderInfo?.relayStatePath;
|
||||
if (!relayPath) {
|
||||
return <span className="auth-domain-list-na">N/A</span>;
|
||||
}
|
||||
|
||||
const href = `${window.location.origin}/${relayPath}`;
|
||||
return <CopyToClipboard textToCopy={href} />;
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'Action',
|
||||
dataIndex: 'action',
|
||||
key: 'action',
|
||||
width: 100,
|
||||
render: (_, record: AuthtypesGettableAuthDomainDTO): JSX.Element => (
|
||||
<section className="auth-domain-list-column-action">
|
||||
<Button
|
||||
className="auth-domain-list-action-link"
|
||||
onClick={(): void => setRecord(record)}
|
||||
variant="link"
|
||||
>
|
||||
Configure {SSOType.get(record.ssoType || '')}
|
||||
</Button>
|
||||
<Button
|
||||
className="auth-domain-list-action-link delete"
|
||||
onClick={(): void => showDeleteModal(record)}
|
||||
variant="link"
|
||||
>
|
||||
Delete
|
||||
</Button>
|
||||
</section>
|
||||
),
|
||||
},
|
||||
],
|
||||
[showDeleteModal],
|
||||
);
|
||||
} = useQuery({
|
||||
queryFn: listAllDomain,
|
||||
queryKey: ['/api/v1/domains', 'list'],
|
||||
enabled: true,
|
||||
});
|
||||
|
||||
return (
|
||||
<div className="auth-domain">
|
||||
<section className="auth-domain-header">
|
||||
<h3 className="auth-domain-title">Authenticated Domains</h3>
|
||||
<Typography.Title level={3}>Authenticated Domains</Typography.Title>
|
||||
<Button
|
||||
prefixIcon={<PlusOutlined />}
|
||||
type="primary"
|
||||
icon={<PlusOutlined />}
|
||||
onClick={(): void => {
|
||||
setAddDomain(true);
|
||||
}}
|
||||
variant="solid"
|
||||
size="sm"
|
||||
color="primary"
|
||||
className="button"
|
||||
>
|
||||
Add Domain
|
||||
</Button>
|
||||
</section>
|
||||
{formattedError && <ErrorContent error={formattedError} />}
|
||||
{!errorFetchingAuthDomainListResponse && (
|
||||
{(errorFetchingAuthDomainListResponse as APIError) && (
|
||||
<ErrorContent error={errorFetchingAuthDomainListResponse as APIError} />
|
||||
)}
|
||||
{!(errorFetchingAuthDomainListResponse as APIError) && (
|
||||
<Table
|
||||
columns={columns}
|
||||
dataSource={authDomainListResponse?.data?.data}
|
||||
onRow={undefined}
|
||||
dataSource={authDomainListResponse?.data}
|
||||
onRow={(record): any => ({
|
||||
onClick: (
|
||||
event: React.SyntheticEvent<HTMLLinkElement, MouseEvent>,
|
||||
): void => {
|
||||
const target = event.target as HTMLLinkElement;
|
||||
const { columnAction } = target.dataset;
|
||||
switch (columnAction) {
|
||||
case 'configure':
|
||||
setRecord(record);
|
||||
|
||||
break;
|
||||
case 'delete':
|
||||
deleteDomainById(
|
||||
record.id,
|
||||
showErrorModal,
|
||||
refetchAuthDomainListResponse,
|
||||
);
|
||||
break;
|
||||
default:
|
||||
console.error('Unknown action:', columnAction);
|
||||
}
|
||||
},
|
||||
})}
|
||||
loading={
|
||||
isLoadingAuthDomainListResponse || isFetchingAuthDomainListResponse
|
||||
}
|
||||
className="auth-domain-list"
|
||||
rowKey="id"
|
||||
/>
|
||||
)}
|
||||
{(addDomain || record) && (
|
||||
@@ -222,39 +159,6 @@ function AuthDomain(): JSX.Element {
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
<Modal
|
||||
className="delete-ingestion-key-modal"
|
||||
title={<span className="title">Delete Domain</span>}
|
||||
open={isDeleteModalOpen}
|
||||
closable
|
||||
onCancel={hideDeleteModal}
|
||||
destroyOnClose
|
||||
footer={[
|
||||
<Button
|
||||
key="cancel"
|
||||
onClick={hideDeleteModal}
|
||||
className="cancel-btn"
|
||||
prefixIcon={<X size={16} />}
|
||||
>
|
||||
Cancel
|
||||
</Button>,
|
||||
<Button
|
||||
key="submit"
|
||||
prefixIcon={<Trash2 size={16} />}
|
||||
onClick={handleDeleteDomain}
|
||||
className="delete-btn"
|
||||
loading={isLoading}
|
||||
>
|
||||
Delete Domain
|
||||
</Button>,
|
||||
]}
|
||||
>
|
||||
<p className="delete-text">
|
||||
Are you sure you want to delete the domain{' '}
|
||||
<strong>{activeDomain?.name}</strong>? This action cannot be undone.
|
||||
</p>
|
||||
</Modal>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { FC } from 'react';
|
||||
import Spinner from 'components/Spinner';
|
||||
|
||||
import { PanelTypeVsPanelWrapper } from './constants';
|
||||
import { PanelWrapperProps } from './panelWrapper.types';
|
||||
@@ -33,11 +32,6 @@ function PanelWrapper({
|
||||
// eslint-disable-next-line react/jsx-no-useless-fragment
|
||||
return <></>;
|
||||
}
|
||||
|
||||
if (queryResponse.isFetching || queryResponse.isLoading) {
|
||||
return <Spinner height="100%" size="large" tip="Loading..." />;
|
||||
}
|
||||
|
||||
return (
|
||||
<Component
|
||||
panelMode={panelMode}
|
||||
|
||||
@@ -1,246 +0,0 @@
|
||||
import { useCallback, useEffect, useMemo } from 'react';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { Pagination, Skeleton } from 'antd';
|
||||
import { useListRoles } from 'api/generated/services/role';
|
||||
import { RoletypesRoleDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import ErrorInPlace from 'components/ErrorInPlace/ErrorInPlace';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import LineClampedText from 'periscope/components/LineClampedText/LineClampedText';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { toAPIError } from 'utils/errorUtils';
|
||||
|
||||
import '../RolesSettings.styles.scss';
|
||||
|
||||
const PAGE_SIZE = 20;
|
||||
|
||||
type DisplayItem =
|
||||
| { type: 'section'; label: string; count?: number }
|
||||
| { type: 'role'; role: RoletypesRoleDTO };
|
||||
|
||||
interface RolesListingTableProps {
|
||||
searchQuery: string;
|
||||
}
|
||||
|
||||
function RolesListingTable({
|
||||
searchQuery,
|
||||
}: RolesListingTableProps): JSX.Element {
|
||||
const { data, isLoading, isError, error } = useListRoles();
|
||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
||||
const history = useHistory();
|
||||
const urlQuery = useUrlQuery();
|
||||
const pageParam = parseInt(urlQuery.get('page') ?? '1', 10);
|
||||
const currentPage = Number.isNaN(pageParam) || pageParam < 1 ? 1 : pageParam;
|
||||
|
||||
const setCurrentPage = useCallback(
|
||||
(page: number): void => {
|
||||
urlQuery.set('page', String(page));
|
||||
history.replace({ search: urlQuery.toString() });
|
||||
},
|
||||
[history, urlQuery],
|
||||
);
|
||||
|
||||
const roles = useMemo(() => data?.data?.data ?? [], [data]);
|
||||
|
||||
const formatTimestamp = (date?: Date | string): string => {
|
||||
if (!date) {
|
||||
return '—';
|
||||
}
|
||||
const d = new Date(date);
|
||||
|
||||
if (Number.isNaN(d.getTime())) {
|
||||
return '—';
|
||||
}
|
||||
|
||||
return formatTimezoneAdjustedTimestamp(date, DATE_TIME_FORMATS.DASH_DATETIME);
|
||||
};
|
||||
|
||||
const filteredRoles = useMemo(() => {
|
||||
if (!searchQuery.trim()) {
|
||||
return roles;
|
||||
}
|
||||
const query = searchQuery.toLowerCase();
|
||||
return roles.filter(
|
||||
(role) =>
|
||||
role.name?.toLowerCase().includes(query) ||
|
||||
role.description?.toLowerCase().includes(query),
|
||||
);
|
||||
}, [roles, searchQuery]);
|
||||
|
||||
const managedRoles = useMemo(
|
||||
() => filteredRoles.filter((role) => role.type?.toLowerCase() === 'managed'),
|
||||
[filteredRoles],
|
||||
);
|
||||
const customRoles = useMemo(
|
||||
() => filteredRoles.filter((role) => role.type?.toLowerCase() === 'custom'),
|
||||
[filteredRoles],
|
||||
);
|
||||
|
||||
// Combine managed + custom into a flat display list for pagination
|
||||
const displayList = useMemo((): DisplayItem[] => {
|
||||
const result: DisplayItem[] = [];
|
||||
|
||||
if (managedRoles.length > 0) {
|
||||
result.push({ type: 'section', label: 'Managed roles' });
|
||||
managedRoles.forEach((role) => result.push({ type: 'role', role }));
|
||||
}
|
||||
if (customRoles.length > 0) {
|
||||
result.push({
|
||||
type: 'section',
|
||||
label: 'Custom roles',
|
||||
count: customRoles.length,
|
||||
});
|
||||
customRoles.forEach((role) => result.push({ type: 'role', role }));
|
||||
}
|
||||
return result;
|
||||
}, [managedRoles, customRoles]);
|
||||
|
||||
const totalRoleCount = managedRoles.length + customRoles.length;
|
||||
|
||||
// Ensure current page is valid; if out of bounds, redirect to last available page
|
||||
useEffect(() => {
|
||||
if (isLoading || totalRoleCount === 0) {
|
||||
return;
|
||||
}
|
||||
const maxPage = Math.ceil(totalRoleCount / PAGE_SIZE);
|
||||
if (currentPage > maxPage) {
|
||||
setCurrentPage(maxPage);
|
||||
}
|
||||
}, [isLoading, totalRoleCount, currentPage, setCurrentPage]);
|
||||
|
||||
// Paginate: count only role items, but include section headers contextually
|
||||
const paginatedItems = useMemo((): DisplayItem[] => {
|
||||
const startRole = (currentPage - 1) * PAGE_SIZE;
|
||||
const endRole = startRole + PAGE_SIZE;
|
||||
let roleIndex = 0;
|
||||
let lastSection: DisplayItem | null = null;
|
||||
const result: DisplayItem[] = [];
|
||||
|
||||
for (const item of displayList) {
|
||||
if (item.type === 'section') {
|
||||
lastSection = item;
|
||||
} else {
|
||||
if (roleIndex >= startRole && roleIndex < endRole) {
|
||||
// Insert section header before first role in that section on this page
|
||||
if (lastSection) {
|
||||
result.push(lastSection);
|
||||
lastSection = null;
|
||||
}
|
||||
result.push(item);
|
||||
}
|
||||
roleIndex++;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}, [displayList, currentPage]);
|
||||
|
||||
const showPaginationItem = (total: number, range: number[]): JSX.Element => (
|
||||
<>
|
||||
<span className="numbers">
|
||||
{range[0]} — {range[1]}
|
||||
</span>
|
||||
<span className="total"> of {total}</span>
|
||||
</>
|
||||
);
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="roles-listing-table">
|
||||
<Skeleton active paragraph={{ rows: 5 }} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (isError) {
|
||||
return (
|
||||
<div className="roles-listing-table">
|
||||
<ErrorInPlace
|
||||
error={toAPIError(
|
||||
error,
|
||||
'An unexpected error occurred while fetching roles.',
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (filteredRoles.length === 0) {
|
||||
return (
|
||||
<div className="roles-listing-table">
|
||||
<div className="roles-table-empty">
|
||||
{searchQuery ? 'No roles match your search.' : 'No roles found.'}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// todo: use table from periscope when its available for consumption
|
||||
const renderRow = (role: RoletypesRoleDTO): JSX.Element => (
|
||||
<div key={role.id} className="roles-table-row">
|
||||
<div className="roles-table-cell roles-table-cell--name">
|
||||
{role.name ?? '—'}
|
||||
</div>
|
||||
<div className="roles-table-cell roles-table-cell--description">
|
||||
<LineClampedText
|
||||
text={role.description ?? '—'}
|
||||
tooltipProps={{ overlayClassName: 'roles-description-tooltip' }}
|
||||
/>
|
||||
</div>
|
||||
<div className="roles-table-cell roles-table-cell--updated-at">
|
||||
{formatTimestamp(role.updatedAt)}
|
||||
</div>
|
||||
<div className="roles-table-cell roles-table-cell--created-at">
|
||||
{formatTimestamp(role.createdAt)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="roles-listing-table">
|
||||
<div className="roles-table-scroll-container">
|
||||
<div className="roles-table-inner">
|
||||
<div className="roles-table-header">
|
||||
<div className="roles-table-header-cell roles-table-header-cell--name">
|
||||
Name
|
||||
</div>
|
||||
<div className="roles-table-header-cell roles-table-header-cell--description">
|
||||
Description
|
||||
</div>
|
||||
<div className="roles-table-header-cell roles-table-header-cell--updated-at">
|
||||
Updated At
|
||||
</div>
|
||||
<div className="roles-table-header-cell roles-table-header-cell--created-at">
|
||||
Created At
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{paginatedItems.map((item) =>
|
||||
item.type === 'section' ? (
|
||||
<h3 key={`section-${item.label}`} className="roles-table-section-header">
|
||||
{item.label}
|
||||
{item.count !== undefined && (
|
||||
<span className="roles-table-section-header__count">{item.count}</span>
|
||||
)}
|
||||
</h3>
|
||||
) : (
|
||||
renderRow(item.role)
|
||||
),
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Pagination
|
||||
current={currentPage}
|
||||
pageSize={PAGE_SIZE}
|
||||
total={totalRoleCount}
|
||||
showTotal={showPaginationItem}
|
||||
showSizeChanger={false}
|
||||
hideOnSinglePage
|
||||
onChange={(page): void => setCurrentPage(page)}
|
||||
className="roles-table-pagination"
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default RolesListingTable;
|
||||
@@ -1,238 +0,0 @@
|
||||
.roles-settings {
|
||||
.roles-settings-header {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
width: 100%;
|
||||
padding: 16px;
|
||||
|
||||
.roles-settings-header-title {
|
||||
color: var(--text-base-white);
|
||||
font-family: Inter;
|
||||
font-style: normal;
|
||||
font-size: 18px;
|
||||
font-weight: 500;
|
||||
line-height: 28px;
|
||||
letter-spacing: -0.09px;
|
||||
}
|
||||
|
||||
.roles-settings-header-description {
|
||||
color: var(--foreground);
|
||||
font-family: Inter;
|
||||
font-style: normal;
|
||||
font-size: 14px;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
}
|
||||
|
||||
.roles-settings-content {
|
||||
padding: 0 16px;
|
||||
}
|
||||
|
||||
// todo: https://github.com/SigNoz/components/issues/116
|
||||
.roles-search-wrapper {
|
||||
input {
|
||||
width: 100%;
|
||||
background: var(--l3-background);
|
||||
border: 1px solid var(--l3-border);
|
||||
border-radius: 2px;
|
||||
padding: 6px 6px 6px 8px;
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-weight: 400;
|
||||
line-height: 18px;
|
||||
letter-spacing: -0.07px;
|
||||
color: var(--l1-foreground);
|
||||
outline: none;
|
||||
height: 32px;
|
||||
|
||||
&::placeholder {
|
||||
color: var(--l3-foreground);
|
||||
}
|
||||
|
||||
&:focus {
|
||||
border-color: var(--input);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.roles-description-tooltip {
|
||||
max-height: none;
|
||||
overflow-y: visible;
|
||||
}
|
||||
|
||||
.roles-listing-table {
|
||||
margin-top: 12px;
|
||||
border-radius: 4px;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.roles-table-scroll-container {
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
.roles-table-inner {
|
||||
min-width: 850px;
|
||||
}
|
||||
|
||||
.roles-table-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 8px 16px;
|
||||
gap: 24px;
|
||||
}
|
||||
|
||||
.roles-table-header-cell {
|
||||
font-family: Inter;
|
||||
font-size: 11px;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.44px;
|
||||
color: var(--foreground);
|
||||
line-height: 16px;
|
||||
|
||||
&--name {
|
||||
flex: 0 0 180px;
|
||||
}
|
||||
|
||||
&--description {
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
&--created-at {
|
||||
flex: 0 0 180px;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
&--updated-at {
|
||||
flex: 0 0 180px;
|
||||
text-align: right;
|
||||
}
|
||||
}
|
||||
|
||||
.roles-table-section-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
height: 32px;
|
||||
padding: 0 16px;
|
||||
background: rgba(171, 189, 255, 0.04);
|
||||
border-top: 1px solid var(--secondary);
|
||||
border-bottom: 1px solid var(--secondary);
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
font-weight: 400;
|
||||
color: var(--foreground);
|
||||
line-height: 16px;
|
||||
margin: 0;
|
||||
|
||||
&__count {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
min-width: 20px;
|
||||
height: 18px;
|
||||
padding: 0 6px;
|
||||
border-radius: 9px;
|
||||
background: var(--l3-background);
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
color: var(--foreground);
|
||||
line-height: 1;
|
||||
}
|
||||
}
|
||||
|
||||
.roles-table-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 8px 16px;
|
||||
background: rgba(171, 189, 255, 0.02);
|
||||
border-bottom: 1px solid var(--secondary);
|
||||
gap: 24px;
|
||||
}
|
||||
|
||||
.roles-table-cell {
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-weight: 400;
|
||||
color: var(--foreground);
|
||||
line-height: 20px;
|
||||
|
||||
&--name {
|
||||
flex: 0 0 180px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
&--description {
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
&--created-at {
|
||||
flex: 0 0 180px;
|
||||
text-align: right;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
&--updated-at {
|
||||
flex: 0 0 180px;
|
||||
text-align: right;
|
||||
white-space: nowrap;
|
||||
}
|
||||
}
|
||||
|
||||
.roles-table-empty {
|
||||
padding: 24px 16px;
|
||||
text-align: center;
|
||||
color: var(--foreground);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-weight: 400;
|
||||
}
|
||||
|
||||
.roles-table-pagination {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: flex-end;
|
||||
padding: 8px 16px;
|
||||
|
||||
.ant-pagination-total-text {
|
||||
margin-right: auto;
|
||||
|
||||
.numbers {
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
color: var(--foreground);
|
||||
}
|
||||
|
||||
.total {
|
||||
font-family: Inter;
|
||||
font-size: 12px;
|
||||
color: var(--foreground);
|
||||
opacity: 0.5;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.roles-settings {
|
||||
.roles-settings-header {
|
||||
.roles-settings-header-title {
|
||||
color: var(--text-base-black);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.roles-table-section-header {
|
||||
background: rgba(0, 0, 0, 0.03);
|
||||
}
|
||||
|
||||
.roles-table-row {
|
||||
background: rgba(0, 0, 0, 0.01);
|
||||
}
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
import { useState } from 'react';
|
||||
import { Input } from '@signozhq/input';
|
||||
|
||||
import RolesListingTable from './RolesComponents/RolesListingTable';
|
||||
|
||||
import './RolesSettings.styles.scss';
|
||||
|
||||
function RolesSettings(): JSX.Element {
|
||||
const [searchQuery, setSearchQuery] = useState('');
|
||||
|
||||
return (
|
||||
<div className="roles-settings" data-testid="roles-settings">
|
||||
<div className="roles-settings-header">
|
||||
<h3 className="roles-settings-header-title">Roles</h3>
|
||||
<p className="roles-settings-header-description">
|
||||
Create and manage custom roles for your team.
|
||||
</p>
|
||||
</div>
|
||||
<div className="roles-settings-content">
|
||||
<div className="roles-search-wrapper">
|
||||
<Input
|
||||
type="search"
|
||||
placeholder="Search for roles..."
|
||||
value={searchQuery}
|
||||
onChange={(e): void => setSearchQuery(e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<RolesListingTable searchQuery={searchQuery} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default RolesSettings;
|
||||
@@ -1,232 +0,0 @@
|
||||
import {
|
||||
allRoles,
|
||||
listRolesSuccessResponse,
|
||||
} from 'mocks-server/__mockdata__/roles';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { render, screen, userEvent } from 'tests/test-utils';
|
||||
|
||||
import RolesSettings from '../RolesSettings';
|
||||
|
||||
const rolesApiURL = 'http://localhost/api/v1/roles';
|
||||
|
||||
describe('RolesSettings', () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('renders the header and search input', () => {
|
||||
server.use(
|
||||
rest.get(rolesApiURL, (_req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(listRolesSuccessResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<RolesSettings />);
|
||||
|
||||
expect(screen.getByText('Roles')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText('Create and manage custom roles for your team.'),
|
||||
).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByPlaceholderText('Search for roles...'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('displays roles grouped by managed and custom sections', async () => {
|
||||
server.use(
|
||||
rest.get(rolesApiURL, (_req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(listRolesSuccessResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<RolesSettings />);
|
||||
|
||||
expect(await screen.findByText('signoz-admin')).toBeInTheDocument();
|
||||
|
||||
// Section headers
|
||||
expect(screen.getByText('Managed roles')).toBeInTheDocument();
|
||||
expect(screen.getByText('Custom roles')).toBeInTheDocument();
|
||||
|
||||
// Managed roles
|
||||
expect(screen.getByText('signoz-admin')).toBeInTheDocument();
|
||||
expect(screen.getByText('signoz-editor')).toBeInTheDocument();
|
||||
expect(screen.getByText('signoz-viewer')).toBeInTheDocument();
|
||||
|
||||
// Custom roles
|
||||
expect(screen.getByText('billing-manager')).toBeInTheDocument();
|
||||
expect(screen.getByText('dashboard-creator')).toBeInTheDocument();
|
||||
|
||||
// Custom roles count badge
|
||||
expect(screen.getByText('2')).toBeInTheDocument();
|
||||
|
||||
// Column headers
|
||||
expect(screen.getByText('Name')).toBeInTheDocument();
|
||||
expect(screen.getByText('Description')).toBeInTheDocument();
|
||||
expect(screen.getByText('Updated At')).toBeInTheDocument();
|
||||
expect(screen.getByText('Created At')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('filters roles by search query on name', async () => {
|
||||
server.use(
|
||||
rest.get(rolesApiURL, (_req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(listRolesSuccessResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<RolesSettings />);
|
||||
|
||||
expect(await screen.findByText('signoz-admin')).toBeInTheDocument();
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
const searchInput = screen.getByPlaceholderText('Search for roles...');
|
||||
|
||||
await user.type(searchInput, 'billing');
|
||||
|
||||
expect(await screen.findByText('billing-manager')).toBeInTheDocument();
|
||||
expect(screen.queryByText('signoz-admin')).not.toBeInTheDocument();
|
||||
expect(screen.queryByText('signoz-editor')).not.toBeInTheDocument();
|
||||
expect(screen.queryByText('dashboard-creator')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('filters roles by search query on description', async () => {
|
||||
server.use(
|
||||
rest.get(rolesApiURL, (_req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(listRolesSuccessResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<RolesSettings />);
|
||||
|
||||
expect(await screen.findByText('signoz-admin')).toBeInTheDocument();
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
const searchInput = screen.getByPlaceholderText('Search for roles...');
|
||||
|
||||
await user.type(searchInput, 'read-only');
|
||||
|
||||
expect(await screen.findByText('signoz-viewer')).toBeInTheDocument();
|
||||
expect(screen.queryByText('signoz-admin')).not.toBeInTheDocument();
|
||||
expect(screen.queryByText('billing-manager')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows empty state when search matches nothing', async () => {
|
||||
server.use(
|
||||
rest.get(rolesApiURL, (_req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(listRolesSuccessResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<RolesSettings />);
|
||||
|
||||
expect(await screen.findByText('signoz-admin')).toBeInTheDocument();
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
const searchInput = screen.getByPlaceholderText('Search for roles...');
|
||||
|
||||
await user.type(searchInput, 'nonexistentrole');
|
||||
|
||||
expect(
|
||||
await screen.findByText('No roles match your search.'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows loading skeleton while fetching', () => {
|
||||
server.use(
|
||||
rest.get(rolesApiURL, (_req, res, ctx) =>
|
||||
res(ctx.delay(200), ctx.status(200), ctx.json(listRolesSuccessResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<RolesSettings />);
|
||||
|
||||
expect(document.querySelector('.ant-skeleton')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows error state when API fails', async () => {
|
||||
const errorMessage = 'Failed to fetch roles';
|
||||
server.use(
|
||||
rest.get(rolesApiURL, (_req, res, ctx) =>
|
||||
res(
|
||||
ctx.status(500),
|
||||
ctx.json({
|
||||
error: {
|
||||
code: 'INTERNAL_ERROR',
|
||||
message: errorMessage,
|
||||
url: '',
|
||||
errors: [],
|
||||
},
|
||||
}),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
render(<RolesSettings />);
|
||||
|
||||
expect(await screen.findByText(errorMessage)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows empty state when API returns no roles', async () => {
|
||||
server.use(
|
||||
rest.get(rolesApiURL, (_req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json({ status: 'success', data: [] })),
|
||||
),
|
||||
);
|
||||
|
||||
render(<RolesSettings />);
|
||||
|
||||
expect(await screen.findByText('No roles found.')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders descriptions for all roles', async () => {
|
||||
server.use(
|
||||
rest.get(rolesApiURL, (_req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(listRolesSuccessResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<RolesSettings />);
|
||||
|
||||
expect(await screen.findByText('signoz-admin')).toBeInTheDocument();
|
||||
|
||||
for (const role of allRoles) {
|
||||
if (role.description) {
|
||||
expect(screen.getByText(role.description)).toBeInTheDocument();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('handles invalid dates gracefully by showing fallback', async () => {
|
||||
const invalidRole = {
|
||||
id: 'edge-0009',
|
||||
createdAt: ('invalid-date' as unknown) as Date,
|
||||
updatedAt: ('not-a-date' as unknown) as Date,
|
||||
name: 'invalid-date-role',
|
||||
description: 'Tests date parsing fallback.',
|
||||
type: 'custom',
|
||||
orgId: 'org-001',
|
||||
};
|
||||
|
||||
server.use(
|
||||
rest.get(rolesApiURL, (_req, res, ctx) =>
|
||||
res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
status: 'success',
|
||||
data: [invalidRole],
|
||||
}),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
render(<RolesSettings />);
|
||||
|
||||
expect(await screen.findByText('invalid-date-role')).toBeInTheDocument();
|
||||
|
||||
// Verify the "—" (em-dash) fallback is shown for both cells
|
||||
const dashFallback = screen.getAllByText('—');
|
||||
// In renderRow: name, description, updatedAt, createdAt.
|
||||
// Total dashes expected: 2 (for both dates)
|
||||
expect(dashFallback.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
});
|
||||
@@ -1 +0,0 @@
|
||||
export { default } from './RolesSettings';
|
||||
@@ -28,7 +28,6 @@ import {
|
||||
ScrollText,
|
||||
Search,
|
||||
Settings,
|
||||
Shield,
|
||||
Slack,
|
||||
Unplug,
|
||||
User,
|
||||
@@ -313,13 +312,6 @@ export const settingsMenuItems: SidebarItem[] = [
|
||||
isEnabled: false,
|
||||
itemKey: 'billing',
|
||||
},
|
||||
{
|
||||
key: ROUTES.ROLES_SETTINGS,
|
||||
label: 'Roles',
|
||||
icon: <Shield size={16} />,
|
||||
isEnabled: false,
|
||||
itemKey: 'roles',
|
||||
},
|
||||
{
|
||||
key: ROUTES.ORG_SETTINGS,
|
||||
label: 'Members & SSO',
|
||||
|
||||
@@ -159,7 +159,6 @@ export const routesToSkip = [
|
||||
ROUTES.ERROR_DETAIL,
|
||||
ROUTES.LOGS_PIPELINES,
|
||||
ROUTES.BILLING,
|
||||
ROUTES.ROLES_SETTINGS,
|
||||
ROUTES.SUPPORT,
|
||||
ROUTES.WORKSPACE_LOCKED,
|
||||
ROUTES.WORKSPACE_SUSPENDED,
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
import { useMemo } from 'react';
|
||||
import { Form } from 'antd';
|
||||
|
||||
export interface CollapseSectionErrors {
|
||||
hasErrors: boolean;
|
||||
errorMessages: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects validation errors in a form section
|
||||
* @param fieldNamePrefix - Field path prefix for the section
|
||||
* @param specificFields - Optional specific field prefixes to check (uses prefix matching)
|
||||
*/
|
||||
export function useCollapseSectionErrors(
|
||||
fieldNamePrefix: string[],
|
||||
specificFields?: string[][],
|
||||
): CollapseSectionErrors {
|
||||
const form = Form.useFormInstance();
|
||||
// Refer: https://github.com/SigNoz/signoz/pull/10276#discussion_r2819372174
|
||||
Form.useWatch([], form);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
return useMemo(() => {
|
||||
const fieldErrors = form.getFieldsError();
|
||||
const messages: string[] = [];
|
||||
|
||||
if (specificFields?.length) {
|
||||
fieldErrors.forEach((field) => {
|
||||
const fieldPath = Array.isArray(field.name) ? field.name : [field.name];
|
||||
|
||||
const isMatch = specificFields.some((specificField) => {
|
||||
if (fieldPath.length < specificField.length) {
|
||||
return false;
|
||||
}
|
||||
return specificField.every((part, idx) => fieldPath[idx] === part);
|
||||
});
|
||||
|
||||
if (isMatch && field.errors.length > 0) {
|
||||
field.errors.forEach((error) => messages.push(error));
|
||||
}
|
||||
});
|
||||
} else {
|
||||
const prefixPath = fieldNamePrefix.join('.');
|
||||
|
||||
fieldErrors.forEach((field) => {
|
||||
const fieldPath = Array.isArray(field.name)
|
||||
? field.name.join('.')
|
||||
: String(field.name);
|
||||
|
||||
if (fieldPath.startsWith(prefixPath) && field.errors.length > 0) {
|
||||
field.errors.forEach((error) => messages.push(error));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
hasErrors: messages.length > 0,
|
||||
errorMessages: messages,
|
||||
};
|
||||
}, [form, fieldNamePrefix, specificFields]);
|
||||
}
|
||||
@@ -1,64 +0,0 @@
|
||||
import { RoletypesRoleDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
|
||||
const orgId = '019ba2bb-2fa1-7b24-8159-cfca08617ef9';
|
||||
|
||||
export const managedRoles: RoletypesRoleDTO[] = [
|
||||
{
|
||||
id: '019c24aa-2248-756f-9833-984f1ab63819',
|
||||
createdAt: new Date('2026-02-03T18:00:55.624356Z'),
|
||||
updatedAt: new Date('2026-02-03T18:00:55.624356Z'),
|
||||
name: 'signoz-admin',
|
||||
description:
|
||||
'Role assigned to users who have full administrative access to SigNoz resources.',
|
||||
type: 'managed',
|
||||
orgId,
|
||||
},
|
||||
{
|
||||
id: '019c24aa-2248-757c-9faf-7b1e899751e0',
|
||||
createdAt: new Date('2026-02-03T18:00:55.624359Z'),
|
||||
updatedAt: new Date('2026-02-03T18:00:55.624359Z'),
|
||||
name: 'signoz-editor',
|
||||
description:
|
||||
'Role assigned to users who can create, edit, and manage SigNoz resources but do not have full administrative privileges.',
|
||||
type: 'managed',
|
||||
orgId,
|
||||
},
|
||||
{
|
||||
id: '019c24aa-2248-7585-a129-4188b3473c27',
|
||||
createdAt: new Date('2026-02-03T18:00:55.624362Z'),
|
||||
updatedAt: new Date('2026-02-03T18:00:55.624362Z'),
|
||||
name: 'signoz-viewer',
|
||||
description:
|
||||
'Role assigned to users who have read-only access to SigNoz resources.',
|
||||
type: 'managed',
|
||||
orgId,
|
||||
},
|
||||
];
|
||||
|
||||
export const customRoles: RoletypesRoleDTO[] = [
|
||||
{
|
||||
id: '019c24aa-3333-0001-aaaa-111111111111',
|
||||
createdAt: new Date('2026-02-10T10:30:00.000Z'),
|
||||
updatedAt: new Date('2026-02-12T14:20:00.000Z'),
|
||||
name: 'billing-manager',
|
||||
description: 'Custom role for managing billing and invoices.',
|
||||
type: 'custom',
|
||||
orgId,
|
||||
},
|
||||
{
|
||||
id: '019c24aa-3333-0002-bbbb-222222222222',
|
||||
createdAt: new Date('2026-02-11T09:00:00.000Z'),
|
||||
updatedAt: new Date('2026-02-13T11:45:00.000Z'),
|
||||
name: 'dashboard-creator',
|
||||
description: 'Custom role allowing users to create and manage dashboards.',
|
||||
type: 'custom',
|
||||
orgId,
|
||||
},
|
||||
];
|
||||
|
||||
export const allRoles: RoletypesRoleDTO[] = [...managedRoles, ...customRoles];
|
||||
|
||||
export const listRolesSuccessResponse = {
|
||||
status: 'success',
|
||||
data: allRoles,
|
||||
};
|
||||
@@ -77,7 +77,6 @@ function SettingsPage(): JSX.Element {
|
||||
...item,
|
||||
isEnabled:
|
||||
item.key === ROUTES.BILLING ||
|
||||
item.key === ROUTES.ROLES_SETTINGS ||
|
||||
item.key === ROUTES.INTEGRATIONS ||
|
||||
item.key === ROUTES.CUSTOM_DOMAIN_SETTINGS ||
|
||||
item.key === ROUTES.API_KEYS ||
|
||||
@@ -108,7 +107,6 @@ function SettingsPage(): JSX.Element {
|
||||
...item,
|
||||
isEnabled:
|
||||
item.key === ROUTES.BILLING ||
|
||||
item.key === ROUTES.ROLES_SETTINGS ||
|
||||
item.key === ROUTES.INTEGRATIONS ||
|
||||
item.key === ROUTES.API_KEYS ||
|
||||
item.key === ROUTES.ORG_SETTINGS ||
|
||||
@@ -136,9 +134,7 @@ function SettingsPage(): JSX.Element {
|
||||
updatedItems = updatedItems.map((item) => ({
|
||||
...item,
|
||||
isEnabled:
|
||||
item.key === ROUTES.API_KEYS ||
|
||||
item.key === ROUTES.ORG_SETTINGS ||
|
||||
item.key === ROUTES.ROLES_SETTINGS
|
||||
item.key === ROUTES.API_KEYS || item.key === ROUTES.ORG_SETTINGS
|
||||
? true
|
||||
: item.isEnabled,
|
||||
}));
|
||||
|
||||
@@ -12,7 +12,6 @@ import IngestionSettings from 'container/IngestionSettings/IngestionSettings';
|
||||
import MultiIngestionSettings from 'container/IngestionSettings/MultiIngestionSettings';
|
||||
import MySettings from 'container/MySettings';
|
||||
import OrganizationSettings from 'container/OrganizationSettings';
|
||||
import RolesSettings from 'container/RolesSettings';
|
||||
import { TFunction } from 'i18next';
|
||||
import {
|
||||
Backpack,
|
||||
@@ -25,7 +24,6 @@ import {
|
||||
KeySquare,
|
||||
Pencil,
|
||||
Plus,
|
||||
Shield,
|
||||
User,
|
||||
} from 'lucide-react';
|
||||
import ChannelsEdit from 'pages/ChannelsEdit';
|
||||
@@ -150,19 +148,6 @@ export const billingSettings = (t: TFunction): RouteTabProps['routes'] => [
|
||||
},
|
||||
];
|
||||
|
||||
export const rolesSettings = (t: TFunction): RouteTabProps['routes'] => [
|
||||
{
|
||||
Component: RolesSettings,
|
||||
name: (
|
||||
<div className="periscope-tab">
|
||||
<Shield size={16} /> {t('routes:roles').toString()}
|
||||
</div>
|
||||
),
|
||||
route: ROUTES.ROLES_SETTINGS,
|
||||
key: ROUTES.ROLES_SETTINGS,
|
||||
},
|
||||
];
|
||||
|
||||
export const keyboardShortcuts = (t: TFunction): RouteTabProps['routes'] => [
|
||||
{
|
||||
Component: Shortcuts,
|
||||
|
||||
@@ -15,7 +15,6 @@ import {
|
||||
multiIngestionSettings,
|
||||
mySettings,
|
||||
organizationSettings,
|
||||
rolesSettings,
|
||||
} from './config';
|
||||
|
||||
export const getRoutes = (
|
||||
@@ -67,10 +66,6 @@ export const getRoutes = (
|
||||
settings.push(...customDomainSettings(t), ...billingSettings(t));
|
||||
}
|
||||
|
||||
if (isAdmin) {
|
||||
settings.push(...rolesSettings(t));
|
||||
}
|
||||
|
||||
settings.push(
|
||||
...mySettings(t),
|
||||
...createAlertChannels(t),
|
||||
|
||||
@@ -1,8 +1,3 @@
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp } from 'types/api';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
/**
|
||||
* Extracts HTTP status code from various error types
|
||||
* @param error - The error object (could be APIError, AxiosError, or other error types)
|
||||
@@ -33,25 +28,3 @@ export const isRetryableError = (error: any): boolean => {
|
||||
// If no status code is available, default to retryable
|
||||
return !statusCode || statusCode >= 500;
|
||||
};
|
||||
|
||||
export function toAPIError(
|
||||
error: unknown,
|
||||
defaultMessage = 'An unexpected error occurred.',
|
||||
): APIError {
|
||||
try {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
} catch (apiError) {
|
||||
if (apiError instanceof APIError) {
|
||||
return apiError;
|
||||
}
|
||||
}
|
||||
return new APIError({
|
||||
httpStatusCode: 500,
|
||||
error: {
|
||||
code: 'UNKNOWN_ERROR',
|
||||
message: defaultMessage,
|
||||
url: '',
|
||||
errors: [],
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@@ -97,7 +97,6 @@ export const routePermission: Record<keyof typeof ROUTES, ROLES[]> = {
|
||||
GET_STARTED_AZURE_MONITORING: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
WORKSPACE_LOCKED: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
WORKSPACE_SUSPENDED: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
ROLES_SETTINGS: ['ADMIN'],
|
||||
BILLING: ['ADMIN'],
|
||||
SUPPORT: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
SOMETHING_WENT_WRONG: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
|
||||
@@ -4479,19 +4479,6 @@
|
||||
dependencies:
|
||||
"@radix-ui/react-compose-refs" "1.1.2"
|
||||
|
||||
"@radix-ui/react-switch@^1.1.4":
|
||||
version "1.2.6"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/react-switch/-/react-switch-1.2.6.tgz#ff79acb831f0d5ea9216cfcc5b939912571358e3"
|
||||
integrity sha512-bByzr1+ep1zk4VubeEVViV592vu2lHE2BZY5OnzehZqOOgogN80+mNtCqPkhn2gklJqOpxWgPoYTSnhBCqpOXQ==
|
||||
dependencies:
|
||||
"@radix-ui/primitive" "1.1.3"
|
||||
"@radix-ui/react-compose-refs" "1.1.2"
|
||||
"@radix-ui/react-context" "1.1.2"
|
||||
"@radix-ui/react-primitive" "2.1.3"
|
||||
"@radix-ui/react-use-controllable-state" "1.2.2"
|
||||
"@radix-ui/react-use-previous" "1.1.1"
|
||||
"@radix-ui/react-use-size" "1.1.1"
|
||||
|
||||
"@radix-ui/react-tabs@1.0.4":
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/react-tabs/-/react-tabs-1.0.4.tgz#993608eec55a5d1deddd446fa9978d2bc1053da2"
|
||||
@@ -5177,20 +5164,6 @@
|
||||
tailwind-merge "^2.5.2"
|
||||
tailwindcss-animate "^1.0.7"
|
||||
|
||||
"@signozhq/switch@0.0.2":
|
||||
version "0.0.2"
|
||||
resolved "https://registry.yarnpkg.com/@signozhq/switch/-/switch-0.0.2.tgz#58003ce9c0cd1f2ad8266a7045182607ce51df76"
|
||||
integrity sha512-3B3Y5dzIyepO6EQJ7agx97bPmwg1dcOY46q2lqviHnMxNk3Sv079nSNCaztjQlo0VR0qu2JgVXhWi5Lw9WBN8A==
|
||||
dependencies:
|
||||
"@radix-ui/react-icons" "^1.3.0"
|
||||
"@radix-ui/react-slot" "^1.1.0"
|
||||
"@radix-ui/react-switch" "^1.1.4"
|
||||
class-variance-authority "^0.7.0"
|
||||
clsx "^2.1.1"
|
||||
lucide-react "^0.445.0"
|
||||
tailwind-merge "^2.5.2"
|
||||
tailwindcss-animate "^1.0.7"
|
||||
|
||||
"@signozhq/table@0.3.7":
|
||||
version "0.3.7"
|
||||
resolved "https://registry.yarnpkg.com/@signozhq/table/-/table-0.3.7.tgz#895b710c02af124dfb5117e02bbc6d80ce062063"
|
||||
|
||||
287
go.mod
287
go.mod
@@ -3,22 +3,23 @@ module github.com/SigNoz/signoz
|
||||
go 1.24.0
|
||||
|
||||
require (
|
||||
dario.cat/mergo v1.0.2
|
||||
dario.cat/mergo v1.0.1
|
||||
github.com/AfterShip/clickhouse-sql-parser v0.4.16
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.40.1
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||
github.com/SigNoz/signoz-otel-collector v0.142.1-rc.1
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.9
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||
github.com/antonmedv/expr v1.15.3
|
||||
github.com/bytedance/sonic v1.14.1
|
||||
github.com/cespare/xxhash/v2 v2.3.0
|
||||
github.com/coreos/go-oidc/v3 v3.16.0
|
||||
github.com/coreos/go-oidc/v3 v3.14.1
|
||||
github.com/dgraph-io/ristretto/v2 v2.3.0
|
||||
github.com/dustin/go-humanize v1.0.1
|
||||
github.com/gin-gonic/gin v1.11.0
|
||||
github.com/go-co-op/gocron v1.30.1
|
||||
github.com/go-openapi/runtime v0.28.0
|
||||
github.com/go-openapi/strfmt v0.24.0
|
||||
github.com/go-openapi/strfmt v0.23.0
|
||||
github.com/go-redis/redismock/v9 v9.2.0
|
||||
github.com/go-viper/mapstructure/v2 v2.4.0
|
||||
github.com/gojek/heimdall/v7 v7.0.3
|
||||
@@ -29,22 +30,22 @@ require (
|
||||
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674
|
||||
github.com/huandu/go-sqlbuilder v1.35.0
|
||||
github.com/jackc/pgx/v5 v5.7.6
|
||||
github.com/json-iterator/go v1.1.13-0.20220915233716-71ac16282d12
|
||||
github.com/json-iterator/go v1.1.12
|
||||
github.com/knadh/koanf v1.5.0
|
||||
github.com/knadh/koanf/v2 v2.3.0
|
||||
github.com/mailru/easyjson v0.9.0
|
||||
github.com/open-telemetry/opamp-go v0.22.0
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.142.0
|
||||
github.com/knadh/koanf/v2 v2.2.0
|
||||
github.com/mailru/easyjson v0.7.7
|
||||
github.com/open-telemetry/opamp-go v0.19.0
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.128.0
|
||||
github.com/openfga/api/proto v0.0.0-20250909172242-b4b2a12f5c67
|
||||
github.com/openfga/language/pkg/go v0.2.0-beta.2.0.20250428093642-7aeebe78bbfe
|
||||
github.com/opentracing/opentracing-go v1.2.0
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/prometheus/alertmanager v0.28.1
|
||||
github.com/prometheus/client_golang v1.23.2
|
||||
github.com/prometheus/common v0.67.4
|
||||
github.com/prometheus/prometheus v0.308.0
|
||||
github.com/prometheus/common v0.66.1
|
||||
github.com/prometheus/prometheus v0.304.1
|
||||
github.com/redis/go-redis/extra/redisotel/v9 v9.15.1
|
||||
github.com/redis/go-redis/v9 v9.17.2
|
||||
github.com/redis/go-redis/v9 v9.15.1
|
||||
github.com/rs/cors v1.11.1
|
||||
github.com/russellhaering/gosaml2 v0.9.0
|
||||
github.com/russellhaering/goxmldsig v1.2.0
|
||||
@@ -53,7 +54,7 @@ require (
|
||||
github.com/sethvargo/go-password v0.2.0
|
||||
github.com/smartystreets/goconvey v1.8.1
|
||||
github.com/soheilhy/cmux v0.1.5
|
||||
github.com/spf13/cobra v1.10.2
|
||||
github.com/spf13/cobra v1.10.1
|
||||
github.com/srikanthccv/ClickHouse-go-mock v0.13.0
|
||||
github.com/stretchr/testify v1.11.1
|
||||
github.com/swaggest/jsonschema-go v0.3.78
|
||||
@@ -63,59 +64,43 @@ require (
|
||||
github.com/uptrace/bun/dialect/pgdialect v1.2.9
|
||||
github.com/uptrace/bun/dialect/sqlitedialect v1.2.9
|
||||
github.com/uptrace/bun/extra/bunotel v1.2.9
|
||||
go.opentelemetry.io/collector/confmap v1.48.0
|
||||
go.opentelemetry.io/collector/otelcol v0.142.0
|
||||
go.opentelemetry.io/collector/pdata v1.48.0
|
||||
go.opentelemetry.io/collector/confmap v1.34.0
|
||||
go.opentelemetry.io/collector/otelcol v0.128.0
|
||||
go.opentelemetry.io/collector/pdata v1.34.0
|
||||
go.opentelemetry.io/contrib/config v0.10.0
|
||||
go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux v0.63.0
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0
|
||||
go.opentelemetry.io/otel v1.39.0
|
||||
go.opentelemetry.io/otel/metric v1.39.0
|
||||
go.opentelemetry.io/otel/sdk v1.39.0
|
||||
go.opentelemetry.io/otel/trace v1.39.0
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0
|
||||
go.opentelemetry.io/otel v1.38.0
|
||||
go.opentelemetry.io/otel/metric v1.38.0
|
||||
go.opentelemetry.io/otel/sdk v1.38.0
|
||||
go.opentelemetry.io/otel/trace v1.38.0
|
||||
go.uber.org/multierr v1.11.0
|
||||
go.uber.org/zap v1.27.1
|
||||
go.uber.org/zap v1.27.0
|
||||
golang.org/x/crypto v0.46.0
|
||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6
|
||||
golang.org/x/net v0.48.0
|
||||
golang.org/x/oauth2 v0.33.0
|
||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b
|
||||
golang.org/x/net v0.47.0
|
||||
golang.org/x/oauth2 v0.30.0
|
||||
golang.org/x/sync v0.19.0
|
||||
golang.org/x/text v0.32.0
|
||||
google.golang.org/protobuf v1.36.10
|
||||
google.golang.org/protobuf v1.36.9
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
k8s.io/apimachinery v0.35.0-alpha.0
|
||||
k8s.io/apimachinery v0.34.0
|
||||
modernc.org/sqlite v1.39.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/aws/aws-sdk-go-v2 v1.40.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/config v1.32.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.19.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/signin v1.0.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.30.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.9 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.41.1 // indirect
|
||||
github.com/aws/smithy-go v1.23.2 // indirect
|
||||
github.com/bytedance/gopkg v0.1.3 // indirect
|
||||
github.com/bytedance/sonic v1.14.1 // indirect
|
||||
github.com/bytedance/sonic/loader v0.3.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.6 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.27.0 // indirect
|
||||
github.com/goccy/go-yaml v1.19.0 // indirect
|
||||
github.com/goccy/go-yaml v1.18.0 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/prometheus/client_golang/exp v0.0.0-20250914183048-a974e0d45e0a // indirect
|
||||
github.com/redis/go-redis/extra/rediscmd/v9 v9.15.1 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/swaggest/refl v1.4.0 // indirect
|
||||
@@ -123,27 +108,24 @@ require (
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/ugorji/go/codec v1.3.0 // indirect
|
||||
github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 // indirect
|
||||
go.opentelemetry.io/collector/client v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configoptional v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configretry v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/exporterhelper v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/xpdata v0.142.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.3 // indirect
|
||||
go.opentelemetry.io/collector/config/configretry v1.34.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.2 // indirect
|
||||
golang.org/x/arch v0.20.0 // indirect
|
||||
golang.org/x/tools/godoc v0.1.0-deprecated // indirect
|
||||
modernc.org/libc v1.66.10 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
modernc.org/memory v1.11.0 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
cel.dev/expr v0.25.1 // indirect
|
||||
cloud.google.com/go/auth v0.17.0 // indirect
|
||||
cel.dev/expr v0.24.0 // indirect
|
||||
cloud.google.com/go/auth v0.16.1 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.9.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.20.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.13.1 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.2 // indirect
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.6.0 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.8.2 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 // indirect
|
||||
github.com/ClickHouse/ch-go v0.67.0 // indirect
|
||||
github.com/Masterminds/squirrel v1.5.4 // indirect
|
||||
github.com/Yiling-J/theine-go v0.6.2 // indirect
|
||||
@@ -151,35 +133,35 @@ require (
|
||||
github.com/andybalholm/brotli v1.2.0 // indirect
|
||||
github.com/armon/go-metrics v0.4.1 // indirect
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
||||
github.com/aws/aws-sdk-go v1.55.8 // indirect
|
||||
github.com/aws/aws-sdk-go v1.55.7 // indirect
|
||||
github.com/beevik/etree v1.1.0 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect
|
||||
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
|
||||
github.com/cenkalti/backoff/v5 v5.0.3 // indirect
|
||||
github.com/coder/quartz v0.1.2 // indirect
|
||||
github.com/coreos/go-systemd/v22 v22.6.0 // indirect
|
||||
github.com/coreos/go-systemd/v22 v22.5.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/dennwc/varint v1.0.0 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/docker/go-units v0.5.0 // indirect
|
||||
github.com/ebitengine/purego v0.9.1 // indirect
|
||||
github.com/ebitengine/purego v0.8.4 // indirect
|
||||
github.com/edsrzf/mmap-go v1.2.0 // indirect
|
||||
github.com/elastic/lunes v0.2.0 // indirect
|
||||
github.com/elastic/lunes v0.1.0 // indirect
|
||||
github.com/emirpasic/gods v1.18.1 // indirect
|
||||
github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect
|
||||
github.com/expr-lang/expr v1.17.7
|
||||
github.com/expr-lang/expr v1.17.5
|
||||
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/fsnotify/fsnotify v1.9.0 // indirect
|
||||
github.com/go-faster/city v1.0.1 // indirect
|
||||
github.com/go-faster/errors v0.7.1 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.1.3 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.1.1 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-ole/go-ole v1.3.0 // indirect
|
||||
github.com/go-openapi/analysis v0.23.0 // indirect
|
||||
github.com/go-openapi/errors v0.22.3 // indirect
|
||||
github.com/go-openapi/errors v0.22.0 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.21.0 // indirect
|
||||
github.com/go-openapi/jsonreference v0.21.0 // indirect
|
||||
github.com/go-openapi/loads v0.22.0 // indirect
|
||||
@@ -196,19 +178,19 @@ require (
|
||||
github.com/google/btree v1.1.3 // indirect
|
||||
github.com/google/cel-go v0.26.1 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.7 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.15.0 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/gopherjs/gopherjs v1.17.2 // indirect
|
||||
github.com/grafana/regexp v0.0.0-20250905093917-f7b3be9d1853 // indirect
|
||||
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.3 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
|
||||
github.com/hashicorp/go-msgpack/v2 v2.1.1 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/hashicorp/go-sockaddr v1.0.7 // indirect
|
||||
github.com/hashicorp/go-version v1.8.0 // indirect
|
||||
github.com/hashicorp/go-version v1.7.0 // indirect
|
||||
github.com/hashicorp/golang-lru v1.0.2 // indirect
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
||||
github.com/hashicorp/memberlist v0.5.1 // indirect
|
||||
@@ -224,21 +206,21 @@ require (
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/jpillora/backoff v1.0.0 // indirect
|
||||
github.com/jtolds/gls v4.20.0+incompatible // indirect
|
||||
github.com/klauspost/compress v1.18.2 // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
|
||||
github.com/kylelemons/godebug v1.1.0 // indirect
|
||||
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect
|
||||
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect
|
||||
github.com/leodido/go-syslog/v4 v4.3.0 // indirect
|
||||
github.com/leodido/go-syslog/v4 v4.2.0 // indirect
|
||||
github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b // indirect
|
||||
github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35 // indirect
|
||||
github.com/magefile/mage v1.15.0 // indirect
|
||||
github.com/mattermost/xml-roundtrip-validator v0.1.0 // indirect
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
|
||||
github.com/mdlayher/socket v0.5.1 // indirect
|
||||
github.com/mdlayher/socket v0.4.1 // indirect
|
||||
github.com/mdlayher/vsock v1.2.1 // indirect
|
||||
github.com/mfridman/interpolate v0.0.2 // indirect
|
||||
github.com/miekg/dns v1.1.68 // indirect
|
||||
github.com/miekg/dns v1.1.65 // indirect
|
||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.1-0.20231216201459-8508981c8b6c // indirect
|
||||
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
||||
@@ -247,14 +229,14 @@ require (
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f // indirect
|
||||
github.com/natefinch/wrap v0.2.0 // indirect
|
||||
github.com/oklog/run v1.2.0 // indirect
|
||||
github.com/oklog/run v1.1.0 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/oklog/ulid/v2 v2.1.1
|
||||
github.com/open-feature/go-sdk v1.17.0
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.142.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.142.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.142.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.142.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.128.0 // indirect
|
||||
github.com/openfga/openfga v1.10.1
|
||||
github.com/paulmach/orb v0.11.1 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||
@@ -264,10 +246,10 @@ require (
|
||||
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect
|
||||
github.com/pressly/goose/v3 v3.25.0 // indirect
|
||||
github.com/prometheus/client_model v0.6.2 // indirect
|
||||
github.com/prometheus/exporter-toolkit v0.15.0 // indirect
|
||||
github.com/prometheus/otlptranslator v1.0.0 // indirect
|
||||
github.com/prometheus/procfs v0.19.2 // indirect
|
||||
github.com/prometheus/sigv4 v0.3.0 // indirect
|
||||
github.com/prometheus/exporter-toolkit v0.14.0 // indirect
|
||||
github.com/prometheus/otlptranslator v0.0.0-20250320144820-d800c8b0eb07 // indirect
|
||||
github.com/prometheus/procfs v0.16.1 // indirect
|
||||
github.com/prometheus/sigv4 v0.1.2 // indirect
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
|
||||
github.com/robfig/cron/v3 v3.0.1 // indirect
|
||||
github.com/sagikazarmark/locafero v0.9.0 // indirect
|
||||
@@ -275,7 +257,7 @@ require (
|
||||
github.com/segmentio/asm v1.2.0 // indirect
|
||||
github.com/segmentio/backo-go v1.0.1 // indirect
|
||||
github.com/sethvargo/go-retry v0.3.0 // indirect
|
||||
github.com/shirou/gopsutil/v4 v4.25.11 // indirect
|
||||
github.com/shirou/gopsutil/v4 v4.25.5 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c // indirect
|
||||
github.com/shurcooL/vfsgen v0.0.0-20230704071429-0000e147ea92 // indirect
|
||||
@@ -290,9 +272,9 @@ require (
|
||||
github.com/subosito/gotenv v1.6.0 // indirect
|
||||
github.com/swaggest/openapi-go v0.2.60
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.1 // indirect
|
||||
github.com/tklauser/go-sysconf v0.3.16 // indirect
|
||||
github.com/tklauser/numcpus v0.11.0 // indirect
|
||||
github.com/tidwall/pretty v1.2.0 // indirect
|
||||
github.com/tklauser/go-sysconf v0.3.15 // indirect
|
||||
github.com/tklauser/numcpus v0.10.0 // indirect
|
||||
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect
|
||||
github.com/trivago/tgo v1.0.7 // indirect
|
||||
github.com/valyala/fastjson v1.6.4 // indirect
|
||||
@@ -301,82 +283,83 @@ require (
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
||||
github.com/zeebo/xxh3 v1.0.2 // indirect
|
||||
go.mongodb.org/mongo-driver v1.17.4 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
||||
go.opentelemetry.io/collector/component v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componentstatus v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componenttest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configtelemetry v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/envprovider v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/fileprovider v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/xconfmap v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/connector v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/connectortest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/xconnector v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumererror v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumertest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/xconsumer v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/exportertest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/xexporter v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/extension v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensioncapabilities v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensiontest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/xextension v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/featuregate v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/fanoutconsumer v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/telemetry v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/pprofile v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/testdata v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline/xpipeline v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/processor v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processorhelper v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processortest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/xprocessor v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receiverhelper v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receivertest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/xreceiver v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/semconv v0.128.1-0.20250610090210-188191247685
|
||||
go.opentelemetry.io/collector/service v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/service/hostcapabilities v0.142.0 // indirect
|
||||
go.opentelemetry.io/contrib/bridges/otelzap v0.13.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.63.0 // indirect
|
||||
go.opentelemetry.io/contrib/otelconf v0.18.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.60.0
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/log v0.15.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk/log v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk/metric v1.39.0
|
||||
go.opentelemetry.io/proto/otlp v1.9.0 // indirect
|
||||
go.mongodb.org/mongo-driver v1.17.1 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/collector/component v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componentstatus v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componenttest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configtelemetry v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/envprovider v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/fileprovider v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/xconfmap v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/connector v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/connectortest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/xconnector v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumererror v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumertest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/xconsumer v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/exportertest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/xexporter v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/extension v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensioncapabilities v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensiontest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/xextension v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/featuregate v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/fanoutconsumer v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/telemetry v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/pprofile v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/testdata v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline/xpipeline v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/processor v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processorhelper v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processortest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/xprocessor v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receiverhelper v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receivertest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/xreceiver v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/semconv v0.128.0
|
||||
go.opentelemetry.io/collector/service v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/service/hostcapabilities v0.128.0 // indirect
|
||||
go.opentelemetry.io/contrib/bridges/otelzap v0.11.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.60.0 // indirect
|
||||
go.opentelemetry.io/contrib/otelconf v0.16.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.58.0
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/log v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/sdk/log v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0
|
||||
go.opentelemetry.io/proto/otlp v1.8.0 // indirect
|
||||
go.uber.org/atomic v1.11.0 // indirect
|
||||
go.uber.org/mock v0.6.0 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/mod v0.30.0 // indirect
|
||||
golang.org/x/sys v0.39.0 // indirect
|
||||
golang.org/x/time v0.14.0 // indirect
|
||||
golang.org/x/time v0.11.0 // indirect
|
||||
golang.org/x/tools v0.39.0 // indirect
|
||||
gonum.org/v1/gonum v0.16.0 // indirect
|
||||
google.golang.org/api v0.257.0
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251202230838-ff82c1b0f217 // indirect
|
||||
google.golang.org/grpc v1.77.0 // indirect
|
||||
google.golang.org/api v0.236.0
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 // indirect
|
||||
google.golang.org/grpc v1.75.1 // indirect
|
||||
gopkg.in/telebot.v3 v3.3.8 // indirect
|
||||
k8s.io/client-go v0.34.2 // indirect
|
||||
k8s.io/client-go v0.34.0 // indirect
|
||||
k8s.io/klog/v2 v2.130.1 // indirect
|
||||
k8s.io/utils v0.0.0-20250604170112-4c0f3b243397 // indirect
|
||||
sigs.k8s.io/yaml v1.6.0 // indirect
|
||||
)
|
||||
|
||||
replace github.com/expr-lang/expr => github.com/SigNoz/expr v1.17.7-beta
|
||||
|
||||
@@ -22,7 +22,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
@@ -45,7 +44,6 @@ type provider struct {
|
||||
gatewayHandler gateway.Handler
|
||||
fieldsHandler fields.Handler
|
||||
authzHandler authz.Handler
|
||||
zeusHandler zeus.Handler
|
||||
}
|
||||
|
||||
func NewFactory(
|
||||
@@ -65,7 +63,6 @@ func NewFactory(
|
||||
gatewayHandler gateway.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
zeusHandler zeus.Handler,
|
||||
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
|
||||
return newProvider(
|
||||
@@ -88,7 +85,6 @@ func NewFactory(
|
||||
gatewayHandler,
|
||||
fieldsHandler,
|
||||
authzHandler,
|
||||
zeusHandler,
|
||||
)
|
||||
})
|
||||
}
|
||||
@@ -113,7 +109,6 @@ func newProvider(
|
||||
gatewayHandler gateway.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
zeusHandler zeus.Handler,
|
||||
) (apiserver.APIServer, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
|
||||
router := mux.NewRouter().UseEncodedPath()
|
||||
@@ -136,7 +131,6 @@ func newProvider(
|
||||
gatewayHandler: gatewayHandler,
|
||||
fieldsHandler: fieldsHandler,
|
||||
authzHandler: authzHandler,
|
||||
zeusHandler: zeusHandler,
|
||||
}
|
||||
|
||||
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
|
||||
@@ -205,10 +199,6 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addZeusRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/zeustypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addZeusRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v2/zeus/profiles", handler.New(provider.authZ.AdminAccess(provider.zeusHandler.PutProfile), handler.OpenAPIDef{
|
||||
ID: "PutProfile",
|
||||
Tags: []string{"zeus"},
|
||||
Summary: "Put profile in Zeus for a deployment.",
|
||||
Description: "This endpoint saves the profile of a deployment to zeus.",
|
||||
Request: new(zeustypes.PostableProfile),
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusForbidden, http.StatusNotFound, http.StatusConflict},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodPut).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/zeus/hosts", handler.New(provider.authZ.AdminAccess(provider.zeusHandler.GetHosts), handler.OpenAPIDef{
|
||||
ID: "GetHosts",
|
||||
Tags: []string{"zeus"},
|
||||
Summary: "Get host info from Zeus.",
|
||||
Description: "This endpoint gets the host info from zeus.",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(zeustypes.GettableHost),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusForbidden, http.StatusNotFound},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/zeus/hosts", handler.New(provider.authZ.AdminAccess(provider.zeusHandler.PutHost), handler.OpenAPIDef{
|
||||
ID: "PutHost",
|
||||
Tags: []string{"zeus"},
|
||||
Summary: "Put host in Zeus for a deployment.",
|
||||
Description: "This endpoint saves the host of a deployment to zeus.",
|
||||
Request: new(zeustypes.PostableHost),
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusNoContent,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusForbidden, http.StatusNotFound, http.StatusConflict},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
})).Methods(http.MethodPut).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -692,18 +692,18 @@ func (m *module) buildFilterClause(ctx context.Context, filter *qbtypes.Filter,
|
||||
}
|
||||
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: m.logger,
|
||||
FieldMapper: m.fieldMapper,
|
||||
ConditionBuilder: m.condBuilder,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "labels"},
|
||||
FieldKeys: keys,
|
||||
StartNs: querybuilder.ToNanoSecs(uint64(startMillis)),
|
||||
EndNs: querybuilder.ToNanoSecs(uint64(endMillis)),
|
||||
}
|
||||
|
||||
startNs := querybuilder.ToNanoSecs(uint64(startMillis))
|
||||
endNs := querybuilder.ToNanoSecs(uint64(endMillis))
|
||||
|
||||
whereClause, err := querybuilder.PrepareWhereClause(expression, opts, startNs, endNs)
|
||||
whereClause, err := querybuilder.PrepareWhereClause(expression, opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -76,7 +76,7 @@ func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetype
|
||||
|
||||
// add the paths that are not promoted but have indexes
|
||||
for path, indexes := range aggr {
|
||||
path := strings.TrimPrefix(path, telemetrylogs.BodyV2ColumnPrefix)
|
||||
path := strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
|
||||
path = telemetrytypes.BodyJSONStringSearchPrefix + path
|
||||
response = append(response, promotetypes.PromotePath{
|
||||
Path: path,
|
||||
@@ -156,7 +156,7 @@ func (m *module) PromoteAndIndexPaths(
|
||||
}
|
||||
}
|
||||
if len(it.Indexes) > 0 {
|
||||
parentColumn := telemetrylogs.LogsV2BodyV2Column
|
||||
parentColumn := telemetrylogs.LogsV2BodyJSONColumn
|
||||
// if the path is already promoted or is being promoted, add it to the promoted column
|
||||
if _, promoted := existingPromotedPaths[it.Path]; promoted || it.Promote {
|
||||
parentColumn = telemetrylogs.LogsV2BodyPromotedColumn
|
||||
|
||||
@@ -87,24 +87,6 @@ func (client *client) Read(ctx context.Context, query *prompb.Query, sortSeries
|
||||
return remote.FromQueryResult(sortSeries, res), nil
|
||||
}
|
||||
|
||||
func (c *client) ReadMultiple(ctx context.Context, queries []*prompb.Query, sortSeries bool) (storage.SeriesSet, error) {
|
||||
if len(queries) == 0 {
|
||||
return storage.EmptySeriesSet(), nil
|
||||
}
|
||||
if len(queries) == 1 {
|
||||
return c.Read(ctx, queries[0], sortSeries)
|
||||
}
|
||||
sets := make([]storage.SeriesSet, 0, len(queries))
|
||||
for _, q := range queries {
|
||||
ss, err := c.Read(ctx, q, sortSeries)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sets = append(sets, ss)
|
||||
}
|
||||
return storage.NewMergeSeriesSet(sets, 0, storage.ChainedSeriesMerge), nil
|
||||
}
|
||||
|
||||
func (client *client) queryToClickhouseQuery(_ context.Context, query *prompb.Query, metricName string, subQuery bool) (string, []any, error) {
|
||||
var clickHouseQuery string
|
||||
var conditions []string
|
||||
|
||||
@@ -2,7 +2,6 @@ package prometheus
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
)
|
||||
|
||||
@@ -16,24 +15,30 @@ func RemoveExtraLabels(res *promql.Result, labelsToRemove ...string) error {
|
||||
toRemove[l] = struct{}{}
|
||||
}
|
||||
|
||||
dropLabels := func(metric labels.Labels) labels.Labels {
|
||||
b := labels.NewBuilder(metric)
|
||||
for name := range toRemove {
|
||||
b.Del(name)
|
||||
}
|
||||
return b.Labels()
|
||||
}
|
||||
|
||||
switch res.Value.(type) {
|
||||
case promql.Vector:
|
||||
value := res.Value.(promql.Vector)
|
||||
for i := range value {
|
||||
(value)[i].Metric = dropLabels((value)[i].Metric)
|
||||
series := &(value)[i]
|
||||
dst := series.Metric[:0]
|
||||
for _, lbl := range series.Metric {
|
||||
if _, drop := toRemove[lbl.Name]; !drop {
|
||||
dst = append(dst, lbl)
|
||||
}
|
||||
}
|
||||
series.Metric = dst
|
||||
}
|
||||
case promql.Matrix:
|
||||
value := res.Value.(promql.Matrix)
|
||||
for i := range value {
|
||||
(value)[i].Metric = dropLabels((value)[i].Metric)
|
||||
series := &(value)[i]
|
||||
dst := series.Metric[:0]
|
||||
for _, lbl := range series.Metric {
|
||||
if _, drop := toRemove[lbl.Name]; !drop {
|
||||
dst = append(dst, lbl)
|
||||
}
|
||||
}
|
||||
series.Metric = dst
|
||||
}
|
||||
case promql.Scalar:
|
||||
return nil
|
||||
|
||||
@@ -10,9 +10,11 @@ import (
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/bytedance/sonic"
|
||||
)
|
||||
|
||||
type builderQuery[T any] struct {
|
||||
@@ -248,6 +250,40 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// merge body_json and promoted into body
|
||||
if q.spec.Signal == telemetrytypes.SignalLogs {
|
||||
switch typedPayload := payload.(type) {
|
||||
case *qbtypes.RawData:
|
||||
for _, rr := range typedPayload.Rows {
|
||||
seeder := func() error {
|
||||
body, ok := rr.Data[telemetrylogs.LogsV2BodyJSONColumn].(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
promoted, ok := rr.Data[telemetrylogs.LogsV2BodyPromotedColumn].(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
seed(promoted, body)
|
||||
str, err := sonic.MarshalString(body)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to marshal body")
|
||||
}
|
||||
rr.Data["body"] = str
|
||||
return nil
|
||||
}
|
||||
err := seeder()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
delete(rr.Data, telemetrylogs.LogsV2BodyJSONColumn)
|
||||
delete(rr.Data, telemetrylogs.LogsV2BodyPromotedColumn)
|
||||
}
|
||||
payload = typedPayload
|
||||
}
|
||||
}
|
||||
|
||||
return &qbtypes.Result{
|
||||
Type: q.kind,
|
||||
Value: payload,
|
||||
@@ -375,3 +411,18 @@ func decodeCursor(cur string) (int64, error) {
|
||||
}
|
||||
return strconv.ParseInt(string(b), 10, 64)
|
||||
}
|
||||
|
||||
func seed(promoted map[string]any, body map[string]any) {
|
||||
for key, fromValue := range promoted {
|
||||
if toValue, ok := body[key]; !ok {
|
||||
body[key] = fromValue
|
||||
} else {
|
||||
if fromValue, ok := fromValue.(map[string]any); ok {
|
||||
if toValue, ok := toValue.(map[string]any); ok {
|
||||
seed(fromValue, toValue)
|
||||
body[key] = toValue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/bytedance/sonic"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -393,11 +394,17 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
|
||||
|
||||
// de-reference the typed pointer to any
|
||||
val := reflect.ValueOf(cellPtr).Elem().Interface()
|
||||
// Post-process JSON columns: normalize into String value
|
||||
|
||||
// Post-process JSON columns: normalize into structured values
|
||||
if strings.HasPrefix(strings.ToUpper(colTypes[i].DatabaseTypeName()), "JSON") {
|
||||
switch x := val.(type) {
|
||||
case []byte:
|
||||
val = string(x)
|
||||
if len(x) > 0 {
|
||||
var v any
|
||||
if err := sonic.Unmarshal(x, &v); err == nil {
|
||||
val = v
|
||||
}
|
||||
}
|
||||
default:
|
||||
// already a structured type (map[string]any, []any, etc.)
|
||||
}
|
||||
|
||||
@@ -16,7 +16,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbv5 "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
)
|
||||
@@ -241,13 +240,13 @@ func (q *promqlQuery) Execute(ctx context.Context) (*qbv5.Result, error) {
|
||||
var series []*qbv5.TimeSeries
|
||||
for _, v := range matrix {
|
||||
var s qbv5.TimeSeries
|
||||
lbls := make([]*qbv5.Label, 0, v.Metric.Len())
|
||||
v.Metric.Range(func(l labels.Label) {
|
||||
lbls := make([]*qbv5.Label, 0, len(v.Metric))
|
||||
for name, value := range v.Metric.Copy().Map() {
|
||||
lbls = append(lbls, &qbv5.Label{
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: l.Name},
|
||||
Value: l.Value,
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: name},
|
||||
Value: value,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
s.Labels = lbls
|
||||
|
||||
|
||||
@@ -66,6 +66,7 @@ func newProvider(
|
||||
telemetrylogs.LogResourceKeysTblName,
|
||||
telemetrymetadata.DBName,
|
||||
telemetrymetadata.AttributesMetadataLocalTableName,
|
||||
telemetrymetadata.ColumnEvolutionMetadataTableName,
|
||||
)
|
||||
|
||||
// Create trace statement builder
|
||||
|
||||
@@ -19,7 +19,6 @@ import (
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
)
|
||||
|
||||
@@ -462,12 +461,12 @@ func toCommonSeries(series promql.Series) v3.Series {
|
||||
Points: make([]v3.Point, 0),
|
||||
}
|
||||
|
||||
series.Metric.Range(func(l labels.Label) {
|
||||
commonSeries.Labels[l.Name] = l.Value
|
||||
for _, lbl := range series.Metric {
|
||||
commonSeries.Labels[lbl.Name] = lbl.Value
|
||||
commonSeries.LabelsArray = append(commonSeries.LabelsArray, map[string]string{
|
||||
l.Name: l.Value,
|
||||
lbl.Name: lbl.Value,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
for _, f := range series.Floats {
|
||||
commonSeries.Points = append(commonSeries.Points, v3.Point{
|
||||
|
||||
@@ -48,6 +48,8 @@ func NewAggExprRewriter(
|
||||
// and the args if the parametric aggregation function is used.
|
||||
func (r *aggExprRewriter) Rewrite(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
expr string,
|
||||
rateInterval uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
@@ -74,7 +76,12 @@ func (r *aggExprRewriter) Rewrite(
|
||||
return "", nil, errors.NewInternalf(errors.CodeInternal, "no SELECT items for %q", expr)
|
||||
}
|
||||
|
||||
visitor := newExprVisitor(r.logger, keys,
|
||||
visitor := newExprVisitor(
|
||||
ctx,
|
||||
startNs,
|
||||
endNs,
|
||||
r.logger,
|
||||
keys,
|
||||
r.fullTextColumn,
|
||||
r.fieldMapper,
|
||||
r.conditionBuilder,
|
||||
@@ -94,6 +101,8 @@ func (r *aggExprRewriter) Rewrite(
|
||||
// RewriteMulti rewrites a slice of expressions.
|
||||
func (r *aggExprRewriter) RewriteMulti(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
exprs []string,
|
||||
rateInterval uint64,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
@@ -102,7 +111,7 @@ func (r *aggExprRewriter) RewriteMulti(
|
||||
var errs []error
|
||||
var chArgsList [][]any
|
||||
for i, e := range exprs {
|
||||
w, chArgs, err := r.Rewrite(ctx, e, rateInterval, keys)
|
||||
w, chArgs, err := r.Rewrite(ctx, startNs, endNs, e, rateInterval, keys)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
out[i] = e
|
||||
@@ -119,6 +128,9 @@ func (r *aggExprRewriter) RewriteMulti(
|
||||
|
||||
// exprVisitor walks FunctionExpr nodes and applies the mappers.
|
||||
type exprVisitor struct {
|
||||
ctx context.Context
|
||||
startNs uint64
|
||||
endNs uint64
|
||||
chparser.DefaultASTVisitor
|
||||
logger *slog.Logger
|
||||
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
|
||||
@@ -132,6 +144,9 @@ type exprVisitor struct {
|
||||
}
|
||||
|
||||
func newExprVisitor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
logger *slog.Logger,
|
||||
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
@@ -140,6 +155,9 @@ func newExprVisitor(
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *exprVisitor {
|
||||
return &exprVisitor{
|
||||
ctx: ctx,
|
||||
startNs: startNs,
|
||||
endNs: endNs,
|
||||
logger: logger,
|
||||
fieldKeys: fieldKeys,
|
||||
fullTextColumn: fullTextColumn,
|
||||
@@ -186,13 +204,16 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
whereClause, err := PrepareWhereClause(
|
||||
origPred,
|
||||
FilterExprVisitorOpts{
|
||||
Context: v.ctx,
|
||||
Logger: v.logger,
|
||||
FieldKeys: v.fieldKeys,
|
||||
FieldMapper: v.fieldMapper,
|
||||
ConditionBuilder: v.conditionBuilder,
|
||||
FullTextColumn: v.fullTextColumn,
|
||||
JsonKeyToKey: v.jsonKeyToKey,
|
||||
}, 0, 0,
|
||||
StartNs: v.startNs,
|
||||
EndNs: v.endNs,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -212,7 +233,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i := 0; i < len(args)-1; i++ {
|
||||
origVal := args[i].String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(v.ctx, v.startNs, v.endNs, &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
|
||||
}
|
||||
@@ -230,7 +251,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i, arg := range args {
|
||||
orig := arg.String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(v.ctx, v.startNs, v.endNs, &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -153,6 +153,7 @@ func AdjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemet
|
||||
key.Indexes = intrinsicOrCalculatedField.Indexes
|
||||
key.Materialized = intrinsicOrCalculatedField.Materialized
|
||||
key.JSONPlan = intrinsicOrCalculatedField.JSONPlan
|
||||
key.Evolutions = intrinsicOrCalculatedField.Evolutions
|
||||
return actions
|
||||
|
||||
}
|
||||
@@ -205,6 +206,7 @@ func AdjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemet
|
||||
key.Indexes = matchingKey.Indexes
|
||||
key.Materialized = matchingKey.Materialized
|
||||
key.JSONPlan = matchingKey.JSONPlan
|
||||
key.Evolutions = matchingKey.Evolutions
|
||||
|
||||
return actions
|
||||
} else {
|
||||
|
||||
@@ -19,6 +19,8 @@ import (
|
||||
|
||||
func CollisionHandledFinalExpr(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
fm qbtypes.FieldMapper,
|
||||
cb qbtypes.ConditionBuilder,
|
||||
@@ -44,7 +46,7 @@ func CollisionHandledFinalExpr(
|
||||
|
||||
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
|
||||
condition, err := cb.ConditionFor(ctx, startNs, endNs, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -57,7 +59,7 @@ func CollisionHandledFinalExpr(
|
||||
return nil
|
||||
}
|
||||
|
||||
colName, fieldForErr := fm.FieldFor(ctx, field)
|
||||
colName, fieldForErr := fm.FieldFor(ctx, startNs, endNs, field)
|
||||
if errors.Is(fieldForErr, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -92,7 +94,7 @@ func CollisionHandledFinalExpr(
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
colName, _ = fm.FieldFor(ctx, key)
|
||||
colName, _ = fm.FieldFor(ctx, startNs, endNs, key)
|
||||
colName, _ = DataTypeCollisionHandledFieldName(key, dummyValue, colName, qbtypes.FilterOperatorUnknown)
|
||||
stmts = append(stmts, colName)
|
||||
}
|
||||
@@ -204,10 +206,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
// While we expect user not to send the mixed data types, it inevitably happens
|
||||
// So we handle the data type collisions here
|
||||
switch key.FieldDataType {
|
||||
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString, telemetrytypes.FieldDataTypeJSON:
|
||||
if key.FieldDataType == telemetrytypes.FieldDataTypeJSON {
|
||||
tblFieldName = fmt.Sprintf("toString(%s)", tblFieldName)
|
||||
}
|
||||
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString:
|
||||
switch v := value.(type) {
|
||||
case float64:
|
||||
// try to convert the string value to to number
|
||||
@@ -222,6 +221,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
// we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string
|
||||
value = fmt.Sprintf("%t", v)
|
||||
}
|
||||
|
||||
case telemetrytypes.FieldDataTypeInt64,
|
||||
telemetrytypes.FieldDataTypeArrayInt64,
|
||||
telemetrytypes.FieldDataTypeNumber,
|
||||
|
||||
@@ -44,12 +44,12 @@ func keyIndexFilter(key *telemetrytypes.TelemetryFieldKey) any {
|
||||
|
||||
func (b *defaultConditionBuilder) ConditionFor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
op qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
|
||||
if key.FieldContext != telemetrytypes.FieldContextResource {
|
||||
@@ -60,15 +60,17 @@ func (b *defaultConditionBuilder) ConditionFor(
|
||||
// as we store resource values as string
|
||||
formattedValue := querybuilder.FormatValueForContains(value)
|
||||
|
||||
column, err := b.fm.ColumnFor(ctx, key)
|
||||
columns, err := b.fm.ColumnFor(ctx, startNs, endNs, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
// resource evolution on main table doesn't affect this as we not changing the resource column in the resource fingerprint table.
|
||||
column := columns[0]
|
||||
|
||||
keyIdxFilter := sb.Like(column.Name, keyIndexFilter(key))
|
||||
valueForIndexFilter := valueForIndexFilter(op, key, value)
|
||||
|
||||
fieldName, err := b.fm.FieldFor(ctx, key)
|
||||
fieldName, err := b.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -206,7 +206,7 @@ func TestConditionBuilder(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(context.Background(), tc.key, tc.op, tc.value, sb, 0, 0)
|
||||
cond, err := conditionBuilder.ConditionFor(context.Background(), 0, 0, tc.key, tc.op, tc.value, sb)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedErr != nil {
|
||||
|
||||
@@ -27,44 +27,48 @@ func NewFieldMapper() *defaultFieldMapper {
|
||||
|
||||
func (m *defaultFieldMapper) getColumn(
|
||||
_ context.Context,
|
||||
_, _ uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
) (*schema.Column, error) {
|
||||
) ([]*schema.Column, error) {
|
||||
if key.FieldContext == telemetrytypes.FieldContextResource {
|
||||
return resourceColumns["labels"], nil
|
||||
return []*schema.Column{resourceColumns["labels"]}, nil
|
||||
}
|
||||
if col, ok := resourceColumns[key.Name]; ok {
|
||||
return col, nil
|
||||
return []*schema.Column{col}, nil
|
||||
}
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
}
|
||||
|
||||
func (m *defaultFieldMapper) ColumnFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
) (*schema.Column, error) {
|
||||
return m.getColumn(ctx, key)
|
||||
) ([]*schema.Column, error) {
|
||||
return m.getColumn(ctx, tsStart, tsEnd, key)
|
||||
}
|
||||
|
||||
func (m *defaultFieldMapper) FieldFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
columns, err := m.getColumn(ctx, tsStart, tsEnd, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if key.FieldContext == telemetrytypes.FieldContextResource {
|
||||
return fmt.Sprintf("simpleJSONExtractString(%s, '%s')", column.Name, key.Name), nil
|
||||
return fmt.Sprintf("simpleJSONExtractString(%s, '%s')", columns[0].Name, key.Name), nil
|
||||
}
|
||||
return column.Name, nil
|
||||
return columns[0].Name, nil
|
||||
}
|
||||
|
||||
func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
_ map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
colName, err := m.FieldFor(ctx, key)
|
||||
colName, err := m.FieldFor(ctx, tsStart, tsEnd, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -148,7 +148,7 @@ func (b *resourceFilterStatementBuilder[T]) Build(
|
||||
|
||||
// addConditions adds both filter and time conditions to the query
|
||||
func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
_ context.Context,
|
||||
ctx context.Context,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
start, end uint64,
|
||||
query qbtypes.QueryBuilderQuery[T],
|
||||
@@ -160,6 +160,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
|
||||
// warnings would be encountered as part of the main condition already
|
||||
filterWhereClause, err := querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: b.logger,
|
||||
FieldMapper: b.fieldMapper,
|
||||
ConditionBuilder: b.conditionBuilder,
|
||||
@@ -171,7 +172,9 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
// there is no need for "key" not found error for resource filtering
|
||||
IgnoreNotFoundKeys: true,
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
StartNs: start,
|
||||
EndNs: end,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -23,6 +23,7 @@ const stringMatchingOperatorDocURL = "https://signoz.io/docs/userguide/operators
|
||||
// filterExpressionVisitor implements the FilterQueryVisitor interface
|
||||
// to convert the parsed filter expressions into ClickHouse WHERE clause
|
||||
type filterExpressionVisitor struct {
|
||||
context context.Context
|
||||
logger *slog.Logger
|
||||
fieldMapper qbtypes.FieldMapper
|
||||
conditionBuilder qbtypes.ConditionBuilder
|
||||
@@ -46,6 +47,7 @@ type filterExpressionVisitor struct {
|
||||
}
|
||||
|
||||
type FilterExprVisitorOpts struct {
|
||||
Context context.Context
|
||||
Logger *slog.Logger
|
||||
FieldMapper qbtypes.FieldMapper
|
||||
ConditionBuilder qbtypes.ConditionBuilder
|
||||
@@ -65,6 +67,7 @@ type FilterExprVisitorOpts struct {
|
||||
// newFilterExpressionVisitor creates a new filterExpressionVisitor
|
||||
func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVisitor {
|
||||
return &filterExpressionVisitor{
|
||||
context: opts.Context,
|
||||
logger: opts.Logger,
|
||||
fieldMapper: opts.FieldMapper,
|
||||
conditionBuilder: opts.ConditionBuilder,
|
||||
@@ -90,7 +93,7 @@ type PreparedWhereClause struct {
|
||||
}
|
||||
|
||||
// PrepareWhereClause generates a ClickHouse compatible WHERE clause from the filter query
|
||||
func PrepareWhereClause(query string, opts FilterExprVisitorOpts, startNs uint64, endNs uint64) (*PreparedWhereClause, error) {
|
||||
func PrepareWhereClause(query string, opts FilterExprVisitorOpts) (*PreparedWhereClause, error) {
|
||||
|
||||
// Setup the ANTLR parsing pipeline
|
||||
input := antlr.NewInputStream(query)
|
||||
@@ -124,8 +127,6 @@ func PrepareWhereClause(query string, opts FilterExprVisitorOpts, startNs uint64
|
||||
}
|
||||
tokens.Reset()
|
||||
|
||||
opts.StartNs = startNs
|
||||
opts.EndNs = endNs
|
||||
visitor := newFilterExpressionVisitor(opts)
|
||||
|
||||
// Handle syntax errors
|
||||
@@ -313,31 +314,37 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any
|
||||
return ""
|
||||
}
|
||||
child := ctx.GetChild(0)
|
||||
var searchText string
|
||||
if keyCtx, ok := child.(*grammar.KeyContext); ok {
|
||||
// create a full text search condition on the body field
|
||||
searchText = keyCtx.GetText()
|
||||
|
||||
keyText := keyCtx.GetText()
|
||||
cond, err := v.conditionBuilder.ConditionFor(v.context, v.startNs, v.endNs, v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(keyText), v.builder)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
|
||||
return ""
|
||||
}
|
||||
return cond
|
||||
} else if valCtx, ok := child.(*grammar.ValueContext); ok {
|
||||
var text string
|
||||
if valCtx.QUOTED_TEXT() != nil {
|
||||
searchText = trimQuotes(valCtx.QUOTED_TEXT().GetText())
|
||||
text = trimQuotes(valCtx.QUOTED_TEXT().GetText())
|
||||
} else if valCtx.NUMBER() != nil {
|
||||
searchText = valCtx.NUMBER().GetText()
|
||||
text = valCtx.NUMBER().GetText()
|
||||
} else if valCtx.BOOL() != nil {
|
||||
searchText = valCtx.BOOL().GetText()
|
||||
text = valCtx.BOOL().GetText()
|
||||
} else if valCtx.KEY() != nil {
|
||||
searchText = valCtx.KEY().GetText()
|
||||
text = valCtx.KEY().GetText()
|
||||
} else {
|
||||
v.errors = append(v.errors, fmt.Sprintf("unsupported value type: %s", valCtx.GetText()))
|
||||
return ""
|
||||
}
|
||||
cond, err := v.conditionBuilder.ConditionFor(v.context, v.startNs, v.endNs, v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
|
||||
return ""
|
||||
}
|
||||
return cond
|
||||
}
|
||||
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(searchText), v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
|
||||
return ""
|
||||
}
|
||||
v.warnings = append(v.warnings, v.fullTextColumn.Warnings...)
|
||||
return cond
|
||||
}
|
||||
|
||||
return "" // Should not happen with valid input
|
||||
@@ -375,13 +382,11 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
}
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, nil, v.builder, v.startNs, v.endNs)
|
||||
condition, err := v.conditionBuilder.ConditionFor(v.context, v.startNs, v.endNs, key, op, nil, v.builder)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build condition: %s", err.Error()))
|
||||
return ""
|
||||
}
|
||||
conds = append(conds, condition)
|
||||
v.warnings = append(v.warnings, key.Warnings...)
|
||||
}
|
||||
// if there is only one condition, return it directly, one less `()` wrapper
|
||||
if len(conds) == 1 {
|
||||
@@ -449,12 +454,11 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
}
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, values, v.builder, v.startNs, v.endNs)
|
||||
condition, err := v.conditionBuilder.ConditionFor(v.context, v.startNs, v.endNs, key, op, values, v.builder)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
conds = append(conds, condition)
|
||||
v.warnings = append(v.warnings, key.Warnings...)
|
||||
}
|
||||
if len(conds) == 1 {
|
||||
return conds[0]
|
||||
@@ -498,12 +502,11 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, []any{value1, value2}, v.builder, v.startNs, v.endNs)
|
||||
condition, err := v.conditionBuilder.ConditionFor(v.context, v.startNs, v.endNs, key, op, []any{value1, value2}, v.builder)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
conds = append(conds, condition)
|
||||
v.warnings = append(v.warnings, key.Warnings...)
|
||||
}
|
||||
if len(conds) == 1 {
|
||||
return conds[0]
|
||||
@@ -584,13 +587,12 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, value, v.builder, v.startNs, v.endNs)
|
||||
condition, err := v.conditionBuilder.ConditionFor(v.context, v.startNs, v.endNs, key, op, value, v.builder)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build condition: %s", err.Error()))
|
||||
return ""
|
||||
}
|
||||
conds = append(conds, condition)
|
||||
v.warnings = append(v.warnings, key.Warnings...)
|
||||
}
|
||||
if len(conds) == 1 {
|
||||
return conds[0]
|
||||
@@ -647,6 +649,7 @@ func (v *filterExpressionVisitor) VisitValueList(ctx *grammar.ValueListContext)
|
||||
|
||||
// VisitFullText handles standalone quoted strings for full-text search
|
||||
func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) any {
|
||||
|
||||
if v.skipFullTextFilter {
|
||||
return ""
|
||||
}
|
||||
@@ -663,13 +666,11 @@ func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) an
|
||||
v.errors = append(v.errors, "full text search is not supported")
|
||||
return ""
|
||||
}
|
||||
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder, v.startNs, v.endNs)
|
||||
cond, err := v.conditionBuilder.ConditionFor(v.context, v.startNs, v.endNs, v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
|
||||
return ""
|
||||
}
|
||||
v.warnings = append(v.warnings, v.fullTextColumn.Warnings...)
|
||||
|
||||
return cond
|
||||
}
|
||||
|
||||
@@ -750,13 +751,13 @@ func (v *filterExpressionVisitor) VisitFunctionCall(ctx *grammar.FunctionCallCon
|
||||
if key.FieldContext == telemetrytypes.FieldContextBody {
|
||||
var err error
|
||||
if BodyJSONQueryEnabled {
|
||||
fieldName, err = v.fieldMapper.FieldFor(context.Background(), key)
|
||||
fieldName, err = v.fieldMapper.FieldFor(v.context, v.startNs, v.endNs, key)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to get field name for key %s: %s", key.Name, err.Error()))
|
||||
return ""
|
||||
}
|
||||
} else {
|
||||
fieldName, _ = v.jsonKeyToKey(context.Background(), key, qbtypes.FilterOperatorUnknown, value)
|
||||
fieldName, _ = v.jsonKeyToKey(v.context, key, qbtypes.FilterOperatorUnknown, value)
|
||||
}
|
||||
} else {
|
||||
// TODO(add docs for json body search)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package querybuilder
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"strings"
|
||||
"testing"
|
||||
@@ -54,11 +55,12 @@ func TestPrepareWhereClause_EmptyVariableList(t *testing.T) {
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
opts := FilterExprVisitorOpts{
|
||||
Context: context.Background(),
|
||||
FieldKeys: keys,
|
||||
Variables: tt.variables,
|
||||
}
|
||||
|
||||
_, err := PrepareWhereClause(tt.expr, opts, 0, 0)
|
||||
_, err := PrepareWhereClause(tt.expr, opts)
|
||||
|
||||
if tt.expectError {
|
||||
if err == nil {
|
||||
@@ -467,7 +469,7 @@ func TestVisitKey(t *testing.T) {
|
||||
expectedWarnings: nil,
|
||||
expectedMainWrnURL: "",
|
||||
},
|
||||
{
|
||||
{
|
||||
name: "only attribute.custom_field is selected",
|
||||
keyText: "attribute.attribute.custom_field",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
|
||||
@@ -31,7 +31,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
)
|
||||
|
||||
type Handlers struct {
|
||||
@@ -49,7 +48,6 @@ type Handlers struct {
|
||||
GatewayHandler gateway.Handler
|
||||
Fields fields.Handler
|
||||
AuthzHandler authz.Handler
|
||||
ZeusHandler zeus.Handler
|
||||
}
|
||||
|
||||
func NewHandlers(
|
||||
@@ -62,7 +60,6 @@ func NewHandlers(
|
||||
gatewayService gateway.Gateway,
|
||||
telemetryMetadataStore telemetrytypes.MetadataStore,
|
||||
authz authz.AuthZ,
|
||||
zeusService zeus.Zeus,
|
||||
) Handlers {
|
||||
return Handlers{
|
||||
SavedView: implsavedview.NewHandler(modules.SavedView),
|
||||
@@ -79,6 +76,5 @@ func NewHandlers(
|
||||
GatewayHandler: gateway.NewHandler(gatewayService),
|
||||
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
|
||||
AuthzHandler: signozauthzapi.NewHandler(authz),
|
||||
ZeusHandler: zeus.NewHandler(zeusService, licensing),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ func TestNewHandlers(t *testing.T) {
|
||||
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
|
||||
|
||||
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil, nil, nil, nil)
|
||||
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil, nil, nil)
|
||||
reflectVal := reflect.ValueOf(handlers)
|
||||
for i := 0; i < reflectVal.NumField(); i++ {
|
||||
f := reflectVal.Field(i)
|
||||
|
||||
@@ -28,7 +28,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
"github.com/swaggest/openapi-go"
|
||||
"github.com/swaggest/openapi-go/openapi3"
|
||||
@@ -59,7 +58,6 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
|
||||
struct{ gateway.Handler }{},
|
||||
struct{ fields.Handler }{},
|
||||
struct{ authz.Handler }{},
|
||||
struct{ zeus.Handler }{},
|
||||
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -252,7 +252,6 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
|
||||
handlers.GatewayHandler,
|
||||
handlers.Fields,
|
||||
handlers.AuthzHandler,
|
||||
handlers.ZeusHandler,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -373,6 +373,7 @@ func New(
|
||||
telemetrylogs.LogResourceKeysTblName,
|
||||
telemetrymetadata.DBName,
|
||||
telemetrymetadata.AttributesMetadataLocalTableName,
|
||||
telemetrymetadata.ColumnEvolutionMetadataTableName,
|
||||
)
|
||||
|
||||
global, err := factory.NewProviderFromNamedMap(
|
||||
@@ -392,7 +393,7 @@ func New(
|
||||
userService := impluser.NewService(providerSettings, impluser.NewStore(sqlstore, providerSettings), modules.User, orgGetter, authz, config.User.Root)
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz, zeus)
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz)
|
||||
|
||||
// Initialize the API server
|
||||
apiserver, err := factory.NewProviderFromNamedMap(
|
||||
|
||||
@@ -25,28 +25,26 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
) (string, error) {
|
||||
column, err := c.fm.ColumnFor(ctx, key)
|
||||
columns, err := c.fm.ColumnFor(ctx, startNs, endNs, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if column.Type.GetType() == schema.ColumnTypeEnumJSON {
|
||||
// If field data is Not JSON Column Itself, then we need to build a JSON condition
|
||||
if querybuilder.BodyJSONQueryEnabled && key.MustBuildJSONCondition() {
|
||||
// TODO(Piyush): Update this to support multiple JSON columns based on evolutions
|
||||
for _, column := range columns {
|
||||
if column.IsJSONColumn() && querybuilder.BodyJSONQueryEnabled {
|
||||
valueType, value := InferDataType(value, operator, key)
|
||||
cond, err := NewJSONConditionBuilder(key, valueType).buildJSONCondition(operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return cond, nil
|
||||
} else if key.FieldDataType == telemetrytypes.FieldDataTypeJSON && !operator.IsOpValidForJSON() {
|
||||
// throw error for invalid operators on JSON columns
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid operator")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,7 +52,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -114,6 +112,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
return sb.ILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
|
||||
case qbtypes.FilterOperatorNotContains:
|
||||
return sb.NotILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
|
||||
|
||||
case qbtypes.FilterOperatorRegexp:
|
||||
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
|
||||
// Only needed because we are using sprintf instead of sb.Match (not implemented in sqlbuilder)
|
||||
@@ -179,18 +178,36 @@ func (c *conditionBuilder) conditionFor(
|
||||
}
|
||||
|
||||
var value any
|
||||
switch column.Type.GetType() {
|
||||
case schema.ColumnTypeEnumJSON:
|
||||
switch key.FieldDataType {
|
||||
case telemetrytypes.FieldDataTypeJSON:
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return sb.EQ(fmt.Sprintf("empty(%s)", tblFieldName), false), nil
|
||||
}
|
||||
return sb.EQ(fmt.Sprintf("empty(%s)", tblFieldName), true), nil
|
||||
default:
|
||||
column := columns[0]
|
||||
if len(key.Evolutions) > 0 {
|
||||
// we will use the corresponding column and its evolution entry for the query
|
||||
newColumns, _, err := selectEvolutionsForColumns(columns, key.Evolutions, startNs, endNs)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if len(newColumns) == 0 {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "no valid evolution found for field %s in the given time range", key.Name)
|
||||
}
|
||||
|
||||
// This mean tblFieldName is with multiIf, we just need to do a null check.
|
||||
if len(newColumns) > 1 {
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return sb.IsNotNull(tblFieldName), nil
|
||||
} else {
|
||||
return sb.IsNull(tblFieldName), nil
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise we have to find the correct exist operator based on the column type
|
||||
column = newColumns[0]
|
||||
}
|
||||
|
||||
switch column.Type.GetType() {
|
||||
case schema.ColumnTypeEnumJSON:
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return sb.IsNotNull(tblFieldName), nil
|
||||
} else {
|
||||
return sb.IsNull(tblFieldName), nil
|
||||
}
|
||||
case schema.ColumnTypeEnumLowCardinality:
|
||||
@@ -240,6 +257,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
}
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for column type %s", column.Type)
|
||||
|
||||
}
|
||||
}
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported operator: %v", operator)
|
||||
@@ -247,14 +265,15 @@ func (c *conditionBuilder) conditionFor(
|
||||
|
||||
func (c *conditionBuilder) ConditionFor(
|
||||
ctx context.Context,
|
||||
startNs uint64,
|
||||
endNs uint64,
|
||||
key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator,
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
_ uint64,
|
||||
_ uint64,
|
||||
) (string, error) {
|
||||
condition, err := c.conditionFor(ctx, key, operator, value, sb)
|
||||
|
||||
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -262,12 +281,12 @@ func (c *conditionBuilder) ConditionFor(
|
||||
if !(key.FieldContext == telemetrytypes.FieldContextBody && querybuilder.BodyJSONQueryEnabled) && operator.AddDefaultExistsFilter() {
|
||||
// skip adding exists filter for intrinsic fields
|
||||
// with an exception for body json search
|
||||
field, _ := c.fm.FieldFor(ctx, key)
|
||||
field, _ := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), field) && key.FieldContext != telemetrytypes.FieldContextBody {
|
||||
return condition, nil
|
||||
}
|
||||
|
||||
existsCondition, err := c.conditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
existsCondition, err := c.conditionFor(ctx, startNs, endNs, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ package telemetrylogs
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
@@ -11,14 +12,148 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestExistsConditionForWithEvolutions(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
startTs uint64
|
||||
endTs uint64
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
operator qbtypes.FilterOperator
|
||||
value any
|
||||
expectedSQL string
|
||||
expectedArgs []any
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
name: "New column",
|
||||
startTs: uint64(time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC).UnixNano()),
|
||||
endTs: uint64(time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC).UnixNano()),
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
},
|
||||
operator: qbtypes.FilterOperatorExists,
|
||||
value: nil,
|
||||
expectedSQL: "WHERE resource.`service.name`::String IS NOT NULL",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Old column",
|
||||
startTs: uint64(time.Date(2023, 1, 15, 10, 0, 0, 0, time.UTC).UnixNano()),
|
||||
endTs: uint64(time.Date(2023, 1, 15, 10, 0, 0, 0, time.UTC).UnixNano()),
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
},
|
||||
operator: qbtypes.FilterOperatorExists,
|
||||
value: nil,
|
||||
expectedSQL: "WHERE mapContains(resources_string, 'service.name') = ?",
|
||||
expectedArgs: []any{true},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Both Old column and new - empty filter",
|
||||
startTs: uint64(time.Date(2023, 1, 15, 10, 0, 0, 0, time.UTC).UnixNano()),
|
||||
endTs: uint64(time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC).UnixNano()),
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
},
|
||||
operator: qbtypes.FilterOperatorExists,
|
||||
value: nil,
|
||||
expectedSQL: "WHERE multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL",
|
||||
expectedError: nil,
|
||||
},
|
||||
}
|
||||
fm := NewFieldMapper()
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
ctx := context.Background()
|
||||
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, tc.startTs, tc.endTs, &tc.key, tc.operator, tc.value, sb)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
sql, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
assert.Contains(t, sql, tc.expectedSQL)
|
||||
assert.Equal(t, tc.expectedArgs, args)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConditionFor(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
mockEvolution := mockEvolutionData(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC))
|
||||
testCases := []struct {
|
||||
name string
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
operator qbtypes.FilterOperator
|
||||
value any
|
||||
evolutions []*telemetrytypes.EvolutionEntry
|
||||
expectedSQL string
|
||||
expectedArgs []any
|
||||
expectedError error
|
||||
@@ -240,9 +375,11 @@ func TestConditionFor(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
evolutions: mockEvolution,
|
||||
operator: qbtypes.FilterOperatorExists,
|
||||
value: nil,
|
||||
expectedSQL: "WHERE multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL",
|
||||
expectedSQL: "mapContains(resources_string, 'service.name') = ?",
|
||||
expectedArgs: []any{true},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -252,9 +389,11 @@ func TestConditionFor(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
evolutions: mockEvolution,
|
||||
operator: qbtypes.FilterOperatorNotExists,
|
||||
value: nil,
|
||||
expectedSQL: "WHERE multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NULL",
|
||||
expectedSQL: "mapContains(resources_string, 'service.name') <> ?",
|
||||
expectedArgs: []any{true},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -315,10 +454,11 @@ func TestConditionFor(t *testing.T) {
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Materialized: true,
|
||||
},
|
||||
evolutions: mockEvolution,
|
||||
operator: qbtypes.FilterOperatorRegexp,
|
||||
value: "frontend-.*",
|
||||
expectedSQL: "(match(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, `resource_string_service$$name_exists`==true, `resource_string_service$$name`, NULL), ?) AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, `resource_string_service$$name_exists`==true, `resource_string_service$$name`, NULL) IS NOT NULL)",
|
||||
expectedArgs: []any{"frontend-.*"},
|
||||
expectedSQL: "WHERE (match(`resource_string_service$$name`, ?) AND `resource_string_service$$name_exists` = ?)",
|
||||
expectedArgs: []any{"frontend-.*", true},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -329,9 +469,10 @@ func TestConditionFor(t *testing.T) {
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Materialized: true,
|
||||
},
|
||||
evolutions: mockEvolution,
|
||||
operator: qbtypes.FilterOperatorNotRegexp,
|
||||
value: "test-.*",
|
||||
expectedSQL: "WHERE NOT match(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, `resource_string_service$$name_exists`==true, `resource_string_service$$name`, NULL), ?)",
|
||||
expectedSQL: "WHERE NOT match(`resource_string_service$$name`, ?)",
|
||||
expectedArgs: []any{"test-.*"},
|
||||
expectedError: nil,
|
||||
},
|
||||
@@ -371,14 +512,13 @@ func TestConditionFor(t *testing.T) {
|
||||
expectedError: qbtypes.ErrColumnNotFound,
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
|
||||
tc.key.Evolutions = tc.evolutions
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, 0, 0, &tc.key, tc.operator, tc.value, sb)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
@@ -433,7 +573,7 @@ func TestConditionForMultipleKeys(t *testing.T) {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
var err error
|
||||
for _, key := range tc.keys {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &key, tc.operator, tc.value, sb, 0, 0)
|
||||
cond, err := conditionBuilder.conditionFor(ctx, 0, 0, &key, tc.operator, tc.value, sb)
|
||||
sb.Where(cond)
|
||||
if err != nil {
|
||||
t.Fatalf("Error getting condition for key %s: %v", key.Name, err)
|
||||
@@ -690,7 +830,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
|
||||
cond, err := conditionBuilder.conditionFor(ctx, 0, 0, &tc.key, tc.operator, tc.value, sb)
|
||||
sb.Where(cond)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
@@ -20,7 +17,7 @@ const (
|
||||
LogsV2TimestampColumn = "timestamp"
|
||||
LogsV2ObservedTimestampColumn = "observed_timestamp"
|
||||
LogsV2BodyColumn = "body"
|
||||
LogsV2BodyV2Column = constants.BodyV2Column
|
||||
LogsV2BodyJSONColumn = constants.BodyJSONColumn
|
||||
LogsV2BodyPromotedColumn = constants.BodyPromotedColumn
|
||||
LogsV2TraceIDColumn = "trace_id"
|
||||
LogsV2SpanIDColumn = "span_id"
|
||||
@@ -37,25 +34,11 @@ const (
|
||||
LogsV2ResourcesStringColumn = "resources_string"
|
||||
LogsV2ScopeStringColumn = "scope_string"
|
||||
|
||||
BodyV2ColumnPrefix = constants.BodyV2ColumnPrefix
|
||||
BodyJSONColumnPrefix = constants.BodyJSONColumnPrefix
|
||||
BodyPromotedColumnPrefix = constants.BodyPromotedColumnPrefix
|
||||
MessageSubColumn = "message"
|
||||
ftsBodyFieldKey = "object(body)"
|
||||
bodySearchDefaultWarning = "When you search on `body` (full text or by field), Query Builder uses body.message:string by default. Check that this matches what you want to search."
|
||||
ftsBodyFieldWarning = "Avoid using `object(body)` in queries. It performs a full scan and can severely slow queries. Use explicit fields instead (for example, `body.response.status_code` or `response.status_code`). Only use `object(body)` for temporary schema exploration. We do not recommend its usage elsewhere."
|
||||
)
|
||||
|
||||
var (
|
||||
// mapping for body logical field to message sub column
|
||||
// TODO(Piyush): Add description for detailed explanation of remapping of body to message
|
||||
BodyLogicalFieldJSONMapping = &telemetrytypes.TelemetryFieldKey{
|
||||
Name: MessageSubColumn,
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
JSONDataType: &telemetrytypes.String,
|
||||
Warnings: []string{bodySearchDefaultWarning},
|
||||
}
|
||||
DefaultFullTextColumn = &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "body",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
@@ -135,44 +118,3 @@ var (
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func bodyAliasExpression() string {
|
||||
if !querybuilder.BodyJSONQueryEnabled {
|
||||
return LogsV2BodyColumn
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s as body", LogsV2BodyV2Column)
|
||||
}
|
||||
|
||||
func enrichIntrinsicFields() error {
|
||||
// body logical field is mapped to message field in the body context that too only with String data type
|
||||
err := BodyLogicalFieldJSONMapping.SetJSONAccessPlan(telemetrytypes.JSONColumnMetadata{
|
||||
BaseColumn: LogsV2BodyV2Column,
|
||||
PromotedColumn: LogsV2BodyPromotedColumn,
|
||||
}, map[string][]telemetrytypes.JSONDataType{
|
||||
MessageSubColumn: {telemetrytypes.String},
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
DefaultFullTextColumn = BodyLogicalFieldJSONMapping
|
||||
IntrinsicFields["body"] = *BodyLogicalFieldJSONMapping
|
||||
IntrinsicFields[ftsBodyFieldKey] = telemetrytypes.TelemetryFieldKey{
|
||||
Name: "body_v2",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeJSON,
|
||||
Warnings: []string{ftsBodyFieldWarning},
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
err := enrichIntrinsicFields()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,10 @@ package telemetrylogs
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz-otel-collector/utils"
|
||||
@@ -30,7 +33,7 @@ var (
|
||||
"severity_text": {Name: "severity_text", Type: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}},
|
||||
"severity_number": {Name: "severity_number", Type: schema.ColumnTypeUInt8},
|
||||
"body": {Name: "body", Type: schema.ColumnTypeString},
|
||||
LogsV2BodyV2Column: {Name: LogsV2BodyV2Column, Type: schema.JSONColumnType{
|
||||
LogsV2BodyJSONColumn: {Name: LogsV2BodyJSONColumn, Type: schema.JSONColumnType{
|
||||
MaxDynamicTypes: utils.ToPointer(uint(32)),
|
||||
MaxDynamicPaths: utils.ToPointer(uint(0)),
|
||||
}},
|
||||
@@ -61,140 +64,280 @@ var (
|
||||
}
|
||||
)
|
||||
|
||||
type fieldMapper struct {}
|
||||
type fieldMapper struct{}
|
||||
|
||||
func NewFieldMapper() qbtypes.FieldMapper {
|
||||
return &fieldMapper{}
|
||||
}
|
||||
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||
|
||||
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) ([]*schema.Column, error) {
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource:
|
||||
return logsV2Columns["resource"], nil
|
||||
columns := []*schema.Column{logsV2Columns["resources_string"], logsV2Columns["resource"]}
|
||||
return columns, nil
|
||||
case telemetrytypes.FieldContextScope:
|
||||
switch key.Name {
|
||||
case "name", "scope.name", "scope_name":
|
||||
return logsV2Columns["scope_name"], nil
|
||||
return []*schema.Column{logsV2Columns["scope_name"]}, nil
|
||||
case "version", "scope.version", "scope_version":
|
||||
return logsV2Columns["scope_version"], nil
|
||||
return []*schema.Column{logsV2Columns["scope_version"]}, nil
|
||||
}
|
||||
return logsV2Columns["scope_string"], nil
|
||||
return []*schema.Column{logsV2Columns["scope_string"]}, nil
|
||||
case telemetrytypes.FieldContextAttribute:
|
||||
switch key.FieldDataType {
|
||||
case telemetrytypes.FieldDataTypeString:
|
||||
return logsV2Columns["attributes_string"], nil
|
||||
return []*schema.Column{logsV2Columns["attributes_string"]}, nil
|
||||
case telemetrytypes.FieldDataTypeInt64, telemetrytypes.FieldDataTypeFloat64, telemetrytypes.FieldDataTypeNumber:
|
||||
return logsV2Columns["attributes_number"], nil
|
||||
return []*schema.Column{logsV2Columns["attributes_number"]}, nil
|
||||
case telemetrytypes.FieldDataTypeBool:
|
||||
return logsV2Columns["attributes_bool"], nil
|
||||
return []*schema.Column{logsV2Columns["attributes_bool"]}, nil
|
||||
}
|
||||
case telemetrytypes.FieldContextBody:
|
||||
// Body context is for JSON body fields
|
||||
// Use body_v2 if feature flag is enabled
|
||||
// Use body_json if feature flag is enabled
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
return logsV2Columns[LogsV2BodyV2Column], nil
|
||||
return []*schema.Column{logsV2Columns[LogsV2BodyJSONColumn]}, nil
|
||||
}
|
||||
// Fall back to legacy body column
|
||||
return logsV2Columns["body"], nil
|
||||
return []*schema.Column{logsV2Columns["body"]}, nil
|
||||
case telemetrytypes.FieldContextLog, telemetrytypes.FieldContextUnspecified:
|
||||
col, ok := logsV2Columns[key.Name]
|
||||
if !ok {
|
||||
// check if the key has body JSON search
|
||||
if strings.HasPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
// Use body_v2 if feature flag is enabled and we have a body condition builder
|
||||
// Use body_json if feature flag is enabled and we have a body condition builder
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
return logsV2Columns[LogsV2BodyV2Column], nil
|
||||
// TODO(Piyush): Update this to support multiple JSON columns based on evolutions
|
||||
// i.e return both the body json and body json promoted and let the evolutions decide which one to use
|
||||
// based on the query range time.
|
||||
return []*schema.Column{logsV2Columns[LogsV2BodyJSONColumn]}, nil
|
||||
}
|
||||
// Fall back to legacy body column
|
||||
return logsV2Columns["body"], nil
|
||||
return []*schema.Column{logsV2Columns["body"]}, nil
|
||||
}
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
}
|
||||
return col, nil
|
||||
return []*schema.Column{col}, nil
|
||||
}
|
||||
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
}
|
||||
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
column, err := m.getColumn(ctx, key)
|
||||
// selectEvolutionsForColumns selects the appropriate evolution entries for each column based on the time range.
|
||||
// Logic:
|
||||
// - Finds the latest base evolution (<= tsStartTime) across ALL columns
|
||||
// - Rejects all evolutions before this latest base evolution
|
||||
// - For duplicate evolutions it considers the oldest one (first in ReleaseTime)
|
||||
// - For each column, includes its evolution if it's >= latest base evolution and <= tsEndTime
|
||||
// - Results are sorted by ReleaseTime descending (newest first)
|
||||
func selectEvolutionsForColumns(columns []*schema.Column, evolutions []*telemetrytypes.EvolutionEntry, tsStart, tsEnd uint64) ([]*schema.Column, []*telemetrytypes.EvolutionEntry, error) {
|
||||
|
||||
sortedEvolutions := make([]*telemetrytypes.EvolutionEntry, len(evolutions))
|
||||
copy(sortedEvolutions, evolutions)
|
||||
|
||||
// sort the evolutions by ReleaseTime ascending
|
||||
sort.Slice(sortedEvolutions, func(i, j int) bool {
|
||||
return sortedEvolutions[i].ReleaseTime.Before(sortedEvolutions[j].ReleaseTime)
|
||||
})
|
||||
|
||||
tsStartTime := time.Unix(0, int64(tsStart))
|
||||
tsEndTime := time.Unix(0, int64(tsEnd))
|
||||
|
||||
// Build evolution map: column name -> evolution
|
||||
evolutionMap := make(map[string]*telemetrytypes.EvolutionEntry)
|
||||
for _, evolution := range sortedEvolutions {
|
||||
if _, exists := evolutionMap[evolution.ColumnName+":"+evolution.FieldName+":"+strconv.Itoa(int(evolution.Version))]; exists {
|
||||
// since if there is duplicate we would just use the oldest one.
|
||||
continue
|
||||
}
|
||||
evolutionMap[evolution.ColumnName+":"+evolution.FieldName+":"+strconv.Itoa(int(evolution.Version))] = evolution
|
||||
}
|
||||
|
||||
// Find the latest base evolution (<= tsStartTime) across ALL columns
|
||||
// Evolutions are sorted, so we can break early
|
||||
var latestBaseEvolutionAcrossAll *telemetrytypes.EvolutionEntry
|
||||
for _, evolution := range sortedEvolutions {
|
||||
if evolution.ReleaseTime.After(tsStartTime) {
|
||||
break
|
||||
}
|
||||
latestBaseEvolutionAcrossAll = evolution
|
||||
}
|
||||
|
||||
// We shouldn't reach this, it basically means there is something wrong with the evolutions data
|
||||
if latestBaseEvolutionAcrossAll == nil {
|
||||
return nil, nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "no base evolution found for columns %v", columns)
|
||||
}
|
||||
|
||||
columnLookUpMap := make(map[string]*schema.Column)
|
||||
for _, column := range columns {
|
||||
columnLookUpMap[column.Name] = column
|
||||
}
|
||||
|
||||
// Collect column-evolution pairs
|
||||
type colEvoPair struct {
|
||||
column *schema.Column
|
||||
evolution *telemetrytypes.EvolutionEntry
|
||||
}
|
||||
pairs := []colEvoPair{}
|
||||
|
||||
for _, evolution := range evolutionMap {
|
||||
// Reject evolutions before the latest base evolution
|
||||
if evolution.ReleaseTime.Before(latestBaseEvolutionAcrossAll.ReleaseTime) {
|
||||
continue
|
||||
}
|
||||
// skip evolutions after tsEndTime
|
||||
if evolution.ReleaseTime.After(tsEndTime) || evolution.ReleaseTime.Equal(tsEndTime) {
|
||||
continue
|
||||
}
|
||||
|
||||
if _, exists := columnLookUpMap[evolution.ColumnName]; !exists {
|
||||
return nil, nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "evolution column %s not found in columns %v", evolution.ColumnName, columns)
|
||||
}
|
||||
|
||||
pairs = append(pairs, colEvoPair{columnLookUpMap[evolution.ColumnName], evolution})
|
||||
}
|
||||
|
||||
// If no pairs found, fall back to latestBaseEvolutionAcrossAll for matching columns
|
||||
if len(pairs) == 0 {
|
||||
for _, column := range columns {
|
||||
// Use latestBaseEvolutionAcrossAll if this column name matches its column name
|
||||
if column.Name == latestBaseEvolutionAcrossAll.ColumnName {
|
||||
pairs = append(pairs, colEvoPair{column, latestBaseEvolutionAcrossAll})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by ReleaseTime descending (newest first)
|
||||
for i := 0; i < len(pairs)-1; i++ {
|
||||
for j := i + 1; j < len(pairs); j++ {
|
||||
if pairs[i].evolution.ReleaseTime.Before(pairs[j].evolution.ReleaseTime) {
|
||||
pairs[i], pairs[j] = pairs[j], pairs[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract results
|
||||
newColumns := make([]*schema.Column, len(pairs))
|
||||
evolutionsEntries := make([]*telemetrytypes.EvolutionEntry, len(pairs))
|
||||
for i, pair := range pairs {
|
||||
newColumns[i] = pair.column
|
||||
evolutionsEntries[i] = pair.evolution
|
||||
}
|
||||
|
||||
return newColumns, evolutionsEntries, nil
|
||||
}
|
||||
|
||||
func (m *fieldMapper) FieldFor(ctx context.Context, tsStart, tsEnd uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||
columns, err := m.getColumn(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
switch column.Type.GetType() {
|
||||
case schema.ColumnTypeEnumJSON:
|
||||
// json is only supported for resource context as of now
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource:
|
||||
oldColumn := logsV2Columns["resources_string"]
|
||||
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
|
||||
var newColumns []*schema.Column
|
||||
var evolutionsEntries []*telemetrytypes.EvolutionEntry
|
||||
if len(key.Evolutions) > 0 {
|
||||
// we will use the corresponding column and its evolution entry for the query
|
||||
newColumns, evolutionsEntries, err = selectEvolutionsForColumns(columns, key.Evolutions, tsStart, tsEnd)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
} else {
|
||||
newColumns = columns
|
||||
}
|
||||
|
||||
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
|
||||
// once clickHouse dependency is updated, we need to check if we can remove it.
|
||||
if key.Materialized {
|
||||
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
|
||||
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
|
||||
}
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
|
||||
case telemetrytypes.FieldContextBody:
|
||||
if key.JSONDataType == nil {
|
||||
return "", qbtypes.ErrColumnNotFound
|
||||
exprs := []string{}
|
||||
existExpr := []string{}
|
||||
for i, column := range newColumns {
|
||||
// Use evolution column name if available, otherwise use the column name
|
||||
columnName := column.Name
|
||||
if evolutionsEntries != nil && evolutionsEntries[i] != nil {
|
||||
columnName = evolutionsEntries[i].ColumnName
|
||||
}
|
||||
|
||||
switch column.Type.GetType() {
|
||||
case schema.ColumnTypeEnumJSON:
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource:
|
||||
exprs = append(exprs, fmt.Sprintf("%s.`%s`::String", columnName, key.Name))
|
||||
existExpr = append(existExpr, fmt.Sprintf("%s.`%s` IS NOT NULL", columnName, key.Name))
|
||||
case telemetrytypes.FieldContextBody:
|
||||
if key.JSONDataType == nil {
|
||||
return "", qbtypes.ErrColumnNotFound
|
||||
}
|
||||
|
||||
if key.KeyNameContainsArray() && !key.JSONDataType.IsArray {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "FieldFor not supported for nested fields; only supported for flat paths (e.g. body.status.detail) and paths of Array type: %s(%s)", key.Name, key.FieldDataType)
|
||||
}
|
||||
expr, err := m.buildFieldForJSON(key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
exprs = append(exprs, expr)
|
||||
default:
|
||||
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource/body context fields are supported for json columns, got %s", key.FieldContext.String)
|
||||
}
|
||||
|
||||
if key.KeyNameContainsArray() && !key.JSONDataType.IsArray {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "FieldFor not supported for nested fields; only supported for flat paths (e.g. body.status.detail) and paths of Array type: %s(%s)", key.Name, key.FieldDataType)
|
||||
case schema.ColumnTypeEnumLowCardinality:
|
||||
switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
|
||||
case schema.ColumnTypeEnumString:
|
||||
exprs = append(exprs, column.Name)
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for low cardinality column type %s", elementType)
|
||||
}
|
||||
|
||||
return m.buildFieldForJSON(key)
|
||||
case telemetrytypes.FieldContextLog:
|
||||
// return the column name as is for log context fields
|
||||
case schema.ColumnTypeEnumString,
|
||||
schema.ColumnTypeEnumUInt64, schema.ColumnTypeEnumUInt32, schema.ColumnTypeEnumUInt8:
|
||||
return column.Name, nil
|
||||
default:
|
||||
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource/body context fields are supported for json columns, got %s", key.FieldContext.String)
|
||||
}
|
||||
case schema.ColumnTypeEnumLowCardinality:
|
||||
switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
|
||||
case schema.ColumnTypeEnumString:
|
||||
return column.Name, nil
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for low cardinality column type %s", elementType)
|
||||
}
|
||||
case schema.ColumnTypeEnumString,
|
||||
schema.ColumnTypeEnumUInt64, schema.ColumnTypeEnumUInt32, schema.ColumnTypeEnumUInt8:
|
||||
return column.Name, nil
|
||||
case schema.ColumnTypeEnumMap:
|
||||
keyType := column.Type.(schema.MapColumnType).KeyType
|
||||
if _, ok := keyType.(schema.LowCardinalityColumnType); !ok {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "key type %s is not supported for map column type %s", keyType, column.Type)
|
||||
}
|
||||
|
||||
switch valueType := column.Type.(schema.MapColumnType).ValueType; valueType.GetType() {
|
||||
case schema.ColumnTypeEnumString, schema.ColumnTypeEnumBool, schema.ColumnTypeEnumFloat64:
|
||||
// a key could have been materialized, if so return the materialized column name
|
||||
if key.Materialized {
|
||||
return telemetrytypes.FieldKeyToMaterializedColumnName(key), nil
|
||||
case schema.ColumnTypeEnumMap:
|
||||
keyType := column.Type.(schema.MapColumnType).KeyType
|
||||
if _, ok := keyType.(schema.LowCardinalityColumnType); !ok {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "key type %s is not supported for map column type %s", keyType, column.Type)
|
||||
}
|
||||
|
||||
switch valueType := column.Type.(schema.MapColumnType).ValueType; valueType.GetType() {
|
||||
case schema.ColumnTypeEnumString, schema.ColumnTypeEnumBool, schema.ColumnTypeEnumFloat64:
|
||||
// a key could have been materialized, if so return the materialized column name
|
||||
if key.Materialized {
|
||||
return telemetrytypes.FieldKeyToMaterializedColumnName(key), nil
|
||||
}
|
||||
exprs = append(exprs, fmt.Sprintf("%s['%s']", columnName, key.Name))
|
||||
existExpr = append(existExpr, fmt.Sprintf("mapContains(%s, '%s')", columnName, key.Name))
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for map column type %s", valueType)
|
||||
}
|
||||
return fmt.Sprintf("%s['%s']", column.Name, key.Name), nil
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for map column type %s", valueType)
|
||||
}
|
||||
}
|
||||
|
||||
if len(exprs) == 1 {
|
||||
return exprs[0], nil
|
||||
} else if len(exprs) > 1 {
|
||||
// Ensure existExpr has the same length as exprs
|
||||
if len(existExpr) != len(exprs) {
|
||||
return "", errors.New(errors.TypeInternal, errors.CodeInternal, "length of exist exprs doesn't match to that of exprs")
|
||||
}
|
||||
finalExprs := []string{}
|
||||
for i, expr := range exprs {
|
||||
finalExprs = append(finalExprs, fmt.Sprintf("%s, %s", existExpr[i], expr))
|
||||
}
|
||||
return "multiIf(" + strings.Join(finalExprs, ", ") + ", NULL)", nil
|
||||
}
|
||||
|
||||
// should not reach here
|
||||
return column.Name, nil
|
||||
return columns[0].Name, nil
|
||||
}
|
||||
|
||||
func (m *fieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||
func (m *fieldMapper) ColumnFor(ctx context.Context, _, _ uint64, key *telemetrytypes.TelemetryFieldKey) ([]*schema.Column, error) {
|
||||
return m.getColumn(ctx, key)
|
||||
}
|
||||
|
||||
func (m *fieldMapper) ColumnExpressionFor(
|
||||
ctx context.Context,
|
||||
tsStart, tsEnd uint64,
|
||||
field *telemetrytypes.TelemetryFieldKey,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, error) {
|
||||
|
||||
colName, err := m.FieldFor(ctx, field)
|
||||
colName, err := m.FieldFor(ctx, tsStart, tsEnd, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
@@ -204,7 +347,7 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
if _, ok := logsV2Columns[field.Name]; ok {
|
||||
// if it is, attach the column name directly
|
||||
field.FieldContext = telemetrytypes.FieldContextLog
|
||||
colName, _ = m.FieldFor(ctx, field)
|
||||
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, field)
|
||||
} else {
|
||||
// - the context is not provided
|
||||
// - there are not keys for the field
|
||||
@@ -222,12 +365,12 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
}
|
||||
} else if len(keysForField) == 1 {
|
||||
// we have a single key for the field, use it
|
||||
colName, _ = m.FieldFor(ctx, keysForField[0])
|
||||
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, keysForField[0])
|
||||
} else {
|
||||
// select any non-empty value from the keys
|
||||
args := []string{}
|
||||
for _, key := range keysForField {
|
||||
colName, _ = m.FieldFor(ctx, key)
|
||||
colName, _ = m.FieldFor(ctx, tsStart, tsEnd, key)
|
||||
args = append(args, fmt.Sprintf("toString(%s) != '', toString(%s)", colName, colName))
|
||||
}
|
||||
colName = fmt.Sprintf("multiIf(%s, NULL)", strings.Join(args, ", "))
|
||||
|
||||
@@ -3,6 +3,7 @@ package telemetrylogs
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
@@ -17,7 +18,7 @@ func TestGetColumn(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
expectedCol *schema.Column
|
||||
expectedCol []*schema.Column
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
@@ -26,7 +27,7 @@ func TestGetColumn(t *testing.T) {
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
},
|
||||
expectedCol: logsV2Columns["resource"],
|
||||
expectedCol: []*schema.Column{logsV2Columns["resources_string"], logsV2Columns["resource"]},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -35,7 +36,7 @@ func TestGetColumn(t *testing.T) {
|
||||
Name: "name",
|
||||
FieldContext: telemetrytypes.FieldContextScope,
|
||||
},
|
||||
expectedCol: logsV2Columns["scope_name"],
|
||||
expectedCol: []*schema.Column{logsV2Columns["scope_name"]},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -44,7 +45,7 @@ func TestGetColumn(t *testing.T) {
|
||||
Name: "scope.name",
|
||||
FieldContext: telemetrytypes.FieldContextScope,
|
||||
},
|
||||
expectedCol: logsV2Columns["scope_name"],
|
||||
expectedCol: []*schema.Column{logsV2Columns["scope_name"]},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -53,7 +54,7 @@ func TestGetColumn(t *testing.T) {
|
||||
Name: "scope_name",
|
||||
FieldContext: telemetrytypes.FieldContextScope,
|
||||
},
|
||||
expectedCol: logsV2Columns["scope_name"],
|
||||
expectedCol: []*schema.Column{logsV2Columns["scope_name"]},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -62,7 +63,7 @@ func TestGetColumn(t *testing.T) {
|
||||
Name: "version",
|
||||
FieldContext: telemetrytypes.FieldContextScope,
|
||||
},
|
||||
expectedCol: logsV2Columns["scope_version"],
|
||||
expectedCol: []*schema.Column{logsV2Columns["scope_version"]},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -71,7 +72,7 @@ func TestGetColumn(t *testing.T) {
|
||||
Name: "custom.scope.field",
|
||||
FieldContext: telemetrytypes.FieldContextScope,
|
||||
},
|
||||
expectedCol: logsV2Columns["scope_string"],
|
||||
expectedCol: []*schema.Column{logsV2Columns["scope_string"]},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -81,7 +82,7 @@ func TestGetColumn(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
expectedCol: logsV2Columns["attributes_string"],
|
||||
expectedCol: []*schema.Column{logsV2Columns["attributes_string"]},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -91,7 +92,7 @@ func TestGetColumn(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
expectedCol: logsV2Columns["attributes_number"],
|
||||
expectedCol: []*schema.Column{logsV2Columns["attributes_number"]},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -101,7 +102,7 @@ func TestGetColumn(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeInt64,
|
||||
},
|
||||
expectedCol: logsV2Columns["attributes_number"],
|
||||
expectedCol: []*schema.Column{logsV2Columns["attributes_number"]},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -111,7 +112,7 @@ func TestGetColumn(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
|
||||
},
|
||||
expectedCol: logsV2Columns["attributes_number"],
|
||||
expectedCol: []*schema.Column{logsV2Columns["attributes_number"]},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -121,7 +122,7 @@ func TestGetColumn(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||
},
|
||||
expectedCol: logsV2Columns["attributes_bool"],
|
||||
expectedCol: []*schema.Column{logsV2Columns["attributes_bool"]},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -130,7 +131,7 @@ func TestGetColumn(t *testing.T) {
|
||||
Name: "timestamp",
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
},
|
||||
expectedCol: logsV2Columns["timestamp"],
|
||||
expectedCol: []*schema.Column{logsV2Columns["timestamp"]},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -139,7 +140,7 @@ func TestGetColumn(t *testing.T) {
|
||||
Name: "body",
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
},
|
||||
expectedCol: logsV2Columns["body"],
|
||||
expectedCol: []*schema.Column{logsV2Columns["body"]},
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -159,7 +160,7 @@ func TestGetColumn(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||
},
|
||||
expectedCol: logsV2Columns["attributes_bool"],
|
||||
expectedCol: []*schema.Column{logsV2Columns["attributes_bool"]},
|
||||
expectedError: nil,
|
||||
},
|
||||
}
|
||||
@@ -168,7 +169,7 @@ func TestGetColumn(t *testing.T) {
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
col, err := fm.ColumnFor(ctx, &tc.key)
|
||||
col, err := fm.ColumnFor(ctx, 0, 0, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
@@ -183,11 +184,14 @@ func TestGetColumn(t *testing.T) {
|
||||
func TestGetFieldKeyName(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
resourceEvolution := mockEvolutionData(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC))
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
expectedResult string
|
||||
expectedError error
|
||||
name string
|
||||
key telemetrytypes.TelemetryFieldKey
|
||||
expectedResult string
|
||||
expectedError error
|
||||
addExistsFilter bool
|
||||
}{
|
||||
{
|
||||
name: "Simple column type - timestamp",
|
||||
@@ -195,8 +199,9 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
Name: "timestamp",
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
},
|
||||
expectedResult: "timestamp",
|
||||
expectedError: nil,
|
||||
expectedResult: "timestamp",
|
||||
expectedError: nil,
|
||||
addExistsFilter: false,
|
||||
},
|
||||
{
|
||||
name: "Map column type - string attribute",
|
||||
@@ -205,8 +210,9 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
expectedResult: "attributes_string['user.id']",
|
||||
expectedError: nil,
|
||||
expectedResult: "attributes_string['user.id']",
|
||||
expectedError: nil,
|
||||
addExistsFilter: false,
|
||||
},
|
||||
{
|
||||
name: "Map column type - number attribute",
|
||||
@@ -215,8 +221,9 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
expectedResult: "attributes_number['request.size']",
|
||||
expectedError: nil,
|
||||
expectedResult: "attributes_number['request.size']",
|
||||
expectedError: nil,
|
||||
addExistsFilter: false,
|
||||
},
|
||||
{
|
||||
name: "Map column type - bool attribute",
|
||||
@@ -225,28 +232,33 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||
},
|
||||
expectedResult: "attributes_bool['request.success']",
|
||||
expectedError: nil,
|
||||
expectedResult: "attributes_bool['request.success']",
|
||||
expectedError: nil,
|
||||
addExistsFilter: false,
|
||||
},
|
||||
{
|
||||
name: "Map column type - resource attribute",
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
Evolutions: resourceEvolution,
|
||||
},
|
||||
expectedResult: "multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL)",
|
||||
expectedError: nil,
|
||||
expectedResult: "resources_string['service.name']",
|
||||
expectedError: nil,
|
||||
addExistsFilter: false,
|
||||
},
|
||||
{
|
||||
name: "Map column type - resource attribute - Materialized",
|
||||
name: "Map column type - resource attribute - Materialized - json",
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Materialized: true,
|
||||
Evolutions: resourceEvolution,
|
||||
},
|
||||
expectedResult: "multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, `resource_string_service$$name_exists`==true, `resource_string_service$$name`, NULL)",
|
||||
expectedError: nil,
|
||||
expectedResult: "`resource_string_service$$name`",
|
||||
expectedError: nil,
|
||||
addExistsFilter: false,
|
||||
},
|
||||
{
|
||||
name: "Non-existent column",
|
||||
@@ -262,7 +274,7 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
result, err := fm.FieldFor(ctx, &tc.key)
|
||||
result, err := fm.FieldFor(ctx, 0, 0, &tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
@@ -273,3 +285,571 @@ func TestGetFieldKeyName(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFieldForWithEvolutions(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
key := &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
evolutions []*telemetrytypes.EvolutionEntry
|
||||
key *telemetrytypes.TelemetryFieldKey
|
||||
tsStartTime time.Time
|
||||
tsEndTime time.Time
|
||||
expectedResult string
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
name: "Single evolution before tsStartTime",
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 1, 15, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
key: key,
|
||||
tsStartTime: time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
tsEndTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
expectedResult: "resources_string['service.name']",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Single evolution exactly at tsStartTime",
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
key: key,
|
||||
tsStartTime: time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
tsEndTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
expectedResult: "resources_string['service.name']",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Single evolution after tsStartTime",
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 2, 2, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
key: key,
|
||||
tsStartTime: time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
tsEndTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
expectedResult: "multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL)",
|
||||
expectedError: nil,
|
||||
},
|
||||
// TODO(piyush): to be added once integration with JSON is done.
|
||||
// {
|
||||
// name: "Single evolution after tsStartTime - JSON body",
|
||||
// evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
// {
|
||||
// Signal: telemetrytypes.SignalLogs,
|
||||
// ColumnName: LogsV2BodyJSONColumn,
|
||||
// ColumnType: "JSON(max_dynamic_paths=0)",
|
||||
// FieldContext: telemetrytypes.FieldContextBody,
|
||||
// FieldName: "__all__",
|
||||
// ReleaseTime: time.Unix(0, 0),
|
||||
// },
|
||||
// {
|
||||
// Signal: telemetrytypes.SignalLogs,
|
||||
// ColumnName: LogsV2BodyPromotedColumn,
|
||||
// ColumnType: "JSON()",
|
||||
// FieldContext: telemetrytypes.FieldContextBody,
|
||||
// FieldName: "user.name",
|
||||
// ReleaseTime: time.Date(2024, 2, 2, 0, 0, 0, 0, time.UTC),
|
||||
// },
|
||||
// },
|
||||
// key: &telemetrytypes.TelemetryFieldKey{
|
||||
// Name: "user.name",
|
||||
// FieldContext: telemetrytypes.FieldContextBody,
|
||||
// JSONDataType: &telemetrytypes.String,
|
||||
// Materialized: true,
|
||||
// },
|
||||
// tsStartTime: time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
// tsEndTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
// expectedResult: "coalesce(dynamicElement(body_json.`user.name`, 'String'), dynamicElement(body_promoted.`user.name`, 'String'))",
|
||||
// expectedError: nil,
|
||||
// },
|
||||
{
|
||||
name: "Multiple evolutions before tsStartTime - only latest should be included",
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 1, 2, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
key: key,
|
||||
tsStartTime: time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
tsEndTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
expectedResult: "resource.`service.name`::String",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Multiple evolutions after tsStartTime - all should be included",
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 1, 2, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
key: key,
|
||||
tsStartTime: time.Unix(0, 0),
|
||||
tsEndTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
expectedResult: "multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL)",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Duplicate evolutions after tsStartTime - all should be included",
|
||||
// Note: on production when this happens, we should go ahead and clean it up if required
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 2, 3, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
key: key,
|
||||
tsStartTime: time.Date(2024, 2, 2, 0, 0, 0, 0, time.UTC),
|
||||
tsEndTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
expectedResult: "resource.`service.name`::String",
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
name: "Evolution exactly at tsEndTime - should not be included",
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
key: key,
|
||||
tsStartTime: time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC),
|
||||
tsEndTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC),
|
||||
expectedResult: "resources_string['service.name']",
|
||||
expectedError: nil,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
|
||||
tsStart := uint64(tc.tsStartTime.UnixNano())
|
||||
tsEnd := uint64(tc.tsEndTime.UnixNano())
|
||||
tc.key.Evolutions = tc.evolutions
|
||||
|
||||
result, err := fm.FieldFor(ctx, tsStart, tsEnd, tc.key)
|
||||
|
||||
if tc.expectedError != nil {
|
||||
assert.Equal(t, tc.expectedError, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tc.expectedResult, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSelectEvolutionsForColumns(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
columns []*schema.Column
|
||||
evolutions []*telemetrytypes.EvolutionEntry
|
||||
tsStart uint64
|
||||
tsEnd uint64
|
||||
expectedColumns []string // column names
|
||||
expectedEvols []string // evolution column names
|
||||
expectedError bool
|
||||
errorStr string
|
||||
}{
|
||||
{
|
||||
name: "New evolutions after tsStartTime - should include all",
|
||||
columns: []*schema.Column{
|
||||
logsV2Columns["resources_string"],
|
||||
logsV2Columns["resource"],
|
||||
},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 0,
|
||||
ReleaseTime: time.Date(0, 0, 0, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 1,
|
||||
ReleaseTime: time.Date(2024, 2, 3, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
tsStart: uint64(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{"resource", "resources_string"}, // sorted by ReleaseTime desc
|
||||
expectedEvols: []string{"resource", "resources_string"},
|
||||
},
|
||||
{
|
||||
name: "Columns without matching evolutions - should exclude them",
|
||||
columns: []*schema.Column{
|
||||
logsV2Columns["resources_string"],
|
||||
logsV2Columns["resource"], // no evolution for this
|
||||
logsV2Columns["attributes_string"], // no evolution for this
|
||||
},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 0,
|
||||
ReleaseTime: time.Date(2024, 1, 15, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
tsStart: uint64(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{"resources_string"},
|
||||
expectedEvols: []string{"resources_string"},
|
||||
},
|
||||
{
|
||||
name: "New evolutions after tsEndTime - should exclude all",
|
||||
columns: []*schema.Column{
|
||||
logsV2Columns["resources_string"],
|
||||
logsV2Columns["resource"],
|
||||
},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 0,
|
||||
ReleaseTime: time.Date(0, 0, 0, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 1,
|
||||
ReleaseTime: time.Date(2024, 2, 25, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
tsStart: uint64(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{"resources_string"},
|
||||
expectedEvols: []string{"resources_string"},
|
||||
},
|
||||
{
|
||||
name: "Empty columns array",
|
||||
columns: []*schema.Column{},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 0,
|
||||
ReleaseTime: time.Date(2024, 1, 15, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
tsStart: uint64(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{},
|
||||
expectedEvols: []string{},
|
||||
expectedError: true,
|
||||
errorStr: "column resources_string not found",
|
||||
},
|
||||
{
|
||||
name: "Duplicate evolutions - should use first encountered (oldest if sorted)",
|
||||
columns: []*schema.Column{
|
||||
logsV2Columns["resource"],
|
||||
},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 0,
|
||||
ReleaseTime: time.Date(0, 0, 0, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 1,
|
||||
ReleaseTime: time.Date(2024, 1, 15, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 1,
|
||||
ReleaseTime: time.Date(2024, 1, 20, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
tsStart: uint64(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{"resource"},
|
||||
expectedEvols: []string{"resource"}, // should use first one (older)
|
||||
},
|
||||
{
|
||||
name: "Genuine Duplicate evolutions with new version- should consider both",
|
||||
columns: []*schema.Column{
|
||||
logsV2Columns["resources_string"],
|
||||
logsV2Columns["resource"],
|
||||
},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 0,
|
||||
ReleaseTime: time.Date(0, 0, 0, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 1,
|
||||
ReleaseTime: time.Date(2024, 1, 15, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
Version: 2,
|
||||
ReleaseTime: time.Date(2024, 1, 20, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
tsStart: uint64(time.Date(2024, 1, 16, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{"resources_string", "resource"},
|
||||
expectedEvols: []string{"resources_string", "resource"}, // should use first one (older)
|
||||
},
|
||||
{
|
||||
name: "Evolution exactly at tsEndTime",
|
||||
columns: []*schema.Column{
|
||||
logsV2Columns["resources_string"],
|
||||
logsV2Columns["resource"],
|
||||
},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resources_string",
|
||||
ColumnType: "Map(LowCardinality(String), String)",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 1, 15, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: "resource",
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC), // exactly at tsEnd
|
||||
},
|
||||
},
|
||||
tsStart: uint64(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{"resources_string"}, // resource excluded because After(tsEnd) is true
|
||||
expectedEvols: []string{"resources_string"},
|
||||
},
|
||||
{
|
||||
name: "Single evolution after tsStartTime - JSON body",
|
||||
columns: []*schema.Column{
|
||||
logsV2Columns[LogsV2BodyJSONColumn],
|
||||
logsV2Columns[LogsV2BodyPromotedColumn],
|
||||
},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: LogsV2BodyJSONColumn,
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: LogsV2BodyPromotedColumn,
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldName: "user.name",
|
||||
ReleaseTime: time.Date(2024, 2, 2, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
tsStart: uint64(time.Date(2024, 2, 1, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{LogsV2BodyPromotedColumn, LogsV2BodyJSONColumn}, // sorted by ReleaseTime desc (newest first)
|
||||
expectedEvols: []string{LogsV2BodyPromotedColumn, LogsV2BodyJSONColumn},
|
||||
},
|
||||
{
|
||||
name: "No evolution after tsStartTime - JSON body",
|
||||
columns: []*schema.Column{
|
||||
logsV2Columns[LogsV2BodyJSONColumn],
|
||||
logsV2Columns[LogsV2BodyPromotedColumn],
|
||||
},
|
||||
evolutions: []*telemetrytypes.EvolutionEntry{
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: LogsV2BodyJSONColumn,
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldName: "__all__",
|
||||
ReleaseTime: time.Unix(0, 0),
|
||||
},
|
||||
{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
ColumnName: LogsV2BodyPromotedColumn,
|
||||
ColumnType: "JSON()",
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldName: "user.name",
|
||||
ReleaseTime: time.Date(2024, 2, 2, 0, 0, 0, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
tsStart: uint64(time.Date(2024, 2, 3, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
tsEnd: uint64(time.Date(2024, 2, 15, 0, 0, 0, 0, time.UTC).UnixNano()),
|
||||
expectedColumns: []string{LogsV2BodyPromotedColumn},
|
||||
expectedEvols: []string{LogsV2BodyPromotedColumn},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
resultColumns, resultEvols, err := selectEvolutionsForColumns(tc.columns, tc.evolutions, tc.tsStart, tc.tsEnd)
|
||||
|
||||
if tc.expectedError {
|
||||
assert.Contains(t, err.Error(), tc.errorStr)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, len(tc.expectedColumns), len(resultColumns), "column count mismatch")
|
||||
assert.Equal(t, len(tc.expectedEvols), len(resultEvols), "evolution count mismatch")
|
||||
|
||||
resultColumnNames := make([]string, len(resultColumns))
|
||||
for i, col := range resultColumns {
|
||||
resultColumnNames[i] = col.Name
|
||||
}
|
||||
resultEvolNames := make([]string, len(resultEvols))
|
||||
for i, evol := range resultEvols {
|
||||
resultEvolNames[i] = evol.ColumnName
|
||||
}
|
||||
|
||||
for i := range tc.expectedColumns {
|
||||
assert.Equal(t, resultColumnNames[i], tc.expectedColumns[i], "expected column missing: "+tc.expectedColumns[i])
|
||||
}
|
||||
for i := range tc.expectedEvols {
|
||||
assert.Equal(t, resultEvolNames[i], tc.expectedEvols[i], "expected evolution missing: "+tc.expectedEvols[i])
|
||||
}
|
||||
// Verify sorting: should be descending by ReleaseTime
|
||||
for i := 0; i < len(resultEvols)-1; i++ {
|
||||
assert.True(t, !resultEvols[i].ReleaseTime.Before(resultEvols[i+1].ReleaseTime),
|
||||
"evolutions should be sorted descending by ReleaseTime")
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
@@ -10,12 +11,14 @@ import (
|
||||
|
||||
// TestLikeAndILikeWithoutWildcards_Warns Tests that LIKE/ILIKE without wildcards add warnings and include docs URL
|
||||
func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
@@ -33,7 +36,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
@@ -52,6 +55,7 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Context: context.Background(),
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
@@ -69,7 +73,7 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
@@ -19,6 +20,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Context: context.Background(),
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
@@ -161,7 +163,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts)
|
||||
|
||||
if tc.shouldPass {
|
||||
if err != nil {
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
@@ -15,19 +17,33 @@ import (
|
||||
|
||||
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
|
||||
func TestFilterExprLogs(t *testing.T) {
|
||||
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
ctx := context.Background()
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
// for each key of resource attribute add evolution metadata
|
||||
for i, telemetryKeys := range keys {
|
||||
for j, telemetryKey := range telemetryKeys {
|
||||
if telemetryKey.FieldContext == telemetrytypes.FieldContextResource {
|
||||
keys[i][j].Evolutions = mockEvolutionData(releaseTime)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: DefaultFullTextColumn,
|
||||
JsonKeyToKey: GetBodyJSONKey,
|
||||
StartNs: uint64(releaseTime.Add(-5 * time.Minute).UnixNano()),
|
||||
EndNs: uint64(releaseTime.Add(5 * time.Minute).UnixNano()),
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
@@ -466,7 +482,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
expectedErrorContains: "",
|
||||
},
|
||||
|
||||
// fulltext with parenthesized expression
|
||||
//fulltext with parenthesized expression
|
||||
{
|
||||
category: "FREETEXT with parentheses",
|
||||
query: "error (status.code=500 OR status.code=503)",
|
||||
@@ -2386,7 +2402,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts)
|
||||
|
||||
if tc.shouldPass {
|
||||
if err != nil {
|
||||
@@ -2442,6 +2458,7 @@ func TestFilterExprLogsConflictNegation(t *testing.T) {
|
||||
}
|
||||
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Context: context.Background(),
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
@@ -2504,7 +2521,7 @@ func TestFilterExprLogsConflictNegation(t *testing.T) {
|
||||
for _, tc := range testCases {
|
||||
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts)
|
||||
|
||||
if tc.shouldPass {
|
||||
if err != nil {
|
||||
|
||||
@@ -30,7 +30,7 @@ func NewJSONConditionBuilder(key *telemetrytypes.TelemetryFieldKey, valueType te
|
||||
return &jsonConditionBuilder{key: key, valueType: telemetrytypes.MappingFieldDataTypeToJSONDataType[valueType]}
|
||||
}
|
||||
|
||||
// BuildCondition builds the full WHERE condition for body_v2 JSON paths
|
||||
// BuildCondition builds the full WHERE condition for body_json JSON paths
|
||||
func (c *jsonConditionBuilder) buildJSONCondition(operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
conditions := []string{}
|
||||
for _, node := range c.key.JSONPlan {
|
||||
@@ -40,7 +40,6 @@ func (c *jsonConditionBuilder) buildJSONCondition(operator qbtypes.FilterOperato
|
||||
}
|
||||
conditions = append(conditions, condition)
|
||||
}
|
||||
|
||||
return sb.Or(conditions...), nil
|
||||
}
|
||||
|
||||
@@ -289,9 +288,9 @@ func (c *jsonConditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, field
|
||||
}
|
||||
return sb.NotIn(fieldExpr, values...), nil
|
||||
case qbtypes.FilterOperatorExists:
|
||||
return sb.IsNotNull(fieldExpr), nil
|
||||
return fmt.Sprintf("%s IS NOT NULL", fieldExpr), nil
|
||||
case qbtypes.FilterOperatorNotExists:
|
||||
return sb.IsNull(fieldExpr), nil
|
||||
return fmt.Sprintf("%s IS NULL", fieldExpr), nil
|
||||
// between and not between
|
||||
case qbtypes.FilterOperatorBetween, qbtypes.FilterOperatorNotBetween:
|
||||
values, ok := value.([]any)
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -203,6 +203,7 @@ func (b *logQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[stri
|
||||
}
|
||||
|
||||
func (b *logQueryStatementBuilder) adjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) []string {
|
||||
|
||||
// First check if it matches with any intrinsic fields
|
||||
var intrinsicOrCalculatedField telemetrytypes.TelemetryFieldKey
|
||||
if _, ok := IntrinsicFields[key.Name]; ok {
|
||||
@@ -211,6 +212,7 @@ func (b *logQueryStatementBuilder) adjustKey(key *telemetrytypes.TelemetryFieldK
|
||||
}
|
||||
|
||||
return querybuilder.AdjustKey(key, keys, nil)
|
||||
|
||||
}
|
||||
|
||||
// buildListQuery builds a query for list panel type
|
||||
@@ -247,7 +249,11 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
sb.SelectMore(LogsV2SeverityNumberColumn)
|
||||
sb.SelectMore(LogsV2ScopeNameColumn)
|
||||
sb.SelectMore(LogsV2ScopeVersionColumn)
|
||||
sb.SelectMore(bodyAliasExpression())
|
||||
sb.SelectMore(LogsV2BodyColumn)
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
sb.SelectMore(LogsV2BodyJSONColumn)
|
||||
sb.SelectMore(LogsV2BodyPromotedColumn)
|
||||
}
|
||||
sb.SelectMore(LogsV2AttributesStringColumn)
|
||||
sb.SelectMore(LogsV2AttributesNumberColumn)
|
||||
sb.SelectMore(LogsV2AttributesBoolColumn)
|
||||
@@ -262,7 +268,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
}
|
||||
|
||||
// get column expression for the field - use array index directly to avoid pointer to loop variable
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &query.SelectFields[index], keys)
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &query.SelectFields[index], keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -271,7 +277,6 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
}
|
||||
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||
|
||||
// Add filter conditions
|
||||
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
|
||||
@@ -281,7 +286,8 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
|
||||
// Add order by
|
||||
for _, orderBy := range query.Order {
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -347,7 +353,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -362,7 +368,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
allAggChArgs := make([]any, 0)
|
||||
for i, agg := range query.Aggregations {
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, agg.Expression,
|
||||
ctx, start, end, agg.Expression,
|
||||
uint64(query.StepInterval.Seconds()),
|
||||
keys,
|
||||
)
|
||||
@@ -494,7 +500,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
var allGroupByArgs []any
|
||||
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -512,7 +518,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
for idx := range query.Aggregations {
|
||||
aggExpr := query.Aggregations[idx]
|
||||
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(
|
||||
ctx, aggExpr.Expression,
|
||||
ctx, start, end, aggExpr.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
)
|
||||
@@ -584,7 +590,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
// buildFilterCondition builds SQL condition from filter expression
|
||||
func (b *logQueryStatementBuilder) addFilterCondition(
|
||||
_ context.Context,
|
||||
ctx context.Context,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
start, end uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||
@@ -598,6 +604,7 @@ func (b *logQueryStatementBuilder) addFilterCondition(
|
||||
if query.Filter != nil && query.Filter.Expression != "" {
|
||||
// add filter expression
|
||||
preparedWhereClause, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: b.logger,
|
||||
FieldMapper: b.fm,
|
||||
ConditionBuilder: b.cb,
|
||||
@@ -606,7 +613,9 @@ func (b *logQueryStatementBuilder) addFilterCondition(
|
||||
FullTextColumn: b.fullTextColumn,
|
||||
JsonKeyToKey: b.jsonKeyToKey,
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
StartNs: start,
|
||||
EndNs: end,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user