Compare commits

..

2 Commits

Author SHA1 Message Date
Nikhil Soni
0160a526fa fix: switch to new derived attributes for ip filtering 2026-02-19 20:11:52 +05:30
Nikhil Soni
deeb690621 fix: add missing filtering for ip address for scalar data
In domain listing api for external api monitoring,
we have option to filter out the IP address but
it only handles timeseries and raw type data while
domain list handler returns scalar data.
2026-02-19 20:11:52 +05:30
117 changed files with 2170 additions and 4566 deletions

View File

@@ -53,9 +53,9 @@ jobs:
- sqlite
clickhouse-version:
- 25.5.6
- 25.10.5
- 25.10.1
schema-migrator-version:
- v0.142.0
- v0.129.7
postgres-version:
- 15
if: |

View File

@@ -85,9 +85,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return noopgateway.NewProviderFactory()
},
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
return querier.NewHandler(ps, q, a)
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)

View File

@@ -9,7 +9,6 @@ import (
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
eequerier "github.com/SigNoz/signoz/ee/querier"
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
@@ -125,10 +124,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return httpgateway.NewProviderFactory(licensing)
},
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
communityHandler := querier.NewHandler(ps, q, a)
return eequerier.NewHandler(ps, q, communityHandler)
},
)
if err != nil {

View File

@@ -92,15 +92,6 @@ components:
tokenType:
type: string
type: object
AuthtypesGettableTransaction:
properties:
authorized:
type: boolean
object:
$ref: '#/components/schemas/AuthtypesObject'
relation:
type: string
type: object
AuthtypesGoogleConfig:
properties:
allowedGroups:
@@ -126,8 +117,6 @@ components:
serviceAccountJson:
type: string
type: object
AuthtypesName:
type: object
AuthtypesOIDCConfig:
properties:
claimMapping:
@@ -145,16 +134,6 @@ components:
issuerAlias:
type: string
type: object
AuthtypesObject:
properties:
resource:
$ref: '#/components/schemas/AuthtypesResource'
selector:
$ref: '#/components/schemas/AuthtypesSelector'
required:
- resource
- selector
type: object
AuthtypesOrgSessionContext:
properties:
authNSupport:
@@ -192,16 +171,6 @@ components:
refreshToken:
type: string
type: object
AuthtypesResource:
properties:
name:
$ref: '#/components/schemas/AuthtypesName'
type:
type: string
required:
- name
- type
type: object
AuthtypesRoleMapping:
properties:
defaultRole:
@@ -227,8 +196,6 @@ components:
samlIdp:
type: string
type: object
AuthtypesSelector:
type: object
AuthtypesSessionContext:
properties:
exists:
@@ -239,18 +206,6 @@ components:
nullable: true
type: array
type: object
AuthtypesTransaction:
properties:
id:
type: string
object:
$ref: '#/components/schemas/AuthtypesObject'
relation:
type: string
required:
- relation
- object
type: object
AuthtypesUpdateableAuthDomain:
properties:
config:
@@ -352,16 +307,11 @@ components:
type: string
value:
type: string
required:
- id
- value
type: object
GatewaytypesGettableCreatedIngestionKeyLimit:
properties:
id:
type: string
required:
- id
type: object
GatewaytypesGettableIngestionKeys:
properties:
@@ -482,8 +432,6 @@ components:
type: string
nullable: true
type: array
required:
- name
type: object
GatewaytypesPostableIngestionKeyLimit:
properties:
@@ -506,8 +454,6 @@ components:
type: string
nullable: true
type: array
required:
- config
type: object
MetricsexplorertypesListMetric:
properties:
@@ -1658,52 +1604,6 @@ components:
status:
type: string
type: object
RoletypesGettableResources:
properties:
relations:
additionalProperties:
items:
type: string
type: array
nullable: true
type: object
resources:
items:
$ref: '#/components/schemas/AuthtypesResource'
nullable: true
type: array
type: object
RoletypesPatchableObjects:
properties:
additions:
items:
$ref: '#/components/schemas/AuthtypesObject'
nullable: true
type: array
deletions:
items:
$ref: '#/components/schemas/AuthtypesObject'
nullable: true
type: array
required:
- additions
- deletions
type: object
RoletypesPatchableRole:
properties:
description:
nullable: true
type: string
type: object
RoletypesPostableRole:
properties:
description:
type: string
name:
type: string
required:
- name
type: object
RoletypesRole:
properties:
createdAt:
@@ -2020,82 +1920,6 @@ components:
format: date-time
type: string
type: object
ZeustypesGettableHost:
properties:
hosts:
items:
$ref: '#/components/schemas/ZeustypesHost'
nullable: true
type: array
name:
type: string
state:
type: string
tier:
type: string
required:
- name
- state
- tier
- hosts
type: object
ZeustypesHost:
properties:
is_default:
type: boolean
name:
type: string
url:
type: string
required:
- name
- is_default
- url
type: object
ZeustypesPostableHost:
properties:
name:
type: string
required:
- name
type: object
ZeustypesPostableProfile:
properties:
existing_observability_tool:
type: string
has_existing_observability_tool:
type: boolean
logs_scale_per_day_in_gb:
format: int64
type: integer
number_of_hosts:
format: int64
type: integer
number_of_services:
format: int64
type: integer
reasons_for_interest_in_signoz:
items:
type: string
nullable: true
type: array
timeline_for_migrating_to_signoz:
type: string
uses_otel:
type: boolean
where_did_you_discover_signoz:
type: string
required:
- uses_otel
- has_existing_observability_tool
- existing_observability_tool
- reasons_for_interest_in_signoz
- logs_scale_per_day_in_gb
- number_of_services
- number_of_hosts
- where_did_you_discover_signoz
- timeline_for_migrating_to_signoz
type: object
securitySchemes:
api_key:
description: API Keys
@@ -2113,41 +1937,6 @@ info:
version: ""
openapi: 3.0.3
paths:
/api/v1/authz/check:
post:
deprecated: false
description: Checks if the authenticated user has permissions for given transactions
operationId: AuthzCheck
requestBody:
content:
application/json:
schema:
items:
$ref: '#/components/schemas/AuthtypesTransaction'
type: array
responses:
"200":
content:
application/json:
schema:
properties:
data:
items:
$ref: '#/components/schemas/AuthtypesGettableTransaction'
type: array
status:
type: string
type: object
description: OK
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
summary: Check permissions
tags:
- authz
/api/v1/changePassword/{id}:
post:
deprecated: false
@@ -3349,29 +3138,12 @@ paths:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Promote and index paths
tags:
- logs
@@ -3396,29 +3168,12 @@ paths:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- EDITOR
- tokenizer:
- EDITOR
summary: Promote and index paths
tags:
- logs
@@ -3977,11 +3732,6 @@ paths:
deprecated: false
description: This endpoint creates a role
operationId: CreateRole
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/RoletypesPostableRole'
responses:
"201":
content:
@@ -3994,12 +3744,6 @@ paths:
type: string
type: object
description: Created
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
@@ -4012,30 +3756,12 @@ paths:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"409":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Conflict
"451":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unavailable For Legal Reasons
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
"501":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Implemented
security:
- api_key:
- ADMIN
@@ -4074,30 +3800,12 @@ paths:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"404":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Found
"451":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unavailable For Legal Reasons
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
"501":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Implemented
security:
- api_key:
- ADMIN
@@ -4164,11 +3872,6 @@ paths:
required: true
schema:
type: string
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/RoletypesPatchableRole'
responses:
"204":
content:
@@ -4188,30 +3891,12 @@ paths:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"404":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Found
"451":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unavailable For Legal Reasons
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
"501":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Implemented
security:
- api_key:
- ADMIN
@@ -4220,202 +3905,6 @@ paths:
summary: Patch role
tags:
- role
/api/v1/roles/{id}/relation/{relation}/objects:
get:
deprecated: false
description: Gets all objects connected to the specified role via a given relation
type
operationId: GetObjects
parameters:
- in: path
name: id
required: true
schema:
type: string
- in: path
name: relation
required: true
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
items:
$ref: '#/components/schemas/AuthtypesObject'
type: array
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"404":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Found
"451":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unavailable For Legal Reasons
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
"501":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Implemented
security:
- api_key:
- ADMIN
- tokenizer:
- ADMIN
summary: Get objects for a role by relation
tags:
- role
patch:
deprecated: false
description: Patches the objects connected to the specified role via a given
relation type
operationId: PatchObjects
parameters:
- in: path
name: id
required: true
schema:
type: string
- in: path
name: relation
required: true
schema:
type: string
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/RoletypesPatchableObjects'
responses:
"204":
content:
application/json:
schema:
type: string
description: No Content
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"404":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Found
"451":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unavailable For Legal Reasons
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
"501":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Implemented
security:
- api_key:
- ADMIN
- tokenizer:
- ADMIN
summary: Patch objects for a role by relation
tags:
- role
/api/v1/roles/resources:
get:
deprecated: false
description: Gets all the available resources for role assignment
operationId: GetResources
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/RoletypesGettableResources'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- ADMIN
- tokenizer:
- ADMIN
summary: Get resources
tags:
- role
/api/v1/user:
get:
deprecated: false
@@ -5231,7 +4720,6 @@ paths:
parameters:
- in: query
name: name
required: true
schema:
type: string
- in: query
@@ -6050,174 +5538,6 @@ paths:
summary: Rotate session
tags:
- sessions
/api/v2/zeus/hosts:
get:
deprecated: false
description: This endpoint gets the host info from zeus.
operationId: GetHosts
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/ZeustypesGettableHost'
status:
type: string
type: object
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"404":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Found
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- ADMIN
- tokenizer:
- ADMIN
summary: Get host info from Zeus.
tags:
- zeus
put:
deprecated: false
description: This endpoint saves the host of a deployment to zeus.
operationId: PutHost
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/ZeustypesPostableHost'
responses:
"204":
description: No Content
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"404":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Found
"409":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Conflict
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- ADMIN
- tokenizer:
- ADMIN
summary: Put host in Zeus for a deployment.
tags:
- zeus
/api/v2/zeus/profiles:
put:
deprecated: false
description: This endpoint saves the profile of a deployment to zeus.
operationId: PutProfile
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/ZeustypesPostableProfile'
responses:
"204":
description: No Content
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"404":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Found
"409":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Conflict
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- ADMIN
- tokenizer:
- ADMIN
summary: Put profile in Zeus for a deployment.
tags:
- zeus
/api/v5/query_range:
post:
deprecated: false
@@ -6612,58 +5932,4 @@ paths:
- VIEWER
summary: Query range
tags:
- querier
/api/v5/substitute_vars:
post:
deprecated: false
description: Replace variables in a query
operationId: ReplaceVariables
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
status:
type: string
type: object
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Replace variables
tags:
- querier
- query

View File

@@ -155,7 +155,6 @@ The `handler.New` function ties the HTTP handler to OpenAPI metadata via `OpenAP
- **Request / RequestContentType**:
- `Request` is a Go type that describes the request body or form.
- `RequestContentType` is usually `"application/json"` or `"application/x-www-form-urlencoded"` (for callbacks like SAML).
- **RequestExamples**: An array of `handler.OpenAPIExample` that provide concrete request payloads in the generated spec. See [Adding request examples](#adding-request-examples) below.
- **Response / ResponseContentType**:
- `Response` is the Go type for the successful response payload.
- `ResponseContentType` is usually `"application/json"`; use `""` for responses without a body.
@@ -173,170 +172,8 @@ See existing examples in:
- `addUserRoutes` (for typical JSON request/response)
- `addSessionRoutes` (for form-encoded and redirect flows)
## OpenAPI schema details for request/response types
The OpenAPI spec is generated from the Go types you pass as `Request` and `Response` in `OpenAPIDef`. The following struct tags and interfaces control how those types appear in the generated schema.
### Adding request examples
Use the `RequestExamples` field in `OpenAPIDef` to provide concrete request payloads. Each example is a `handler.OpenAPIExample`:
```go
type OpenAPIExample struct {
Name string // unique key for the example (e.g. "traces_time_series")
Summary string // short description shown in docs (e.g. "Time series: count spans grouped by service")
Description string // optional longer description
Value any // the example payload, typically map[string]any
}
```
For reference, see `pkg/apiserver/signozapiserver/querier.go` which defines examples inline for the `/api/v5/query_range` endpoint:
```go
if err := router.Handle("/api/v5/query_range", handler.New(provider.authZ.ViewAccess(provider.querierHandler.QueryRange), handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"querier"},
Summary: "Query range",
Description: "Execute a composite query over a time range.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
RequestExamples: []handler.OpenAPIExample{
{
Name: "traces_time_series",
Summary: "Time series: count spans grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
// ...
},
},
},
},
},
},
// ... more examples
},
// ...
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
```
### `required` tag
Use `required:"true"` on struct fields where the property is expected to be **present** in the JSON payload. This is different from the zero value, a field can have its zero value (e.g. `0`, `""`, `false`) and still be required. The `required` tag means the key itself must exist in the JSON object.
```go
type ListItem struct {
...
}
type ListResponse struct {
List []ListItem `json:"list" required:"true" nullable:"true"`
Total uint64 `json:"total" required:"true"`
}
```
In this example, a response like `{"list": null, "total": 0}` is valid. Both keys are present (satisfying `required`), `total` has its zero value, and `list` is null (allowed by `nullable`). But `{"total": 0}` would violate the schema because the `list` key is missing.
### `nullable` tag
Use `nullable:"true"` on struct fields that can be `null` in the JSON payload. This is especially important for **slice and map fields** because in Go, the zero value for these types is `nil`, which serializes to `null` in JSON (not `[]` or `{}`).
Be explicit about the distinction:
- **Nullable list** (`nullable:"true"`): the field can be `null`. Use this when the Go code may return `nil` for the slice.
- **Non-nullable list** (no `nullable` tag): the field is always an array, never `null`. Ensure the Go code initializes it to an empty slice (e.g. `make([]T, 0)`) before serializing.
```go
// Non-nullable: Go code must ensure this is always an initialized slice.
type NonNullableExample struct {
Items []Item `json:"items" required:"true"`
}
```
When defining your types, ask yourself: "Can this field be `null` in the JSON response, or is it always an array/object?" If the Go code ever returns a `nil` slice or map, mark it `nullable:"true"`.
### `Enum()` method
For types that have a fixed set of acceptable values, implement the `Enum() []any` method. This generates an `enum` constraint in the JSON schema so the OpenAPI spec accurately restricts the values.
```go
type Signal struct {
valuer.String
}
var (
SignalTraces = Signal{valuer.NewString("traces")}
SignalLogs = Signal{valuer.NewString("logs")}
SignalMetrics = Signal{valuer.NewString("metrics")}
)
func (Signal) Enum() []any {
return []any{
SignalTraces,
SignalLogs,
SignalMetrics,
}
}
```
This produces the following in the generated OpenAPI spec:
```yaml
Signal:
enum:
- traces
- logs
- metrics
type: string
```
Every type with a known set of values **must** implement `Enum()`. Without it, the JSON schema will only show the base type (e.g. `string`) with no value constraints.
### `JSONSchema()` method (custom schema)
For types that need a completely custom JSON schema (for example, a field that accepts either a string or a number), implement the `jsonschema.Exposer` interface:
```go
var _ jsonschema.Exposer = Step{}
func (Step) JSONSchema() (jsonschema.Schema, error) {
s := jsonschema.Schema{}
s.WithDescription("Step interval. Accepts a duration string or seconds.")
strSchema := jsonschema.Schema{}
strSchema.WithType(jsonschema.String.Type())
strSchema.WithExamples("60s", "5m", "1h")
numSchema := jsonschema.Schema{}
numSchema.WithType(jsonschema.Number.Type())
numSchema.WithExamples(60, 300, 3600)
s.OneOf = []jsonschema.SchemaOrBool{
strSchema.ToSchemaOrBool(),
numSchema.ToSchemaOrBool(),
}
return s, nil
}
```
## What should I remember?
- **Keep handlers thin**: focus on HTTP concerns and delegate logic to modules/services.
- **Always register routes through `signozapiserver`** using `handler.New` and a complete `OpenAPIDef`.
- **Choose accurate request/response types** from the `types` packages so OpenAPI schemas are correct.
- **Add `required:"true"`** on fields where the key must be present in the JSON (this is about key presence, not about the zero value).
- **Add `nullable:"true"`** on fields that can be `null`. Pay special attention to slices and maps -- in Go these default to `nil` which serializes to `null`. If the field should always be an array, initialize it and do not mark it nullable.
- **Implement `Enum()`** on every type that has a fixed set of acceptable values so the JSON schema generates proper `enum` constraints.
- **Add request examples** via `RequestExamples` in `OpenAPIDef` for any non-trivial endpoint. See `pkg/apiserver/signozapiserver/querier.go` for reference.

View File

@@ -62,6 +62,10 @@ func (provider *provider) Stop(ctx context.Context) error {
return provider.openfgaServer.Stop(ctx)
}
func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return provider.openfgaServer.Check(ctx, tuple)
}
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
return provider.openfgaServer.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
}
@@ -70,8 +74,8 @@ func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Contex
return provider.openfgaServer.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
}
func (provider *provider) BatchCheck(ctx context.Context, tupleReq map[string]*openfgav1.TupleKey) (map[string]*authtypes.TupleKeyAuthorization, error) {
return provider.openfgaServer.BatchCheck(ctx, tupleReq)
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return provider.openfgaServer.BatchCheck(ctx, tuples)
}
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
@@ -183,11 +187,6 @@ func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource
}
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
storableRole, err := provider.store.Get(ctx, orgID, id)
if err != nil {
return nil, err
@@ -199,7 +198,7 @@ func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id
resourceObjects, err := provider.
ListObjects(
ctx,
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.Name, orgID, &authtypes.RelationAssignee),
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
relation,
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
)

View File

@@ -2,10 +2,8 @@ package openfgaserver
import (
"context"
"strconv"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
@@ -30,34 +28,27 @@ func (server *Server) Stop(ctx context.Context) error {
return server.pkgAuthzService.Stop(ctx)
}
func (server *Server) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
return server.pkgAuthzService.Check(ctx, tuple)
}
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tupleSlice, err := typeable.Tuples(subject, relation, selectors, orgID)
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
tuples := make(map[string]*openfgav1.TupleKey, len(tupleSlice))
for idx, tuple := range tupleSlice {
tuples[strconv.Itoa(idx)] = tuple
}
response, err := server.BatchCheck(ctx, tuples)
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
for _, resp := range response {
if resp.Authorized {
return nil
}
}
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
return nil
}
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
@@ -66,32 +57,21 @@ func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, o
return err
}
tupleSlice, err := typeable.Tuples(subject, relation, selectors, orgID)
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
if err != nil {
return err
}
tuples := make(map[string]*openfgav1.TupleKey, len(tupleSlice))
for idx, tuple := range tupleSlice {
tuples[strconv.Itoa(idx)] = tuple
}
response, err := server.BatchCheck(ctx, tuples)
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
for _, resp := range response {
if resp.Authorized {
return nil
}
}
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
return nil
}
func (server *Server) BatchCheck(ctx context.Context, tupleReq map[string]*openfgav1.TupleKey) (map[string]*authtypes.TupleKeyAuthorization, error) {
return server.pkgAuthzService.BatchCheck(ctx, tupleReq)
func (server *Server) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
return server.pkgAuthzService.BatchCheck(ctx, tuples)
}
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {

View File

@@ -220,7 +220,6 @@ func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.T
return map[string][]*authtypes.Transaction{
roletypes.SigNozAnonymousRoleName: {
{
ID: valuer.GenerateUUID(),
Relation: authtypes.RelationRead,
Object: *authtypes.MustNewObject(
authtypes.Resource{

View File

@@ -1,178 +0,0 @@
package querier
import (
"bytes"
"context"
"encoding/json"
"io"
"net/http"
"runtime/debug"
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/types/authtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/valuer"
)
type handler struct {
set factory.ProviderSettings
querier querier.Querier
community querier.Handler
}
func NewHandler(set factory.ProviderSettings, querier querier.Querier, community querier.Handler) querier.Handler {
return &handler{
set: set,
querier: querier,
community: community,
}
}
func (h *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
bodyBytes, err := io.ReadAll(req.Body)
if err != nil {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
return
}
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := req.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
var queryRangeRequest qbtypes.QueryRangeRequest
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
return
}
defer func() {
if r := recover(); r != nil {
stackTrace := string(debug.Stack())
queryJSON, _ := json.Marshal(queryRangeRequest)
h.set.Logger.ErrorContext(ctx, "panic in QueryRange",
"error", r,
"user", claims.UserID,
"payload", string(queryJSON),
"stacktrace", stackTrace,
)
render.Error(rw, errors.NewInternalf(
errors.CodeInternal,
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
))
}
}()
if err := queryRangeRequest.Validate(); err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, err)
return
}
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
anomalies, err := h.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
if err != nil {
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
return
}
results := []any{}
for _, item := range anomalies.Results {
results = append(results, item)
}
// Build step intervals from the anomaly query
stepIntervals := make(map[string]uint64)
if anomalyQuery.StepInterval.Duration > 0 {
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
}
finalResp := &qbtypes.QueryRangeResponse{
Type: queryRangeRequest.RequestType,
Data: qbtypes.QueryData{
Results: results,
},
Meta: qbtypes.ExecStats{
StepIntervals: stepIntervals,
},
}
render.Success(rw, http.StatusOK, finalResp)
return
}
// regular query range request, delegate to community handler
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
h.community.QueryRange(rw, req)
}
func (h *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
h.community.QueryRawStream(rw, req)
}
func (h *handler) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
h.community.ReplaceVariables(rw, req)
}
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
for _, fn := range anomalyQuery.Functions {
if fn.Name == qbtypes.FunctionNameAnomaly {
for _, arg := range fn.Args {
if arg.Name == "seasonality" {
if seasonalityStr, ok := arg.Value.(string); ok {
switch seasonalityStr {
case "weekly":
return anomalyV2.SeasonalityWeekly
case "hourly":
return anomalyV2.SeasonalityHourly
}
}
}
}
}
}
return anomalyV2.SeasonalityDaily // default
}
func (h *handler) createAnomalyProvider(seasonality anomalyV2.Seasonality) anomalyV2.Provider {
switch seasonality {
case anomalyV2.SeasonalityWeekly:
return anomalyV2.NewWeeklyProvider(
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](h.querier),
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](h.set.Logger),
)
case anomalyV2.SeasonalityHourly:
return anomalyV2.NewHourlyProvider(
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](h.querier),
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](h.set.Logger),
)
default:
return anomalyV2.NewDailyProvider(
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](h.querier),
anomalyV2.WithLogger[*anomalyV2.DailyProvider](h.set.Logger),
)
}
}
func (h *handler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
seasonality := extractSeasonality(anomalyQuery)
provider := h.createAnomalyProvider(seasonality)
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
}

View File

@@ -2,13 +2,17 @@ package api
import (
"net/http"
"net/http/httputil"
"time"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
@@ -29,6 +33,7 @@ type APIHandlerOptions struct {
IntegrationsController *integrations.Controller
CloudIntegrationsController *cloudintegrations.Controller
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
Gateway *httputil.ReverseProxy
GatewayUrl string
// Querier Influx Interval
FluxInterval time.Duration
@@ -52,6 +57,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
}, config)
@@ -74,6 +80,10 @@ func (ah *APIHandler) UM() *usage.Manager {
return ah.opts.UsageManager
}
func (ah *APIHandler) Gateway() *httputil.ReverseProxy {
return ah.opts.Gateway
}
// RegisterRoutes registers routes for this handler on the given router
func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
// note: add ee override methods first
@@ -96,6 +106,17 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
// v4
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
// v5
router.Handle("/api/v5/query_range", handler.New(
am.ViewAccess(ah.queryRangeV5),
querierAPI.QueryRangeV5OpenAPIDef,
)).Methods(http.MethodPost)
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
// Gateway
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.EditAccess(ah.ServeGatewayHTTP))
ah.APIHandler.RegisterRoutes(router, am)
}

View File

@@ -0,0 +1,58 @@
package api
import (
"net/http"
"strings"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
return
}
validPath := false
for _, allowedPrefix := range gateway.AllowedPrefix {
if strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+allowedPrefix) {
validPath = true
break
}
}
if !validPath {
rw.WriteHeader(http.StatusNotFound)
return
}
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
if err != nil {
render.Error(rw, err)
return
}
//Create headers
var licenseKey string
if license != nil {
licenseKey = license.Key
}
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
req.Header.Set("X-Consumer-Username", "lid:00000000-0000-0000-0000-000000000000")
req.Header.Set("X-Consumer-Groups", "ns:default")
ah.Gateway().ServeHTTP(rw, req)
}

View File

@@ -2,11 +2,16 @@ package api
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"runtime/debug"
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
@@ -15,6 +20,8 @@ import (
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
@@ -137,3 +144,140 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
}
}
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
for _, fn := range anomalyQuery.Functions {
if fn.Name == qbtypes.FunctionNameAnomaly {
for _, arg := range fn.Args {
if arg.Name == "seasonality" {
if seasonalityStr, ok := arg.Value.(string); ok {
switch seasonalityStr {
case "weekly":
return anomalyV2.SeasonalityWeekly
case "hourly":
return anomalyV2.SeasonalityHourly
}
}
}
}
}
}
return anomalyV2.SeasonalityDaily // default
}
func createAnomalyProvider(aH *APIHandler, seasonality anomalyV2.Seasonality) anomalyV2.Provider {
switch seasonality {
case anomalyV2.SeasonalityWeekly:
return anomalyV2.NewWeeklyProvider(
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](aH.Signoz.Querier),
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](aH.Signoz.Instrumentation.Logger()),
)
case anomalyV2.SeasonalityHourly:
return anomalyV2.NewHourlyProvider(
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](aH.Signoz.Querier),
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](aH.Signoz.Instrumentation.Logger()),
)
default:
return anomalyV2.NewDailyProvider(
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](aH.Signoz.Querier),
anomalyV2.WithLogger[*anomalyV2.DailyProvider](aH.Signoz.Instrumentation.Logger()),
)
}
}
func (aH *APIHandler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
seasonality := extractSeasonality(anomalyQuery)
provider := createAnomalyProvider(aH, seasonality)
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
}
func (aH *APIHandler) queryRangeV5(rw http.ResponseWriter, req *http.Request) {
bodyBytes, err := io.ReadAll(req.Body)
if err != nil {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
return
}
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := req.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
var queryRangeRequest qbtypes.QueryRangeRequest
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
return
}
defer func() {
if r := recover(); r != nil {
stackTrace := string(debug.Stack())
queryJSON, _ := json.Marshal(queryRangeRequest)
aH.Signoz.Instrumentation.Logger().ErrorContext(ctx, "panic in QueryRange",
"error", r,
"user", claims.UserID,
"payload", string(queryJSON),
"stacktrace", stackTrace,
)
render.Error(rw, errors.NewInternalf(
errors.CodeInternal,
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
))
}
}()
if err := queryRangeRequest.Validate(); err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, err)
return
}
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
anomalies, err := aH.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
if err != nil {
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
return
}
results := []any{}
for _, item := range anomalies.Results {
results = append(results, item)
}
// Build step intervals from the anomaly query
stepIntervals := make(map[string]uint64)
if anomalyQuery.StepInterval.Duration > 0 {
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
}
finalResp := &qbtypes.QueryRangeResponse{
Type: queryRangeRequest.RequestType,
Data: qbtypes.QueryData{
Results: results,
},
Meta: qbtypes.ExecStats{
StepIntervals: stepIntervals,
},
}
render.Success(rw, http.StatusOK, finalResp)
return
} else {
// regular query range request, let the querier handle it
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
aH.QuerierAPI.QueryRange(rw, req)
}
}

View File

@@ -19,6 +19,7 @@ import (
"github.com/gorilla/handlers"
"github.com/SigNoz/signoz/ee/query-service/app/api"
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
"github.com/SigNoz/signoz/ee/query-service/rules"
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
@@ -71,6 +72,11 @@ type Server struct {
// NewServer creates and initializes Server
func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
gatewayProxy, err := gateway.NewProxy(config.Gateway.URL.String(), gateway.RoutePrefix)
if err != nil {
return nil, err
}
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
Provider: "memory",
Memory: cache.Memory{
@@ -164,6 +170,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
CloudIntegrationsController: cloudIntegrationsController,
LogsParsingPipelineController: logParsingPipelineController,
FluxInterval: config.Querier.FluxInterval,
Gateway: gatewayProxy,
GatewayUrl: config.Gateway.URL.String(),
GlobalConfig: config.Global,
}
@@ -233,6 +240,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterInfraMetricsRoutes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am)
apiHandler.RegisterQueryRangeV5Routes(r, am)
apiHandler.RegisterWebSocketPaths(r, am)
apiHandler.RegisterMessagingQueuesRoutes(r, am)
apiHandler.RegisterThirdPartyApiRoutes(r, am)

View File

@@ -0,0 +1,9 @@
package gateway
import (
"net/http/httputil"
)
func NewNoopProxy() (*httputil.ReverseProxy, error) {
return &httputil.ReverseProxy{}, nil
}

View File

@@ -0,0 +1,66 @@
package gateway
import (
"net/http"
"net/http/httputil"
"net/url"
"path"
"strings"
)
var (
RoutePrefix string = "/api/gateway"
AllowedPrefix []string = []string{"/v1/workspaces/me", "/v2/profiles/me", "/v2/deployments/me"}
)
type proxy struct {
url *url.URL
stripPath string
}
func NewProxy(u string, stripPath string) (*httputil.ReverseProxy, error) {
url, err := url.Parse(u)
if err != nil {
return nil, err
}
proxy := &proxy{url: url, stripPath: stripPath}
return &httputil.ReverseProxy{
Rewrite: proxy.rewrite,
ModifyResponse: proxy.modifyResponse,
ErrorHandler: proxy.errorHandler,
}, nil
}
func (p *proxy) rewrite(pr *httputil.ProxyRequest) {
pr.SetURL(p.url)
pr.SetXForwarded()
pr.Out.URL.Path = cleanPath(strings.ReplaceAll(pr.Out.URL.Path, p.stripPath, ""))
}
func (p *proxy) modifyResponse(res *http.Response) error {
return nil
}
func (p *proxy) errorHandler(rw http.ResponseWriter, req *http.Request, err error) {
rw.WriteHeader(http.StatusBadGateway)
}
func cleanPath(p string) string {
if p == "" {
return "/"
}
if p[0] != '/' {
p = "/" + p
}
np := path.Clean(p)
if p[len(p)-1] == '/' && np != "/" {
if len(p) == len(np)+1 && strings.HasPrefix(p, np) {
np = p
} else {
np += "/"
}
}
return np
}

View File

@@ -0,0 +1,61 @@
package gateway
import (
"context"
"net/http"
"net/http/httputil"
"net/url"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestProxyRewrite(t *testing.T) {
testCases := []struct {
name string
url *url.URL
stripPath string
in *url.URL
expected *url.URL
}{
{
name: "SamePathAdded",
url: &url.URL{Scheme: "http", Host: "backend", Path: "/path1"},
stripPath: "/strip",
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/path1/path1"},
},
{
name: "NoStripPathInput",
url: &url.URL{Scheme: "http", Host: "backend"},
stripPath: "",
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
},
{
name: "NoStripPathPresentInReq",
url: &url.URL{Scheme: "http", Host: "backend"},
stripPath: "/not-found",
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
},
}
for _, tc := range testCases {
proxy, err := NewProxy(tc.url.String(), tc.stripPath)
require.NoError(t, err)
inReq, err := http.NewRequest(http.MethodGet, tc.in.String(), nil)
require.NoError(t, err)
proxyReq := &httputil.ProxyRequest{
In: inReq,
Out: inReq.Clone(context.Background()),
}
proxy.Rewrite(proxyReq)
assert.Equal(t, tc.expected.Host, proxyReq.Out.URL.Host)
assert.Equal(t, tc.expected.Scheme, proxyReq.Out.URL.Scheme)
assert.Equal(t, tc.expected.Path, proxyReq.Out.URL.Path)
assert.Equal(t, tc.expected.Query(), proxyReq.Out.URL.Query())
}
}

View File

@@ -3,7 +3,6 @@ package httpzeus
import (
"bytes"
"context"
"encoding/json"
"io"
"net/http"
"net/url"
@@ -11,7 +10,6 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/client"
"github.com/SigNoz/signoz/pkg/types/zeustypes"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/tidwall/gjson"
)
@@ -121,13 +119,8 @@ func (provider *Provider) PutMeters(ctx context.Context, key string, data []byte
return err
}
func (provider *Provider) PutProfile(ctx context.Context, key string, profile *zeustypes.PostableProfile) error {
body, err := json.Marshal(profile)
if err != nil {
return err
}
_, err = provider.do(
func (provider *Provider) PutProfile(ctx context.Context, key string, body []byte) error {
_, err := provider.do(
ctx,
provider.config.URL.JoinPath("/v2/profiles/me"),
http.MethodPut,
@@ -138,15 +131,10 @@ func (provider *Provider) PutProfile(ctx context.Context, key string, profile *z
return err
}
func (provider *Provider) PutHost(ctx context.Context, key string, host *zeustypes.PostableHost) error {
body, err := json.Marshal(host)
if err != nil {
return err
}
_, err = provider.do(
func (provider *Provider) PutHost(ctx context.Context, key string, body []byte) error {
_, err := provider.do(
ctx,
provider.config.URL.JoinPath("/v2/deployments/me/host"),
provider.config.URL.JoinPath("/v2/deployments/me/hosts"),
http.MethodPut,
key,
body,
@@ -181,28 +169,21 @@ func (provider *Provider) do(ctx context.Context, url *url.URL, method string, k
return body, nil
}
errorMessage := gjson.GetBytes(body, "error").String()
if errorMessage == "" {
errorMessage = "an unknown error occurred"
}
return nil, provider.errFromStatusCode(response.StatusCode, errorMessage)
return nil, provider.errFromStatusCode(response.StatusCode)
}
// This can be taken down to the client package
func (provider *Provider) errFromStatusCode(statusCode int, errorMessage string) error {
func (provider *Provider) errFromStatusCode(statusCode int) error {
switch statusCode {
case http.StatusBadRequest:
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, errorMessage)
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "bad request")
case http.StatusUnauthorized:
return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, errorMessage)
return errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated")
case http.StatusForbidden:
return errors.New(errors.TypeForbidden, errors.CodeForbidden, errorMessage)
return errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "forbidden")
case http.StatusNotFound:
return errors.New(errors.TypeNotFound, errors.CodeNotFound, errorMessage)
case http.StatusConflict:
return errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, errorMessage)
return errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "not found")
}
return errors.New(errors.TypeInternal, errors.CodeInternal, errorMessage)
return errors.Newf(errors.TypeInternal, errors.CodeInternal, "internal")
}

View File

@@ -0,0 +1,29 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
CreateIngestionKeyProps,
IngestionKeyProps,
} from 'types/api/ingestionKeys/types';
const createIngestionKey = async (
props: CreateIngestionKeyProps,
): Promise<SuccessResponse<IngestionKeyProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.post('/workspaces/me/keys', {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default createIngestionKey;

View File

@@ -0,0 +1,26 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
const deleteIngestionKey = async (
id: string,
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.delete(
`/workspaces/me/keys/${id}`,
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default deleteIngestionKey;

View File

@@ -0,0 +1,21 @@
import { GatewayApiV1Instance } from 'api';
import { AxiosResponse } from 'axios';
import {
AllIngestionKeyProps,
GetIngestionKeyProps,
} from 'types/api/ingestionKeys/types';
export const getAllIngestionKeys = (
props: GetIngestionKeyProps,
): Promise<AxiosResponse<AllIngestionKeyProps>> => {
// eslint-disable-next-line @typescript-eslint/naming-convention
const { search, per_page, page } = props;
const BASE_URL = '/workspaces/me/keys';
const URL_QUERY_PARAMS =
search && search.length > 0
? `/search?name=${search}&page=1&per_page=100`
: `?page=${page}&per_page=${per_page}`;
return GatewayApiV1Instance.get(`${BASE_URL}${URL_QUERY_PARAMS}`);
};

View File

@@ -0,0 +1,65 @@
/* eslint-disable @typescript-eslint/no-throw-literal */
import { GatewayApiV1Instance } from 'api';
import axios from 'axios';
import {
AddLimitProps,
LimitSuccessProps,
} from 'types/api/ingestionKeys/limits/types';
interface SuccessResponse<T> {
statusCode: number;
error: null;
message: string;
payload: T;
}
interface ErrorResponse {
statusCode: number;
error: string;
message: string;
payload: null;
}
const createLimitForIngestionKey = async (
props: AddLimitProps,
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.post(
`/workspaces/me/keys/${props.keyID}/limits`,
{
...props,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
if (axios.isAxiosError(error)) {
// Axios error
const errResponse: ErrorResponse = {
statusCode: error.response?.status || 500,
error: error.response?.data?.error,
message: error.response?.data?.status || 'An error occurred',
payload: null,
};
throw errResponse;
} else {
// Non-Axios error
const errResponse: ErrorResponse = {
statusCode: 500,
error: 'Unknown error',
message: 'An unknown error occurred',
payload: null,
};
throw errResponse;
}
}
};
export default createLimitForIngestionKey;

View File

@@ -0,0 +1,26 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
const deleteLimitsForIngestionKey = async (
id: string,
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.delete(
`/workspaces/me/limits/${id}`,
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default deleteLimitsForIngestionKey;

View File

@@ -0,0 +1,65 @@
/* eslint-disable @typescript-eslint/no-throw-literal */
import { GatewayApiV1Instance } from 'api';
import axios from 'axios';
import {
LimitSuccessProps,
UpdateLimitProps,
} from 'types/api/ingestionKeys/limits/types';
interface SuccessResponse<T> {
statusCode: number;
error: null;
message: string;
payload: T;
}
interface ErrorResponse {
statusCode: number;
error: string;
message: string;
payload: null;
}
const updateLimitForIngestionKey = async (
props: UpdateLimitProps,
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.patch(
`/workspaces/me/limits/${props.limitID}`,
{
config: props.config,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
if (axios.isAxiosError(error)) {
// Axios error
const errResponse: ErrorResponse = {
statusCode: error.response?.status || 500,
error: error.response?.data?.error,
message: error.response?.data?.status || 'An error occurred',
payload: null,
};
throw errResponse;
} else {
// Non-Axios error
const errResponse: ErrorResponse = {
statusCode: 500,
error: 'Unknown error',
message: 'An unknown error occurred',
payload: null,
};
throw errResponse;
}
}
};
export default updateLimitForIngestionKey;

View File

@@ -0,0 +1,32 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
IngestionKeysPayloadProps,
UpdateIngestionKeyProps,
} from 'types/api/ingestionKeys/types';
const updateIngestionKey = async (
props: UpdateIngestionKeyProps,
): Promise<SuccessResponse<IngestionKeysPayloadProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.patch(
`/workspaces/me/keys/${props.id}`,
{
...props.data,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default updateIngestionKey;

View File

@@ -4,6 +4,8 @@ export const apiV2 = '/api/v2/';
export const apiV3 = '/api/v3/';
export const apiV4 = '/api/v4/';
export const apiV5 = '/api/v5/';
export const gatewayApiV1 = '/api/gateway/v1/';
export const gatewayApiV2 = '/api/gateway/v2/';
export const apiAlertManager = '/api/alertmanager/';
export default apiV1;

View File

@@ -0,0 +1,7 @@
import { GatewayApiV2Instance as axios } from 'api';
import { AxiosResponse } from 'axios';
import { DeploymentsDataProps } from 'types/api/customDomain/types';
export const getDeploymentsData = (): Promise<
AxiosResponse<DeploymentsDataProps>
> => axios.get(`/deployments/me`);

View File

@@ -0,0 +1,16 @@
import { GatewayApiV2Instance as axios } from 'api';
import { AxiosError } from 'axios';
import { SuccessResponse } from 'types/api';
import {
PayloadProps,
UpdateCustomDomainProps,
} from 'types/api/customDomain/types';
const updateSubDomainAPI = async (
props: UpdateCustomDomainProps,
): Promise<SuccessResponse<PayloadProps> | AxiosError> =>
axios.put(`/deployments/me/host`, {
...props.data,
});
export default updateSubDomainAPI;

View File

@@ -1,107 +0,0 @@
/**
* ! Do not edit manually
* * The file has been auto-generated using Orval for SigNoz
* * regenerate with 'yarn generate:api'
* SigNoz
*/
import type {
MutationFunction,
UseMutationOptions,
UseMutationResult,
} from 'react-query';
import { useMutation } from 'react-query';
import { GeneratedAPIInstance } from '../../../index';
import type {
AuthtypesTransactionDTO,
AuthzCheck200,
RenderErrorResponseDTO,
} from '../sigNoz.schemas';
type AwaitedInput<T> = PromiseLike<T> | T;
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
/**
* Checks if the authenticated user has permissions for given transactions
* @summary Check permissions
*/
export const authzCheck = (
authtypesTransactionDTO: AuthtypesTransactionDTO[],
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<AuthzCheck200>({
url: `/api/v1/authz/check`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: authtypesTransactionDTO,
signal,
});
};
export const getAuthzCheckMutationOptions = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof authzCheck>>,
TError,
{ data: AuthtypesTransactionDTO[] },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof authzCheck>>,
TError,
{ data: AuthtypesTransactionDTO[] },
TContext
> => {
const mutationKey = ['authzCheck'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof authzCheck>>,
{ data: AuthtypesTransactionDTO[] }
> = (props) => {
const { data } = props ?? {};
return authzCheck(data);
};
return { mutationFn, ...mutationOptions };
};
export type AuthzCheckMutationResult = NonNullable<
Awaited<ReturnType<typeof authzCheck>>
>;
export type AuthzCheckMutationBody = AuthtypesTransactionDTO[];
export type AuthzCheckMutationError = RenderErrorResponseDTO;
/**
* @summary Check permissions
*/
export const useAuthzCheck = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof authzCheck>>,
TError,
{ data: AuthtypesTransactionDTO[] },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof authzCheck>>,
TError,
{ data: AuthtypesTransactionDTO[] },
TContext
> => {
const mutationOptions = getAuthzCheckMutationOptions(options);
return useMutation(mutationOptions);
};

View File

@@ -678,7 +678,7 @@ export const useUpdateIngestionKeyLimit = <
* @summary Search ingestion keys for workspace
*/
export const searchIngestionKeys = (
params: SearchIngestionKeysParams,
params?: SearchIngestionKeysParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<SearchIngestionKeys200>({
@@ -699,7 +699,7 @@ export const getSearchIngestionKeysQueryOptions = <
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
TError = RenderErrorResponseDTO
>(
params: SearchIngestionKeysParams,
params?: SearchIngestionKeysParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof searchIngestionKeys>>,
@@ -737,7 +737,7 @@ export function useSearchIngestionKeys<
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
TError = RenderErrorResponseDTO
>(
params: SearchIngestionKeysParams,
params?: SearchIngestionKeysParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof searchIngestionKeys>>,
@@ -762,7 +762,7 @@ export function useSearchIngestionKeys<
*/
export const invalidateSearchIngestionKeys = async (
queryClient: QueryClient,
params: SearchIngestionKeysParams,
params?: SearchIngestionKeysParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(

View File

@@ -16,7 +16,6 @@ import type {
Querybuildertypesv5QueryRangeRequestDTO,
QueryRangeV5200,
RenderErrorResponseDTO,
ReplaceVariables200,
} from '../sigNoz.schemas';
type AwaitedInput<T> = PromiseLike<T> | T;
@@ -106,86 +105,3 @@ export const useQueryRangeV5 = <
return useMutation(mutationOptions);
};
/**
* Replace variables in a query
* @summary Replace variables
*/
export const replaceVariables = (
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<ReplaceVariables200>({
url: `/api/v5/substitute_vars`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: querybuildertypesv5QueryRangeRequestDTO,
signal,
});
};
export const getReplaceVariablesMutationOptions = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof replaceVariables>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof replaceVariables>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
> => {
const mutationKey = ['replaceVariables'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof replaceVariables>>,
{ data: Querybuildertypesv5QueryRangeRequestDTO }
> = (props) => {
const { data } = props ?? {};
return replaceVariables(data);
};
return { mutationFn, ...mutationOptions };
};
export type ReplaceVariablesMutationResult = NonNullable<
Awaited<ReturnType<typeof replaceVariables>>
>;
export type ReplaceVariablesMutationBody = Querybuildertypesv5QueryRangeRequestDTO;
export type ReplaceVariablesMutationError = RenderErrorResponseDTO;
/**
* @summary Replace variables
*/
export const useReplaceVariables = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof replaceVariables>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof replaceVariables>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
> => {
const mutationOptions = getReplaceVariablesMutationOptions(options);
return useMutation(mutationOptions);
};

View File

@@ -21,18 +21,11 @@ import { GeneratedAPIInstance } from '../../../index';
import type {
CreateRole201,
DeleteRolePathParameters,
GetObjects200,
GetObjectsPathParameters,
GetResources200,
GetRole200,
GetRolePathParameters,
ListRoles200,
PatchObjectsPathParameters,
PatchRolePathParameters,
RenderErrorResponseDTO,
RoletypesPatchableObjectsDTO,
RoletypesPatchableRoleDTO,
RoletypesPostableRoleDTO,
} from '../sigNoz.schemas';
type AwaitedInput<T> = PromiseLike<T> | T;
@@ -121,15 +114,10 @@ export const invalidateListRoles = async (
* This endpoint creates a role
* @summary Create role
*/
export const createRole = (
roletypesPostableRoleDTO: RoletypesPostableRoleDTO,
signal?: AbortSignal,
) => {
export const createRole = (signal?: AbortSignal) => {
return GeneratedAPIInstance<CreateRole201>({
url: `/api/v1/roles`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: roletypesPostableRoleDTO,
signal,
});
};
@@ -141,13 +129,13 @@ export const getCreateRoleMutationOptions = <
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof createRole>>,
TError,
{ data: RoletypesPostableRoleDTO },
void,
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof createRole>>,
TError,
{ data: RoletypesPostableRoleDTO },
void,
TContext
> => {
const mutationKey = ['createRole'];
@@ -161,11 +149,9 @@ export const getCreateRoleMutationOptions = <
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof createRole>>,
{ data: RoletypesPostableRoleDTO }
> = (props) => {
const { data } = props ?? {};
return createRole(data);
void
> = () => {
return createRole();
};
return { mutationFn, ...mutationOptions };
@@ -174,7 +160,7 @@ export const getCreateRoleMutationOptions = <
export type CreateRoleMutationResult = NonNullable<
Awaited<ReturnType<typeof createRole>>
>;
export type CreateRoleMutationBody = RoletypesPostableRoleDTO;
export type CreateRoleMutationError = RenderErrorResponseDTO;
/**
@@ -187,13 +173,13 @@ export const useCreateRole = <
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof createRole>>,
TError,
{ data: RoletypesPostableRoleDTO },
void,
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof createRole>>,
TError,
{ data: RoletypesPostableRoleDTO },
void,
TContext
> => {
const mutationOptions = getCreateRoleMutationOptions(options);
@@ -372,15 +358,10 @@ export const invalidateGetRole = async (
* This endpoint patches a role
* @summary Patch role
*/
export const patchRole = (
{ id }: PatchRolePathParameters,
roletypesPatchableRoleDTO: RoletypesPatchableRoleDTO,
) => {
export const patchRole = ({ id }: PatchRolePathParameters) => {
return GeneratedAPIInstance<string>({
url: `/api/v1/roles/${id}`,
method: 'PATCH',
headers: { 'Content-Type': 'application/json' },
data: roletypesPatchableRoleDTO,
});
};
@@ -391,13 +372,13 @@ export const getPatchRoleMutationOptions = <
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof patchRole>>,
TError,
{ pathParams: PatchRolePathParameters; data: RoletypesPatchableRoleDTO },
{ pathParams: PatchRolePathParameters },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof patchRole>>,
TError,
{ pathParams: PatchRolePathParameters; data: RoletypesPatchableRoleDTO },
{ pathParams: PatchRolePathParameters },
TContext
> => {
const mutationKey = ['patchRole'];
@@ -411,11 +392,11 @@ export const getPatchRoleMutationOptions = <
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof patchRole>>,
{ pathParams: PatchRolePathParameters; data: RoletypesPatchableRoleDTO }
{ pathParams: PatchRolePathParameters }
> = (props) => {
const { pathParams, data } = props ?? {};
const { pathParams } = props ?? {};
return patchRole(pathParams, data);
return patchRole(pathParams);
};
return { mutationFn, ...mutationOptions };
@@ -424,7 +405,7 @@ export const getPatchRoleMutationOptions = <
export type PatchRoleMutationResult = NonNullable<
Awaited<ReturnType<typeof patchRole>>
>;
export type PatchRoleMutationBody = RoletypesPatchableRoleDTO;
export type PatchRoleMutationError = RenderErrorResponseDTO;
/**
@@ -437,292 +418,16 @@ export const usePatchRole = <
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof patchRole>>,
TError,
{ pathParams: PatchRolePathParameters; data: RoletypesPatchableRoleDTO },
{ pathParams: PatchRolePathParameters },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof patchRole>>,
TError,
{ pathParams: PatchRolePathParameters; data: RoletypesPatchableRoleDTO },
{ pathParams: PatchRolePathParameters },
TContext
> => {
const mutationOptions = getPatchRoleMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* Gets all objects connected to the specified role via a given relation type
* @summary Get objects for a role by relation
*/
export const getObjects = (
{ id, relation }: GetObjectsPathParameters,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetObjects200>({
url: `/api/v1/roles/${id}/relation/${relation}/objects`,
method: 'GET',
signal,
});
};
export const getGetObjectsQueryKey = ({
id,
relation,
}: GetObjectsPathParameters) => {
return ['getObjects'] as const;
};
export const getGetObjectsQueryOptions = <
TData = Awaited<ReturnType<typeof getObjects>>,
TError = RenderErrorResponseDTO
>(
{ id, relation }: GetObjectsPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getObjects>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetObjectsQueryKey({ id, relation });
const queryFn: QueryFunction<Awaited<ReturnType<typeof getObjects>>> = ({
signal,
}) => getObjects({ id, relation }, signal);
return {
queryKey,
queryFn,
enabled: !!(id && relation),
...queryOptions,
} as UseQueryOptions<Awaited<ReturnType<typeof getObjects>>, TError, TData> & {
queryKey: QueryKey;
};
};
export type GetObjectsQueryResult = NonNullable<
Awaited<ReturnType<typeof getObjects>>
>;
export type GetObjectsQueryError = RenderErrorResponseDTO;
/**
* @summary Get objects for a role by relation
*/
export function useGetObjects<
TData = Awaited<ReturnType<typeof getObjects>>,
TError = RenderErrorResponseDTO
>(
{ id, relation }: GetObjectsPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getObjects>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetObjectsQueryOptions({ id, relation }, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get objects for a role by relation
*/
export const invalidateGetObjects = async (
queryClient: QueryClient,
{ id, relation }: GetObjectsPathParameters,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetObjectsQueryKey({ id, relation }) },
options,
);
return queryClient;
};
/**
* Patches the objects connected to the specified role via a given relation type
* @summary Patch objects for a role by relation
*/
export const patchObjects = (
{ id, relation }: PatchObjectsPathParameters,
roletypesPatchableObjectsDTO: RoletypesPatchableObjectsDTO,
) => {
return GeneratedAPIInstance<string>({
url: `/api/v1/roles/${id}/relation/${relation}/objects`,
method: 'PATCH',
headers: { 'Content-Type': 'application/json' },
data: roletypesPatchableObjectsDTO,
});
};
export const getPatchObjectsMutationOptions = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof patchObjects>>,
TError,
{
pathParams: PatchObjectsPathParameters;
data: RoletypesPatchableObjectsDTO;
},
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof patchObjects>>,
TError,
{ pathParams: PatchObjectsPathParameters; data: RoletypesPatchableObjectsDTO },
TContext
> => {
const mutationKey = ['patchObjects'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof patchObjects>>,
{ pathParams: PatchObjectsPathParameters; data: RoletypesPatchableObjectsDTO }
> = (props) => {
const { pathParams, data } = props ?? {};
return patchObjects(pathParams, data);
};
return { mutationFn, ...mutationOptions };
};
export type PatchObjectsMutationResult = NonNullable<
Awaited<ReturnType<typeof patchObjects>>
>;
export type PatchObjectsMutationBody = RoletypesPatchableObjectsDTO;
export type PatchObjectsMutationError = RenderErrorResponseDTO;
/**
* @summary Patch objects for a role by relation
*/
export const usePatchObjects = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof patchObjects>>,
TError,
{
pathParams: PatchObjectsPathParameters;
data: RoletypesPatchableObjectsDTO;
},
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof patchObjects>>,
TError,
{ pathParams: PatchObjectsPathParameters; data: RoletypesPatchableObjectsDTO },
TContext
> => {
const mutationOptions = getPatchObjectsMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* Gets all the available resources for role assignment
* @summary Get resources
*/
export const getResources = (signal?: AbortSignal) => {
return GeneratedAPIInstance<GetResources200>({
url: `/api/v1/roles/resources`,
method: 'GET',
signal,
});
};
export const getGetResourcesQueryKey = () => {
return ['getResources'] as const;
};
export const getGetResourcesQueryOptions = <
TData = Awaited<ReturnType<typeof getResources>>,
TError = RenderErrorResponseDTO
>(options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getResources>>,
TError,
TData
>;
}) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getGetResourcesQueryKey();
const queryFn: QueryFunction<Awaited<ReturnType<typeof getResources>>> = ({
signal,
}) => getResources(signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getResources>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetResourcesQueryResult = NonNullable<
Awaited<ReturnType<typeof getResources>>
>;
export type GetResourcesQueryError = RenderErrorResponseDTO;
/**
* @summary Get resources
*/
export function useGetResources<
TData = Awaited<ReturnType<typeof getResources>>,
TError = RenderErrorResponseDTO
>(options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getResources>>,
TError,
TData
>;
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetResourcesQueryOptions(options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get resources
*/
export const invalidateGetResources = async (
queryClient: QueryClient,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetResourcesQueryKey() },
options,
);
return queryClient;
};

View File

@@ -127,18 +127,6 @@ export interface AuthtypesGettableTokenDTO {
tokenType?: string;
}
export interface AuthtypesGettableTransactionDTO {
/**
* @type boolean
*/
authorized?: boolean;
object?: AuthtypesObjectDTO;
/**
* @type string
*/
relation?: string;
}
export type AuthtypesGoogleConfigDTODomainToAdminEmail = {
[key: string]: string;
};
@@ -182,10 +170,6 @@ export interface AuthtypesGoogleConfigDTO {
serviceAccountJson?: string;
}
export interface AuthtypesNameDTO {
[key: string]: unknown;
}
export interface AuthtypesOIDCConfigDTO {
claimMapping?: AuthtypesAttributeMappingDTO;
/**
@@ -214,11 +198,6 @@ export interface AuthtypesOIDCConfigDTO {
issuerAlias?: string;
}
export interface AuthtypesObjectDTO {
resource: AuthtypesResourceDTO;
selector: AuthtypesSelectorDTO;
}
export interface AuthtypesOrgSessionContextDTO {
authNSupport?: AuthtypesAuthNSupportDTO;
/**
@@ -269,14 +248,6 @@ export interface AuthtypesPostableRotateTokenDTO {
refreshToken?: string;
}
export interface AuthtypesResourceDTO {
name: AuthtypesNameDTO;
/**
* @type string
*/
type: string;
}
/**
* @nullable
*/
@@ -320,10 +291,6 @@ export interface AuthtypesSamlConfigDTO {
samlIdp?: string;
}
export interface AuthtypesSelectorDTO {
[key: string]: unknown;
}
export interface AuthtypesSessionContextDTO {
/**
* @type boolean
@@ -336,18 +303,6 @@ export interface AuthtypesSessionContextDTO {
orgs?: AuthtypesOrgSessionContextDTO[] | null;
}
export interface AuthtypesTransactionDTO {
/**
* @type string
*/
id?: string;
object: AuthtypesObjectDTO;
/**
* @type string
*/
relation: string;
}
export interface AuthtypesUpdateableAuthDomainDTO {
config?: AuthtypesAuthDomainConfigDTO;
}
@@ -498,18 +453,18 @@ export interface GatewaytypesGettableCreatedIngestionKeyDTO {
/**
* @type string
*/
id: string;
id?: string;
/**
* @type string
*/
value: string;
value?: string;
}
export interface GatewaytypesGettableCreatedIngestionKeyLimitDTO {
/**
* @type string
*/
id: string;
id?: string;
}
export interface GatewaytypesGettableIngestionKeysDTO {
@@ -661,7 +616,7 @@ export interface GatewaytypesPostableIngestionKeyDTO {
/**
* @type string
*/
name: string;
name?: string;
/**
* @type array
* @nullable true
@@ -683,7 +638,7 @@ export interface GatewaytypesPostableIngestionKeyLimitDTO {
}
export interface GatewaytypesUpdatableIngestionKeyLimitDTO {
config: GatewaytypesLimitConfigDTO;
config?: GatewaytypesLimitConfigDTO;
/**
* @type array
* @nullable true
@@ -1992,58 +1947,6 @@ export interface RenderErrorResponseDTO {
status?: string;
}
/**
* @nullable
*/
export type RoletypesGettableResourcesDTORelations = {
[key: string]: string[];
} | null;
export interface RoletypesGettableResourcesDTO {
/**
* @type object
* @nullable true
*/
relations?: RoletypesGettableResourcesDTORelations;
/**
* @type array
* @nullable true
*/
resources?: AuthtypesResourceDTO[] | null;
}
export interface RoletypesPatchableObjectsDTO {
/**
* @type array
* @nullable true
*/
additions: AuthtypesObjectDTO[] | null;
/**
* @type array
* @nullable true
*/
deletions: AuthtypesObjectDTO[] | null;
}
export interface RoletypesPatchableRoleDTO {
/**
* @type string
* @nullable true
*/
description?: string | null;
}
export interface RoletypesPostableRoleDTO {
/**
* @type string
*/
description?: string;
/**
* @type string
*/
name: string;
}
export interface RoletypesRoleDTO {
/**
* @type string
@@ -2511,102 +2414,6 @@ export interface TypesUserDTO {
updatedAt?: Date;
}
export interface ZeustypesGettableHostDTO {
/**
* @type array
* @nullable true
*/
hosts: ZeustypesHostDTO[] | null;
/**
* @type string
*/
name: string;
/**
* @type string
*/
state: string;
/**
* @type string
*/
tier: string;
}
export interface ZeustypesHostDTO {
/**
* @type boolean
*/
is_default: boolean;
/**
* @type string
*/
name: string;
/**
* @type string
*/
url: string;
}
export interface ZeustypesPostableHostDTO {
/**
* @type string
*/
name: string;
}
export interface ZeustypesPostableProfileDTO {
/**
* @type string
*/
existing_observability_tool: string;
/**
* @type boolean
*/
has_existing_observability_tool: boolean;
/**
* @type integer
* @format int64
*/
logs_scale_per_day_in_gb: number;
/**
* @type integer
* @format int64
*/
number_of_hosts: number;
/**
* @type integer
* @format int64
*/
number_of_services: number;
/**
* @type array
* @nullable true
*/
reasons_for_interest_in_signoz: string[] | null;
/**
* @type string
*/
timeline_for_migrating_to_signoz: string;
/**
* @type boolean
*/
uses_otel: boolean;
/**
* @type string
*/
where_did_you_discover_signoz: string;
}
export type AuthzCheck200 = {
/**
* @type array
*/
data?: AuthtypesGettableTransactionDTO[];
/**
* @type string
*/
status?: string;
};
export type ChangePasswordPathParameters = {
id: string;
};
@@ -3010,33 +2817,6 @@ export type GetRole200 = {
export type PatchRolePathParameters = {
id: string;
};
export type GetObjectsPathParameters = {
id: string;
relation: string;
};
export type GetObjects200 = {
/**
* @type array
*/
data?: AuthtypesObjectDTO[];
/**
* @type string
*/
status?: string;
};
export type PatchObjectsPathParameters = {
id: string;
relation: string;
};
export type GetResources200 = {
data?: RoletypesGettableResourcesDTO;
/**
* @type string
*/
status?: string;
};
export type ListUsers200 = {
/**
* @type array
@@ -3174,7 +2954,7 @@ export type SearchIngestionKeysParams = {
* @type string
* @description undefined
*/
name: string;
name?: string;
/**
* @type integer
* @description undefined
@@ -3349,14 +3129,6 @@ export type RotateSession200 = {
status?: string;
};
export type GetHosts200 = {
data?: ZeustypesGettableHostDTO;
/**
* @type string
*/
status?: string;
};
export type QueryRangeV5200 = {
data?: Querybuildertypesv5QueryRangeResponseDTO;
/**
@@ -3364,11 +3136,3 @@ export type QueryRangeV5200 = {
*/
status?: string;
};
export type ReplaceVariables200 = {
data?: Querybuildertypesv5QueryRangeRequestDTO;
/**
* @type string
*/
status?: string;
};

View File

@@ -1,269 +0,0 @@
/**
* ! Do not edit manually
* * The file has been auto-generated using Orval for SigNoz
* * regenerate with 'yarn generate:api'
* SigNoz
*/
import type {
InvalidateOptions,
MutationFunction,
QueryClient,
QueryFunction,
QueryKey,
UseMutationOptions,
UseMutationResult,
UseQueryOptions,
UseQueryResult,
} from 'react-query';
import { useMutation, useQuery } from 'react-query';
import { GeneratedAPIInstance } from '../../../index';
import type {
GetHosts200,
RenderErrorResponseDTO,
ZeustypesPostableHostDTO,
ZeustypesPostableProfileDTO,
} from '../sigNoz.schemas';
type AwaitedInput<T> = PromiseLike<T> | T;
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
/**
* This endpoint gets the host info from zeus.
* @summary Get host info from Zeus.
*/
export const getHosts = (signal?: AbortSignal) => {
return GeneratedAPIInstance<GetHosts200>({
url: `/api/v2/zeus/hosts`,
method: 'GET',
signal,
});
};
export const getGetHostsQueryKey = () => {
return ['getHosts'] as const;
};
export const getGetHostsQueryOptions = <
TData = Awaited<ReturnType<typeof getHosts>>,
TError = RenderErrorResponseDTO
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof getHosts>>, TError, TData>;
}) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getGetHostsQueryKey();
const queryFn: QueryFunction<Awaited<ReturnType<typeof getHosts>>> = ({
signal,
}) => getHosts(signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getHosts>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetHostsQueryResult = NonNullable<
Awaited<ReturnType<typeof getHosts>>
>;
export type GetHostsQueryError = RenderErrorResponseDTO;
/**
* @summary Get host info from Zeus.
*/
export function useGetHosts<
TData = Awaited<ReturnType<typeof getHosts>>,
TError = RenderErrorResponseDTO
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof getHosts>>, TError, TData>;
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetHostsQueryOptions(options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get host info from Zeus.
*/
export const invalidateGetHosts = async (
queryClient: QueryClient,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetHostsQueryKey() },
options,
);
return queryClient;
};
/**
* This endpoint saves the host of a deployment to zeus.
* @summary Put host in Zeus for a deployment.
*/
export const putHost = (zeustypesPostableHostDTO: ZeustypesPostableHostDTO) => {
return GeneratedAPIInstance<void>({
url: `/api/v2/zeus/hosts`,
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
data: zeustypesPostableHostDTO,
});
};
export const getPutHostMutationOptions = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof putHost>>,
TError,
{ data: ZeustypesPostableHostDTO },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof putHost>>,
TError,
{ data: ZeustypesPostableHostDTO },
TContext
> => {
const mutationKey = ['putHost'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof putHost>>,
{ data: ZeustypesPostableHostDTO }
> = (props) => {
const { data } = props ?? {};
return putHost(data);
};
return { mutationFn, ...mutationOptions };
};
export type PutHostMutationResult = NonNullable<
Awaited<ReturnType<typeof putHost>>
>;
export type PutHostMutationBody = ZeustypesPostableHostDTO;
export type PutHostMutationError = RenderErrorResponseDTO;
/**
* @summary Put host in Zeus for a deployment.
*/
export const usePutHost = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof putHost>>,
TError,
{ data: ZeustypesPostableHostDTO },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof putHost>>,
TError,
{ data: ZeustypesPostableHostDTO },
TContext
> => {
const mutationOptions = getPutHostMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* This endpoint saves the profile of a deployment to zeus.
* @summary Put profile in Zeus for a deployment.
*/
export const putProfile = (
zeustypesPostableProfileDTO: ZeustypesPostableProfileDTO,
) => {
return GeneratedAPIInstance<void>({
url: `/api/v2/zeus/profiles`,
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
data: zeustypesPostableProfileDTO,
});
};
export const getPutProfileMutationOptions = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof putProfile>>,
TError,
{ data: ZeustypesPostableProfileDTO },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof putProfile>>,
TError,
{ data: ZeustypesPostableProfileDTO },
TContext
> => {
const mutationKey = ['putProfile'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof putProfile>>,
{ data: ZeustypesPostableProfileDTO }
> = (props) => {
const { data } = props ?? {};
return putProfile(data);
};
return { mutationFn, ...mutationOptions };
};
export type PutProfileMutationResult = NonNullable<
Awaited<ReturnType<typeof putProfile>>
>;
export type PutProfileMutationBody = ZeustypesPostableProfileDTO;
export type PutProfileMutationError = RenderErrorResponseDTO;
/**
* @summary Put profile in Zeus for a deployment.
*/
export const usePutProfile = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof putProfile>>,
TError,
{ data: ZeustypesPostableProfileDTO },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof putProfile>>,
TError,
{ data: ZeustypesPostableProfileDTO },
TContext
> => {
const mutationOptions = getPutProfileMutationOptions(options);
return useMutation(mutationOptions);
};

View File

@@ -15,7 +15,15 @@ import { Events } from 'constants/events';
import { LOCALSTORAGE } from 'constants/localStorage';
import { eventEmitter } from 'utils/getEventEmitter';
import apiV1, { apiAlertManager, apiV2, apiV3, apiV4, apiV5 } from './apiV1';
import apiV1, {
apiAlertManager,
apiV2,
apiV3,
apiV4,
apiV5,
gatewayApiV1,
gatewayApiV2,
} from './apiV1';
import { Logout } from './utils';
const RESPONSE_TIMEOUT_THRESHOLD = 5000; // 5 seconds
@@ -203,6 +211,24 @@ LogEventAxiosInstance.interceptors.response.use(
LogEventAxiosInstance.interceptors.request.use(interceptorsRequestResponse);
//
// gateway Api V1
export const GatewayApiV1Instance = axios.create({
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV1}`,
});
GatewayApiV1Instance.interceptors.response.use(
interceptorsResponse,
interceptorRejected,
);
GatewayApiV1Instance.interceptors.request.use(interceptorsRequestResponse);
//
// gateway Api V2
export const GatewayApiV2Instance = axios.create({
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV2}`,
});
// generated API Instance
export const GeneratedAPIInstance = axios.create({
baseURL: ENVIRONMENT.baseURL,
@@ -214,6 +240,14 @@ GeneratedAPIInstance.interceptors.response.use(
interceptorRejected,
);
GatewayApiV2Instance.interceptors.response.use(
interceptorsResponse,
interceptorRejected,
);
GatewayApiV2Instance.interceptors.request.use(interceptorsRequestResponse);
//
AxiosAlertManagerInstance.interceptors.response.use(
interceptorsResponse,
interceptorRejected,

View File

@@ -0,0 +1,20 @@
import { GatewayApiV2Instance } from 'api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { UpdateProfileProps } from 'types/api/onboarding/types';
const updateProfile = async (
props: UpdateProfileProps,
): Promise<SuccessResponse<UpdateProfileProps> | ErrorResponse> => {
const response = await GatewayApiV2Instance.put('/profiles/me', {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
};
export default updateProfile;

View File

@@ -42,7 +42,6 @@
display: flex;
flex-direction: column;
padding: 16px;
padding-bottom: 0;
}
.title {
@@ -191,7 +190,7 @@
padding: 0px;
}
.log-detail-drawer__footer-hint {
position: sticky;
position: absolute;
bottom: 0;
left: 0;
right: 0;
@@ -367,7 +366,7 @@
}
.log-detail-drawer__footer-hint {
position: sticky;
position: absolute;
bottom: 0;
left: 0;
right: 0;

View File

@@ -202,7 +202,7 @@ function AllEndPoints({
const onRowClick = useCallback(
(props: any): void => {
setSelectedEndPointName(props[SPAN_ATTRIBUTES.HTTP_URL] as string);
setSelectedEndPointName(props[SPAN_ATTRIBUTES.URL_PATH] as string);
setSelectedView(VIEWS.ENDPOINT_STATS);
const initialItems = [
...(filters?.items || []),
@@ -213,7 +213,7 @@ function AllEndPoints({
op: 'AND',
});
setParams({
selectedEndPointName: props[SPAN_ATTRIBUTES.HTTP_URL] as string,
selectedEndPointName: props[SPAN_ATTRIBUTES.URL_PATH] as string,
selectedView: VIEWS.ENDPOINT_STATS,
endPointDetailsLocalFilters: {
items: initialItems,

View File

@@ -33,7 +33,7 @@ import { SPAN_ATTRIBUTES } from './constants';
const httpUrlKey = {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.HTTP_URL,
key: SPAN_ATTRIBUTES.URL_PATH,
type: 'tag',
};
@@ -93,7 +93,7 @@ function EndPointDetails({
return currentFilters; // No change needed, prevents loop
}
// Rebuild filters: Keep non-http_url filters and add/update http_url filter based on prop
// Rebuild filters: Keep non-http.url filters and add/update http.url filter based on prop
const otherFilters = currentFilters?.items?.filter(
(item) => item.key?.key !== httpUrlKey.key,
);
@@ -125,7 +125,7 @@ function EndPointDetails({
(newFilters: IBuilderQuery['filters']): void => {
// 1. Update local filters state immediately
setFilters(newFilters);
// Filter out http_url filter before saving to params
// Filter out http.url filter before saving to params
const filteredNewFilters = {
op: 'AND',
items:
@@ -299,6 +299,7 @@ function EndPointDetails({
endPointStatusCodeLatencyBarChartsDataQuery
}
domainName={domainName}
endPointName={endPointName}
filters={filters}
timeRange={timeRange}
onDragSelect={onDragSelect}

View File

@@ -56,15 +56,15 @@ function TopErrors({
{
items: endPointName
? [
// Remove any existing http_url filters from initialFilters to avoid duplicates
// Remove any existing http.url filters from initialFilters to avoid duplicates
...(initialFilters?.items?.filter(
(item) => item.key?.key !== SPAN_ATTRIBUTES.HTTP_URL,
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
) || []),
{
id: '92b8a1c1',
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.HTTP_URL,
key: SPAN_ATTRIBUTES.URL_PATH,
type: 'tag',
},
op: '=',

View File

@@ -9,7 +9,6 @@ import { ENTITY_VERSION_V5 } from 'constants/app';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { SPAN_ATTRIBUTES } from '../constants';
import DomainMetrics from './DomainMetrics';
// Mock the API call
@@ -127,9 +126,11 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
'count()',
);
// Verify exact domain filter expression structure
expect(queryA.filter.expression).toContain("http_host = '0.0.0.0'");
expect(queryA.filter.expression).toContain(
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
expect(queryA.filter.expression).toContain(
'url.full EXISTS OR http.url EXISTS',
);
// Verify Query B - p99 latency
@@ -141,13 +142,17 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
'p99(duration_nano)',
);
// Verify exact domain filter expression structure
expect(queryB.filter.expression).toContain("http_host = '0.0.0.0'");
expect(queryB.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
// Verify Query C - error count (disabled)
const queryC = queryData.find((q: any) => q.queryName === 'C');
expect(queryC).toBeDefined();
expect(queryC.disabled).toBe(true);
expect(queryC.filter.expression).toContain("http_host = '0.0.0.0'");
expect(queryC.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
expect(queryC.aggregations?.[0]).toBeDefined();
expect((queryC.aggregations?.[0] as TraceAggregation)?.expression).toBe(
'count()',
@@ -164,7 +169,9 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
'max(timestamp)',
);
// Verify exact domain filter expression structure
expect(queryD.filter.expression).toContain("http_host = '0.0.0.0'");
expect(queryD.filter.expression).toContain(
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
);
// Verify Formula F1 - error rate calculation
const formulas = payload.query.builder.queryFormulas;

View File

@@ -153,7 +153,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
// Verify exact domain filter expression structure
if (queryA.filter) {
expect(queryA.filter.expression).toContain(
`http_host = 'api.example.com'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
);
expect(queryA.filter.expression).toContain("kind_string = 'Client'");
}
@@ -171,7 +171,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
// Verify exact domain filter expression structure
if (queryB.filter) {
expect(queryB.filter.expression).toContain(
`http_host = 'api.example.com'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
);
expect(queryB.filter.expression).toContain("kind_string = 'Client'");
}
@@ -185,7 +185,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
expect(queryC.aggregateOperator).toBe('count');
if (queryC.filter) {
expect(queryC.filter.expression).toContain(
`http_host = 'api.example.com'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
);
expect(queryC.filter.expression).toContain("kind_string = 'Client'");
expect(queryC.filter.expression).toContain('has_error = true');
@@ -204,7 +204,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
// Verify exact domain filter expression structure
if (queryD.filter) {
expect(queryD.filter.expression).toContain(
`http_host = 'api.example.com'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
);
expect(queryD.filter.expression).toContain("kind_string = 'Client'");
}
@@ -221,7 +221,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
}
if (queryE.filter) {
expect(queryE.filter.expression).toContain(
`http_host = 'api.example.com'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
);
expect(queryE.filter.expression).toContain("kind_string = 'Client'");
}
@@ -291,7 +291,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
expect(query.filter.expression).toContain('staging');
// Also verify domain filter is still present
expect(query.filter.expression).toContain(
"http_host = 'api.internal.com'",
"(net.peer.name = 'api.internal.com' OR server.address = 'api.internal.com')",
);
// Verify client kind filter is present
expect(query.filter.expression).toContain("kind_string = 'Client'");

View File

@@ -34,6 +34,7 @@ function StatusCodeBarCharts({
endPointStatusCodeBarChartsDataQuery,
endPointStatusCodeLatencyBarChartsDataQuery,
domainName,
endPointName,
filters,
timeRange,
onDragSelect,
@@ -47,6 +48,7 @@ function StatusCodeBarCharts({
unknown
>;
domainName: string;
endPointName: string;
filters: IBuilderQuery['filters'];
timeRange: {
startTime: number;
@@ -142,11 +144,11 @@ function StatusCodeBarCharts({
const widget = useMemo<Widgets>(
() =>
getStatusCodeBarChartWidgetData(domainName, {
getStatusCodeBarChartWidgetData(domainName, endPointName, {
items: [...(filters?.items || [])],
op: filters?.op || 'AND',
}),
[domainName, filters],
[domainName, endPointName, filters],
);
const graphClickHandler = useCallback(
@@ -164,7 +166,6 @@ function StatusCodeBarCharts({
xValue,
TWO_AND_HALF_MINUTES_IN_MILLISECONDS,
);
handleGraphClick({
xValue,
yValue,

View File

@@ -12,7 +12,7 @@ export const VIEW_TYPES = {
// Span attribute keys - these are the source of truth for all attribute keys
export const SPAN_ATTRIBUTES = {
HTTP_URL: 'http_url',
URL_PATH: 'http.url',
RESPONSE_STATUS_CODE: 'response_status_code',
SERVER_NAME: 'http_host',
SERVER_PORT: 'net.peer.port',

View File

@@ -280,7 +280,7 @@ describe('API Monitoring Utils', () => {
const endpointFilter = result?.items?.find(
(item) =>
item.key &&
item.key.key === SPAN_ATTRIBUTES.HTTP_URL &&
item.key.key === SPAN_ATTRIBUTES.URL_PATH &&
item.value === endPointName,
);
expect(endpointFilter).toBeDefined();
@@ -344,12 +344,13 @@ describe('API Monitoring Utils', () => {
describe('getFormattedEndPointDropDownData', () => {
it('should format endpoint dropdown data correctly', () => {
// Arrange
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
const mockData = [
{
data: {
// eslint-disable-next-line sonarjs/no-duplicate-string
[URL_PATH_KEY]: '/api/users',
'url.full': 'http://example.com/api/users',
A: 150, // count or other metric
},
},
@@ -357,6 +358,7 @@ describe('API Monitoring Utils', () => {
data: {
// eslint-disable-next-line sonarjs/no-duplicate-string
[URL_PATH_KEY]: '/api/orders',
'url.full': 'http://example.com/api/orders',
A: 75,
},
},
@@ -404,7 +406,7 @@ describe('API Monitoring Utils', () => {
it('should handle items without URL path', () => {
// Arrange
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
type MockDataType = {
data: {
[key: string]: string | number;
@@ -710,11 +712,13 @@ describe('API Monitoring Utils', () => {
it('should generate widget configuration for status code bar chart', () => {
// Arrange
const domainName = 'test-domain';
const endPointName = '/api/test';
const filters = { items: [], op: 'AND' };
// Act
const result = getStatusCodeBarChartWidgetData(
domainName,
endPointName,
filters as IBuilderQuery['filters'],
);
@@ -737,11 +741,21 @@ describe('API Monitoring Utils', () => {
if (domainFilter) {
expect(domainFilter.value).toBe(domainName);
}
// Should have endpoint filter if provided
const endpointFilter = queryData.filters?.items?.find(
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.URL_PATH,
);
expect(endpointFilter).toBeDefined();
if (endpointFilter) {
expect(endpointFilter.value).toBe(endPointName);
}
});
it('should include custom filters in the widget configuration', () => {
// Arrange
const domainName = 'test-domain';
const endPointName = '/api/test';
const customFilter = {
id: 'custom-filter',
key: {
@@ -757,6 +771,7 @@ describe('API Monitoring Utils', () => {
// Act
const result = getStatusCodeBarChartWidgetData(
domainName,
endPointName,
filters as IBuilderQuery['filters'],
);

View File

@@ -25,7 +25,7 @@ jest.mock('container/GridCardLayout/GridCard', () => ({
type="button"
data-testid="row-click-button"
onClick={(): void =>
customOnRowClick({ [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test' })
customOnRowClick({ [SPAN_ATTRIBUTES.URL_PATH]: '/api/test' })
}
>
Click Row

View File

@@ -6,10 +6,10 @@
* These tests validate the migration from V4 to V5 format for getAllEndpointsWidgetData:
* - Filter format change: filters.items[] → filter.expression
* - Aggregation format: aggregateAttribute → aggregations[] array
* - Domain filter: http_host = '${domainName}'
* - Domain filter: (net.peer.name OR server.address)
* - Kind filter: kind_string = 'Client'
* - Four queries: A (count), B (p99 latency), C (max timestamp), D (error count - disabled)
* - GroupBy: http_url with type 'attribute'
* - GroupBy: Both http.url AND url.full with type 'attribute'
*/
import { getAllEndpointsWidgetData } from 'container/ApiMonitoring/utils';
import {
@@ -18,8 +18,6 @@ import {
} from 'types/api/queryBuilder/queryAutocompleteResponse';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
describe('AllEndpointsWidget - V5 Migration Validation', () => {
const mockDomainName = 'api.example.com';
const emptyFilters: IBuilderQuery['filters'] = {
@@ -94,28 +92,28 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
const baseExpression = `http_host = '${mockDomainName}' AND kind_string = 'Client'`;
const baseExpression = `(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') AND kind_string = 'Client'`;
// Queries A, B, C have identical base filter
expect(queryA.filter?.expression).toBe(
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
);
expect(queryB.filter?.expression).toBe(
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
);
expect(queryC.filter?.expression).toBe(
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
);
// Query D has additional has_error filter
expect(queryD.filter?.expression).toBe(
`${baseExpression} AND has_error = true AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${baseExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
);
});
});
describe('2. GroupBy Structure', () => {
it(`default groupBy includes ${SPAN_ATTRIBUTES.HTTP_URL} with type attribute`, () => {
it('default groupBy includes both http.url and url.full with type attribute', () => {
const widget = getAllEndpointsWidgetData(
emptyGroupBy,
mockDomainName,
@@ -126,13 +124,23 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
// All queries should have the same default groupBy
queryData.forEach((query) => {
expect(query.groupBy).toHaveLength(1);
expect(query.groupBy).toHaveLength(2);
// http.url
expect(query.groupBy).toContainEqual({
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: SPAN_ATTRIBUTES.HTTP_URL,
key: 'http.url',
type: 'attribute',
});
// url.full
expect(query.groupBy).toContainEqual({
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: 'url.full',
type: 'attribute',
});
});
@@ -162,18 +170,19 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
// All queries should have defaults + custom groupBy
queryData.forEach((query) => {
expect(query.groupBy).toHaveLength(3); // 1 default + 2 custom
expect(query.groupBy).toHaveLength(4); // 2 defaults + 2 custom
// First two should be defaults (http_url)
expect(query.groupBy[0].key).toBe(SPAN_ATTRIBUTES.HTTP_URL);
// First two should be defaults (http.url, url.full)
expect(query.groupBy[0].key).toBe('http.url');
expect(query.groupBy[1].key).toBe('url.full');
// Last two should be custom (matching subset of properties)
expect(query.groupBy[1]).toMatchObject({
expect(query.groupBy[2]).toMatchObject({
dataType: DataTypes.String,
key: 'service.name',
type: 'resource',
});
expect(query.groupBy[2]).toMatchObject({
expect(query.groupBy[3]).toMatchObject({
dataType: DataTypes.String,
key: 'deployment.environment',
type: 'resource',

View File

@@ -258,7 +258,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
value: '/api/test',
}),
]),
@@ -278,7 +278,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
value: '/api/test',
}),
]),
@@ -360,7 +360,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
value: '/api/test',
}),
]),
@@ -373,7 +373,7 @@ describe('EndPointDetails Component', () => {
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
value: '/api/test',
}),
]),

View File

@@ -191,7 +191,7 @@ describe('EndPointsDropDown Component', () => {
it('formats data using the utility function', () => {
const mockRows = [
{ data: { [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test', A: 10 } },
{ data: { [SPAN_ATTRIBUTES.URL_PATH]: '/api/test', A: 10 } },
];
const dataProps = {

View File

@@ -6,18 +6,15 @@
* These tests validate the migration from V4 to V5 format for the third payload
* in getEndPointDetailsQueryPayload (endpoint dropdown data):
* - Filter format change: filters.items[] → filter.expression
* - Domain handling: http_host = '${domainName}'
* - Domain handling: (net.peer.name OR server.address)
* - Kind filter: kind_string = 'Client'
* - Existence check: http_url EXISTS
* - Existence check: (http.url EXISTS OR url.full EXISTS)
* - Aggregation: count() expression
* - GroupBy: http_url with type 'attribute'
* - GroupBy: Both http.url AND url.full with type 'attribute'
*/
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
describe('EndpointDropdown - V5 Migration Validation', () => {
const mockDomainName = 'api.example.com';
const mockStartTime = 1000;
@@ -46,9 +43,9 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters');
// Base filter 1: Domain http_host = '${domainName}'
// Base filter 1: Domain (net.peer.name OR server.address)
expect(queryA.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Base filter 2: Kind
@@ -56,7 +53,7 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
// Base filter 3: Existence check
expect(queryA.filter?.expression).toContain(
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
'(http.url EXISTS OR url.full EXISTS)',
);
// V5 Aggregation format: aggregations array (not aggregateAttribute)
@@ -67,11 +64,16 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
});
expect(queryA).not.toHaveProperty('aggregateAttribute');
// GroupBy: http_url
expect(queryA.groupBy).toHaveLength(1);
// GroupBy: Both http.url and url.full
expect(queryA.groupBy).toHaveLength(2);
expect(queryA.groupBy).toContainEqual({
key: SPAN_ATTRIBUTES.HTTP_URL,
dataType: DataTypes.String,
key: 'http.url',
dataType: 'string',
type: 'attribute',
});
expect(queryA.groupBy).toContainEqual({
key: 'url.full',
dataType: 'string',
type: 'attribute',
});
});
@@ -118,7 +120,53 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
// Exact filter expression with custom filters merged
expect(expression).toBe(
`${SPAN_ATTRIBUTES.SERVER_NAME} = 'api.example.com' AND kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS service.name = 'user-service' AND deployment.environment = 'production'`,
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND deployment.environment = 'production'",
);
});
});
describe('3. HTTP URL Filter Special Handling', () => {
it('converts http.url filter to (http.url OR url.full) expression', () => {
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
items: [
{
id: 'http-url-filter',
key: {
key: 'http.url',
dataType: 'string' as any,
type: 'tag',
},
op: '=',
value: '/api/users',
},
{
id: 'service-filter',
key: {
key: 'service.name',
dataType: 'string' as any,
type: 'resource',
},
op: '=',
value: 'user-service',
},
],
op: 'AND',
};
const payload = getEndPointDetailsQueryPayload(
mockDomainName,
mockStartTime,
mockEndTime,
filtersWithHttpUrl,
);
const dropdownQuery = payload[2];
const expression =
dropdownQuery.query.builder.queryData[0].filter?.expression;
// CRITICAL: Exact filter expression with http.url converted to OR logic
expect(expression).toBe(
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND (http.url = '/api/users' OR url.full = '/api/users')",
);
});
});

View File

@@ -33,7 +33,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
expect(queryData).not.toHaveProperty('filters.items');
});
it('uses new domain filter format: (http_host)', () => {
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
const widget = getRateOverTimeWidgetData(
mockDomainName,
mockEndpointName,
@@ -44,7 +44,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify EXACT new filter format with OR operator
expect(queryData?.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Endpoint name is used in legend, not filter
@@ -90,7 +90,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify domain filter is present
expect(queryData?.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Verify custom filters are merged into the expression
@@ -120,7 +120,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
expect(queryData).not.toHaveProperty('filters.items');
});
it('uses new domain filter format: (http_host)', () => {
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
const widget = getLatencyOverTimeWidgetData(
mockDomainName,
mockEndpointName,
@@ -132,7 +132,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify EXACT new filter format with OR operator
expect(queryData.filter).toBeDefined();
expect(queryData?.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Endpoint name is used in legend, not filter
@@ -166,7 +166,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
// Verify domain filter is present
expect(queryData?.filter?.expression).toContain(
`http_host = '${mockDomainName}' service.name = 'user-service'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') service.name = 'user-service'`,
);
});
});

View File

@@ -142,6 +142,7 @@ describe('StatusCodeBarCharts', () => {
endTime: 1609545600000,
};
const mockDomainName = 'test-domain';
const mockEndPointName = '/api/test';
const onDragSelectMock = jest.fn();
const refetchFn = jest.fn();
@@ -231,6 +232,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -266,6 +268,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -308,6 +311,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -352,6 +356,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -399,6 +404,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockFilters}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -413,6 +419,7 @@ describe('StatusCodeBarCharts', () => {
// but we've confirmed the function is mocked and ready to be tested
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
mockDomainName,
mockEndPointName,
expect.objectContaining({
items: [],
op: 'AND',
@@ -460,6 +467,7 @@ describe('StatusCodeBarCharts', () => {
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
domainName={mockDomainName}
endPointName={mockEndPointName}
filters={mockCustomFilters as IBuilderQuery['filters']}
timeRange={mockTimeRange}
onDragSelect={onDragSelectMock}
@@ -469,6 +477,7 @@ describe('StatusCodeBarCharts', () => {
// Assert widget creation was called with the correct parameters
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
mockDomainName,
mockEndPointName,
expect.objectContaining({
items: expect.arrayContaining([
expect.objectContaining({ id: 'custom-filter' }),

View File

@@ -10,7 +10,7 @@
*
* V5 Changes:
* - Filter format change: filters.items[] → filter.expression
* - Domain filter: (http_host)
* - Domain filter: (net.peer.name OR server.address)
* - Kind filter: kind_string = 'Client'
* - stepInterval: 60 → null
* - Grouped by response_status_code
@@ -47,9 +47,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters.items');
// Base filter 1: Domain (http_host)
// Base filter 1: Domain (net.peer.name OR server.address)
expect(queryA.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Base filter 2: Kind
@@ -96,9 +96,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters.items');
// Base filter 1: Domain (http_host)
// Base filter 1: Domain (net.peer.name OR server.address)
expect(queryA.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Base filter 2: Kind
@@ -177,7 +177,7 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(callsExpression).toBe(latencyExpression);
// Verify base filters
expect(callsExpression).toContain('http_host');
expect(callsExpression).toContain('net.peer.name');
expect(callsExpression).toContain("kind_string = 'Client'");
// Verify custom filters are merged
@@ -187,4 +187,51 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
expect(callsExpression).toContain('production');
});
});
describe('4. HTTP URL Filter Handling', () => {
it('converts http.url filter to (http.url OR url.full) expression in both charts', () => {
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
items: [
{
id: 'http-url-filter',
key: {
key: 'http.url',
dataType: 'string' as any,
type: 'tag',
},
op: '=',
value: '/api/metrics',
},
],
op: 'AND',
};
const payload = getEndPointDetailsQueryPayload(
mockDomainName,
mockStartTime,
mockEndTime,
filtersWithHttpUrl,
);
const callsChartQuery = payload[4];
const latencyChartQuery = payload[5];
const callsExpression =
callsChartQuery.query.builder.queryData[0].filter?.expression;
const latencyExpression =
latencyChartQuery.query.builder.queryData[0].filter?.expression;
// CRITICAL: http.url converted to OR logic
expect(callsExpression).toContain(
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
);
expect(latencyExpression).toContain(
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
);
// Base filters still present
expect(callsExpression).toContain('net.peer.name');
expect(callsExpression).toContain("kind_string = 'Client'");
});
});
});

View File

@@ -6,8 +6,8 @@
* These tests validate the migration from V4 to V5 format for the second payload
* in getEndPointDetailsQueryPayload (status code table data):
* - Filter format change: filters.items[] → filter.expression
* - URL handling: Special logic for http_url
* - Domain filter: http_host = '${domainName}'
* - URL handling: Special logic for (http.url OR url.full)
* - Domain filter: (net.peer.name OR server.address)
* - Kind filter: kind_string = 'Client'
* - Kind filter: response_status_code EXISTS
* - Three queries: A (count), B (p99 latency), C (rate)
@@ -45,9 +45,9 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
expect(typeof queryA.filter?.expression).toBe('string');
expect(queryA).not.toHaveProperty('filters.items');
// Base filter 1: Domain (http_host)
// Base filter 1: Domain (net.peer.name OR server.address)
expect(queryA.filter?.expression).toContain(
`http_host = '${mockDomainName}'`,
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
);
// Base filter 2: Kind
@@ -149,7 +149,7 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
statusCodeQuery.query.builder.queryData[0].filter?.expression;
// Base filters present
expect(expression).toContain('http_host');
expect(expression).toContain('net.peer.name');
expect(expression).toContain("kind_string = 'Client'");
expect(expression).toContain('response_status_code EXISTS');
@@ -165,4 +165,62 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
expect(queries[1].filter?.expression).toBe(queries[2].filter?.expression);
});
});
describe('4. HTTP URL Filter Handling', () => {
it('converts http.url filter to (http.url OR url.full) expression', () => {
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
items: [
{
id: 'http-url-filter',
key: {
key: 'http.url',
dataType: 'string' as any,
type: 'tag',
},
op: '=',
value: '/api/users',
},
{
id: 'service-filter',
key: {
key: 'service.name',
dataType: 'string' as any,
type: 'resource',
},
op: '=',
value: 'user-service',
},
],
op: 'AND',
};
const payload = getEndPointDetailsQueryPayload(
mockDomainName,
mockStartTime,
mockEndTime,
filtersWithHttpUrl,
);
const statusCodeQuery = payload[1];
const expression =
statusCodeQuery.query.builder.queryData[0].filter?.expression;
// CRITICAL: http.url converted to OR logic
expect(expression).toContain(
"(http.url = '/api/users' OR url.full = '/api/users')",
);
// Other filters still present
expect(expression).toContain('service.name');
expect(expression).toContain('user-service');
// Base filters present
expect(expression).toContain('net.peer.name');
expect(expression).toContain("kind_string = 'Client'");
expect(expression).toContain('response_status_code EXISTS');
// All ANDed together (at least 2 ANDs: domain+kind, custom filter, url condition)
expect(expression?.match(/AND/g)?.length).toBeGreaterThanOrEqual(2);
});
});
});

View File

@@ -84,7 +84,7 @@ describe('TopErrors', () => {
{
columns: [
{
name: SPAN_ATTRIBUTES.HTTP_URL,
name: 'http.url',
fieldDataType: 'string',
fieldContext: 'attribute',
},
@@ -124,7 +124,7 @@ describe('TopErrors', () => {
table: {
rows: [
{
http_url: '/api/test',
'http.url': '/api/test',
A: 100,
},
],
@@ -206,7 +206,7 @@ describe('TopErrors', () => {
expect(navigateMock).toHaveBeenCalledWith({
filters: expect.arrayContaining([
expect.objectContaining({
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
key: expect.objectContaining({ key: 'http.url' }),
op: '=',
value: '/api/test',
}),
@@ -335,7 +335,7 @@ describe('TopErrors', () => {
// Verify all required filters are present
expect(filterExpression).toContain(
`kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS AND ${SPAN_ATTRIBUTES.SERVER_NAME} = 'test-domain' AND has_error = true`,
`kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) AND (net.peer.name = 'test-domain' OR server.address = 'test-domain') AND has_error = true`,
);
});
});

View File

@@ -15,6 +15,7 @@ import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsAppli
import { convertNanoToMilliseconds } from 'container/MetricsExplorer/Summary/utils';
import dayjs from 'dayjs';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { cloneDeep } from 'lodash-es';
import { ArrowUpDown, ChevronDown, ChevronRight, Info } from 'lucide-react';
import { getWidgetQuery } from 'pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil';
@@ -56,12 +57,12 @@ export const getDisplayValue = (value: unknown): string =>
isEmptyFilterValue(value) ? '-' : String(value);
export const getDomainNameFilterExpression = (domainName: string): string =>
`http_host = '${domainName}'`;
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`;
export const clientKindExpression = `kind_string = 'Client'`;
/**
* Converts filters to expression
* Converts filters to expression, handling http.url specially by creating (http.url OR url.full) condition
* @param filters Filters to convert
* @param baseExpression Base expression to combine with filters
* @returns Filter expression string
@@ -74,6 +75,34 @@ export const convertFiltersWithUrlHandling = (
return baseExpression;
}
// Check if filters contain http.url (SPAN_ATTRIBUTES.URL_PATH)
const httpUrlFilter = filters.items?.find(
(item) => item.key?.key === SPAN_ATTRIBUTES.URL_PATH,
);
// If http.url filter exists, create modified filters with (http.url OR url.full)
if (httpUrlFilter && httpUrlFilter.value) {
// Remove ALL http.url filters from items (guards against duplicates)
const otherFilters = filters.items?.filter(
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
);
// Convert to expression first with other filters
const {
filter: intermediateFilter,
} = convertFiltersToExpressionWithExistingQuery(
{ ...filters, items: otherFilters || [] },
baseExpression,
);
// Add the OR condition for http.url and url.full
const urlValue = httpUrlFilter.value;
const urlCondition = `(http.url = '${urlValue}' OR url.full = '${urlValue}')`;
return intermediateFilter.expression.trim()
? `${intermediateFilter.expression} AND ${urlCondition}`
: urlCondition;
}
const { filter } = convertFiltersToExpressionWithExistingQuery(
filters,
baseExpression,
@@ -342,7 +371,7 @@ export const formatDataForTable = (
});
};
const urlExpression = `${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`;
const urlExpression = `(url.full EXISTS OR http.url EXISTS)`;
export const getDomainMetricsQueryPayload = (
domainName: string,
@@ -559,7 +588,14 @@ const defaultGroupBy = [
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: SPAN_ATTRIBUTES.HTTP_URL,
key: SPAN_ATTRIBUTES.URL_PATH,
type: 'attribute',
},
{
dataType: DataTypes.String,
isColumn: false,
isJSON: false,
key: 'url.full',
type: 'attribute',
},
// {
@@ -831,8 +867,8 @@ function buildFilterExpression(
): string {
const baseFilterParts = [
`kind_string = 'Client'`,
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
`${SPAN_ATTRIBUTES.SERVER_NAME} = '${domainName}'`,
`(http.url EXISTS OR url.full EXISTS)`,
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
`has_error = true`,
];
if (showStatusCodeErrors) {
@@ -874,7 +910,12 @@ export const getTopErrorsQueryPayload = (
filter: { expression: filterExpression },
groupBy: [
{
name: SPAN_ATTRIBUTES.HTTP_URL,
name: 'http.url',
fieldDataType: 'string',
fieldContext: 'attribute',
},
{
name: 'url.full',
fieldDataType: 'string',
fieldContext: 'attribute',
},
@@ -1093,11 +1134,11 @@ export const formatEndPointsDataForTable = (
if (!isGroupedByAttribute) {
formattedData = data?.map((endpoint) => {
const { port } = extractPortAndEndpoint(
(endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '',
(endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '',
);
return {
key: v4(),
endpointName: (endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '-',
endpointName: (endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '-',
port,
callCount:
endpoint.data.A === 'n/a' || endpoint.data.A === undefined
@@ -1221,7 +1262,9 @@ export const formatTopErrorsDataForTable = (
return {
key: v4(),
endpointName: getDisplayValue(rowObj[SPAN_ATTRIBUTES.HTTP_URL]),
endpointName: getDisplayValue(
rowObj[SPAN_ATTRIBUTES.URL_PATH] || rowObj['url.full'],
),
statusCode: getDisplayValue(rowObj[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]),
statusMessage: getDisplayValue(rowObj.status_message),
count: getDisplayValue(rowObj.__result_0),
@@ -1238,10 +1281,10 @@ export const getTopErrorsCoRelationQueryFilters = (
{
id: 'ea16470b',
key: {
key: SPAN_ATTRIBUTES.HTTP_URL,
key: 'http.url',
dataType: DataTypes.String,
type: 'tag',
id: `${SPAN_ATTRIBUTES.HTTP_URL}--string--tag--false`,
id: 'http.url--string--tag--false',
},
op: '=',
value: endPointName,
@@ -1738,7 +1781,7 @@ export const getEndPointDetailsQueryPayload = (
filters || { items: [], op: 'AND' },
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
),
},
expression: 'A',
@@ -1750,7 +1793,12 @@ export const getEndPointDetailsQueryPayload = (
orderBy: [],
groupBy: [
{
key: SPAN_ATTRIBUTES.HTTP_URL,
key: SPAN_ATTRIBUTES.URL_PATH,
dataType: DataTypes.String,
type: 'attribute',
},
{
key: 'url.full',
dataType: DataTypes.String,
type: 'attribute',
},
@@ -2177,7 +2225,7 @@ export const getEndPointZeroStateQueryPayload = (
orderBy: [],
groupBy: [
{
key: SPAN_ATTRIBUTES.HTTP_URL,
key: SPAN_ATTRIBUTES.URL_PATH,
dataType: DataTypes.String,
type: 'tag',
},
@@ -2371,7 +2419,8 @@ export const statusCodeWidgetInfo = [
interface EndPointDropDownResponseRow {
data: {
[SPAN_ATTRIBUTES.HTTP_URL]: string;
[SPAN_ATTRIBUTES.URL_PATH]: string;
'url.full': string;
A: number;
};
}
@@ -2390,8 +2439,8 @@ export const getFormattedEndPointDropDownData = (
}
return data.map((row) => ({
key: v4(),
label: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
value: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
label: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
value: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
}));
};
@@ -2720,6 +2769,7 @@ export const groupStatusCodes = (
export const getStatusCodeBarChartWidgetData = (
domainName: string,
endPointName: string,
filters: IBuilderQuery['filters'],
): Widgets => ({
query: {
@@ -2748,6 +2798,20 @@ export const getStatusCodeBarChartWidgetData = (
op: '=',
value: domainName,
},
...(endPointName
? [
{
id: '8b1be6f0',
key: {
dataType: DataTypes.String,
key: SPAN_ATTRIBUTES.URL_PATH,
type: 'tag',
},
op: '=',
value: endPointName,
},
]
: []),
...(filters?.items || []),
],
op: 'AND',
@@ -2869,7 +2933,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND http_url EXISTS`,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
),
},
functions: [],
@@ -2901,7 +2965,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND http_url EXISTS`,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
),
},
functions: [],
@@ -2933,7 +2997,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND http_url EXISTS`,
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
),
},
functions: [],
@@ -2965,7 +3029,7 @@ export const getAllEndpointsWidgetData = (
filters,
`${getDomainNameFilterExpression(
domainName,
)} AND ${clientKindExpression} AND has_error = true AND http_url EXISTS`,
)} AND ${clientKindExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
),
},
functions: [],
@@ -2996,12 +3060,24 @@ export const getAllEndpointsWidgetData = (
);
widget.renderColumnCell = {
[SPAN_ATTRIBUTES.HTTP_URL]: (url: string | number): ReactNode => {
if (isEmptyFilterValue(url) || !url || url === 'n/a') {
[SPAN_ATTRIBUTES.URL_PATH]: (
url: string | number,
record?: RowData,
): ReactNode => {
// First try to use the url from the column value
let urlValue = url;
// If url is empty/null and we have the record, fallback to url.full
if (isEmptyFilterValue(url) && record) {
const { 'url.full': urlFull } = record;
urlValue = urlFull;
}
if (!urlValue || urlValue === 'n/a') {
return <span>-</span>;
}
const { endpoint } = extractPortAndEndpoint(String(url));
const { endpoint } = extractPortAndEndpoint(String(urlValue));
return <span>{getDisplayValue(endpoint)}</span>;
},
A: (numOfCalls: any): ReactNode => (
@@ -3056,8 +3132,8 @@ export const getAllEndpointsWidgetData = (
};
widget.customColTitles = {
[SPAN_ATTRIBUTES.HTTP_URL]: 'Endpoint',
[SPAN_ATTRIBUTES.SERVER_PORT]: 'Port',
[SPAN_ATTRIBUTES.URL_PATH]: 'Endpoint',
'net.peer.port': 'Port',
};
widget.title = (
@@ -3082,10 +3158,12 @@ export const getAllEndpointsWidgetData = (
</div>
);
widget.hiddenColumns = ['url.full'];
return widget;
};
const keysToRemove = [SPAN_ATTRIBUTES.HTTP_URL, 'A', 'B', 'C', 'F1'];
const keysToRemove = ['http.url', 'url.full', 'A', 'B', 'C', 'F1'];
export const getGroupByFiltersFromGroupByValues = (
rowData: any,
@@ -3143,7 +3221,7 @@ export const getRateOverTimeWidgetData = (
filter: {
expression: convertFiltersWithUrlHandling(
filters || { items: [], op: 'AND' },
`http_host = '${domainName}'`,
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
),
},
functions: [],
@@ -3194,7 +3272,7 @@ export const getLatencyOverTimeWidgetData = (
filter: {
expression: convertFiltersWithUrlHandling(
filters || { items: [], op: 'AND' },
`http_host = '${domainName}'`,
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
),
},
functions: [],

View File

@@ -1,6 +1,7 @@
/* eslint-disable jsx-a11y/no-static-element-interactions */
/* eslint-disable jsx-a11y/click-events-have-key-events */
import { useEffect, useMemo, useState } from 'react';
import { useEffect, useState } from 'react';
import { useMutation } from 'react-query';
import { useCopyToClipboard } from 'react-use';
import { Color } from '@signozhq/design-tokens';
import {
@@ -14,16 +15,14 @@ import {
Tag,
Typography,
} from 'antd';
import {
RenderErrorResponseDTO,
ZeustypesHostDTO,
} from 'api/generated/services/sigNoz.schemas';
import { useGetHosts, usePutHost } from 'api/generated/services/zeus';
import updateSubDomainAPI from 'api/customDomain/updateSubDomain';
import { AxiosError } from 'axios';
import LaunchChatSupport from 'components/LaunchChatSupport/LaunchChatSupport';
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
import { useNotifications } from 'hooks/useNotifications';
import { InfoIcon, Link2, Pencil } from 'lucide-react';
import { useAppContext } from 'providers/App/App';
import { HostsProps } from 'types/api/customDomain/types';
import './CustomDomainSettings.styles.scss';
@@ -36,7 +35,7 @@ export default function CustomDomainSettings(): JSX.Element {
const { notifications } = useNotifications();
const [isEditModalOpen, setIsEditModalOpen] = useState(false);
const [isPollingEnabled, setIsPollingEnabled] = useState(false);
const [hosts, setHosts] = useState<ZeustypesHostDTO[] | null>(null);
const [hosts, setHosts] = useState<HostsProps[] | null>(null);
const [updateDomainError, setUpdateDomainError] = useState<AxiosError | null>(
null,
@@ -58,37 +57,36 @@ export default function CustomDomainSettings(): JSX.Element {
};
const {
data: hostsData,
isLoading: isLoadingHosts,
isFetching: isFetchingHosts,
refetch: refetchHosts,
} = useGetHosts();
data: deploymentsData,
isLoading: isLoadingDeploymentsData,
isFetching: isFetchingDeploymentsData,
refetch: refetchDeploymentsData,
} = useGetDeploymentsData(true);
const {
mutate: updateSubDomain,
isLoading: isLoadingUpdateCustomDomain,
} = usePutHost<AxiosError<RenderErrorResponseDTO>>();
const stripProtocol = (url: string): string => {
return url?.split('://')[1] ?? url;
};
const dnsSuffix = useMemo(() => {
const defaultHost = hosts?.find((h) => h.is_default);
return defaultHost?.url && defaultHost?.name
? defaultHost.url.split(`${defaultHost.name}.`)[1] || ''
: '';
}, [hosts]);
} = useMutation(updateSubDomainAPI, {
onSuccess: () => {
setIsPollingEnabled(true);
refetchDeploymentsData();
setIsEditModalOpen(false);
},
onError: (error: AxiosError) => {
setUpdateDomainError(error);
setIsPollingEnabled(false);
},
});
useEffect(() => {
if (isFetchingHosts) {
if (isFetchingDeploymentsData) {
return;
}
if (hostsData?.data?.status === 'success') {
setHosts(hostsData?.data?.data?.hosts ?? null);
if (deploymentsData?.data?.status === 'success') {
setHosts(deploymentsData.data.data.hosts);
const activeCustomDomain = hostsData?.data?.data?.hosts?.find(
const activeCustomDomain = deploymentsData.data.data.hosts.find(
(host) => !host.is_default,
);
@@ -99,36 +97,32 @@ export default function CustomDomainSettings(): JSX.Element {
}
}
if (hostsData?.data?.data?.state !== 'HEALTHY' && isPollingEnabled) {
if (deploymentsData?.data?.data?.state !== 'HEALTHY' && isPollingEnabled) {
setTimeout(() => {
refetchHosts();
refetchDeploymentsData();
}, 3000);
}
if (hostsData?.data?.data?.state === 'HEALTHY') {
if (deploymentsData?.data?.data.state === 'HEALTHY') {
setIsPollingEnabled(false);
}
}, [hostsData, refetchHosts, isPollingEnabled, isFetchingHosts]);
}, [
deploymentsData,
refetchDeploymentsData,
isPollingEnabled,
isFetchingDeploymentsData,
]);
const onUpdateCustomDomainSettings = (): void => {
editForm
.validateFields()
.then((values) => {
if (values.subdomain) {
updateSubDomain(
{ data: { name: values.subdomain } },
{
onSuccess: () => {
setIsPollingEnabled(true);
refetchHosts();
setIsEditModalOpen(false);
},
onError: (error: AxiosError<RenderErrorResponseDTO>) => {
setUpdateDomainError(error as AxiosError);
setIsPollingEnabled(false);
},
updateSubDomain({
data: {
name: values.subdomain,
},
);
});
setCustomDomainDetails({
subdomain: values.subdomain,
@@ -140,8 +134,10 @@ export default function CustomDomainSettings(): JSX.Element {
});
};
const onCopyUrlHandler = (url: string): void => {
setCopyUrl(stripProtocol(url));
const onCopyUrlHandler = (host: string): void => {
const url = `${host}.${deploymentsData?.data.data.cluster.region.dns}`;
setCopyUrl(url);
notifications.success({
message: 'Copied to clipboard',
});
@@ -161,7 +157,7 @@ export default function CustomDomainSettings(): JSX.Element {
</div>
<div className="custom-domain-settings-content">
{!isLoadingHosts && (
{!isLoadingDeploymentsData && (
<Card className="custom-domain-settings-card">
<div className="custom-domain-settings-content-header">
Team {org?.[0]?.displayName} Information
@@ -173,9 +169,10 @@ export default function CustomDomainSettings(): JSX.Element {
<div
className="custom-domain-url"
key={host.name}
onClick={(): void => onCopyUrlHandler(host.url || '')}
onClick={(): void => onCopyUrlHandler(host.name)}
>
<Link2 size={12} /> {stripProtocol(host.url || '')}
<Link2 size={12} /> {host.name}.
{deploymentsData?.data.data.cluster.region.dns}
{host.is_default && <Tag color={Color.BG_ROBIN_500}>Default</Tag>}
</div>
))}
@@ -184,7 +181,11 @@ export default function CustomDomainSettings(): JSX.Element {
<div className="custom-domain-url-edit-btn">
<Button
className="periscope-btn"
disabled={isLoadingHosts || isFetchingHosts || isPollingEnabled}
disabled={
isLoadingDeploymentsData ||
isFetchingDeploymentsData ||
isPollingEnabled
}
type="default"
icon={<Pencil size={10} />}
onClick={(): void => setIsEditModalOpen(true)}
@@ -197,7 +198,7 @@ export default function CustomDomainSettings(): JSX.Element {
{isPollingEnabled && (
<Alert
className="custom-domain-update-status"
message={`Updating your URL to ⎯ ${customDomainDetails?.subdomain}.${dnsSuffix}. This may take a few mins.`}
message={`Updating your URL to ⎯ ${customDomainDetails?.subdomain}.${deploymentsData?.data.data.cluster.region.dns}. This may take a few mins.`}
type="info"
icon={<InfoIcon size={12} />}
/>
@@ -205,7 +206,7 @@ export default function CustomDomainSettings(): JSX.Element {
</Card>
)}
{isLoadingHosts && (
{isLoadingDeploymentsData && (
<Card className="custom-domain-settings-card">
<Skeleton
className="custom-domain-settings-skeleton"
@@ -254,7 +255,7 @@ export default function CustomDomainSettings(): JSX.Element {
addonBefore={updateDomainError && <InfoIcon size={12} color="red" />}
placeholder="Enter Domain"
onChange={(): void => setUpdateDomainError(null)}
addonAfter={dnsSuffix}
addonAfter={deploymentsData?.data.data.cluster.region.dns}
autoFocus
/>
</Form.Item>
@@ -266,8 +267,7 @@ export default function CustomDomainSettings(): JSX.Element {
{updateDomainError.status === 409 ? (
<Alert
message={
(updateDomainError?.response?.data as RenderErrorResponseDTO)?.error
?.message ||
(updateDomainError?.response?.data as { error?: string })?.error ||
'Youve already updated the custom domain once today. To make further changes, please contact our support team for assistance.'
}
type="warning"
@@ -275,10 +275,7 @@ export default function CustomDomainSettings(): JSX.Element {
/>
) : (
<Typography.Text type="danger">
{
(updateDomainError?.response?.data as RenderErrorResponseDTO)?.error
?.message
}
{(updateDomainError.response?.data as { error: string })?.error}
</Typography.Text>
)}
</div>

View File

@@ -1,128 +0,0 @@
import { GetHosts200 } from 'api/generated/services/sigNoz.schemas';
import { rest, server } from 'mocks-server/server';
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import CustomDomainSettings from '../CustomDomainSettings';
const ZEUS_HOSTS_ENDPOINT = '*/api/v2/zeus/hosts';
const mockHostsResponse: GetHosts200 = {
status: 'success',
data: {
name: 'accepted-starfish',
state: 'HEALTHY',
tier: 'PREMIUM',
hosts: [
{
name: 'accepted-starfish',
is_default: true,
url: 'https://accepted-starfish.test.cloud',
},
{
name: 'custom-host',
is_default: false,
url: 'https://custom-host.test.cloud',
},
],
},
};
describe('CustomDomainSettings', () => {
afterEach(() => server.resetHandlers());
it('renders host URLs with protocol stripped and marks the default host', async () => {
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(mockHostsResponse)),
),
);
render(<CustomDomainSettings />);
await screen.findByText(/accepted-starfish\.test\.cloud/i);
await screen.findByText(/custom-host\.test\.cloud/i);
expect(screen.getByText('Default')).toBeInTheDocument();
});
it('opens edit modal with DNS suffix derived from the default host', async () => {
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(mockHostsResponse)),
),
);
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<CustomDomainSettings />);
await screen.findByText(/accepted-starfish\.test\.cloud/i);
await user.click(
screen.getByRole('button', { name: /customize team[']s url/i }),
);
expect(
screen.getByRole('dialog', { name: /customize your team[']s url/i }),
).toBeInTheDocument();
// DNS suffix is the part of the default host URL after the name prefix
expect(screen.getByText('test.cloud')).toBeInTheDocument();
});
it('submits PUT to /zeus/hosts with the entered subdomain as the payload', async () => {
let capturedBody: Record<string, unknown> = {};
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(mockHostsResponse)),
),
rest.put(ZEUS_HOSTS_ENDPOINT, async (req, res, ctx) => {
capturedBody = await req.json<Record<string, unknown>>();
return res(ctx.status(200), ctx.json({}));
}),
);
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<CustomDomainSettings />);
await screen.findByText(/accepted-starfish\.test\.cloud/i);
await user.click(
screen.getByRole('button', { name: /customize team[']s url/i }),
);
const input = screen.getByPlaceholderText(/enter domain/i);
await user.clear(input);
await user.type(input, 'myteam');
await user.click(screen.getByRole('button', { name: /apply changes/i }));
await waitFor(() => {
expect(capturedBody).toEqual({ name: 'myteam' });
});
});
it('shows contact support option when domain update returns 409', async () => {
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(mockHostsResponse)),
),
rest.put(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(
ctx.status(409),
ctx.json({ error: { message: 'Already updated today' } }),
),
),
);
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<CustomDomainSettings />);
await screen.findByText(/accepted-starfish\.test\.cloud/i);
await user.click(
screen.getByRole('button', { name: /customize team[']s url/i }),
);
await user.type(screen.getByPlaceholderText(/enter domain/i), 'myteam');
await user.click(screen.getByRole('button', { name: /apply changes/i }));
expect(
await screen.findByRole('button', { name: /contact support/i }),
).toBeInTheDocument();
});
});

View File

@@ -34,9 +34,6 @@ function DashboardVariableSelection(): JSX.Element | null {
const sortedVariablesArray = useDashboardVariablesSelector(
(state) => state.sortedVariablesArray,
);
const dynamicVariableOrder = useDashboardVariablesSelector(
(state) => state.dynamicVariableOrder,
);
const dependencyData = useDashboardVariablesSelector(
(state) => state.dependencyData,
);
@@ -55,11 +52,10 @@ function DashboardVariableSelection(): JSX.Element | null {
}, [getUrlVariables, updateUrlVariable, dashboardVariables]);
// Memoize the order key to avoid unnecessary triggers
const variableOrderKey = useMemo(() => {
const queryVariableOrderKey = dependencyData?.order?.join(',') ?? '';
const dynamicVariableOrderKey = dynamicVariableOrder?.join(',') ?? '';
return `${queryVariableOrderKey}|${dynamicVariableOrderKey}`;
}, [dependencyData?.order, dynamicVariableOrder]);
const dependencyOrderKey = useMemo(
() => dependencyData?.order?.join(',') ?? '',
[dependencyData?.order],
);
// Initialize fetch store then start a new fetch cycle.
// Runs on dependency order changes, and time range changes.
@@ -70,7 +66,7 @@ function DashboardVariableSelection(): JSX.Element | null {
initializeVariableFetchStore(allVariableNames);
enqueueFetchOfAllVariables();
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [variableOrderKey, minTime, maxTime]);
}, [dependencyOrderKey, minTime, maxTime]);
// Performance optimization: For dynamic variables with allSelected=true, we don't store
// individual values in localStorage since we can always derive them from available options.

View File

@@ -1,203 +0,0 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { act, render } from '@testing-library/react';
import {
dashboardVariablesStore,
setDashboardVariablesStore,
updateDashboardVariablesStore,
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
import {
IDashboardVariables,
IDashboardVariablesStoreState,
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import {
enqueueFetchOfAllVariables,
initializeVariableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import DashboardVariableSelection from '../DashboardVariableSelection';
// Mock providers/Dashboard/Dashboard
const mockSetSelectedDashboard = jest.fn();
const mockUpdateLocalStorageDashboardVariables = jest.fn();
jest.mock('providers/Dashboard/Dashboard', () => ({
useDashboard: (): Record<string, unknown> => ({
setSelectedDashboard: mockSetSelectedDashboard,
updateLocalStorageDashboardVariables: mockUpdateLocalStorageDashboardVariables,
}),
}));
// Mock hooks/dashboard/useVariablesFromUrl
const mockUpdateUrlVariable = jest.fn();
const mockGetUrlVariables = jest.fn().mockReturnValue({});
jest.mock('hooks/dashboard/useVariablesFromUrl', () => ({
__esModule: true,
default: (): Record<string, unknown> => ({
updateUrlVariable: mockUpdateUrlVariable,
getUrlVariables: mockGetUrlVariables,
}),
}));
// Mock variableFetchStore functions
jest.mock('providers/Dashboard/store/variableFetchStore', () => ({
initializeVariableFetchStore: jest.fn(),
enqueueFetchOfAllVariables: jest.fn(),
enqueueDescendantsOfVariable: jest.fn(),
}));
// Mock initializeDefaultVariables
jest.mock('providers/Dashboard/initializeDefaultVariables', () => ({
initializeDefaultVariables: jest.fn(),
}));
// Mock react-redux useSelector for globalTime
jest.mock('react-redux', () => ({
...jest.requireActual('react-redux'),
useSelector: jest.fn().mockReturnValue({ minTime: 1000, maxTime: 2000 }),
}));
// Mock VariableItem to avoid rendering complexity
jest.mock('../VariableItem', () => ({
__esModule: true,
default: (): JSX.Element => <div data-testid="variable-item" />,
}));
function createVariable(
overrides: Partial<IDashboardVariable> = {},
): IDashboardVariable {
return {
id: 'test-id',
name: 'test-var',
description: '',
type: 'QUERY',
sort: 'DISABLED',
showALLOption: false,
multiSelect: false,
order: 0,
...overrides,
};
}
function resetStore(): void {
dashboardVariablesStore.set(() => ({
dashboardId: '',
variables: {},
sortedVariablesArray: [],
dependencyData: null,
variableTypes: {},
dynamicVariableOrder: [],
}));
}
describe('DashboardVariableSelection', () => {
beforeEach(() => {
resetStore();
jest.clearAllMocks();
});
it('should call initializeVariableFetchStore and enqueueFetchOfAllVariables on mount', () => {
const variables: IDashboardVariables = {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
};
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
render(<DashboardVariableSelection />);
expect(initializeVariableFetchStore).toHaveBeenCalledWith(['env']);
expect(enqueueFetchOfAllVariables).toHaveBeenCalled();
});
it('should re-trigger fetch cycle when dynamicVariableOrder changes', () => {
const variables: IDashboardVariables = {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
};
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
render(<DashboardVariableSelection />);
// Clear mocks after initial render
(initializeVariableFetchStore as jest.Mock).mockClear();
(enqueueFetchOfAllVariables as jest.Mock).mockClear();
// Add a DYNAMIC variable which changes dynamicVariableOrder
act(() => {
updateDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
},
});
});
expect(initializeVariableFetchStore).toHaveBeenCalledWith(
expect.arrayContaining(['env', 'dyn1']),
);
expect(enqueueFetchOfAllVariables).toHaveBeenCalled();
});
it('should re-trigger fetch cycle when a dynamic variable is removed', () => {
const variables: IDashboardVariables = {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
dyn2: createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 2 }),
};
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
render(<DashboardVariableSelection />);
(initializeVariableFetchStore as jest.Mock).mockClear();
(enqueueFetchOfAllVariables as jest.Mock).mockClear();
// Remove dyn2, changing dynamicVariableOrder from ['dyn1','dyn2'] to ['dyn1']
act(() => {
updateDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
},
});
});
expect(initializeVariableFetchStore).toHaveBeenCalledWith(['env', 'dyn1']);
expect(enqueueFetchOfAllVariables).toHaveBeenCalled();
});
it('should NOT re-trigger fetch cycle when dynamicVariableOrder stays the same', () => {
const variables: IDashboardVariables = {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
};
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
render(<DashboardVariableSelection />);
(initializeVariableFetchStore as jest.Mock).mockClear();
(enqueueFetchOfAllVariables as jest.Mock).mockClear();
// Update a non-dynamic variable's selectedValue — dynamicVariableOrder unchanged
act(() => {
const snapshot = dashboardVariablesStore.getSnapshot();
dashboardVariablesStore.set(
(): IDashboardVariablesStoreState => ({
...snapshot,
variables: {
...snapshot.variables,
env: {
...snapshot.variables.env,
selectedValue: 'production',
},
},
}),
);
});
expect(initializeVariableFetchStore).not.toHaveBeenCalled();
expect(enqueueFetchOfAllVariables).not.toHaveBeenCalled();
});
});

View File

@@ -1,6 +1,7 @@
import { useCallback } from 'react';
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
import HistogramTooltip from 'lib/uPlotV2/components/Tooltip/HistogramTooltip';
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
import {
HistogramTooltipProps,
TooltipRenderArgs,
@@ -21,11 +22,21 @@ export default function Histogram(props: HistogramChartProps): JSX.Element {
if (customRenderTooltip) {
return customRenderTooltip(props);
}
const content = buildTooltipContent({
data: props.uPlotInstance.data,
series: props.uPlotInstance.series,
dataIndexes: props.dataIndexes,
activeSeriesIndex: props.seriesIndex,
uPlotInstance: props.uPlotInstance,
yAxisUnit: rest.yAxisUnit ?? '',
decimalPrecision: rest.decimalPrecision,
});
const tooltipProps: HistogramTooltipProps = {
...props,
timezone: rest.timezone,
yAxisUnit: rest.yAxisUnit,
decimalPrecision: rest.decimalPrecision,
content,
};
return <HistogramTooltip {...tooltipProps} />;
},

View File

@@ -1,6 +1,7 @@
import { useCallback } from 'react';
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
import TimeSeriesTooltip from 'lib/uPlotV2/components/Tooltip/TimeSeriesTooltip';
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
import {
TimeSeriesTooltipProps,
TooltipRenderArgs,
@@ -16,11 +17,21 @@ export default function TimeSeries(props: TimeSeriesChartProps): JSX.Element {
if (customRenderTooltip) {
return customRenderTooltip(props);
}
const content = buildTooltipContent({
data: props.uPlotInstance.data,
series: props.uPlotInstance.series,
dataIndexes: props.dataIndexes,
activeSeriesIndex: props.seriesIndex,
uPlotInstance: props.uPlotInstance,
yAxisUnit: rest.yAxisUnit ?? '',
decimalPrecision: rest.decimalPrecision,
});
const tooltipProps: TimeSeriesTooltipProps = {
...props,
timezone: rest.timezone,
yAxisUnit: rest.yAxisUnit,
decimalPrecision: rest.decimalPrecision,
content,
};
return <TimeSeriesTooltip {...tooltipProps} />;
},

View File

@@ -2,10 +2,10 @@
import { useEffect, useState } from 'react';
import { Button, Skeleton, Tag, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import { useGetHosts } from 'api/generated/services/zeus';
import ROUTES from 'constants/routes';
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
import history from 'lib/history';
import { Link2 } from 'lucide-react';
import { Globe, Link2 } from 'lucide-react';
import Card from 'periscope/components/Card/Card';
import { useAppContext } from 'providers/App/App';
import { LicensePlatform } from 'types/api/licensesV3/getActive';
@@ -26,21 +26,36 @@ function DataSourceInfo({
const isEnabled =
activeLicense && activeLicense.platform === LicensePlatform.CLOUD;
const { data: hostsData, isError } = useGetHosts({
query: { enabled: isEnabled || false },
});
const {
data: deploymentsData,
isError: isErrorDeploymentsData,
} = useGetDeploymentsData(isEnabled || false);
const [region, setRegion] = useState<string>('');
const [url, setUrl] = useState<string>('');
useEffect(() => {
if (hostsData) {
const defaultHost = hostsData?.data?.data?.hosts?.find((h) => h.is_default);
if (defaultHost?.url) {
const url = defaultHost?.url?.split('://')[1] ?? '';
setUrl(url);
if (deploymentsData) {
switch (deploymentsData?.data.data.cluster.region.name) {
case 'in':
setRegion('India');
break;
case 'us':
setRegion('United States');
break;
case 'eu':
setRegion('Europe');
break;
default:
setRegion(deploymentsData?.data.data.cluster.region.name);
break;
}
setUrl(
`${deploymentsData?.data.data.name}.${deploymentsData?.data.data.cluster.region.dns}`,
);
}
}, [hostsData]);
}, [deploymentsData]);
const renderNotSendingData = (): JSX.Element => (
<>
@@ -108,8 +123,14 @@ function DataSourceInfo({
</Button>
</div>
{!isError && hostsData && (
{!isErrorDeploymentsData && deploymentsData && (
<div className="workspace-details">
<div className="workspace-region">
<Globe size={10} />
<Typography>{region}</Typography>
</div>
<div className="workspace-url">
<Link2 size={12} />
@@ -135,11 +156,17 @@ function DataSourceInfo({
Hello there, Welcome to your SigNoz workspace
</Typography>
{!isError && hostsData && (
{!isErrorDeploymentsData && deploymentsData && (
<Card className="welcome-card">
<Card.Content>
<div className="workspace-ready-container">
<div className="workspace-details">
<div className="workspace-region">
<Globe size={10} />
<Typography>{region}</Typography>
</div>
<div className="workspace-url">
<Link2 size={12} />

View File

@@ -1,69 +0,0 @@
import { GetHosts200 } from 'api/generated/services/sigNoz.schemas';
import { rest, server } from 'mocks-server/server';
import { render, screen } from 'tests/test-utils';
import DataSourceInfo from '../DataSourceInfo';
const ZEUS_HOSTS_ENDPOINT = '*/api/v2/zeus/hosts';
const mockHostsResponse: GetHosts200 = {
status: 'success',
data: {
name: 'accepted-starfish',
state: 'HEALTHY',
tier: 'PREMIUM',
hosts: [
{
name: 'accepted-starfish',
is_default: true,
url: 'https://accepted-starfish.test.cloud',
},
{
name: 'custom-host',
is_default: false,
url: 'https://custom-host.test.cloud',
},
],
},
};
describe('DataSourceInfo', () => {
afterEach(() => server.resetHandlers());
it('renders the default workspace URL with protocol stripped', async () => {
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(mockHostsResponse)),
),
);
render(<DataSourceInfo dataSentToSigNoz={false} isLoading={false} />);
await screen.findByText(/accepted-starfish\.test\.cloud/i);
});
it('does not render workspace URL when GET /zeus/hosts fails', async () => {
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(500), ctx.json({})),
),
);
render(<DataSourceInfo dataSentToSigNoz={false} isLoading={false} />);
await screen.findByText(/Your workspace is ready/i);
expect(screen.queryByText(/signoz\.cloud/i)).not.toBeInTheDocument();
});
it('renders workspace URL in the data-received view when telemetry is flowing', async () => {
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(mockHostsResponse)),
),
);
render(<DataSourceInfo dataSentToSigNoz={true} isLoading={false} />);
await screen.findByText(/accepted-starfish\.test\.cloud/i);
});
});

View File

@@ -36,6 +36,24 @@ jest.mock('react-router-dom', () => {
};
});
// Mock deployments data hook to avoid unrelated network calls in this page
jest.mock(
'hooks/CustomDomain/useGetDeploymentsData',
(): Record<string, unknown> => ({
useGetDeploymentsData: (): {
data: undefined;
isLoading: boolean;
isFetching: boolean;
isError: boolean;
} => ({
data: undefined,
isLoading: false,
isFetching: false,
isError: false,
}),
}),
);
const TEST_CREATED_UPDATED = '2024-01-01T00:00:00Z';
const TEST_EXPIRES_AT = '2030-01-01T00:00:00Z';
const TEST_WORKSPACE_ID = 'w1';

View File

@@ -26,7 +26,7 @@ jest.mock('lib/history', () => ({
// API Endpoints
const ORG_PREFERENCES_ENDPOINT = '*/api/v1/org/preferences/list';
const UPDATE_ORG_PREFERENCE_ENDPOINT = '*/api/v1/org/preferences/name/update';
const UPDATE_PROFILE_ENDPOINT = '*/api/v2/zeus/profiles';
const UPDATE_PROFILE_ENDPOINT = '*/api/gateway/v2/profiles/me';
const EDIT_ORG_ENDPOINT = '*/api/v2/orgs/me';
const INVITE_USERS_ENDPOINT = '*/api/v1/invite/bulk/create';
@@ -277,46 +277,6 @@ describe('OnboardingQuestionaire Component', () => {
).toBeInTheDocument();
});
it('fires PUT to /zeus/profiles and advances to step 4 on success', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
let profilePutCalled = false;
server.use(
rest.put(UPDATE_PROFILE_ENDPOINT, (_, res, ctx) => {
profilePutCalled = true;
return res(ctx.status(200), ctx.json({ status: 'success', data: {} }));
}),
);
render(<OnboardingQuestionaire />);
// Navigate to step 3
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
await user.click(screen.getByLabelText(/just exploring/i));
await user.click(screen.getByRole('button', { name: /next/i }));
await user.type(
await screen.findByPlaceholderText(/e\.g\., googling/i),
'Found via Google',
);
await user.click(screen.getByLabelText(/lowering observability costs/i));
await user.click(screen.getByRole('button', { name: /next/i }));
// Click "I'll do this later" on step 3 — triggers PUT /zeus/profiles
await user.click(
await screen.findByRole('button', { name: /i'll do this later/i }),
);
await waitFor(() => {
expect(profilePutCalled).toBe(true);
// Step 3 content is gone — successfully advanced to step 4
expect(
screen.queryByText(/what does your scale approximately look like/i),
).not.toBeInTheDocument();
});
});
it('shows do later button', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<OnboardingQuestionaire />);

View File

@@ -1,10 +1,8 @@
import { useEffect, useState } from 'react';
import { useMutation, useQuery } from 'react-query';
import { toast } from '@signozhq/sonner';
import { NotificationInstance } from 'antd/es/notification/interface';
import logEvent from 'api/common/logEvent';
import { RenderErrorResponseDTO } from 'api/generated/services/sigNoz.schemas';
import { usePutProfile } from 'api/generated/services/zeus';
import updateProfileAPI from 'api/onboarding/updateProfile';
import listOrgPreferences from 'api/v1/org/preferences/list';
import updateOrgPreferenceAPI from 'api/v1/org/preferences/name/update';
import { AxiosError } from 'axios';
@@ -123,9 +121,20 @@ function OnboardingQuestionaire(): JSX.Element {
optimiseSignozDetails.hostsPerDay === 0 &&
optimiseSignozDetails.services === 0;
const { mutate: updateProfile, isLoading: isUpdatingProfile } = usePutProfile<
AxiosError<RenderErrorResponseDTO>
>();
const { mutate: updateProfile, isLoading: isUpdatingProfile } = useMutation(
updateProfileAPI,
{
onSuccess: () => {
setCurrentStep(4);
},
onError: (error) => {
showErrorNotification(notifications, error as AxiosError);
// Allow user to proceed even if API fails
setCurrentStep(4);
},
},
);
const { mutate: updateOrgPreference } = useMutation(updateOrgPreferenceAPI, {
onSuccess: () => {
@@ -144,44 +153,29 @@ function OnboardingQuestionaire(): JSX.Element {
nextPageID: 4,
});
updateProfile(
{
data: {
uses_otel: orgDetails?.usesOtel as boolean,
has_existing_observability_tool: orgDetails?.usesObservability as boolean,
existing_observability_tool:
orgDetails?.observabilityTool === 'Others'
? (orgDetails?.otherTool as string)
: (orgDetails?.observabilityTool as string),
where_did_you_discover_signoz: signozDetails?.discoverSignoz as string,
timeline_for_migrating_to_signoz: orgDetails?.migrationTimeline as string,
reasons_for_interest_in_signoz: signozDetails?.interestInSignoz?.includes(
'Others',
)
? ([
...(signozDetails?.interestInSignoz?.filter(
(item) => item !== 'Others',
) || []),
signozDetails?.otherInterestInSignoz,
] as string[])
: (signozDetails?.interestInSignoz as string[]),
logs_scale_per_day_in_gb: optimiseSignozDetails?.logsPerDay as number,
number_of_hosts: optimiseSignozDetails?.hostsPerDay as number,
number_of_services: optimiseSignozDetails?.services as number,
},
},
{
onSuccess: () => {
setCurrentStep(4);
},
onError: (error: any) => {
toast.error(error?.message || SOMETHING_WENT_WRONG);
// Allow user to proceed even if API fails
setCurrentStep(4);
},
},
);
updateProfile({
uses_otel: orgDetails?.usesOtel as boolean,
has_existing_observability_tool: orgDetails?.usesObservability as boolean,
existing_observability_tool:
orgDetails?.observabilityTool === 'Others'
? (orgDetails?.otherTool as string)
: (orgDetails?.observabilityTool as string),
where_did_you_discover_signoz: signozDetails?.discoverSignoz as string,
timeline_for_migrating_to_signoz: orgDetails?.migrationTimeline as string,
reasons_for_interest_in_signoz: signozDetails?.interestInSignoz?.includes(
'Others',
)
? ([
...(signozDetails?.interestInSignoz?.filter(
(item) => item !== 'Others',
) || []),
signozDetails?.otherInterestInSignoz,
] as string[])
: (signozDetails?.interestInSignoz as string[]),
logs_scale_per_day_in_gb: optimiseSignozDetails?.logsPerDay as number,
number_of_hosts: optimiseSignozDetails?.hostsPerDay as number,
number_of_services: optimiseSignozDetails?.services as number,
});
};
const handleOnboardingComplete = (): void => {

View File

@@ -3,7 +3,6 @@ import { MemoryRouter, Route } from 'react-router-dom';
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import ROUTES from 'constants/routes';
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
import { AppProvider } from 'providers/App/App';
import MockQueryClientProvider from 'providers/test/MockQueryClientProvider';
import { Span } from 'types/api/trace/getTraceV2';
@@ -109,7 +108,7 @@ const createMockSpan = (): Span => ({
statusMessage: '',
tagMap: {
'http.method': 'GET',
[SPAN_ATTRIBUTES.HTTP_URL]: '/api/users?page=1',
'http.url': '/api/users?page=1',
'http.status_code': '200',
'service.name': 'frontend-service',
'span.kind': 'server',

View File

@@ -5,7 +5,6 @@ import getSpanPercentiles from 'api/trace/getSpanPercentiles';
import getUserPreference from 'api/v1/user/preferences/name/get';
import { QueryParams } from 'constants/query';
import ROUTES from 'constants/routes';
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { server } from 'mocks-server/server';
import { QueryBuilderContext } from 'providers/QueryBuilder';
@@ -879,9 +878,7 @@ describe('SpanDetailsDrawer', () => {
// Verify only matching attributes are shown (use getAllByText for all since they appear in multiple places)
expect(screen.getAllByText('http.method').length).toBeGreaterThan(0);
expect(screen.getAllByText(SPAN_ATTRIBUTES.HTTP_URL).length).toBeGreaterThan(
0,
);
expect(screen.getAllByText('http.url').length).toBeGreaterThan(0);
expect(screen.getAllByText('http.status_code').length).toBeGreaterThan(0);
});
@@ -1129,7 +1126,7 @@ describe('SpanDetailsDrawer - Search Visibility User Flows', () => {
// User sees all attributes initially
expect(screen.getByText('http.method')).toBeInTheDocument();
expect(screen.getByText(SPAN_ATTRIBUTES.HTTP_URL)).toBeInTheDocument();
expect(screen.getByText('http.url')).toBeInTheDocument();
expect(screen.getByText('http.status_code')).toBeInTheDocument();
// User types "method" in search
@@ -1139,7 +1136,7 @@ describe('SpanDetailsDrawer - Search Visibility User Flows', () => {
// User sees only matching attributes
await waitFor(() => {
expect(screen.getByText('http.method')).toBeInTheDocument();
expect(screen.queryByText(SPAN_ATTRIBUTES.HTTP_URL)).not.toBeInTheDocument();
expect(screen.queryByText('http.url')).not.toBeInTheDocument();
expect(screen.queryByText('http.status_code')).not.toBeInTheDocument();
});
});

View File

@@ -1,4 +1,3 @@
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
import { ILog } from 'types/api/logs/log';
import { Span } from 'types/api/trace/getTraceV2';
@@ -23,7 +22,7 @@ export const mockSpan: Span = {
event: [],
tagMap: {
'http.method': 'GET',
[SPAN_ATTRIBUTES.HTTP_URL]: '/api/test',
'http.url': '/api/test',
'http.status_code': '200',
},
hasError: false,

View File

@@ -0,0 +1,13 @@
import { useQuery, UseQueryResult } from 'react-query';
import { getDeploymentsData } from 'api/customDomain/getDeploymentsData';
import { AxiosError, AxiosResponse } from 'axios';
import { DeploymentsDataProps } from 'types/api/customDomain/types';
export const useGetDeploymentsData = (
isEnabled: boolean,
): UseQueryResult<AxiosResponse<DeploymentsDataProps>, AxiosError> =>
useQuery<AxiosResponse<DeploymentsDataProps>, AxiosError>({
queryKey: ['getDeploymentsData'],
queryFn: () => getDeploymentsData(),
enabled: isEnabled,
});

View File

@@ -0,0 +1,15 @@
import { useQuery, UseQueryResult } from 'react-query';
import { getAllIngestionKeys } from 'api/IngestionKeys/getAllIngestionKeys';
import { AxiosError, AxiosResponse } from 'axios';
import {
AllIngestionKeyProps,
GetIngestionKeyProps,
} from 'types/api/ingestionKeys/types';
export const useGetAllIngestionsKeys = (
props: GetIngestionKeyProps,
): UseQueryResult<AxiosResponse<AllIngestionKeyProps>, AxiosError> =>
useQuery<AxiosResponse<AllIngestionKeyProps>, AxiosError>({
queryKey: [`IngestionKeys-${props.page}-${props.search}`],
queryFn: () => getAllIngestionKeys(props),
});

View File

@@ -1,31 +1,8 @@
import { useMemo } from 'react';
import { HistogramTooltipProps, TooltipContentItem } from '../types';
import { HistogramTooltipProps } from '../types';
import Tooltip from './Tooltip';
import { buildTooltipContent } from './utils';
export default function HistogramTooltip(
props: HistogramTooltipProps,
): JSX.Element {
const content = useMemo(
(): TooltipContentItem[] =>
buildTooltipContent({
data: props.uPlotInstance.data,
series: props.uPlotInstance.series,
dataIndexes: props.dataIndexes,
activeSeriesIndex: props.seriesIndex,
uPlotInstance: props.uPlotInstance,
yAxisUnit: props.yAxisUnit ?? '',
decimalPrecision: props.decimalPrecision,
}),
[
props.uPlotInstance,
props.seriesIndex,
props.dataIndexes,
props.yAxisUnit,
props.decimalPrecision,
],
);
return <Tooltip {...props} content={content} showTooltipHeader={false} />;
return <Tooltip {...props} showTooltipHeader={false} />;
}

View File

@@ -1,31 +1,8 @@
import { useMemo } from 'react';
import { TimeSeriesTooltipProps, TooltipContentItem } from '../types';
import { TimeSeriesTooltipProps } from '../types';
import Tooltip from './Tooltip';
import { buildTooltipContent } from './utils';
export default function TimeSeriesTooltip(
props: TimeSeriesTooltipProps,
): JSX.Element {
const content = useMemo(
(): TooltipContentItem[] =>
buildTooltipContent({
data: props.uPlotInstance.data,
series: props.uPlotInstance.series,
dataIndexes: props.dataIndexes,
activeSeriesIndex: props.seriesIndex,
uPlotInstance: props.uPlotInstance,
yAxisUnit: props.yAxisUnit ?? '',
decimalPrecision: props.decimalPrecision,
}),
[
props.uPlotInstance,
props.seriesIndex,
props.dataIndexes,
props.yAxisUnit,
props.decimalPrecision,
],
);
return <Tooltip {...props} content={content} />;
return <Tooltip {...props} />;
}

View File

@@ -28,23 +28,18 @@
font-weight: 500;
}
.uplot-tooltip-list-container {
overflow-y: auto;
max-height: 330px;
.uplot-tooltip-list {
&::-webkit-scrollbar {
width: 0.3rem;
}
.uplot-tooltip-list {
&::-webkit-scrollbar {
width: 0.3rem;
}
&::-webkit-scrollbar-track {
background: transparent;
}
&::-webkit-scrollbar-track {
background: transparent;
}
&::-webkit-scrollbar-thumb {
background: var(--bg-slate-100);
border-radius: 0.5rem;
}
&::-webkit-scrollbar-thumb {
background: var(--bg-slate-100);
border-radius: 0.5rem;
}
}

View File

@@ -1,4 +1,4 @@
import { useMemo, useState } from 'react';
import { useMemo } from 'react';
import { Virtuoso } from 'react-virtuoso';
import cx from 'classnames';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
@@ -11,7 +11,6 @@ import './Tooltip.styles.scss';
const TOOLTIP_LIST_MAX_HEIGHT = 330;
const TOOLTIP_ITEM_HEIGHT = 38;
const TOOLTIP_LIST_PADDING = 10;
export default function Tooltip({
uPlotInstance,
@@ -20,7 +19,7 @@ export default function Tooltip({
showTooltipHeader = true,
}: TooltipProps): JSX.Element {
const isDarkMode = useIsDarkMode();
const [listHeight, setListHeight] = useState(0);
const tooltipContent = content ?? [];
const headerTitle = useMemo(() => {
@@ -42,52 +41,42 @@ export default function Tooltip({
showTooltipHeader,
]);
const virtuosoStyle = useMemo(() => {
return {
height:
listHeight > 0
? Math.min(listHeight + TOOLTIP_LIST_PADDING, TOOLTIP_LIST_MAX_HEIGHT)
: Math.min(
tooltipContent.length * TOOLTIP_ITEM_HEIGHT,
TOOLTIP_LIST_MAX_HEIGHT,
),
width: '100%',
};
}, [listHeight, tooltipContent.length]);
return (
<div
className={cx(
'uplot-tooltip-container',
isDarkMode ? 'darkMode' : 'lightMode',
)}
data-testid="uplot-tooltip-container"
>
{showTooltipHeader && (
<div className="uplot-tooltip-header" data-testid="uplot-tooltip-header">
<div className="uplot-tooltip-header">
<span>{headerTitle}</span>
</div>
)}
<div className="uplot-tooltip-list-container">
<div
style={{
height: Math.min(
tooltipContent.length * TOOLTIP_ITEM_HEIGHT,
TOOLTIP_LIST_MAX_HEIGHT,
),
minHeight: 0,
}}
>
{tooltipContent.length > 0 ? (
<Virtuoso
className="uplot-tooltip-list"
data-testid="uplot-tooltip-list"
data={tooltipContent}
style={virtuosoStyle}
totalListHeightChanged={setListHeight}
defaultItemHeight={TOOLTIP_ITEM_HEIGHT}
itemContent={(_, item): JSX.Element => (
<div className="uplot-tooltip-item" data-testid="uplot-tooltip-item">
<div className="uplot-tooltip-item">
<div
className="uplot-tooltip-item-marker"
style={{ borderColor: item.color }}
data-is-legend-marker={true}
data-testid="uplot-tooltip-item-marker"
/>
<div
className="uplot-tooltip-item-content"
style={{ color: item.color, fontWeight: item.isActive ? 700 : 400 }}
data-testid="uplot-tooltip-item-content"
>
{item.label}: {item.tooltipValue}
</div>

View File

@@ -1,208 +0,0 @@
import React from 'react';
import { VirtuosoMockContext } from 'react-virtuoso';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import dayjs from 'dayjs';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { render, RenderResult, screen } from 'tests/test-utils';
import uPlot from 'uplot';
import { TooltipContentItem } from '../../types';
import Tooltip from '../Tooltip';
type MockVirtuosoProps = {
data: TooltipContentItem[];
itemContent: (index: number, item: TooltipContentItem) => React.ReactNode;
className?: string;
style?: React.CSSProperties;
totalListHeightChanged?: (height: number) => void;
'data-testid'?: string;
};
let mockTotalListHeight = 200;
jest.mock('react-virtuoso', () => {
const actual = jest.requireActual('react-virtuoso');
return {
...actual,
Virtuoso: ({
data,
itemContent,
className,
style,
totalListHeightChanged,
'data-testid': dataTestId,
}: MockVirtuosoProps): JSX.Element => {
if (totalListHeightChanged) {
// Simulate Virtuoso reporting total list height
totalListHeightChanged(mockTotalListHeight);
}
return (
<div className={className} style={style} data-testid={dataTestId}>
{data.map((item, index) => (
<div key={item.label ?? index.toString()}>{itemContent(index, item)}</div>
))}
</div>
);
},
};
});
jest.mock('hooks/useDarkMode', () => ({
useIsDarkMode: jest.fn(),
}));
const mockUseIsDarkMode = useIsDarkMode as jest.MockedFunction<
typeof useIsDarkMode
>;
type TooltipTestProps = React.ComponentProps<typeof Tooltip>;
function createTooltipContent(
overrides: Partial<TooltipContentItem> = {},
): TooltipContentItem {
return {
label: 'Series A',
value: 10,
tooltipValue: '10',
color: '#ff0000',
isActive: true,
...overrides,
};
}
function createUPlotInstance(cursorIdx: number | null): uPlot {
return ({
data: [[1], []],
cursor: { idx: cursorIdx },
// The rest of the uPlot fields are not used by Tooltip
} as unknown) as uPlot;
}
function renderTooltip(props: Partial<TooltipTestProps> = {}): RenderResult {
const defaultProps: TooltipTestProps = {
uPlotInstance: createUPlotInstance(null),
timezone: 'UTC',
content: [],
showTooltipHeader: true,
// TooltipRenderArgs (not used directly in component but required by type)
dataIndexes: [],
seriesIndex: null,
isPinned: false,
dismiss: jest.fn(),
viaSync: false,
} as TooltipTestProps;
return render(
<VirtuosoMockContext.Provider value={{ viewportHeight: 300, itemHeight: 38 }}>
<Tooltip {...defaultProps} {...props} />
</VirtuosoMockContext.Provider>,
);
}
describe('Tooltip', () => {
beforeEach(() => {
jest.clearAllMocks();
mockUseIsDarkMode.mockReturnValue(false);
mockTotalListHeight = 200;
});
it('renders header title when showTooltipHeader is true and cursor index is present', () => {
const uPlotInstance = createUPlotInstance(0);
renderTooltip({ uPlotInstance });
const expectedTitle = dayjs(1 * 1000)
.tz('UTC')
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
expect(screen.getByText(expectedTitle)).toBeInTheDocument();
});
it('does not render header when showTooltipHeader is false', () => {
const uPlotInstance = createUPlotInstance(0);
renderTooltip({ uPlotInstance, showTooltipHeader: false });
const unexpectedTitle = dayjs(1 * 1000)
.tz('UTC')
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
expect(screen.queryByText(unexpectedTitle)).not.toBeInTheDocument();
});
it('renders lightMode class when dark mode is disabled', () => {
const uPlotInstance = createUPlotInstance(null);
mockUseIsDarkMode.mockReturnValue(false);
renderTooltip({ uPlotInstance });
const container = screen.getByTestId('uplot-tooltip-container');
expect(container).toHaveClass('lightMode');
expect(container).not.toHaveClass('darkMode');
});
it('renders darkMode class when dark mode is enabled', () => {
const uPlotInstance = createUPlotInstance(null);
mockUseIsDarkMode.mockReturnValue(true);
renderTooltip({ uPlotInstance });
const container = screen.getByTestId('uplot-tooltip-container');
expect(container).toHaveClass('darkMode');
expect(container).not.toHaveClass('lightMode');
});
it('renders tooltip items when content is provided', () => {
const uPlotInstance = createUPlotInstance(null);
const content = [createTooltipContent()];
renderTooltip({ uPlotInstance, content });
const list = screen.queryByTestId('uplot-tooltip-list');
expect(list).not.toBeNull();
const marker = screen.getByTestId('uplot-tooltip-item-marker');
const itemContent = screen.getByTestId('uplot-tooltip-item-content');
expect(marker).toHaveStyle({ borderColor: '#ff0000' });
expect(itemContent).toHaveStyle({ color: '#ff0000', fontWeight: '700' });
expect(itemContent).toHaveTextContent('Series A: 10');
});
it('does not render tooltip list when content is empty', () => {
const uPlotInstance = createUPlotInstance(null);
renderTooltip({ uPlotInstance, content: [] });
const list = screen.queryByTestId('uplot-tooltip-list');
expect(list).toBeNull();
});
it('sets tooltip list height based on content length, height returned by Virtuoso', () => {
const uPlotInstance = createUPlotInstance(null);
const content = [createTooltipContent(), createTooltipContent()];
renderTooltip({ uPlotInstance, content });
const list = screen.getByTestId('uplot-tooltip-list');
expect(list).toHaveStyle({ height: '210px' });
});
it('sets tooltip list height based on content length when Virtuoso reports 0 height', () => {
const uPlotInstance = createUPlotInstance(null);
const content = [createTooltipContent(), createTooltipContent()];
mockTotalListHeight = 0;
renderTooltip({ uPlotInstance, content });
const list = screen.getByTestId('uplot-tooltip-list');
// Falls back to content length: 2 items * 38px = 76px
expect(list).toHaveStyle({ height: '76px' });
});
});

View File

@@ -1,277 +0,0 @@
import { PrecisionOption } from 'components/Graph/types';
import { getToolTipValue } from 'components/Graph/yAxisConfig';
import uPlot, { AlignedData, Series } from 'uplot';
import { TooltipContentItem } from '../../types';
import {
buildTooltipContent,
FALLBACK_SERIES_COLOR,
getTooltipBaseValue,
resolveSeriesColor,
} from '../utils';
jest.mock('components/Graph/yAxisConfig', () => ({
getToolTipValue: jest.fn(),
}));
const mockGetToolTipValue = getToolTipValue as jest.MockedFunction<
typeof getToolTipValue
>;
function createUPlotInstance(): uPlot {
return ({
data: [],
cursor: { idx: 0 },
} as unknown) as uPlot;
}
describe('Tooltip utils', () => {
describe('resolveSeriesColor', () => {
it('returns string stroke when provided', () => {
const u = createUPlotInstance();
const stroke: Series.Stroke = '#ff0000';
const color = resolveSeriesColor(stroke, u, 1);
expect(color).toBe('#ff0000');
});
it('returns result of stroke function when provided', () => {
const u = createUPlotInstance();
const strokeFn: Series.Stroke = (uInstance, seriesIdx): string =>
`color-${seriesIdx}-${uInstance.cursor.idx}`;
const color = resolveSeriesColor(strokeFn, u, 2);
expect(color).toBe('color-2-0');
});
it('returns fallback color when stroke is not provided', () => {
const u = createUPlotInstance();
const color = resolveSeriesColor(undefined, u, 1);
expect(color).toBe(FALLBACK_SERIES_COLOR);
});
});
describe('getTooltipBaseValue', () => {
it('returns value from aligned data for non-stacked charts', () => {
const data: AlignedData = [
[0, 1],
[10, 20],
];
const result = getTooltipBaseValue({
data,
index: 1,
dataIndex: 1,
isStackedBarChart: false,
});
expect(result).toBe(20);
});
it('returns null when value is missing', () => {
const data: AlignedData = [
[0, 1],
[10, null],
];
const result = getTooltipBaseValue({
data,
index: 1,
dataIndex: 1,
});
expect(result).toBeNull();
});
it('subtracts next visible stacked value for stacked bar charts', () => {
// data[1] and data[2] contain stacked values at dataIndex 1
const data: AlignedData = [
[0, 1],
[30, 60], // series 1 stacked
[10, 20], // series 2 stacked
];
const series: Series[] = [
{ label: 'x', show: true } as Series,
{ label: 'A', show: true } as Series,
{ label: 'B', show: true } as Series,
];
const result = getTooltipBaseValue({
data,
index: 1,
dataIndex: 1,
isStackedBarChart: true,
series,
});
// 60 (stacked at series 1) - 20 (next visible stacked) = 40
expect(result).toBe(40);
});
it('skips hidden series when computing base value for stacked charts', () => {
const data: AlignedData = [
[0, 1],
[30, 60], // series 1 stacked
[10, 20], // series 2 stacked but hidden
[5, 10], // series 3 stacked and visible
];
const series: Series[] = [
{ label: 'x', show: true } as Series,
{ label: 'A', show: true } as Series,
{ label: 'B', show: false } as Series,
{ label: 'C', show: true } as Series,
];
const result = getTooltipBaseValue({
data,
index: 1,
dataIndex: 1,
isStackedBarChart: true,
series,
});
// 60 (stacked at series 1) - 10 (next *visible* stacked, series 3) = 50
expect(result).toBe(50);
});
it('does not subtract when there is no next visible series', () => {
const data: AlignedData = [
[0, 1],
[10, 20], // series 1
[5, (null as unknown) as number], // series 2 missing
];
const series: Series[] = [
{ label: 'x', show: true } as Series,
{ label: 'A', show: true } as Series,
{ label: 'B', show: false } as Series,
];
const result = getTooltipBaseValue({
data,
index: 1,
dataIndex: 1,
isStackedBarChart: true,
series,
});
expect(result).toBe(20);
});
});
describe('buildTooltipContent', () => {
const yAxisUnit = 'ms';
const decimalPrecision: PrecisionOption = 2;
beforeEach(() => {
mockGetToolTipValue.mockReset();
mockGetToolTipValue.mockImplementation(
(value: string | number): string => `formatted-${value}`,
);
});
function createSeriesConfig(): Series[] {
return [
{ label: 'x', show: true } as Series,
{ label: 'A', show: true, stroke: '#ff0000' } as Series,
{
label: 'B',
show: true,
stroke: (_u: uPlot, idx: number): string => `color-${idx}`,
} as Series,
{ label: 'C', show: false, stroke: '#00ff00' } as Series,
];
}
it('builds tooltip content with active series first', () => {
const data: AlignedData = [[0], [10], [20], [30]];
const series = createSeriesConfig();
const dataIndexes = [null, 0, 0, 0];
const u = createUPlotInstance();
const result = buildTooltipContent({
data,
series,
dataIndexes,
activeSeriesIndex: 2,
uPlotInstance: u,
yAxisUnit,
decimalPrecision,
});
expect(result).toHaveLength(2);
// Active (series index 2) should come first
expect(result[0]).toMatchObject<Partial<TooltipContentItem>>({
label: 'B',
value: 20,
tooltipValue: 'formatted-20',
color: 'color-2',
isActive: true,
});
expect(result[1]).toMatchObject<Partial<TooltipContentItem>>({
label: 'A',
value: 10,
tooltipValue: 'formatted-10',
color: '#ff0000',
isActive: false,
});
});
it('skips series with null data index or non-finite values', () => {
const data: AlignedData = [[0], [42], [Infinity]];
const series: Series[] = [
{ label: 'x', show: true } as Series,
{ label: 'A', show: true, stroke: '#ff0000' } as Series,
{ label: 'B', show: true, stroke: '#00ff00' } as Series,
];
const dataIndexes = [null, 0, null];
const u = createUPlotInstance();
const result = buildTooltipContent({
data,
series,
dataIndexes,
activeSeriesIndex: 1,
uPlotInstance: u,
yAxisUnit,
decimalPrecision,
});
// Only the finite, non-null value from series A should be included
expect(result).toHaveLength(1);
expect(result[0].label).toBe('A');
});
it('uses stacked base values when building content for stacked bar charts', () => {
const data: AlignedData = [[0], [60], [30]];
const series: Series[] = [
{ label: 'x', show: true } as Series,
{ label: 'A', show: true, stroke: '#ff0000' } as Series,
{ label: 'B', show: true, stroke: '#00ff00' } as Series,
];
const dataIndexes = [null, 0, 0];
const u = createUPlotInstance();
const result = buildTooltipContent({
data,
series,
dataIndexes,
activeSeriesIndex: 1,
uPlotInstance: u,
yAxisUnit,
decimalPrecision,
isStackedBarChart: true,
});
// baseValue for series 1 at index 0 should be 60 - 30 (next visible) = 30
expect(result[0].value).toBe(30);
expect(result[1].value).toBe(30);
});
});
});

View File

@@ -4,7 +4,7 @@ import uPlot, { AlignedData, Series } from 'uplot';
import { TooltipContentItem } from '../types';
export const FALLBACK_SERIES_COLOR = '#000000';
const FALLBACK_SERIES_COLOR = '#000000';
export function resolveSeriesColor(
stroke: Series.Stroke | undefined,

View File

@@ -1,5 +1,3 @@
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
/* eslint-disable sonarjs/no-duplicate-string */
export const traceDetailResponse = [
{
@@ -37,7 +35,7 @@ export const traceDetailResponse = [
'component',
'host.name',
'http.method',
SPAN_ATTRIBUTES.HTTP_URL,
'http.url',
'ip',
'http.status_code',
'opencensus.exporterversion',
@@ -86,7 +84,7 @@ export const traceDetailResponse = [
'signoz.collector.id',
'component',
'http.method',
SPAN_ATTRIBUTES.HTTP_URL,
'http.url',
'ip',
],
[
@@ -743,7 +741,7 @@ export const traceDetailResponse = [
'component',
'http.method',
'http.status_code',
SPAN_ATTRIBUTES.HTTP_URL,
'http.url',
'net/http.reused',
'net/http.was_idle',
'service.name',
@@ -835,7 +833,7 @@ export const traceDetailResponse = [
'opencensus.exporterversion',
'signoz.collector.id',
'host.name',
SPAN_ATTRIBUTES.HTTP_URL,
'http.url',
'net/http.reused',
'net/http.was_idle',
],
@@ -918,7 +916,7 @@ export const traceDetailResponse = [
'net/http.was_idle',
'component',
'host.name',
SPAN_ATTRIBUTES.HTTP_URL,
'http.url',
'ip',
'service.name',
'signoz.collector.id',

View File

@@ -2,7 +2,6 @@
import { Dispatch, SetStateAction, useEffect, useState } from 'react';
import { getAttributesValues } from 'api/queryBuilder/getAttributesValues';
import { DATA_TYPE_VS_ATTRIBUTE_VALUES_KEY } from 'constants/queryBuilder';
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
import {
BaseAutocompleteData,
DataTypes,
@@ -32,7 +31,7 @@ export const AllTraceFilterKeyValue: Record<string, string> = {
httpRoute: 'HTTP Route',
'http.route': 'HTTP Route',
httpUrl: 'HTTP URL',
[SPAN_ATTRIBUTES.HTTP_URL]: 'HTTP URL',
'http.url': 'HTTP URL',
traceID: 'Trace ID',
trace_id: 'Trace ID',
} as const;

View File

@@ -0,0 +1,53 @@
export interface HostsProps {
name: string;
is_default: boolean;
}
export interface RegionProps {
id: string;
name: string;
category: string;
dns: string;
created_at: string;
updated_at: string;
}
export interface ClusterProps {
id: string;
name: string;
cloud_account_id: string;
cloud_region: string;
address: string;
region: RegionProps;
}
export interface DeploymentData {
id: string;
name: string;
email: string;
state: string;
tier: string;
user: string;
password: string;
created_at: string;
updated_at: string;
cluster_id: string;
hosts: HostsProps[];
cluster: ClusterProps;
}
export interface DeploymentsDataProps {
status: string;
data: DeploymentData;
}
export type PayloadProps = {
status: string;
data: string;
};
export interface UpdateCustomDomainProps {
data: {
name: string;
};
}

View File

@@ -0,0 +1,11 @@
export interface UpdateProfileProps {
reasons_for_interest_in_signoz: string[];
uses_otel: boolean;
has_existing_observability_tool: boolean;
existing_observability_tool: string;
logs_scale_per_day_in_gb: number;
number_of_services: number;
number_of_hosts: number;
where_did_you_discover_signoz: string;
timeline_for_migrating_to_signoz: string;
}

View File

@@ -1,30 +0,0 @@
package signozapiserver
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/gorilla/mux"
)
func (provider *provider) addAuthzRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/authz/check", handler.New(provider.authzHandler.Check, handler.OpenAPIDef{
ID: "AuthzCheck",
Tags: []string{"authz"},
Summary: "Check permissions",
Description: "Checks if the authenticated user has permissions for given transactions",
Request: make([]*authtypes.Transaction, 0),
RequestContentType: "",
Response: make([]*authtypes.GettableTransaction, 0),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: nil,
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -4,7 +4,6 @@ import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/promotetypes"
"github.com/gorilla/mux"
)
@@ -21,7 +20,6 @@ func (provider *provider) addPromoteRoutes(router *mux.Router) error {
ResponseContentType: "",
SuccessStatusCode: http.StatusCreated,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: newSecuritySchemes(types.RoleEditor),
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
@@ -37,7 +35,6 @@ func (provider *provider) addPromoteRoutes(router *mux.Router) error {
ResponseContentType: "",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}

View File

@@ -20,10 +20,8 @@ import (
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/gorilla/mux"
)
@@ -46,8 +44,6 @@ type provider struct {
gatewayHandler gateway.Handler
fieldsHandler fields.Handler
authzHandler authz.Handler
zeusHandler zeus.Handler
querierHandler querier.Handler
}
func NewFactory(
@@ -67,8 +63,6 @@ func NewFactory(
gatewayHandler gateway.Handler,
fieldsHandler fields.Handler,
authzHandler authz.Handler,
zeusHandler zeus.Handler,
querierHandler querier.Handler,
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
return newProvider(
@@ -91,8 +85,6 @@ func NewFactory(
gatewayHandler,
fieldsHandler,
authzHandler,
zeusHandler,
querierHandler,
)
})
}
@@ -117,8 +109,6 @@ func newProvider(
gatewayHandler gateway.Handler,
fieldsHandler fields.Handler,
authzHandler authz.Handler,
zeusHandler zeus.Handler,
querierHandler querier.Handler,
) (apiserver.APIServer, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
router := mux.NewRouter().UseEncodedPath()
@@ -141,8 +131,6 @@ func newProvider(
gatewayHandler: gatewayHandler,
fieldsHandler: fieldsHandler,
authzHandler: authzHandler,
zeusHandler: zeusHandler,
querierHandler: querierHandler,
}
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
@@ -207,22 +195,10 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
return err
}
if err := provider.addAuthzRoutes(router); err != nil {
return err
}
if err := provider.addFieldsRoutes(router); err != nil {
return err
}
if err := provider.addZeusRoutes(router); err != nil {
return err
}
if err := provider.addQuerierRoutes(router); err != nil {
return err
}
return nil
}

View File

@@ -1,471 +0,0 @@
package signozapiserver
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/gorilla/mux"
)
func (provider *provider) addQuerierRoutes(router *mux.Router) error {
if err := router.Handle("/api/v5/query_range", handler.New(provider.authZ.ViewAccess(provider.querierHandler.QueryRange), handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"querier"},
Summary: "Query range",
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
RequestExamples: []handler.OpenAPIExample{
{
Name: "traces_time_series",
Summary: "Time series: count spans grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "span_count"},
},
"stepInterval": "60s",
"filter": map[string]any{"expression": "service.name = 'frontend'"},
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
"order": []any{map[string]any{"key": map[string]any{"name": "span_count"}, "direction": "desc"}},
"limit": 10,
},
},
},
},
},
},
{
Name: "logs_time_series",
Summary: "Time series: count error logs grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "log_count"},
},
"stepInterval": "60s",
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
"order": []any{map[string]any{"key": map[string]any{"name": "log_count"}, "direction": "desc"}},
"limit": 10,
},
},
},
},
},
},
{
Name: "metrics_gauge_time_series",
Summary: "Time series: latest gauge value averaged across series",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "system.cpu.utilization", "timeAggregation": "latest", "spaceAggregation": "avg"},
},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "host.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "metrics_rate_time_series",
Summary: "Time series: rate of cumulative counter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum"},
},
"stepInterval": 120,
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "metrics_histogram_time_series",
Summary: "Time series: p99 latency from histogram",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.bucket", "spaceAggregation": "p99"},
},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "logs_raw",
Summary: "Raw: fetch raw log records",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
"selectFields": []any{
map[string]any{"name": "body", "fieldContext": "log"},
map[string]any{"name": "service.name", "fieldContext": "resource"},
},
"order": []any{
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "log"}, "direction": "desc"},
map[string]any{"key": map[string]any{"name": "id"}, "direction": "desc"},
},
"limit": 50,
"offset": 0,
},
},
},
},
},
},
{
Name: "traces_raw",
Summary: "Raw: fetch raw span records",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"filter": map[string]any{"expression": "service.name = 'frontend' AND has_error = true"},
"selectFields": []any{
map[string]any{"name": "name", "fieldContext": "span"},
map[string]any{"name": "duration_nano", "fieldContext": "span"},
},
"order": []any{
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "span"}, "direction": "desc"},
},
"limit": 100,
},
},
},
},
},
},
{
Name: "traces_scalar",
Summary: "Scalar: total span count",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "span_count"},
},
"filter": map[string]any{"expression": "service.name = 'frontend'"},
},
},
},
},
},
},
{
Name: "logs_scalar",
Summary: "Scalar: total error log count",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "error_count"},
},
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
},
},
},
},
},
},
{
Name: "metrics_scalar",
Summary: "Scalar: single reduced metric value",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum", "reduceTo": "sum"},
},
"stepInterval": "60s",
},
},
},
},
},
},
{
Name: "formula",
Summary: "Formula: error rate from two trace queries",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{map[string]any{"expression": "countIf(has_error = true)"}},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "B",
"signal": "traces",
"aggregations": []any{map[string]any{"expression": "count()"}},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
map[string]any{
"type": "builder_formula",
"spec": map[string]any{
"name": "error_rate",
"expression": "A / B * 100",
},
},
},
},
},
},
{
Name: "promql",
Summary: "PromQL: request rate with UTF-8 metric name",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "promql",
"spec": map[string]any{
"name": "request_rate",
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
"step": 60,
},
},
},
},
},
},
{
Name: "clickhouse_sql_traces_time_series",
Summary: "ClickHouse SQL: traces time series with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "span_rate",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value" +
" FROM signoz_traces.distributed_signoz_index_v3" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
" GROUP BY ts ORDER BY ts",
},
},
},
},
},
},
{
Name: "clickhouse_sql_logs_raw",
Summary: "ClickHouse SQL: raw logs with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "recent_errors",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT timestamp, body" +
" FROM signoz_logs.distributed_logs_v2" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
" AND severity_text = 'ERROR'" +
" ORDER BY timestamp DESC LIMIT 100",
},
},
},
},
},
},
{
Name: "clickhouse_sql_traces_scalar",
Summary: "ClickHouse SQL: scalar aggregate with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "total_spans",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT count() AS value" +
" FROM signoz_traces.distributed_signoz_index_v3" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp",
},
},
},
},
},
},
},
Response: new(qbtypes.QueryRangeResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v5/substitute_vars", handler.New(provider.authZ.ViewAccess(provider.querierHandler.ReplaceVariables), handler.OpenAPIDef{
ID: "ReplaceVariables",
Tags: []string{"querier"},
Summary: "Replace variables",
Description: "Replace variables in a query",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
Response: new(qbtypes.QueryRangeRequest),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -5,7 +5,6 @@ import (
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/gorilla/mux"
)
@@ -16,12 +15,12 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
Tags: []string{"role"},
Summary: "Create role",
Description: "This endpoint creates a role",
Request: new(roletypes.PostableRole),
Request: nil,
RequestContentType: "",
Response: new(types.Identifiable),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusCreated,
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusConflict, http.StatusNotImplemented, http.StatusUnavailableForLegalReasons},
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodPost).GetError(); err != nil {
@@ -45,23 +44,6 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/resources", handler.New(provider.authZ.AdminAccess(provider.authzHandler.GetResources), handler.OpenAPIDef{
ID: "GetResources",
Tags: []string{"role"},
Summary: "Get resources",
Description: "Gets all the available resources for role assignment",
Request: nil,
RequestContentType: "",
Response: new(roletypes.GettableResources),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Get), handler.OpenAPIDef{
ID: "GetRole",
Tags: []string{"role"},
@@ -79,51 +61,17 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/roles/{id}/relation/{relation}/objects", handler.New(provider.authZ.AdminAccess(provider.authzHandler.GetObjects), handler.OpenAPIDef{
ID: "GetObjects",
Tags: []string{"role"},
Summary: "Get objects for a role by relation",
Description: "Gets all objects connected to the specified role via a given relation type",
Request: nil,
RequestContentType: "",
Response: make([]*authtypes.Object, 0),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusNotFound, http.StatusNotImplemented, http.StatusUnavailableForLegalReasons},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v1/roles/{id}", handler.New(provider.authZ.AdminAccess(provider.authzHandler.Patch), handler.OpenAPIDef{
ID: "PatchRole",
Tags: []string{"role"},
Summary: "Patch role",
Description: "This endpoint patches a role",
Request: new(roletypes.PatchableRole),
Request: nil,
RequestContentType: "",
Response: nil,
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusNoContent,
ErrorStatusCodes: []int{http.StatusNotFound, http.StatusNotImplemented, http.StatusUnavailableForLegalReasons},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodPatch).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v1/roles/{id}/relation/{relation}/objects", handler.New(provider.authZ.AdminAccess(provider.authzHandler.PatchObjects), handler.OpenAPIDef{
ID: "PatchObjects",
Tags: []string{"role"},
Summary: "Patch objects for a role by relation",
Description: "Patches the objects connected to the specified role via a given relation type",
Request: new(roletypes.PatchableObjects),
RequestContentType: "",
Response: nil,
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusNoContent,
ErrorStatusCodes: []int{http.StatusNotFound, http.StatusBadRequest, http.StatusNotImplemented, http.StatusUnavailableForLegalReasons},
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodPatch).GetError(); err != nil {
@@ -140,7 +88,7 @@ func (provider *provider) addRoleRoutes(router *mux.Router) error {
Response: nil,
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusNoContent,
ErrorStatusCodes: []int{http.StatusNotFound, http.StatusNotImplemented, http.StatusUnavailableForLegalReasons},
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodDelete).GetError(); err != nil {

View File

@@ -1,65 +0,0 @@
package signozapiserver
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/zeustypes"
"github.com/gorilla/mux"
)
func (provider *provider) addZeusRoutes(router *mux.Router) error {
if err := router.Handle("/api/v2/zeus/profiles", handler.New(provider.authZ.AdminAccess(provider.zeusHandler.PutProfile), handler.OpenAPIDef{
ID: "PutProfile",
Tags: []string{"zeus"},
Summary: "Put profile in Zeus for a deployment.",
Description: "This endpoint saves the profile of a deployment to zeus.",
Request: new(zeustypes.PostableProfile),
RequestContentType: "application/json",
Response: nil,
ResponseContentType: "",
SuccessStatusCode: http.StatusNoContent,
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusForbidden, http.StatusNotFound, http.StatusConflict},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodPut).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v2/zeus/hosts", handler.New(provider.authZ.AdminAccess(provider.zeusHandler.GetHosts), handler.OpenAPIDef{
ID: "GetHosts",
Tags: []string{"zeus"},
Summary: "Get host info from Zeus.",
Description: "This endpoint gets the host info from zeus.",
Request: nil,
RequestContentType: "",
Response: new(zeustypes.GettableHost),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusForbidden, http.StatusNotFound},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v2/zeus/hosts", handler.New(provider.authZ.AdminAccess(provider.zeusHandler.PutHost), handler.OpenAPIDef{
ID: "PutHost",
Tags: []string{"zeus"},
Summary: "Put host in Zeus for a deployment.",
Description: "This endpoint saves the host of a deployment to zeus.",
Request: new(zeustypes.PostableHost),
RequestContentType: "application/json",
Response: nil,
ResponseContentType: "",
SuccessStatusCode: http.StatusNoContent,
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusForbidden, http.StatusNotFound, http.StatusConflict},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodPut).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -14,14 +14,17 @@ import (
type AuthZ interface {
factory.Service
// Check returns error when the upstream authorization server is unavailable or the subject (s) doesn't have relation (r) on object (o).
Check(context.Context, *openfgav1.TupleKey) error
// CheckWithTupleCreation takes upon the responsibility for generating the tuples alongside everything Check does.
CheckWithTupleCreation(context.Context, authtypes.Claims, valuer.UUID, authtypes.Relation, authtypes.Typeable, []authtypes.Selector, []authtypes.Selector) error
// CheckWithTupleCreationWithoutClaims checks permissions for anonymous users.
CheckWithTupleCreationWithoutClaims(context.Context, valuer.UUID, authtypes.Relation, authtypes.Typeable, []authtypes.Selector, []authtypes.Selector) error
// BatchCheck accepts a map of ID → tuple and returns a map of ID → authorization result.
BatchCheck(context.Context, map[string]*openfgav1.TupleKey) (map[string]*authtypes.TupleKeyAuthorization, error)
// Batch Check returns error when the upstream authorization server is unavailable or for all the tuples of subject (s) doesn't have relation (r) on object (o).
BatchCheck(context.Context, []*openfgav1.TupleKey) error
// Write accepts the insertion tuples and the deletion tuples.
Write(context.Context, []*openfgav1.TupleKey, []*openfgav1.TupleKey) error
@@ -99,7 +102,5 @@ type Handler interface {
PatchObjects(http.ResponseWriter, *http.Request)
Check(http.ResponseWriter, *http.Request)
Delete(http.ResponseWriter, *http.Request)
}

View File

@@ -26,7 +26,7 @@ func (store *store) Create(ctx context.Context, role *roletypes.StorableRole) er
Model(role).
Exec(ctx)
if err != nil {
return store.sqlstore.WrapAlreadyExistsErrf(err, errors.CodeAlreadyExists, "role with name: %s already exists", role.Name)
return store.sqlstore.WrapAlreadyExistsErrf(err, errors.CodeAlreadyExists, "role with id: %s already exists", role.ID)
}
return nil

View File

@@ -48,7 +48,11 @@ func (provider *provider) Stop(ctx context.Context) error {
return provider.server.Stop(ctx)
}
func (provider *provider) BatchCheck(ctx context.Context, tupleReq map[string]*openfgav1.TupleKey) (map[string]*authtypes.TupleKeyAuthorization, error) {
func (provider *provider) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
return provider.server.Check(ctx, tupleReq)
}
func (provider *provider) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
return provider.server.BatchCheck(ctx, tupleReq)
}
@@ -177,6 +181,10 @@ func (provider *provider) CreateManagedRoles(ctx context.Context, _ valuer.UUID,
return nil
}
func (provider *provider) SetManagedRoleTransactions(context.Context, valuer.UUID) error {
return nil
}
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
return provider.Grant(ctx, orgID, roletypes.SigNozAdminRoleName, authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil))
}

View File

@@ -90,18 +90,42 @@ func (server *Server) Stop(ctx context.Context) error {
return nil
}
func (server *Server) BatchCheck(ctx context.Context, tupleReq map[string]*openfgav1.TupleKey) (map[string]*authtypes.TupleKeyAuthorization, error) {
func (server *Server) Check(ctx context.Context, tupleReq *openfgav1.TupleKey) error {
storeID, modelID := server.getStoreIDandModelID()
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0, len(tupleReq))
for id, tuple := range tupleReq {
checkResponse, err := server.openfgaServer.Check(
ctx,
&openfgav1.CheckRequest{
StoreId: storeID,
AuthorizationModelId: modelID,
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tupleReq.User,
Relation: tupleReq.Relation,
Object: tupleReq.Object,
},
})
if err != nil {
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
if !checkResponse.Allowed {
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subject %s cannot %s object %s", tupleReq.User, tupleReq.Relation, tupleReq.Object)
}
return nil
}
func (server *Server) BatchCheck(ctx context.Context, tupleReq []*openfgav1.TupleKey) error {
storeID, modelID := server.getStoreIDandModelID()
batchCheckItems := make([]*openfgav1.BatchCheckItem, 0)
for idx, tuple := range tupleReq {
batchCheckItems = append(batchCheckItems, &openfgav1.BatchCheckItem{
TupleKey: &openfgav1.CheckRequestTupleKey{
User: tuple.User,
Relation: tuple.Relation,
Object: tuple.Object,
},
// Use transaction ID as correlation ID for deterministic mapping
CorrelationId: id,
// the batch check response is map[string] keyed by correlationID.
CorrelationId: strconv.Itoa(idx),
})
}
@@ -113,18 +137,17 @@ func (server *Server) BatchCheck(ctx context.Context, tupleReq map[string]*openf
Checks: batchCheckItems,
})
if err != nil {
return nil, errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
return errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthZUnavailable, "authorization server is unavailable").WithAdditional(err.Error())
}
response := make(map[string]*authtypes.TupleKeyAuthorization, len(tupleReq))
for id, tuple := range tupleReq {
response[id] = &authtypes.TupleKeyAuthorization{
Tuple: tuple,
Authorized: checkResponse.Result[id].GetAllowed(),
for _, checkResponse := range checkResponse.Result {
if checkResponse.GetAllowed() {
return nil
}
}
return response, nil
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
}
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
@@ -133,29 +156,17 @@ func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtyp
return err
}
tupleSlice, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
// Convert slice to map with generated IDs for internal use
tuples := make(map[string]*openfgav1.TupleKey, len(tupleSlice))
for idx, tuple := range tupleSlice {
tuples[strconv.Itoa(idx)] = tuple
}
response, err := server.BatchCheck(ctx, tuples)
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
for _, resp := range response {
if resp.Authorized {
return nil
}
}
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
return nil
}
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, _ authtypes.Relation, _ authtypes.Typeable, _ []authtypes.Selector, roleSelectors []authtypes.Selector) error {
@@ -164,29 +175,17 @@ func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, o
return err
}
tupleSlice, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
tuples, err := authtypes.TypeableRole.Tuples(subject, authtypes.RelationAssignee, roleSelectors, orgID)
if err != nil {
return err
}
// Convert slice to map with generated IDs for internal use
tuples := make(map[string]*openfgav1.TupleKey, len(tupleSlice))
for idx, tuple := range tupleSlice {
tuples[strconv.Itoa(idx)] = tuple
}
response, err := server.BatchCheck(ctx, tuples)
err = server.BatchCheck(ctx, tuples)
if err != nil {
return err
}
for _, resp := range response {
if resp.Authorized {
return nil
}
}
return errors.Newf(errors.TypeForbidden, authtypes.ErrCodeAuthZForbidden, "subjects are not authorized for requested access")
return nil
}
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {

View File

@@ -7,7 +7,6 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -36,14 +35,13 @@ func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
return
}
role := roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID))
err = handler.authz.Create(ctx, valuer.MustNewUUID(claims.OrgID), role)
err = handler.authz.Create(ctx, valuer.MustNewUUID(claims.OrgID), roletypes.NewRole(req.Name, req.Description, roletypes.RoleTypeCustom, valuer.MustNewUUID(claims.OrgID)))
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusCreated, types.Identifiable{ID: role.ID})
render.Success(rw, http.StatusCreated, nil)
}
func (handler *handler) Get(rw http.ResponseWriter, r *http.Request) {
@@ -114,9 +112,17 @@ func (handler *handler) GetObjects(rw http.ResponseWriter, r *http.Request) {
}
func (handler *handler) GetResources(rw http.ResponseWriter, r *http.Request) {
resources := handler.authz.GetResources(r.Context())
ctx := r.Context()
resources := handler.authz.GetResources(ctx)
render.Success(rw, http.StatusOK, roletypes.NewGettableResources(resources))
var resourceRelations = struct {
Resources []*authtypes.Resource `json:"resources"`
Relations map[authtypes.Type][]authtypes.Relation `json:"relations"`
}{
Resources: resources,
Relations: authtypes.TypeableRelations,
}
render.Success(rw, http.StatusOK, resourceRelations)
}
func (handler *handler) List(rw http.ResponseWriter, r *http.Request) {
@@ -221,7 +227,7 @@ func (handler *handler) PatchObjects(rw http.ResponseWriter, r *http.Request) {
return
}
render.Success(rw, http.StatusNoContent, nil)
render.Success(rw, http.StatusAccepted, nil)
}
func (handler *handler) Delete(rw http.ResponseWriter, r *http.Request) {
@@ -246,39 +252,3 @@ func (handler *handler) Delete(rw http.ResponseWriter, r *http.Request) {
render.Success(rw, http.StatusNoContent, nil)
}
func (handler *handler) Check(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
transactions := make([]*authtypes.Transaction, 0)
if err := binding.JSON.BindBody(r.Body, &transactions); err != nil {
render.Error(rw, err)
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
render.Error(rw, err)
return
}
tuples, err := authtypes.NewTuplesFromTransactions(transactions, subject, orgID)
if err != nil {
render.Error(rw, err)
return
}
results, err := handler.authz.BatchCheck(ctx, tuples)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, authtypes.NewGettableTransaction(transactions, results))
}

View File

@@ -120,6 +120,8 @@ func FilterResponse(results []*qbtypes.QueryRangeResponse) []*qbtypes.QueryRange
}
}
resultData.Rows = filteredRows
case *qbtypes.ScalarData:
resultData.Data = filterScalarDataIPs(resultData.Columns, resultData.Data)
}
filteredData = append(filteredData, result)
@@ -145,6 +147,39 @@ func shouldIncludeSeries(series *qbtypes.TimeSeries) bool {
return true
}
func filterScalarDataIPs(columns []*qbtypes.ColumnDescriptor, data [][]any) [][]any {
// Find column indices for server address fields
serverColIndices := make([]int, 0)
for i, col := range columns {
if col.Name == derivedKeyHTTPHost {
serverColIndices = append(serverColIndices, i)
}
}
if len(serverColIndices) == 0 {
return data
}
filtered := make([][]any, 0, len(data))
for _, row := range data {
includeRow := true
for _, colIdx := range serverColIndices {
if colIdx < len(row) {
if strVal, ok := row[colIdx].(string); ok {
if net.ParseIP(strVal) != nil {
includeRow = false
break
}
}
}
}
if includeRow {
filtered = append(filtered, row)
}
}
return filtered
}
func shouldIncludeRow(row *qbtypes.RawRow) bool {
if row.Data != nil {
if domainVal, ok := row.Data[derivedKeyHTTPHost]; ok {

View File

@@ -117,6 +117,59 @@ func TestFilterResponse(t *testing.T) {
},
},
},
{
name: "should filter out IP addresses from scalar data",
input: []*qbtypes.QueryRangeResponse{
{
Data: qbtypes.QueryData{
Results: []any{
&qbtypes.ScalarData{
QueryName: "endpoints",
Columns: []*qbtypes.ColumnDescriptor{
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
Type: qbtypes.ColumnTypeGroup,
},
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "endpoints"},
Type: qbtypes.ColumnTypeAggregation,
},
},
Data: [][]any{
{"192.168.1.1", 10},
{"example.com", 20},
{"10.0.0.1", 5},
},
},
},
},
},
},
expected: []*qbtypes.QueryRangeResponse{
{
Data: qbtypes.QueryData{
Results: []any{
&qbtypes.ScalarData{
QueryName: "endpoints",
Columns: []*qbtypes.ColumnDescriptor{
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: derivedKeyHTTPHost},
Type: qbtypes.ColumnTypeGroup,
},
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "endpoints"},
Type: qbtypes.ColumnTypeAggregation,
},
},
Data: [][]any{
{"example.com", 20},
},
},
},
},
},
},
},
}
for _, tt := range tests {

View File

@@ -21,17 +21,18 @@ import (
"github.com/SigNoz/signoz/pkg/variables"
)
type handler struct {
type API struct {
set factory.ProviderSettings
analytics analytics.Analytics
querier Querier
}
func NewHandler(set factory.ProviderSettings, querier Querier, analytics analytics.Analytics) Handler {
return &handler{set: set, querier: querier, analytics: analytics}
func NewAPI(set factory.ProviderSettings, querier Querier, analytics analytics.Analytics) *API {
return &API{set: set, querier: querier, analytics: analytics}
}
func (handler *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
func (a *API) QueryRange(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
@@ -52,7 +53,7 @@ func (handler *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
queryJSON, _ := json.Marshal(queryRangeRequest)
handler.set.Logger.ErrorContext(ctx, "panic in QueryRange",
a.set.Logger.ErrorContext(ctx, "panic in QueryRange",
"error", r,
"user", claims.UserID,
"payload", string(queryJSON),
@@ -78,17 +79,17 @@ func (handler *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
return
}
queryRangeResponse, err := handler.querier.QueryRange(ctx, orgID, &queryRangeRequest)
queryRangeResponse, err := a.querier.QueryRange(ctx, orgID, &queryRangeRequest)
if err != nil {
render.Error(rw, err)
return
}
handler.logEvent(req.Context(), req.Header.Get("Referer"), queryRangeResponse.QBEvent)
a.logEvent(req.Context(), req.Header.Get("Referer"), queryRangeResponse.QBEvent)
render.Success(rw, http.StatusOK, queryRangeResponse)
}
func (handler *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
func (a *API) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
// get the param from url and add it to body
@@ -152,7 +153,7 @@ func (handler *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request
queryJSON, _ := json.Marshal(queryRangeRequest)
handler.set.Logger.ErrorContext(ctx, "panic in QueryRawStream",
a.set.Logger.ErrorContext(ctx, "panic in QueryRawStream",
"error", r,
"user", claims.UserID,
"payload", string(queryJSON),
@@ -192,7 +193,7 @@ func (handler *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request
flusher.Flush()
client := &qbtypes.RawStream{Name: req.RemoteAddr, Logs: make(chan *qbtypes.RawRow, 1000), Done: make(chan *bool), Error: make(chan error)}
go handler.querier.QueryRawStream(ctx, orgID, &queryRangeRequest, client)
go a.querier.QueryRawStream(ctx, orgID, &queryRangeRequest, client)
for {
select {
@@ -219,7 +220,7 @@ func (handler *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request
// TODO(srikanthccv): everything done here can be done on frontend as well
// For the time being I am adding a helper function
func (handler *handler) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
func (a *API) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
var queryRangeRequest qbtypes.QueryRangeRequest
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
@@ -271,7 +272,7 @@ func (handler *handler) ReplaceVariables(rw http.ResponseWriter, req *http.Reque
render.Success(rw, http.StatusOK, queryRangeRequest)
}
func (handler *handler) logEvent(ctx context.Context, referrer string, event *qbtypes.QBEvent) {
func (a *API) logEvent(ctx context.Context, referrer string, event *qbtypes.QBEvent) {
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
return
@@ -304,9 +305,8 @@ func (handler *handler) logEvent(ctx context.Context, referrer string, event *qb
}
if !event.HasData {
handler.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", properties)
a.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", properties)
return
}
handler.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
a.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
}

View File

@@ -2,7 +2,6 @@ package querier
import (
"context"
"net/http"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -21,9 +20,3 @@ type BucketCache interface {
// store fresh buckets for future hits
Put(ctx context.Context, orgID valuer.UUID, q qbtypes.Query, step qbtypes.Step, fresh *qbtypes.Result)
}
type Handler interface {
QueryRange(rw http.ResponseWriter, req *http.Request)
QueryRawStream(rw http.ResponseWriter, req *http.Request)
ReplaceVariables(rw http.ResponseWriter, req *http.Request)
}

View File

@@ -1 +1,454 @@
package querier
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
)
// QueryRangeV5OpenAPIDef is the OpenAPI definition for the /api/v5/query_range endpoint.
var QueryRangeV5OpenAPIDef = handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"query"},
Summary: "Query range",
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
RequestExamples: queryRangeV5Examples,
Response: new(qbtypes.QueryRangeResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: []handler.OpenAPISecurityScheme{
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
},
}
var queryRangeV5Examples = []handler.OpenAPIExample{
{
Name: "traces_time_series",
Summary: "Time series: count spans grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "span_count"},
},
"stepInterval": "60s",
"filter": map[string]any{"expression": "service.name = 'frontend'"},
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
"order": []any{map[string]any{"key": map[string]any{"name": "span_count"}, "direction": "desc"}},
"limit": 10,
},
},
},
},
},
},
{
Name: "logs_time_series",
Summary: "Time series: count error logs grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "log_count"},
},
"stepInterval": "60s",
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
"order": []any{map[string]any{"key": map[string]any{"name": "log_count"}, "direction": "desc"}},
"limit": 10,
},
},
},
},
},
},
{
Name: "metrics_gauge_time_series",
Summary: "Time series: latest gauge value averaged across series",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "system.cpu.utilization", "timeAggregation": "latest", "spaceAggregation": "avg"},
},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "host.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "metrics_rate_time_series",
Summary: "Time series: rate of cumulative counter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum"},
},
"stepInterval": 120,
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "metrics_histogram_time_series",
Summary: "Time series: p99 latency from histogram",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.bucket", "spaceAggregation": "p99"},
},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "logs_raw",
Summary: "Raw: fetch raw log records",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
"selectFields": []any{
map[string]any{"name": "body", "fieldContext": "log"},
map[string]any{"name": "service.name", "fieldContext": "resource"},
},
"order": []any{
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "log"}, "direction": "desc"},
map[string]any{"key": map[string]any{"name": "id"}, "direction": "desc"},
},
"limit": 50,
"offset": 0,
},
},
},
},
},
},
{
Name: "traces_raw",
Summary: "Raw: fetch raw span records",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"filter": map[string]any{"expression": "service.name = 'frontend' AND has_error = true"},
"selectFields": []any{
map[string]any{"name": "name", "fieldContext": "span"},
map[string]any{"name": "duration_nano", "fieldContext": "span"},
},
"order": []any{
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "span"}, "direction": "desc"},
},
"limit": 100,
},
},
},
},
},
},
{
Name: "traces_scalar",
Summary: "Scalar: total span count",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "span_count"},
},
"filter": map[string]any{"expression": "service.name = 'frontend'"},
},
},
},
},
},
},
{
Name: "logs_scalar",
Summary: "Scalar: total error log count",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "error_count"},
},
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
},
},
},
},
},
},
{
Name: "metrics_scalar",
Summary: "Scalar: single reduced metric value",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum", "reduceTo": "sum"},
},
"stepInterval": "60s",
},
},
},
},
},
},
{
Name: "formula",
Summary: "Formula: error rate from two trace queries",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{map[string]any{"expression": "countIf(has_error = true)"}},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "B",
"signal": "traces",
"aggregations": []any{map[string]any{"expression": "count()"}},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
map[string]any{
"type": "builder_formula",
"spec": map[string]any{
"name": "error_rate",
"expression": "A / B * 100",
},
},
},
},
},
},
{
Name: "promql",
Summary: "PromQL: request rate with UTF-8 metric name",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "promql",
"spec": map[string]any{
"name": "request_rate",
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
"step": 60,
},
},
},
},
},
},
{
Name: "clickhouse_sql_traces_time_series",
Summary: "ClickHouse SQL: traces time series with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "span_rate",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value" +
" FROM signoz_traces.distributed_signoz_index_v3" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
" GROUP BY ts ORDER BY ts",
},
},
},
},
},
},
{
Name: "clickhouse_sql_logs_raw",
Summary: "ClickHouse SQL: raw logs with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "recent_errors",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT timestamp, body" +
" FROM signoz_logs.distributed_logs_v2" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
" AND severity_text = 'ERROR'" +
" ORDER BY timestamp DESC LIMIT 100",
},
},
},
},
},
},
{
Name: "clickhouse_sql_traces_scalar",
Summary: "ClickHouse SQL: scalar aggregate with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "total_spans",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT count() AS value" +
" FROM signoz_traces.distributed_signoz_index_v3" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp",
},
},
},
},
},
},
}

View File

@@ -1913,7 +1913,7 @@ func (r *ClickHouseReader) GetCustomRetentionTTL(ctx context.Context, orgID stri
// No V2 configuration found, return defaults
response.DefaultTTLDays = 15
response.TTLConditions = []model.CustomRetentionRule{}
response.Status = constants.StatusSuccess
response.Status = constants.StatusFailed
response.ColdStorageTTLDays = -1
return response, nil
}

View File

@@ -79,6 +79,8 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/version"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
)
type status string
@@ -143,6 +145,8 @@ type APIHandler struct {
LicensingAPI licensing.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
Signoz *signoz.SigNoz
@@ -171,6 +175,8 @@ type APIHandlerOpts struct {
LicensingAPI licensing.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
Signoz *signoz.SigNoz
@@ -233,6 +239,7 @@ func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, erro
AlertmanagerAPI: opts.AlertmanagerAPI,
LicensingAPI: opts.LicensingAPI,
Signoz: opts.Signoz,
QuerierAPI: opts.QuerierAPI,
QueryParserAPI: opts.QueryParserAPI,
}
@@ -389,7 +396,7 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *middlew
subRouter.HandleFunc("/query_progress", am.ViewAccess(aH.GetQueryProgressUpdates)).Methods(http.MethodGet)
// live logs
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.Signoz.Handlers.QuerierHandler.QueryRawStream)).Methods(http.MethodGet)
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.QuerierAPI.QueryRawStream)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *middleware.AuthZ) {
@@ -463,6 +470,12 @@ func (aH *APIHandler) RegisterQueryRangeV4Routes(router *mux.Router, am *middlew
subRouter.HandleFunc("/metric/metric_metadata", am.ViewAccess(aH.getMetricMetadata)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterQueryRangeV5Routes(router *mux.Router, am *middleware.AuthZ) {
subRouter := router.PathPrefix("/api/v5").Subrouter()
subRouter.HandleFunc("/query_range", am.ViewAccess(aH.QuerierAPI.QueryRange)).Methods(http.MethodPost)
subRouter.HandleFunc("/substitute_vars", am.ViewAccess(aH.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
}
// todo(remove): Implemented at render package (github.com/SigNoz/signoz/pkg/http/render) with the new error structure
func (aH *APIHandler) Respond(w http.ResponseWriter, data interface{}) {
writeHttpResponse(w, data)

Some files were not shown because too many files have changed in this diff Show More