mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-20 07:42:42 +00:00
Compare commits
18 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5780fac789 | ||
|
|
3450a98fdf | ||
|
|
92b07d15ea | ||
|
|
a0dad1602e | ||
|
|
4fa5c42a36 | ||
|
|
3555a73ac0 | ||
|
|
ed2d8d06cb | ||
|
|
203318a206 | ||
|
|
5cf5b70aca | ||
|
|
67fbf5a548 | ||
|
|
195dd36078 | ||
|
|
2c12f0bd03 | ||
|
|
fcbb1843d9 | ||
|
|
00dff5c930 | ||
|
|
c7e5258329 | ||
|
|
84a7be22f7 | ||
|
|
c5923f942f | ||
|
|
f91eee7e50 |
@@ -85,6 +85,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return noopgateway.NewProviderFactory()
|
||||
},
|
||||
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
|
||||
return querier.NewHandler(ps, q, a)
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
eequerier "github.com/SigNoz/signoz/ee/querier"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
@@ -124,6 +125,10 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return httpgateway.NewProviderFactory(licensing)
|
||||
},
|
||||
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
|
||||
communityHandler := querier.NewHandler(ps, q, a)
|
||||
return eequerier.NewHandler(ps, q, communityHandler)
|
||||
},
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -307,11 +307,16 @@ components:
|
||||
type: string
|
||||
value:
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
- value
|
||||
type: object
|
||||
GatewaytypesGettableCreatedIngestionKeyLimit:
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
type: object
|
||||
GatewaytypesGettableIngestionKeys:
|
||||
properties:
|
||||
@@ -432,6 +437,8 @@ components:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- name
|
||||
type: object
|
||||
GatewaytypesPostableIngestionKeyLimit:
|
||||
properties:
|
||||
@@ -454,6 +461,8 @@ components:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- config
|
||||
type: object
|
||||
MetricsexplorertypesListMetric:
|
||||
properties:
|
||||
@@ -740,6 +749,14 @@ components:
|
||||
- temporality
|
||||
- isMonotonic
|
||||
type: object
|
||||
MetrictypesComparisonSpaceAggregationParam:
|
||||
properties:
|
||||
operator:
|
||||
type: string
|
||||
threshold:
|
||||
format: double
|
||||
type: number
|
||||
type: object
|
||||
MetrictypesSpaceAggregation:
|
||||
enum:
|
||||
- sum
|
||||
@@ -752,6 +769,7 @@ components:
|
||||
- p90
|
||||
- p95
|
||||
- p99
|
||||
- histogram_count
|
||||
type: string
|
||||
MetrictypesTemporality:
|
||||
enum:
|
||||
@@ -1036,6 +1054,8 @@ components:
|
||||
type: object
|
||||
Querybuildertypesv5MetricAggregation:
|
||||
properties:
|
||||
comparisonSpaceAggregationParam:
|
||||
$ref: '#/components/schemas/MetrictypesComparisonSpaceAggregationParam'
|
||||
metricName:
|
||||
type: string
|
||||
reduceTo:
|
||||
@@ -1933,6 +1953,11 @@ components:
|
||||
type: string
|
||||
tier:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
- state
|
||||
- tier
|
||||
- hosts
|
||||
type: object
|
||||
ZeustypesHost:
|
||||
properties:
|
||||
@@ -1942,6 +1967,10 @@ components:
|
||||
type: string
|
||||
url:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
- is_default
|
||||
- url
|
||||
type: object
|
||||
ZeustypesPostableHost:
|
||||
properties:
|
||||
@@ -1976,6 +2005,16 @@ components:
|
||||
type: boolean
|
||||
where_did_you_discover_signoz:
|
||||
type: string
|
||||
required:
|
||||
- uses_otel
|
||||
- has_existing_observability_tool
|
||||
- existing_observability_tool
|
||||
- reasons_for_interest_in_signoz
|
||||
- logs_scale_per_day_in_gb
|
||||
- number_of_services
|
||||
- number_of_hosts
|
||||
- where_did_you_discover_signoz
|
||||
- timeline_for_migrating_to_signoz
|
||||
type: object
|
||||
securitySchemes:
|
||||
api_key:
|
||||
@@ -3195,12 +3234,29 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- logs
|
||||
@@ -3225,12 +3281,29 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- EDITOR
|
||||
- tokenizer:
|
||||
- EDITOR
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- logs
|
||||
@@ -4777,6 +4850,7 @@ paths:
|
||||
parameters:
|
||||
- in: query
|
||||
name: name
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
@@ -6157,4 +6231,58 @@ paths:
|
||||
- VIEWER
|
||||
summary: Query range
|
||||
tags:
|
||||
- query
|
||||
- querier
|
||||
/api/v5/substitute_vars:
|
||||
post:
|
||||
deprecated: false
|
||||
description: Replace variables in a query
|
||||
operationId: ReplaceVariables
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Replace variables
|
||||
tags:
|
||||
- querier
|
||||
|
||||
@@ -190,43 +190,44 @@ type OpenAPIExample struct {
|
||||
}
|
||||
```
|
||||
|
||||
For reference, see `pkg/querier/openapi.go` which defines some examples for the `/api/v5/query_range` endpoint:
|
||||
For reference, see `pkg/apiserver/signozapiserver/querier.go` which defines examples inline for the `/api/v5/query_range` endpoint:
|
||||
|
||||
```go
|
||||
var QueryRangeV5OpenAPIDef = handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"query"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestExamples: queryRangeV5Examples, // []handler.OpenAPIExample
|
||||
// ...
|
||||
}
|
||||
|
||||
var queryRangeV5Examples = []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
// ...
|
||||
if err := router.Handle("/api/v5/query_range", handler.New(provider.authZ.ViewAccess(provider.querierHandler.QueryRange), handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"querier"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
RequestExamples: []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
// ...
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// ... more examples
|
||||
},
|
||||
// ... more examples
|
||||
// ...
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
@@ -338,4 +339,4 @@ func (Step) JSONSchema() (jsonschema.Schema, error) {
|
||||
- **Add `required:"true"`** on fields where the key must be present in the JSON (this is about key presence, not about the zero value).
|
||||
- **Add `nullable:"true"`** on fields that can be `null`. Pay special attention to slices and maps -- in Go these default to `nil` which serializes to `null`. If the field should always be an array, initialize it and do not mark it nullable.
|
||||
- **Implement `Enum()`** on every type that has a fixed set of acceptable values so the JSON schema generates proper `enum` constraints.
|
||||
- **Add request examples** via `RequestExamples` in `OpenAPIDef` for any non-trivial endpoint. See `pkg/querier/openapi.go` for reference.
|
||||
- **Add request examples** via `RequestExamples` in `OpenAPIDef` for any non-trivial endpoint. See `pkg/apiserver/signozapiserver/querier.go` for reference.
|
||||
|
||||
178
ee/querier/handler.go
Normal file
178
ee/querier/handler.go
Normal file
@@ -0,0 +1,178 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"runtime/debug"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
set factory.ProviderSettings
|
||||
querier querier.Querier
|
||||
community querier.Handler
|
||||
}
|
||||
|
||||
func NewHandler(set factory.ProviderSettings, querier querier.Querier, community querier.Handler) querier.Handler {
|
||||
return &handler{
|
||||
set: set,
|
||||
querier: querier,
|
||||
community: community,
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
bodyBytes, err := io.ReadAll(req.Body)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
|
||||
return
|
||||
}
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
|
||||
ctx := req.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
stackTrace := string(debug.Stack())
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
h.set.Logger.ErrorContext(ctx, "panic in QueryRange",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
"stacktrace", stackTrace,
|
||||
)
|
||||
|
||||
render.Error(rw, errors.NewInternalf(
|
||||
errors.CodeInternal,
|
||||
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
|
||||
))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := queryRangeRequest.Validate(); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
|
||||
anomalies, err := h.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
results := []any{}
|
||||
for _, item := range anomalies.Results {
|
||||
results = append(results, item)
|
||||
}
|
||||
|
||||
// Build step intervals from the anomaly query
|
||||
stepIntervals := make(map[string]uint64)
|
||||
if anomalyQuery.StepInterval.Duration > 0 {
|
||||
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
|
||||
}
|
||||
|
||||
finalResp := &qbtypes.QueryRangeResponse{
|
||||
Type: queryRangeRequest.RequestType,
|
||||
Data: qbtypes.QueryData{
|
||||
Results: results,
|
||||
},
|
||||
Meta: qbtypes.ExecStats{
|
||||
StepIntervals: stepIntervals,
|
||||
},
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, finalResp)
|
||||
return
|
||||
}
|
||||
|
||||
// regular query range request, delegate to community handler
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
h.community.QueryRange(rw, req)
|
||||
}
|
||||
|
||||
func (h *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
h.community.QueryRawStream(rw, req)
|
||||
}
|
||||
|
||||
func (h *handler) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
h.community.ReplaceVariables(rw, req)
|
||||
}
|
||||
|
||||
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
|
||||
for _, fn := range anomalyQuery.Functions {
|
||||
if fn.Name == qbtypes.FunctionNameAnomaly {
|
||||
for _, arg := range fn.Args {
|
||||
if arg.Name == "seasonality" {
|
||||
if seasonalityStr, ok := arg.Value.(string); ok {
|
||||
switch seasonalityStr {
|
||||
case "weekly":
|
||||
return anomalyV2.SeasonalityWeekly
|
||||
case "hourly":
|
||||
return anomalyV2.SeasonalityHourly
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return anomalyV2.SeasonalityDaily // default
|
||||
}
|
||||
|
||||
func (h *handler) createAnomalyProvider(seasonality anomalyV2.Seasonality) anomalyV2.Provider {
|
||||
switch seasonality {
|
||||
case anomalyV2.SeasonalityWeekly:
|
||||
return anomalyV2.NewWeeklyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](h.set.Logger),
|
||||
)
|
||||
case anomalyV2.SeasonalityHourly:
|
||||
return anomalyV2.NewHourlyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](h.set.Logger),
|
||||
)
|
||||
default:
|
||||
return anomalyV2.NewDailyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.DailyProvider](h.set.Logger),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
|
||||
seasonality := extractSeasonality(anomalyQuery)
|
||||
provider := h.createAnomalyProvider(seasonality)
|
||||
|
||||
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
|
||||
}
|
||||
@@ -10,9 +10,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
@@ -57,7 +55,6 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
}, config)
|
||||
|
||||
@@ -106,14 +103,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// v4
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
// v5
|
||||
router.Handle("/api/v5/query_range", handler.New(
|
||||
am.ViewAccess(ah.queryRangeV5),
|
||||
querierAPI.QueryRangeV5OpenAPIDef,
|
||||
)).Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
|
||||
|
||||
// Gateway
|
||||
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.EditAccess(ah.ServeGatewayHTTP))
|
||||
|
||||
|
||||
@@ -2,16 +2,11 @@ package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"runtime/debug"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
@@ -20,8 +15,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
@@ -144,140 +137,3 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
}
|
||||
|
||||
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
|
||||
for _, fn := range anomalyQuery.Functions {
|
||||
if fn.Name == qbtypes.FunctionNameAnomaly {
|
||||
for _, arg := range fn.Args {
|
||||
if arg.Name == "seasonality" {
|
||||
if seasonalityStr, ok := arg.Value.(string); ok {
|
||||
switch seasonalityStr {
|
||||
case "weekly":
|
||||
return anomalyV2.SeasonalityWeekly
|
||||
case "hourly":
|
||||
return anomalyV2.SeasonalityHourly
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return anomalyV2.SeasonalityDaily // default
|
||||
}
|
||||
|
||||
func createAnomalyProvider(aH *APIHandler, seasonality anomalyV2.Seasonality) anomalyV2.Provider {
|
||||
switch seasonality {
|
||||
case anomalyV2.SeasonalityWeekly:
|
||||
return anomalyV2.NewWeeklyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
case anomalyV2.SeasonalityHourly:
|
||||
return anomalyV2.NewHourlyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
default:
|
||||
return anomalyV2.NewDailyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.DailyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
|
||||
seasonality := extractSeasonality(anomalyQuery)
|
||||
provider := createAnomalyProvider(aH, seasonality)
|
||||
|
||||
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
|
||||
}
|
||||
|
||||
func (aH *APIHandler) queryRangeV5(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
bodyBytes, err := io.ReadAll(req.Body)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
|
||||
return
|
||||
}
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
|
||||
ctx := req.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
stackTrace := string(debug.Stack())
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
aH.Signoz.Instrumentation.Logger().ErrorContext(ctx, "panic in QueryRange",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
"stacktrace", stackTrace,
|
||||
)
|
||||
|
||||
render.Error(rw, errors.NewInternalf(
|
||||
errors.CodeInternal,
|
||||
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
|
||||
))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := queryRangeRequest.Validate(); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
|
||||
anomalies, err := aH.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
results := []any{}
|
||||
for _, item := range anomalies.Results {
|
||||
results = append(results, item)
|
||||
}
|
||||
|
||||
// Build step intervals from the anomaly query
|
||||
stepIntervals := make(map[string]uint64)
|
||||
if anomalyQuery.StepInterval.Duration > 0 {
|
||||
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
|
||||
}
|
||||
|
||||
finalResp := &qbtypes.QueryRangeResponse{
|
||||
Type: queryRangeRequest.RequestType,
|
||||
Data: qbtypes.QueryData{
|
||||
Results: results,
|
||||
},
|
||||
Meta: qbtypes.ExecStats{
|
||||
StepIntervals: stepIntervals,
|
||||
},
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, finalResp)
|
||||
return
|
||||
} else {
|
||||
// regular query range request, let the querier handle it
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
aH.QuerierAPI.QueryRange(rw, req)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -240,7 +240,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.RegisterQueryRangeV3Routes(r, am)
|
||||
apiHandler.RegisterInfraMetricsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV4Routes(r, am)
|
||||
apiHandler.RegisterQueryRangeV5Routes(r, am)
|
||||
apiHandler.RegisterWebSocketPaths(r, am)
|
||||
apiHandler.RegisterMessagingQueuesRoutes(r, am)
|
||||
apiHandler.RegisterThirdPartyApiRoutes(r, am)
|
||||
|
||||
@@ -678,7 +678,7 @@ export const useUpdateIngestionKeyLimit = <
|
||||
* @summary Search ingestion keys for workspace
|
||||
*/
|
||||
export const searchIngestionKeys = (
|
||||
params?: SearchIngestionKeysParams,
|
||||
params: SearchIngestionKeysParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<SearchIngestionKeys200>({
|
||||
@@ -699,7 +699,7 @@ export const getSearchIngestionKeysQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: SearchIngestionKeysParams,
|
||||
params: SearchIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
@@ -737,7 +737,7 @@ export function useSearchIngestionKeys<
|
||||
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: SearchIngestionKeysParams,
|
||||
params: SearchIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
@@ -762,7 +762,7 @@ export function useSearchIngestionKeys<
|
||||
*/
|
||||
export const invalidateSearchIngestionKeys = async (
|
||||
queryClient: QueryClient,
|
||||
params?: SearchIngestionKeysParams,
|
||||
params: SearchIngestionKeysParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
|
||||
@@ -16,6 +16,7 @@ import type {
|
||||
Querybuildertypesv5QueryRangeRequestDTO,
|
||||
QueryRangeV5200,
|
||||
RenderErrorResponseDTO,
|
||||
ReplaceVariables200,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
@@ -105,3 +106,86 @@ export const useQueryRangeV5 = <
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* Replace variables in a query
|
||||
* @summary Replace variables
|
||||
*/
|
||||
export const replaceVariables = (
|
||||
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<ReplaceVariables200>({
|
||||
url: `/api/v5/substitute_vars`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getReplaceVariablesMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['replaceVariables'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return replaceVariables(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type ReplaceVariablesMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof replaceVariables>>
|
||||
>;
|
||||
export type ReplaceVariablesMutationBody = Querybuildertypesv5QueryRangeRequestDTO;
|
||||
export type ReplaceVariablesMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Replace variables
|
||||
*/
|
||||
export const useReplaceVariables = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getReplaceVariablesMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
@@ -453,18 +453,18 @@ export interface GatewaytypesGettableCreatedIngestionKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
value?: string;
|
||||
value: string;
|
||||
}
|
||||
|
||||
export interface GatewaytypesGettableCreatedIngestionKeyLimitDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
}
|
||||
|
||||
export interface GatewaytypesGettableIngestionKeysDTO {
|
||||
@@ -616,7 +616,7 @@ export interface GatewaytypesPostableIngestionKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name?: string;
|
||||
name: string;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
@@ -638,7 +638,7 @@ export interface GatewaytypesPostableIngestionKeyLimitDTO {
|
||||
}
|
||||
|
||||
export interface GatewaytypesUpdatableIngestionKeyLimitDTO {
|
||||
config?: GatewaytypesLimitConfigDTO;
|
||||
config: GatewaytypesLimitConfigDTO;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
@@ -2419,34 +2419,34 @@ export interface ZeustypesGettableHostDTO {
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
hosts?: ZeustypesHostDTO[] | null;
|
||||
hosts: ZeustypesHostDTO[] | null;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name?: string;
|
||||
name: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
state?: string;
|
||||
state: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
tier?: string;
|
||||
tier: string;
|
||||
}
|
||||
|
||||
export interface ZeustypesHostDTO {
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
is_default?: boolean;
|
||||
is_default: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name?: string;
|
||||
name: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
url?: string;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface ZeustypesPostableHostDTO {
|
||||
@@ -2460,43 +2460,43 @@ export interface ZeustypesPostableProfileDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
existing_observability_tool?: string;
|
||||
existing_observability_tool: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
has_existing_observability_tool?: boolean;
|
||||
has_existing_observability_tool: boolean;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
logs_scale_per_day_in_gb?: number;
|
||||
logs_scale_per_day_in_gb: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
number_of_hosts?: number;
|
||||
number_of_hosts: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
number_of_services?: number;
|
||||
number_of_services: number;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
reasons_for_interest_in_signoz?: string[] | null;
|
||||
reasons_for_interest_in_signoz: string[] | null;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
timeline_for_migrating_to_signoz?: string;
|
||||
timeline_for_migrating_to_signoz: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
uses_otel?: boolean;
|
||||
uses_otel: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
where_did_you_discover_signoz?: string;
|
||||
where_did_you_discover_signoz: string;
|
||||
}
|
||||
|
||||
export type ChangePasswordPathParameters = {
|
||||
@@ -3039,7 +3039,7 @@ export type SearchIngestionKeysParams = {
|
||||
* @type string
|
||||
* @description undefined
|
||||
*/
|
||||
name?: string;
|
||||
name: string;
|
||||
/**
|
||||
* @type integer
|
||||
* @description undefined
|
||||
@@ -3229,3 +3229,11 @@ export type QueryRangeV5200 = {
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type ReplaceVariables200 = {
|
||||
data?: Querybuildertypesv5QueryRangeRequestDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { useCallback } from 'react';
|
||||
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
|
||||
import HistogramTooltip from 'lib/uPlotV2/components/Tooltip/HistogramTooltip';
|
||||
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
|
||||
import {
|
||||
HistogramTooltipProps,
|
||||
TooltipRenderArgs,
|
||||
@@ -22,21 +21,11 @@ export default function Histogram(props: HistogramChartProps): JSX.Element {
|
||||
if (customRenderTooltip) {
|
||||
return customRenderTooltip(props);
|
||||
}
|
||||
const content = buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: rest.yAxisUnit ?? '',
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
});
|
||||
const tooltipProps: HistogramTooltipProps = {
|
||||
...props,
|
||||
timezone: rest.timezone,
|
||||
yAxisUnit: rest.yAxisUnit,
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
content,
|
||||
};
|
||||
return <HistogramTooltip {...tooltipProps} />;
|
||||
},
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { useCallback } from 'react';
|
||||
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
|
||||
import TimeSeriesTooltip from 'lib/uPlotV2/components/Tooltip/TimeSeriesTooltip';
|
||||
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
|
||||
import {
|
||||
TimeSeriesTooltipProps,
|
||||
TooltipRenderArgs,
|
||||
@@ -17,21 +16,11 @@ export default function TimeSeries(props: TimeSeriesChartProps): JSX.Element {
|
||||
if (customRenderTooltip) {
|
||||
return customRenderTooltip(props);
|
||||
}
|
||||
const content = buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: rest.yAxisUnit ?? '',
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
});
|
||||
const tooltipProps: TimeSeriesTooltipProps = {
|
||||
...props,
|
||||
timezone: rest.timezone,
|
||||
yAxisUnit: rest.yAxisUnit,
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
content,
|
||||
};
|
||||
return <TimeSeriesTooltip {...tooltipProps} />;
|
||||
},
|
||||
|
||||
@@ -1,8 +1,31 @@
|
||||
import { HistogramTooltipProps } from '../types';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import { HistogramTooltipProps, TooltipContentItem } from '../types';
|
||||
import Tooltip from './Tooltip';
|
||||
import { buildTooltipContent } from './utils';
|
||||
|
||||
export default function HistogramTooltip(
|
||||
props: HistogramTooltipProps,
|
||||
): JSX.Element {
|
||||
return <Tooltip {...props} showTooltipHeader={false} />;
|
||||
const content = useMemo(
|
||||
(): TooltipContentItem[] =>
|
||||
buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: props.yAxisUnit ?? '',
|
||||
decimalPrecision: props.decimalPrecision,
|
||||
}),
|
||||
[
|
||||
props.uPlotInstance,
|
||||
props.seriesIndex,
|
||||
props.dataIndexes,
|
||||
props.yAxisUnit,
|
||||
props.decimalPrecision,
|
||||
],
|
||||
);
|
||||
|
||||
return <Tooltip {...props} content={content} showTooltipHeader={false} />;
|
||||
}
|
||||
|
||||
@@ -1,8 +1,31 @@
|
||||
import { TimeSeriesTooltipProps } from '../types';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import { TimeSeriesTooltipProps, TooltipContentItem } from '../types';
|
||||
import Tooltip from './Tooltip';
|
||||
import { buildTooltipContent } from './utils';
|
||||
|
||||
export default function TimeSeriesTooltip(
|
||||
props: TimeSeriesTooltipProps,
|
||||
): JSX.Element {
|
||||
return <Tooltip {...props} />;
|
||||
const content = useMemo(
|
||||
(): TooltipContentItem[] =>
|
||||
buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: props.yAxisUnit ?? '',
|
||||
decimalPrecision: props.decimalPrecision,
|
||||
}),
|
||||
[
|
||||
props.uPlotInstance,
|
||||
props.seriesIndex,
|
||||
props.dataIndexes,
|
||||
props.yAxisUnit,
|
||||
props.decimalPrecision,
|
||||
],
|
||||
);
|
||||
|
||||
return <Tooltip {...props} content={content} />;
|
||||
}
|
||||
|
||||
@@ -28,18 +28,23 @@
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.uplot-tooltip-list {
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
}
|
||||
.uplot-tooltip-list-container {
|
||||
overflow-y: auto;
|
||||
max-height: 330px;
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
.uplot-tooltip-list {
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-slate-100);
|
||||
border-radius: 0.5rem;
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-slate-100);
|
||||
border-radius: 0.5rem;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useMemo } from 'react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { Virtuoso } from 'react-virtuoso';
|
||||
import cx from 'classnames';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
@@ -11,6 +11,7 @@ import './Tooltip.styles.scss';
|
||||
|
||||
const TOOLTIP_LIST_MAX_HEIGHT = 330;
|
||||
const TOOLTIP_ITEM_HEIGHT = 38;
|
||||
const TOOLTIP_LIST_PADDING = 10;
|
||||
|
||||
export default function Tooltip({
|
||||
uPlotInstance,
|
||||
@@ -19,7 +20,7 @@ export default function Tooltip({
|
||||
showTooltipHeader = true,
|
||||
}: TooltipProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const [listHeight, setListHeight] = useState(0);
|
||||
const tooltipContent = content ?? [];
|
||||
|
||||
const headerTitle = useMemo(() => {
|
||||
@@ -41,42 +42,52 @@ export default function Tooltip({
|
||||
showTooltipHeader,
|
||||
]);
|
||||
|
||||
const virtuosoStyle = useMemo(() => {
|
||||
return {
|
||||
height:
|
||||
listHeight > 0
|
||||
? Math.min(listHeight + TOOLTIP_LIST_PADDING, TOOLTIP_LIST_MAX_HEIGHT)
|
||||
: Math.min(
|
||||
tooltipContent.length * TOOLTIP_ITEM_HEIGHT,
|
||||
TOOLTIP_LIST_MAX_HEIGHT,
|
||||
),
|
||||
width: '100%',
|
||||
};
|
||||
}, [listHeight, tooltipContent.length]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cx(
|
||||
'uplot-tooltip-container',
|
||||
isDarkMode ? 'darkMode' : 'lightMode',
|
||||
)}
|
||||
data-testid="uplot-tooltip-container"
|
||||
>
|
||||
{showTooltipHeader && (
|
||||
<div className="uplot-tooltip-header">
|
||||
<div className="uplot-tooltip-header" data-testid="uplot-tooltip-header">
|
||||
<span>{headerTitle}</span>
|
||||
</div>
|
||||
)}
|
||||
<div
|
||||
style={{
|
||||
height: Math.min(
|
||||
tooltipContent.length * TOOLTIP_ITEM_HEIGHT,
|
||||
TOOLTIP_LIST_MAX_HEIGHT,
|
||||
),
|
||||
minHeight: 0,
|
||||
}}
|
||||
>
|
||||
<div className="uplot-tooltip-list-container">
|
||||
{tooltipContent.length > 0 ? (
|
||||
<Virtuoso
|
||||
className="uplot-tooltip-list"
|
||||
data-testid="uplot-tooltip-list"
|
||||
data={tooltipContent}
|
||||
defaultItemHeight={TOOLTIP_ITEM_HEIGHT}
|
||||
style={virtuosoStyle}
|
||||
totalListHeightChanged={setListHeight}
|
||||
itemContent={(_, item): JSX.Element => (
|
||||
<div className="uplot-tooltip-item">
|
||||
<div className="uplot-tooltip-item" data-testid="uplot-tooltip-item">
|
||||
<div
|
||||
className="uplot-tooltip-item-marker"
|
||||
style={{ borderColor: item.color }}
|
||||
data-is-legend-marker={true}
|
||||
data-testid="uplot-tooltip-item-marker"
|
||||
/>
|
||||
<div
|
||||
className="uplot-tooltip-item-content"
|
||||
style={{ color: item.color, fontWeight: item.isActive ? 700 : 400 }}
|
||||
data-testid="uplot-tooltip-item-content"
|
||||
>
|
||||
{item.label}: {item.tooltipValue}
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,208 @@
|
||||
import React from 'react';
|
||||
import { VirtuosoMockContext } from 'react-virtuoso';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import dayjs from 'dayjs';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { render, RenderResult, screen } from 'tests/test-utils';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
import { TooltipContentItem } from '../../types';
|
||||
import Tooltip from '../Tooltip';
|
||||
|
||||
type MockVirtuosoProps = {
|
||||
data: TooltipContentItem[];
|
||||
itemContent: (index: number, item: TooltipContentItem) => React.ReactNode;
|
||||
className?: string;
|
||||
style?: React.CSSProperties;
|
||||
totalListHeightChanged?: (height: number) => void;
|
||||
'data-testid'?: string;
|
||||
};
|
||||
|
||||
let mockTotalListHeight = 200;
|
||||
|
||||
jest.mock('react-virtuoso', () => {
|
||||
const actual = jest.requireActual('react-virtuoso');
|
||||
|
||||
return {
|
||||
...actual,
|
||||
Virtuoso: ({
|
||||
data,
|
||||
itemContent,
|
||||
className,
|
||||
style,
|
||||
totalListHeightChanged,
|
||||
'data-testid': dataTestId,
|
||||
}: MockVirtuosoProps): JSX.Element => {
|
||||
if (totalListHeightChanged) {
|
||||
// Simulate Virtuoso reporting total list height
|
||||
totalListHeightChanged(mockTotalListHeight);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={className} style={style} data-testid={dataTestId}>
|
||||
{data.map((item, index) => (
|
||||
<div key={item.label ?? index.toString()}>{itemContent(index, item)}</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('hooks/useDarkMode', () => ({
|
||||
useIsDarkMode: jest.fn(),
|
||||
}));
|
||||
|
||||
const mockUseIsDarkMode = useIsDarkMode as jest.MockedFunction<
|
||||
typeof useIsDarkMode
|
||||
>;
|
||||
|
||||
type TooltipTestProps = React.ComponentProps<typeof Tooltip>;
|
||||
|
||||
function createTooltipContent(
|
||||
overrides: Partial<TooltipContentItem> = {},
|
||||
): TooltipContentItem {
|
||||
return {
|
||||
label: 'Series A',
|
||||
value: 10,
|
||||
tooltipValue: '10',
|
||||
color: '#ff0000',
|
||||
isActive: true,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createUPlotInstance(cursorIdx: number | null): uPlot {
|
||||
return ({
|
||||
data: [[1], []],
|
||||
cursor: { idx: cursorIdx },
|
||||
// The rest of the uPlot fields are not used by Tooltip
|
||||
} as unknown) as uPlot;
|
||||
}
|
||||
|
||||
function renderTooltip(props: Partial<TooltipTestProps> = {}): RenderResult {
|
||||
const defaultProps: TooltipTestProps = {
|
||||
uPlotInstance: createUPlotInstance(null),
|
||||
timezone: 'UTC',
|
||||
content: [],
|
||||
showTooltipHeader: true,
|
||||
// TooltipRenderArgs (not used directly in component but required by type)
|
||||
dataIndexes: [],
|
||||
seriesIndex: null,
|
||||
isPinned: false,
|
||||
dismiss: jest.fn(),
|
||||
viaSync: false,
|
||||
} as TooltipTestProps;
|
||||
|
||||
return render(
|
||||
<VirtuosoMockContext.Provider value={{ viewportHeight: 300, itemHeight: 38 }}>
|
||||
<Tooltip {...defaultProps} {...props} />
|
||||
</VirtuosoMockContext.Provider>,
|
||||
);
|
||||
}
|
||||
|
||||
describe('Tooltip', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockUseIsDarkMode.mockReturnValue(false);
|
||||
mockTotalListHeight = 200;
|
||||
});
|
||||
|
||||
it('renders header title when showTooltipHeader is true and cursor index is present', () => {
|
||||
const uPlotInstance = createUPlotInstance(0);
|
||||
|
||||
renderTooltip({ uPlotInstance });
|
||||
|
||||
const expectedTitle = dayjs(1 * 1000)
|
||||
.tz('UTC')
|
||||
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
|
||||
|
||||
expect(screen.getByText(expectedTitle)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not render header when showTooltipHeader is false', () => {
|
||||
const uPlotInstance = createUPlotInstance(0);
|
||||
|
||||
renderTooltip({ uPlotInstance, showTooltipHeader: false });
|
||||
|
||||
const unexpectedTitle = dayjs(1 * 1000)
|
||||
.tz('UTC')
|
||||
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
|
||||
|
||||
expect(screen.queryByText(unexpectedTitle)).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders lightMode class when dark mode is disabled', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
mockUseIsDarkMode.mockReturnValue(false);
|
||||
|
||||
renderTooltip({ uPlotInstance });
|
||||
|
||||
const container = screen.getByTestId('uplot-tooltip-container');
|
||||
|
||||
expect(container).toHaveClass('lightMode');
|
||||
expect(container).not.toHaveClass('darkMode');
|
||||
});
|
||||
|
||||
it('renders darkMode class when dark mode is enabled', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
mockUseIsDarkMode.mockReturnValue(true);
|
||||
|
||||
renderTooltip({ uPlotInstance });
|
||||
|
||||
const container = screen.getByTestId('uplot-tooltip-container');
|
||||
|
||||
expect(container).toHaveClass('darkMode');
|
||||
expect(container).not.toHaveClass('lightMode');
|
||||
});
|
||||
|
||||
it('renders tooltip items when content is provided', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
const content = [createTooltipContent()];
|
||||
|
||||
renderTooltip({ uPlotInstance, content });
|
||||
|
||||
const list = screen.queryByTestId('uplot-tooltip-list');
|
||||
|
||||
expect(list).not.toBeNull();
|
||||
|
||||
const marker = screen.getByTestId('uplot-tooltip-item-marker');
|
||||
const itemContent = screen.getByTestId('uplot-tooltip-item-content');
|
||||
|
||||
expect(marker).toHaveStyle({ borderColor: '#ff0000' });
|
||||
expect(itemContent).toHaveStyle({ color: '#ff0000', fontWeight: '700' });
|
||||
expect(itemContent).toHaveTextContent('Series A: 10');
|
||||
});
|
||||
|
||||
it('does not render tooltip list when content is empty', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
|
||||
renderTooltip({ uPlotInstance, content: [] });
|
||||
|
||||
const list = screen.queryByTestId('uplot-tooltip-list');
|
||||
|
||||
expect(list).toBeNull();
|
||||
});
|
||||
|
||||
it('sets tooltip list height based on content length, height returned by Virtuoso', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
const content = [createTooltipContent(), createTooltipContent()];
|
||||
|
||||
renderTooltip({ uPlotInstance, content });
|
||||
|
||||
const list = screen.getByTestId('uplot-tooltip-list');
|
||||
expect(list).toHaveStyle({ height: '210px' });
|
||||
});
|
||||
|
||||
it('sets tooltip list height based on content length when Virtuoso reports 0 height', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
const content = [createTooltipContent(), createTooltipContent()];
|
||||
mockTotalListHeight = 0;
|
||||
|
||||
renderTooltip({ uPlotInstance, content });
|
||||
|
||||
const list = screen.getByTestId('uplot-tooltip-list');
|
||||
// Falls back to content length: 2 items * 38px = 76px
|
||||
expect(list).toHaveStyle({ height: '76px' });
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,277 @@
|
||||
import { PrecisionOption } from 'components/Graph/types';
|
||||
import { getToolTipValue } from 'components/Graph/yAxisConfig';
|
||||
import uPlot, { AlignedData, Series } from 'uplot';
|
||||
|
||||
import { TooltipContentItem } from '../../types';
|
||||
import {
|
||||
buildTooltipContent,
|
||||
FALLBACK_SERIES_COLOR,
|
||||
getTooltipBaseValue,
|
||||
resolveSeriesColor,
|
||||
} from '../utils';
|
||||
|
||||
jest.mock('components/Graph/yAxisConfig', () => ({
|
||||
getToolTipValue: jest.fn(),
|
||||
}));
|
||||
|
||||
const mockGetToolTipValue = getToolTipValue as jest.MockedFunction<
|
||||
typeof getToolTipValue
|
||||
>;
|
||||
|
||||
function createUPlotInstance(): uPlot {
|
||||
return ({
|
||||
data: [],
|
||||
cursor: { idx: 0 },
|
||||
} as unknown) as uPlot;
|
||||
}
|
||||
|
||||
describe('Tooltip utils', () => {
|
||||
describe('resolveSeriesColor', () => {
|
||||
it('returns string stroke when provided', () => {
|
||||
const u = createUPlotInstance();
|
||||
const stroke: Series.Stroke = '#ff0000';
|
||||
|
||||
const color = resolveSeriesColor(stroke, u, 1);
|
||||
|
||||
expect(color).toBe('#ff0000');
|
||||
});
|
||||
|
||||
it('returns result of stroke function when provided', () => {
|
||||
const u = createUPlotInstance();
|
||||
const strokeFn: Series.Stroke = (uInstance, seriesIdx): string =>
|
||||
`color-${seriesIdx}-${uInstance.cursor.idx}`;
|
||||
|
||||
const color = resolveSeriesColor(strokeFn, u, 2);
|
||||
|
||||
expect(color).toBe('color-2-0');
|
||||
});
|
||||
|
||||
it('returns fallback color when stroke is not provided', () => {
|
||||
const u = createUPlotInstance();
|
||||
|
||||
const color = resolveSeriesColor(undefined, u, 1);
|
||||
|
||||
expect(color).toBe(FALLBACK_SERIES_COLOR);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTooltipBaseValue', () => {
|
||||
it('returns value from aligned data for non-stacked charts', () => {
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[10, 20],
|
||||
];
|
||||
|
||||
const result = getTooltipBaseValue({
|
||||
data,
|
||||
index: 1,
|
||||
dataIndex: 1,
|
||||
isStackedBarChart: false,
|
||||
});
|
||||
|
||||
expect(result).toBe(20);
|
||||
});
|
||||
|
||||
it('returns null when value is missing', () => {
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[10, null],
|
||||
];
|
||||
|
||||
const result = getTooltipBaseValue({
|
||||
data,
|
||||
index: 1,
|
||||
dataIndex: 1,
|
||||
});
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('subtracts next visible stacked value for stacked bar charts', () => {
|
||||
// data[1] and data[2] contain stacked values at dataIndex 1
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[30, 60], // series 1 stacked
|
||||
[10, 20], // series 2 stacked
|
||||
];
|
||||
|
||||
const series: Series[] = [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true } as Series,
|
||||
{ label: 'B', show: true } as Series,
|
||||
];
|
||||
|
||||
const result = getTooltipBaseValue({
|
||||
data,
|
||||
index: 1,
|
||||
dataIndex: 1,
|
||||
isStackedBarChart: true,
|
||||
series,
|
||||
});
|
||||
|
||||
// 60 (stacked at series 1) - 20 (next visible stacked) = 40
|
||||
expect(result).toBe(40);
|
||||
});
|
||||
|
||||
it('skips hidden series when computing base value for stacked charts', () => {
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[30, 60], // series 1 stacked
|
||||
[10, 20], // series 2 stacked but hidden
|
||||
[5, 10], // series 3 stacked and visible
|
||||
];
|
||||
|
||||
const series: Series[] = [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true } as Series,
|
||||
{ label: 'B', show: false } as Series,
|
||||
{ label: 'C', show: true } as Series,
|
||||
];
|
||||
|
||||
const result = getTooltipBaseValue({
|
||||
data,
|
||||
index: 1,
|
||||
dataIndex: 1,
|
||||
isStackedBarChart: true,
|
||||
series,
|
||||
});
|
||||
|
||||
// 60 (stacked at series 1) - 10 (next *visible* stacked, series 3) = 50
|
||||
expect(result).toBe(50);
|
||||
});
|
||||
|
||||
it('does not subtract when there is no next visible series', () => {
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[10, 20], // series 1
|
||||
[5, (null as unknown) as number], // series 2 missing
|
||||
];
|
||||
|
||||
const series: Series[] = [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true } as Series,
|
||||
{ label: 'B', show: false } as Series,
|
||||
];
|
||||
|
||||
const result = getTooltipBaseValue({
|
||||
data,
|
||||
index: 1,
|
||||
dataIndex: 1,
|
||||
isStackedBarChart: true,
|
||||
series,
|
||||
});
|
||||
|
||||
expect(result).toBe(20);
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildTooltipContent', () => {
|
||||
const yAxisUnit = 'ms';
|
||||
const decimalPrecision: PrecisionOption = 2;
|
||||
|
||||
beforeEach(() => {
|
||||
mockGetToolTipValue.mockReset();
|
||||
mockGetToolTipValue.mockImplementation(
|
||||
(value: string | number): string => `formatted-${value}`,
|
||||
);
|
||||
});
|
||||
|
||||
function createSeriesConfig(): Series[] {
|
||||
return [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true, stroke: '#ff0000' } as Series,
|
||||
{
|
||||
label: 'B',
|
||||
show: true,
|
||||
stroke: (_u: uPlot, idx: number): string => `color-${idx}`,
|
||||
} as Series,
|
||||
{ label: 'C', show: false, stroke: '#00ff00' } as Series,
|
||||
];
|
||||
}
|
||||
|
||||
it('builds tooltip content with active series first', () => {
|
||||
const data: AlignedData = [[0], [10], [20], [30]];
|
||||
const series = createSeriesConfig();
|
||||
const dataIndexes = [null, 0, 0, 0];
|
||||
const u = createUPlotInstance();
|
||||
|
||||
const result = buildTooltipContent({
|
||||
data,
|
||||
series,
|
||||
dataIndexes,
|
||||
activeSeriesIndex: 2,
|
||||
uPlotInstance: u,
|
||||
yAxisUnit,
|
||||
decimalPrecision,
|
||||
});
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
// Active (series index 2) should come first
|
||||
expect(result[0]).toMatchObject<Partial<TooltipContentItem>>({
|
||||
label: 'B',
|
||||
value: 20,
|
||||
tooltipValue: 'formatted-20',
|
||||
color: 'color-2',
|
||||
isActive: true,
|
||||
});
|
||||
expect(result[1]).toMatchObject<Partial<TooltipContentItem>>({
|
||||
label: 'A',
|
||||
value: 10,
|
||||
tooltipValue: 'formatted-10',
|
||||
color: '#ff0000',
|
||||
isActive: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('skips series with null data index or non-finite values', () => {
|
||||
const data: AlignedData = [[0], [42], [Infinity]];
|
||||
const series: Series[] = [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true, stroke: '#ff0000' } as Series,
|
||||
{ label: 'B', show: true, stroke: '#00ff00' } as Series,
|
||||
];
|
||||
const dataIndexes = [null, 0, null];
|
||||
const u = createUPlotInstance();
|
||||
|
||||
const result = buildTooltipContent({
|
||||
data,
|
||||
series,
|
||||
dataIndexes,
|
||||
activeSeriesIndex: 1,
|
||||
uPlotInstance: u,
|
||||
yAxisUnit,
|
||||
decimalPrecision,
|
||||
});
|
||||
|
||||
// Only the finite, non-null value from series A should be included
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].label).toBe('A');
|
||||
});
|
||||
|
||||
it('uses stacked base values when building content for stacked bar charts', () => {
|
||||
const data: AlignedData = [[0], [60], [30]];
|
||||
const series: Series[] = [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true, stroke: '#ff0000' } as Series,
|
||||
{ label: 'B', show: true, stroke: '#00ff00' } as Series,
|
||||
];
|
||||
const dataIndexes = [null, 0, 0];
|
||||
const u = createUPlotInstance();
|
||||
|
||||
const result = buildTooltipContent({
|
||||
data,
|
||||
series,
|
||||
dataIndexes,
|
||||
activeSeriesIndex: 1,
|
||||
uPlotInstance: u,
|
||||
yAxisUnit,
|
||||
decimalPrecision,
|
||||
isStackedBarChart: true,
|
||||
});
|
||||
|
||||
// baseValue for series 1 at index 0 should be 60 - 30 (next visible) = 30
|
||||
expect(result[0].value).toBe(30);
|
||||
expect(result[1].value).toBe(30);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -4,7 +4,7 @@ import uPlot, { AlignedData, Series } from 'uplot';
|
||||
|
||||
import { TooltipContentItem } from '../types';
|
||||
|
||||
const FALLBACK_SERIES_COLOR = '#000000';
|
||||
export const FALLBACK_SERIES_COLOR = '#000000';
|
||||
|
||||
export function resolveSeriesColor(
|
||||
stroke: Series.Stroke | undefined,
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -20,6 +21,7 @@ func (provider *provider) addPromoteRoutes(router *mux.Router) error {
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleEditor),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -35,6 +37,7 @@ func (provider *provider) addPromoteRoutes(router *mux.Router) error {
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
@@ -46,6 +47,7 @@ type provider struct {
|
||||
fieldsHandler fields.Handler
|
||||
authzHandler authz.Handler
|
||||
zeusHandler zeus.Handler
|
||||
querierHandler querier.Handler
|
||||
}
|
||||
|
||||
func NewFactory(
|
||||
@@ -66,6 +68,7 @@ func NewFactory(
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
zeusHandler zeus.Handler,
|
||||
querierHandler querier.Handler,
|
||||
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
|
||||
return newProvider(
|
||||
@@ -89,6 +92,7 @@ func NewFactory(
|
||||
fieldsHandler,
|
||||
authzHandler,
|
||||
zeusHandler,
|
||||
querierHandler,
|
||||
)
|
||||
})
|
||||
}
|
||||
@@ -114,6 +118,7 @@ func newProvider(
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
zeusHandler zeus.Handler,
|
||||
querierHandler querier.Handler,
|
||||
) (apiserver.APIServer, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
|
||||
router := mux.NewRouter().UseEncodedPath()
|
||||
@@ -137,6 +142,7 @@ func newProvider(
|
||||
fieldsHandler: fieldsHandler,
|
||||
authzHandler: authzHandler,
|
||||
zeusHandler: zeusHandler,
|
||||
querierHandler: querierHandler,
|
||||
}
|
||||
|
||||
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
|
||||
@@ -209,6 +215,10 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addQuerierRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
471
pkg/apiserver/signozapiserver/querier.go
Normal file
471
pkg/apiserver/signozapiserver/querier.go
Normal file
@@ -0,0 +1,471 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addQuerierRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v5/query_range", handler.New(provider.authZ.ViewAccess(provider.querierHandler.QueryRange), handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"querier"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
RequestExamples: []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "span_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_time_series",
|
||||
Summary: "Time series: count error logs grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "log_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "log_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_gauge_time_series",
|
||||
Summary: "Time series: latest gauge value averaged across series",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "system.cpu.utilization", "timeAggregation": "latest", "spaceAggregation": "avg"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "host.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_rate_time_series",
|
||||
Summary: "Time series: rate of cumulative counter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum"},
|
||||
},
|
||||
"stepInterval": 120,
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_histogram_time_series",
|
||||
Summary: "Time series: p99 latency from histogram",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.bucket", "spaceAggregation": "p99"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_raw",
|
||||
Summary: "Raw: fetch raw log records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "body", "fieldContext": "log"},
|
||||
map[string]any{"name": "service.name", "fieldContext": "resource"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "log"}, "direction": "desc"},
|
||||
map[string]any{"key": map[string]any{"name": "id"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 50,
|
||||
"offset": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_raw",
|
||||
Summary: "Raw: fetch raw span records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend' AND has_error = true"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "name", "fieldContext": "span"},
|
||||
map[string]any{"name": "duration_nano", "fieldContext": "span"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "span"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_scalar",
|
||||
Summary: "Scalar: total span count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_scalar",
|
||||
Summary: "Scalar: total error log count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "error_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_scalar",
|
||||
Summary: "Scalar: single reduced metric value",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum", "reduceTo": "sum"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "formula",
|
||||
Summary: "Formula: error rate from two trace queries",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "countIf(has_error = true)"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "B",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "count()"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_formula",
|
||||
"spec": map[string]any{
|
||||
"name": "error_rate",
|
||||
"expression": "A / B * 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "promql",
|
||||
Summary: "PromQL: request rate with UTF-8 metric name",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "promql",
|
||||
"spec": map[string]any{
|
||||
"name": "request_rate",
|
||||
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
|
||||
"step": 60,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_time_series",
|
||||
Summary: "ClickHouse SQL: traces time series with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "span_rate",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" GROUP BY ts ORDER BY ts",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_logs_raw",
|
||||
Summary: "ClickHouse SQL: raw logs with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "recent_errors",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT timestamp, body" +
|
||||
" FROM signoz_logs.distributed_logs_v2" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" AND severity_text = 'ERROR'" +
|
||||
" ORDER BY timestamp DESC LIMIT 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_scalar",
|
||||
Summary: "ClickHouse SQL: scalar aggregate with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "total_spans",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Response: new(qbtypes.QueryRangeResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v5/substitute_vars", handler.New(provider.authZ.ViewAccess(provider.querierHandler.ReplaceVariables), handler.OpenAPIDef{
|
||||
ID: "ReplaceVariables",
|
||||
Tags: []string{"querier"},
|
||||
Summary: "Replace variables",
|
||||
Description: "Replace variables in a query",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(qbtypes.QueryRangeRequest),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -21,18 +21,17 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/variables"
|
||||
)
|
||||
|
||||
type API struct {
|
||||
type handler struct {
|
||||
set factory.ProviderSettings
|
||||
analytics analytics.Analytics
|
||||
querier Querier
|
||||
}
|
||||
|
||||
func NewAPI(set factory.ProviderSettings, querier Querier, analytics analytics.Analytics) *API {
|
||||
return &API{set: set, querier: querier, analytics: analytics}
|
||||
func NewHandler(set factory.ProviderSettings, querier Querier, analytics analytics.Analytics) Handler {
|
||||
return &handler{set: set, querier: querier, analytics: analytics}
|
||||
}
|
||||
|
||||
func (a *API) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
func (handler *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
@@ -53,7 +52,7 @@ func (a *API) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
a.set.Logger.ErrorContext(ctx, "panic in QueryRange",
|
||||
handler.set.Logger.ErrorContext(ctx, "panic in QueryRange",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
@@ -79,17 +78,17 @@ func (a *API) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
queryRangeResponse, err := a.querier.QueryRange(ctx, orgID, &queryRangeRequest)
|
||||
queryRangeResponse, err := handler.querier.QueryRange(ctx, orgID, &queryRangeRequest)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
a.logEvent(req.Context(), req.Header.Get("Referer"), queryRangeResponse.QBEvent)
|
||||
handler.logEvent(req.Context(), req.Header.Get("Referer"), queryRangeResponse.QBEvent)
|
||||
|
||||
render.Success(rw, http.StatusOK, queryRangeResponse)
|
||||
}
|
||||
func (a *API) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
func (handler *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
|
||||
// get the param from url and add it to body
|
||||
@@ -153,7 +152,7 @@ func (a *API) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
a.set.Logger.ErrorContext(ctx, "panic in QueryRawStream",
|
||||
handler.set.Logger.ErrorContext(ctx, "panic in QueryRawStream",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
@@ -193,7 +192,7 @@ func (a *API) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
flusher.Flush()
|
||||
|
||||
client := &qbtypes.RawStream{Name: req.RemoteAddr, Logs: make(chan *qbtypes.RawRow, 1000), Done: make(chan *bool), Error: make(chan error)}
|
||||
go a.querier.QueryRawStream(ctx, orgID, &queryRangeRequest, client)
|
||||
go handler.querier.QueryRawStream(ctx, orgID, &queryRangeRequest, client)
|
||||
|
||||
for {
|
||||
select {
|
||||
@@ -220,7 +219,7 @@ func (a *API) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
// TODO(srikanthccv): everything done here can be done on frontend as well
|
||||
// For the time being I am adding a helper function
|
||||
func (a *API) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
func (handler *handler) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
|
||||
@@ -272,7 +271,7 @@ func (a *API) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
render.Success(rw, http.StatusOK, queryRangeRequest)
|
||||
}
|
||||
|
||||
func (a *API) logEvent(ctx context.Context, referrer string, event *qbtypes.QBEvent) {
|
||||
func (handler *handler) logEvent(ctx context.Context, referrer string, event *qbtypes.QBEvent) {
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
return
|
||||
@@ -305,8 +304,9 @@ func (a *API) logEvent(ctx context.Context, referrer string, event *qbtypes.QBEv
|
||||
}
|
||||
|
||||
if !event.HasData {
|
||||
a.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", properties)
|
||||
handler.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", properties)
|
||||
return
|
||||
}
|
||||
a.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
|
||||
|
||||
handler.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
|
||||
}
|
||||
|
||||
@@ -80,11 +80,16 @@ func (q *builderQuery[T]) Fingerprint() string {
|
||||
case qbtypes.LogAggregation:
|
||||
aggParts = append(aggParts, a.Expression)
|
||||
case qbtypes.MetricAggregation:
|
||||
aggParts = append(aggParts, fmt.Sprintf("%s:%s:%s:%s",
|
||||
var spaceAggParamStr string
|
||||
if a.ComparisonSpaceAggregationParam != nil {
|
||||
spaceAggParamStr = a.ComparisonSpaceAggregationParam.StringValue()
|
||||
}
|
||||
aggParts = append(aggParts, fmt.Sprintf("%s:%s:%s:%s:%s",
|
||||
a.MetricName,
|
||||
a.Temporality.StringValue(),
|
||||
a.TimeAggregation.StringValue(),
|
||||
a.SpaceAggregation.StringValue(),
|
||||
spaceAggParamStr,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package querier
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -20,3 +21,9 @@ type BucketCache interface {
|
||||
// store fresh buckets for future hits
|
||||
Put(ctx context.Context, orgID valuer.UUID, q qbtypes.Query, step qbtypes.Step, fresh *qbtypes.Result)
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
QueryRange(rw http.ResponseWriter, req *http.Request)
|
||||
QueryRawStream(rw http.ResponseWriter, req *http.Request)
|
||||
ReplaceVariables(rw http.ResponseWriter, req *http.Request)
|
||||
}
|
||||
|
||||
@@ -1,454 +1 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
)
|
||||
|
||||
// QueryRangeV5OpenAPIDef is the OpenAPI definition for the /api/v5/query_range endpoint.
|
||||
var QueryRangeV5OpenAPIDef = handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"query"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
RequestExamples: queryRangeV5Examples,
|
||||
Response: new(qbtypes.QueryRangeResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{
|
||||
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
},
|
||||
}
|
||||
|
||||
var queryRangeV5Examples = []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "span_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_time_series",
|
||||
Summary: "Time series: count error logs grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "log_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "log_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_gauge_time_series",
|
||||
Summary: "Time series: latest gauge value averaged across series",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "system.cpu.utilization", "timeAggregation": "latest", "spaceAggregation": "avg"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "host.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_rate_time_series",
|
||||
Summary: "Time series: rate of cumulative counter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum"},
|
||||
},
|
||||
"stepInterval": 120,
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_histogram_time_series",
|
||||
Summary: "Time series: p99 latency from histogram",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.bucket", "spaceAggregation": "p99"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_raw",
|
||||
Summary: "Raw: fetch raw log records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "body", "fieldContext": "log"},
|
||||
map[string]any{"name": "service.name", "fieldContext": "resource"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "log"}, "direction": "desc"},
|
||||
map[string]any{"key": map[string]any{"name": "id"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 50,
|
||||
"offset": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_raw",
|
||||
Summary: "Raw: fetch raw span records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend' AND has_error = true"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "name", "fieldContext": "span"},
|
||||
map[string]any{"name": "duration_nano", "fieldContext": "span"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "span"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_scalar",
|
||||
Summary: "Scalar: total span count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_scalar",
|
||||
Summary: "Scalar: total error log count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "error_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_scalar",
|
||||
Summary: "Scalar: single reduced metric value",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum", "reduceTo": "sum"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "formula",
|
||||
Summary: "Formula: error rate from two trace queries",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "countIf(has_error = true)"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "B",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "count()"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_formula",
|
||||
"spec": map[string]any{
|
||||
"name": "error_rate",
|
||||
"expression": "A / B * 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "promql",
|
||||
Summary: "PromQL: request rate with UTF-8 metric name",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "promql",
|
||||
"spec": map[string]any{
|
||||
"name": "request_rate",
|
||||
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
|
||||
"step": 60,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_time_series",
|
||||
Summary: "ClickHouse SQL: traces time series with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "span_rate",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" GROUP BY ts ORDER BY ts",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_logs_raw",
|
||||
Summary: "ClickHouse SQL: raw logs with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "recent_errors",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT timestamp, body" +
|
||||
" FROM signoz_logs.distributed_logs_v2" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" AND severity_text = 'ERROR'" +
|
||||
" ORDER BY timestamp DESC LIMIT 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_scalar",
|
||||
Summary: "ClickHouse SQL: scalar aggregate with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "total_spans",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -79,8 +79,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
)
|
||||
|
||||
type status string
|
||||
@@ -145,8 +143,6 @@ type APIHandler struct {
|
||||
|
||||
LicensingAPI licensing.API
|
||||
|
||||
QuerierAPI *querierAPI.API
|
||||
|
||||
QueryParserAPI *queryparser.API
|
||||
|
||||
Signoz *signoz.SigNoz
|
||||
@@ -175,8 +171,6 @@ type APIHandlerOpts struct {
|
||||
|
||||
LicensingAPI licensing.API
|
||||
|
||||
QuerierAPI *querierAPI.API
|
||||
|
||||
QueryParserAPI *queryparser.API
|
||||
|
||||
Signoz *signoz.SigNoz
|
||||
@@ -239,7 +233,6 @@ func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, erro
|
||||
AlertmanagerAPI: opts.AlertmanagerAPI,
|
||||
LicensingAPI: opts.LicensingAPI,
|
||||
Signoz: opts.Signoz,
|
||||
QuerierAPI: opts.QuerierAPI,
|
||||
QueryParserAPI: opts.QueryParserAPI,
|
||||
}
|
||||
|
||||
@@ -396,7 +389,7 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *middlew
|
||||
subRouter.HandleFunc("/query_progress", am.ViewAccess(aH.GetQueryProgressUpdates)).Methods(http.MethodGet)
|
||||
|
||||
// live logs
|
||||
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.QuerierAPI.QueryRawStream)).Methods(http.MethodGet)
|
||||
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.Signoz.Handlers.QuerierHandler.QueryRawStream)).Methods(http.MethodGet)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
@@ -470,12 +463,6 @@ func (aH *APIHandler) RegisterQueryRangeV4Routes(router *mux.Router, am *middlew
|
||||
subRouter.HandleFunc("/metric/metric_metadata", am.ViewAccess(aH.getMetricMetadata)).Methods(http.MethodGet)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) RegisterQueryRangeV5Routes(router *mux.Router, am *middleware.AuthZ) {
|
||||
subRouter := router.PathPrefix("/api/v5").Subrouter()
|
||||
subRouter.HandleFunc("/query_range", am.ViewAccess(aH.QuerierAPI.QueryRange)).Methods(http.MethodPost)
|
||||
subRouter.HandleFunc("/substitute_vars", am.ViewAccess(aH.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
|
||||
}
|
||||
|
||||
// todo(remove): Implemented at render package (github.com/SigNoz/signoz/pkg/http/render) with the new error structure
|
||||
func (aH *APIHandler) Respond(w http.ResponseWriter, data interface{}) {
|
||||
writeHttpResponse(w, data)
|
||||
|
||||
@@ -23,7 +23,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
@@ -133,7 +132,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
LicensingAPI: nooplicensing.NewLicenseAPI(),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
}, config)
|
||||
if err != nil {
|
||||
@@ -217,7 +215,6 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
|
||||
api.RegisterInfraMetricsRoutes(r, am)
|
||||
api.RegisterWebSocketPaths(r, am)
|
||||
api.RegisterQueryRangeV4Routes(r, am)
|
||||
api.RegisterQueryRangeV5Routes(r, am)
|
||||
api.RegisterMessagingQueuesRoutes(r, am)
|
||||
api.RegisterThirdPartyApiRoutes(r, am)
|
||||
api.MetricExplorerRoutes(r, am)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package signoz
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/authz/signozauthzapi"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
@@ -50,12 +51,14 @@ type Handlers struct {
|
||||
Fields fields.Handler
|
||||
AuthzHandler authz.Handler
|
||||
ZeusHandler zeus.Handler
|
||||
QuerierHandler querier.Handler
|
||||
}
|
||||
|
||||
func NewHandlers(
|
||||
modules Modules,
|
||||
providerSettings factory.ProviderSettings,
|
||||
querier querier.Querier,
|
||||
analytics analytics.Analytics,
|
||||
querierHandler querier.Handler,
|
||||
licensing licensing.Licensing,
|
||||
global global.Global,
|
||||
flaggerService flagger.Flagger,
|
||||
@@ -80,5 +83,6 @@ func NewHandlers(
|
||||
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
|
||||
AuthzHandler: signozauthzapi.NewHandler(authz),
|
||||
ZeusHandler: zeus.NewHandler(zeusService, licensing),
|
||||
QuerierHandler: querierHandler,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/sharder"
|
||||
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
|
||||
@@ -42,7 +43,8 @@ func TestNewHandlers(t *testing.T) {
|
||||
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
|
||||
|
||||
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil, nil, nil, nil)
|
||||
querierHandler := querier.NewHandler(providerSettings, nil, nil)
|
||||
handlers := NewHandlers(modules, providerSettings, nil, querierHandler, nil, nil, nil, nil, nil, nil, nil)
|
||||
reflectVal := reflect.ValueOf(handlers)
|
||||
for i := 0; i < reflectVal.NumField(); i++ {
|
||||
f := reflectVal.Field(i)
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"os"
|
||||
"reflect"
|
||||
|
||||
@@ -27,7 +26,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
"github.com/swaggest/openapi-go"
|
||||
@@ -60,15 +58,12 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
|
||||
struct{ fields.Handler }{},
|
||||
struct{ authz.Handler }{},
|
||||
struct{ zeus.Handler }{},
|
||||
struct{ querier.Handler }{},
|
||||
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Register routes that live outside the APIServer modules
|
||||
// so they are discovered by the OpenAPI walker.
|
||||
registerQueryRoutes(apiserver.Router())
|
||||
|
||||
reflector := openapi3.NewReflector()
|
||||
reflector.JSONSchemaReflector().DefaultOptions = append(reflector.JSONSchemaReflector().DefaultOptions, jsonschema.InterceptDefName(func(t reflect.Type, defaultDefName string) string {
|
||||
if defaultDefName == "RenderSuccessResponse" {
|
||||
@@ -159,10 +154,3 @@ func convertJSONNumbers(v interface{}) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func registerQueryRoutes(router *mux.Router) {
|
||||
router.Handle("/api/v5/query_range", handler.New(
|
||||
func(http.ResponseWriter, *http.Request) {},
|
||||
querier.QueryRangeV5OpenAPIDef,
|
||||
)).Methods(http.MethodPost)
|
||||
}
|
||||
|
||||
@@ -253,6 +253,7 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
|
||||
handlers.Fields,
|
||||
handlers.AuthzHandler,
|
||||
handlers.ZeusHandler,
|
||||
handlers.QuerierHandler,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -90,6 +90,7 @@ func New(
|
||||
authzCallback func(context.Context, sqlstore.SQLStore, licensing.Licensing, dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config],
|
||||
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
|
||||
gatewayProviderFactory func(licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config],
|
||||
querierHandlerCallback func(factory.ProviderSettings, querier.Querier, analytics.Analytics) querier.Handler,
|
||||
) (*SigNoz, error) {
|
||||
// Initialize instrumentation
|
||||
instrumentation, err := instrumentation.New(ctx, config.Instrumentation, version.Info, "signoz")
|
||||
@@ -391,8 +392,11 @@ func New(
|
||||
|
||||
userService := impluser.NewService(providerSettings, impluser.NewStore(sqlstore, providerSettings), modules.User, orgGetter, authz, config.User.Root)
|
||||
|
||||
// Initialize the querier handler via callback (allows EE to decorate with anomaly detection)
|
||||
querierHandler := querierHandlerCallback(providerSettings, querier, analytics)
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz, zeus)
|
||||
handlers := NewHandlers(modules, providerSettings, analytics, querierHandler, licensing, global, flagger, gateway, telemetryMetadataStore, authz, zeus)
|
||||
|
||||
// Initialize the API server
|
||||
apiserver, err := factory.NewProviderFromNamedMap(
|
||||
|
||||
@@ -123,7 +123,8 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
|
||||
origTimeAgg := query.Aggregations[0].TimeAggregation
|
||||
origGroupBy := slices.Clone(query.GroupBy)
|
||||
|
||||
if query.Aggregations[0].SpaceAggregation.IsPercentile() &&
|
||||
if (query.Aggregations[0].SpaceAggregation.IsPercentile() ||
|
||||
query.Aggregations[0].SpaceAggregation == metrictypes.SpaceAggregationHistogramCount) &&
|
||||
query.Aggregations[0].Type != metrictypes.ExpHistogramType {
|
||||
// add le in the group by if doesn't exist
|
||||
leExists := false
|
||||
@@ -154,7 +155,11 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
|
||||
}
|
||||
|
||||
// make the time aggregation rate and space aggregation sum
|
||||
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationRate
|
||||
if query.Aggregations[0].SpaceAggregation.IsPercentile() {
|
||||
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationRate
|
||||
} else {
|
||||
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationIncrease
|
||||
}
|
||||
query.Aggregations[0].SpaceAggregation = metrictypes.SpaceAggregationSum
|
||||
}
|
||||
|
||||
@@ -524,7 +529,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
return "", nil, errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`]",
|
||||
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`, `histogram_count`]",
|
||||
)
|
||||
}
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
@@ -577,6 +582,29 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
|
||||
sb.From("__spatial_aggregation_cte")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.GroupBy("ts")
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
|
||||
sb.Having(rewrittenExpr)
|
||||
}
|
||||
} else if query.Aggregations[0].SpaceAggregation == metrictypes.SpaceAggregationHistogramCount {
|
||||
sb.Select("ts")
|
||||
|
||||
for _, g := range query.GroupBy {
|
||||
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
aggQuery, err := AggregationQueryForHistogramCount(query.Aggregations[0].ComparisonSpaceAggregationParam)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sb.SelectMore(aggQuery)
|
||||
|
||||
sb.From("__spatial_aggregation_cte")
|
||||
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.GroupBy("ts")
|
||||
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package telemetrymetrics
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
@@ -308,3 +309,20 @@ func AggregationColumnForSamplesTable(
|
||||
}
|
||||
return aggregationColumn, nil
|
||||
}
|
||||
|
||||
func AggregationQueryForHistogramCount(param *metrictypes.ComparisonSpaceAggregationParam) (string, error) {
|
||||
if param == nil {
|
||||
return "", errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "no aggregation param provided for histogram count")
|
||||
}
|
||||
histogramCountThreshold := param.Threshold
|
||||
|
||||
switch param.Operater {
|
||||
case "<=":
|
||||
return fmt.Sprintf("argMaxIf(value, toFloat64(le), toFloat64(le) <= %f) + (argMinIf(value, toFloat64(le), toFloat64(le) > %f) - argMaxIf(value, toFloat64(le), toFloat64(le) <= %f)) * (%f - maxIf(toFloat64(le), toFloat64(le) <= %f)) / (minIf(toFloat64(le), toFloat64(le) > %f) - maxIf(toFloat64(le), toFloat64(le) <= %f)) AS value", histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold), nil
|
||||
case ">":
|
||||
return fmt.Sprintf("argMax(value, toFloat64(le)) - (argMaxIf(value, toFloat64(le), toFloat64(le) <= %f) + (argMinIf(value, toFloat64(le), toFloat64(le) > %f) - argMaxIf(value, toFloat64(le), toFloat64(le) <= %f)) * (%f - maxIf(toFloat64(le), toFloat64(le) <= %f)) / (minIf(toFloat64(le), toFloat64(le) > %f) - maxIf(toFloat64(le), toFloat64(le) <= %f))) AS value", histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold), nil
|
||||
default:
|
||||
return "", errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid space aggregation operator, should be one of the following: [`<=`, `>`]")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -60,7 +60,7 @@ type IngestionKeysParams struct {
|
||||
}
|
||||
|
||||
type SearchIngestionKeysParams struct {
|
||||
Name string `query:"name"`
|
||||
Name string `query:"name" required:"true"`
|
||||
Page int `query:"page"`
|
||||
PerPage int `query:"per_page"`
|
||||
}
|
||||
@@ -71,14 +71,14 @@ type GettableIngestionKeys struct {
|
||||
}
|
||||
|
||||
type PostableIngestionKey struct {
|
||||
Name string `json:"name"`
|
||||
Name string `json:"name" required:"true"`
|
||||
Tags []string `json:"tags"`
|
||||
ExpiresAt time.Time `json:"expires_at"`
|
||||
}
|
||||
|
||||
type GettableCreatedIngestionKey struct {
|
||||
ID string `json:"id"`
|
||||
Value string `json:"value"`
|
||||
ID string `json:"id" required:"true"`
|
||||
Value string `json:"value" required:"true"`
|
||||
}
|
||||
|
||||
type PostableIngestionKeyLimit struct {
|
||||
@@ -88,10 +88,10 @@ type PostableIngestionKeyLimit struct {
|
||||
}
|
||||
|
||||
type GettableCreatedIngestionKeyLimit struct {
|
||||
ID string `json:"id"`
|
||||
ID string `json:"id" required:"true"`
|
||||
}
|
||||
|
||||
type UpdatableIngestionKeyLimit struct {
|
||||
Config LimitConfig `json:"config"`
|
||||
Config LimitConfig `json:"config" required:"true"`
|
||||
Tags []string `json:"tags"`
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package metrictypes
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
@@ -189,17 +190,18 @@ type SpaceAggregation struct {
|
||||
}
|
||||
|
||||
var (
|
||||
SpaceAggregationUnspecified = SpaceAggregation{valuer.NewString("")}
|
||||
SpaceAggregationSum = SpaceAggregation{valuer.NewString("sum")}
|
||||
SpaceAggregationAvg = SpaceAggregation{valuer.NewString("avg")}
|
||||
SpaceAggregationMin = SpaceAggregation{valuer.NewString("min")}
|
||||
SpaceAggregationMax = SpaceAggregation{valuer.NewString("max")}
|
||||
SpaceAggregationCount = SpaceAggregation{valuer.NewString("count")}
|
||||
SpaceAggregationPercentile50 = SpaceAggregation{valuer.NewString("p50")}
|
||||
SpaceAggregationPercentile75 = SpaceAggregation{valuer.NewString("p75")}
|
||||
SpaceAggregationPercentile90 = SpaceAggregation{valuer.NewString("p90")}
|
||||
SpaceAggregationPercentile95 = SpaceAggregation{valuer.NewString("p95")}
|
||||
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
|
||||
SpaceAggregationUnspecified = SpaceAggregation{valuer.NewString("")}
|
||||
SpaceAggregationSum = SpaceAggregation{valuer.NewString("sum")}
|
||||
SpaceAggregationAvg = SpaceAggregation{valuer.NewString("avg")}
|
||||
SpaceAggregationMin = SpaceAggregation{valuer.NewString("min")}
|
||||
SpaceAggregationMax = SpaceAggregation{valuer.NewString("max")}
|
||||
SpaceAggregationCount = SpaceAggregation{valuer.NewString("count")}
|
||||
SpaceAggregationPercentile50 = SpaceAggregation{valuer.NewString("p50")}
|
||||
SpaceAggregationPercentile75 = SpaceAggregation{valuer.NewString("p75")}
|
||||
SpaceAggregationPercentile90 = SpaceAggregation{valuer.NewString("p90")}
|
||||
SpaceAggregationPercentile95 = SpaceAggregation{valuer.NewString("p95")}
|
||||
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
|
||||
SpaceAggregationHistogramCount = SpaceAggregation{valuer.NewString("histogram_count")}
|
||||
)
|
||||
|
||||
func (SpaceAggregation) Enum() []any {
|
||||
@@ -214,6 +216,7 @@ func (SpaceAggregation) Enum() []any {
|
||||
SpaceAggregationPercentile90,
|
||||
SpaceAggregationPercentile95,
|
||||
SpaceAggregationPercentile99,
|
||||
SpaceAggregationHistogramCount,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -256,3 +259,12 @@ type MetricTableHints struct {
|
||||
type MetricValueFilter struct {
|
||||
Value float64
|
||||
}
|
||||
|
||||
type ComparisonSpaceAggregationParam struct {
|
||||
Operater string `json:"operator"`
|
||||
Threshold float64 `json:"threshold"`
|
||||
}
|
||||
|
||||
func (param ComparisonSpaceAggregationParam) StringValue() string {
|
||||
return fmt.Sprintf("{\"operator\": \"%s\", \"limit\": \"%f\"}", param.Operater, param.Threshold)
|
||||
}
|
||||
|
||||
@@ -446,6 +446,8 @@ type MetricAggregation struct {
|
||||
TimeAggregation metrictypes.TimeAggregation `json:"timeAggregation"`
|
||||
// space aggregation to apply to the query
|
||||
SpaceAggregation metrictypes.SpaceAggregation `json:"spaceAggregation"`
|
||||
// param for space aggregation if needed
|
||||
ComparisonSpaceAggregationParam *metrictypes.ComparisonSpaceAggregationParam `json:"comparisonSpaceAggregationParam"`
|
||||
// table hints to use for the query
|
||||
TableHints *metrictypes.MetricTableHints `json:"-"`
|
||||
// value filter to apply to the query
|
||||
|
||||
@@ -11,28 +11,28 @@ type PostableHost struct {
|
||||
}
|
||||
|
||||
type PostableProfile struct {
|
||||
UsesOtel bool `json:"uses_otel"`
|
||||
HasExistingObservabilityTool bool `json:"has_existing_observability_tool"`
|
||||
ExistingObservabilityTool string `json:"existing_observability_tool"`
|
||||
ReasonsForInterestInSigNoz []string `json:"reasons_for_interest_in_signoz"`
|
||||
LogsScalePerDayInGB int64 `json:"logs_scale_per_day_in_gb"`
|
||||
NumberOfServices int64 `json:"number_of_services"`
|
||||
NumberOfHosts int64 `json:"number_of_hosts"`
|
||||
WhereDidYouDiscoverSigNoz string `json:"where_did_you_discover_signoz"`
|
||||
TimelineForMigratingToSigNoz string `json:"timeline_for_migrating_to_signoz"`
|
||||
UsesOtel bool `json:"uses_otel" required:"true"`
|
||||
HasExistingObservabilityTool bool `json:"has_existing_observability_tool" required:"true"`
|
||||
ExistingObservabilityTool string `json:"existing_observability_tool" required:"true"`
|
||||
ReasonsForInterestInSigNoz []string `json:"reasons_for_interest_in_signoz" required:"true"`
|
||||
LogsScalePerDayInGB int64 `json:"logs_scale_per_day_in_gb" required:"true"`
|
||||
NumberOfServices int64 `json:"number_of_services" required:"true"`
|
||||
NumberOfHosts int64 `json:"number_of_hosts" required:"true"`
|
||||
WhereDidYouDiscoverSigNoz string `json:"where_did_you_discover_signoz" required:"true"`
|
||||
TimelineForMigratingToSigNoz string `json:"timeline_for_migrating_to_signoz" required:"true"`
|
||||
}
|
||||
|
||||
type GettableHost struct {
|
||||
Name string `json:"name"`
|
||||
State string `json:"state"`
|
||||
Tier string `json:"tier"`
|
||||
Hosts []Host `json:"hosts"`
|
||||
Name string `json:"name" required:"true"`
|
||||
State string `json:"state" required:"true"`
|
||||
Tier string `json:"tier" required:"true"`
|
||||
Hosts []Host `json:"hosts" required:"true"`
|
||||
}
|
||||
|
||||
type Host struct {
|
||||
Name string `json:"name"`
|
||||
IsDefault bool `json:"is_default"`
|
||||
URL string `json:"url"`
|
||||
Name string `json:"name" required:"true"`
|
||||
IsDefault bool `json:"is_default" required:"true"`
|
||||
URL string `json:"url" required:"true"`
|
||||
}
|
||||
|
||||
func NewGettableHost(data []byte) *GettableHost {
|
||||
|
||||
@@ -52,6 +52,7 @@ def build_builder_query(
|
||||
time_aggregation: str,
|
||||
space_aggregation: str,
|
||||
*,
|
||||
comparisonSpaceAggregationParam: Optional[Dict] = None,
|
||||
temporality: Optional[str] = None,
|
||||
step_interval: int = DEFAULT_STEP_INTERVAL,
|
||||
group_by: Optional[List[str]] = None,
|
||||
@@ -74,7 +75,8 @@ def build_builder_query(
|
||||
}
|
||||
if temporality:
|
||||
spec["aggregations"][0]["temporality"] = temporality
|
||||
|
||||
if comparisonSpaceAggregationParam:
|
||||
spec["aggregations"][0]["comparisonSpaceAggregationParam"] = comparisonSpaceAggregationParam
|
||||
if group_by:
|
||||
spec["groupBy"] = [
|
||||
{
|
||||
|
||||
259
tests/integration/src/querier/08_metrics_histogram.py
Normal file
259
tests/integration/src/querier/08_metrics_histogram.py
Normal file
@@ -0,0 +1,259 @@
|
||||
"""
|
||||
Look at the multi_temporality_counters_1h.jsonl file for the relevant data
|
||||
"""
|
||||
|
||||
import random
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, List
|
||||
|
||||
import pytest
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.metrics import Metrics
|
||||
from fixtures.querier import (
|
||||
build_builder_query,
|
||||
get_all_series,
|
||||
get_series_values,
|
||||
make_query_request,
|
||||
)
|
||||
from fixtures.utils import get_testdata_file_path
|
||||
|
||||
FILE = get_testdata_file_path("histogram_data_1h.jsonl")
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"threshold, operator, first_value, last_value",
|
||||
[
|
||||
(1000, "<=", 11, 69),
|
||||
(100, "<=", 1.1, 6.9),
|
||||
(7500, "<=", 16.75, 74.75),
|
||||
(8000, "<=", 17, 75),
|
||||
(80000, "<=", 17, 75), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
||||
(1000, ">", 7, 7),
|
||||
(100, ">", 16.9, 69.1),
|
||||
(7500, ">", 1.25, 1.25),
|
||||
(8000, ">", 1, 1),
|
||||
(80000, ">", 1, 1), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
||||
],
|
||||
)
|
||||
def test_histogram_count_for_one_endpoint(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
threshold: float,
|
||||
operator: str,
|
||||
first_value: float,
|
||||
last_value: float,
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
metric_name = f"test_bucket"
|
||||
|
||||
metrics = Metrics.load_from_file(
|
||||
FILE,
|
||||
base_time=now - timedelta(minutes=60),
|
||||
metric_name_override=metric_name,
|
||||
)
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
query = build_builder_query(
|
||||
"A",
|
||||
metric_name,
|
||||
"rate",
|
||||
"histogram_count",
|
||||
comparisonSpaceAggregationParam={
|
||||
"threshold": threshold,
|
||||
"operator": operator
|
||||
},
|
||||
filter_expression='endpoint = "/health"',
|
||||
)
|
||||
|
||||
response = make_query_request(signoz, token, start_ms, end_ms, [query])
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
data = response.json()
|
||||
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
|
||||
assert len(result_values) == 59
|
||||
assert result_values[0]["value"] == first_value
|
||||
assert result_values[-1]["value"] == last_value
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"threshold, operator, first_value, last_value",
|
||||
[
|
||||
(1000, "<=", 22, 138),
|
||||
(100, "<=", 2.2, 13.8),
|
||||
(7500, "<=", 33.5, 149.5),
|
||||
(8000, "<=", 34, 150),
|
||||
(80000, "<=", 34, 150), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
||||
(1000, ">", 14, 14),
|
||||
(100, ">", 33.8, 138.2),
|
||||
(7500, ">", 2.5, 2.5),
|
||||
(8000, ">", 2, 2),
|
||||
(80000, ">", 2, 2), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
||||
],
|
||||
)
|
||||
def test_histogram_count_for_one_service(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
threshold: float,
|
||||
operator: str,
|
||||
first_value: float,
|
||||
last_value: float,
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
metric_name = f"test_bucket"
|
||||
|
||||
metrics = Metrics.load_from_file(
|
||||
FILE,
|
||||
base_time=now - timedelta(minutes=60),
|
||||
metric_name_override=metric_name,
|
||||
)
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
query = build_builder_query(
|
||||
"A",
|
||||
metric_name,
|
||||
"rate",
|
||||
"histogram_count",
|
||||
comparisonSpaceAggregationParam={
|
||||
"threshold": threshold,
|
||||
"operator": operator
|
||||
},
|
||||
filter_expression='service = "api"',
|
||||
)
|
||||
|
||||
response = make_query_request(signoz, token, start_ms, end_ms, [query])
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
data = response.json()
|
||||
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
|
||||
assert len(result_values) == 59
|
||||
assert result_values[0]["value"] == first_value
|
||||
assert result_values[-1]["value"] == last_value
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"threshold, operator, first_value, last_value",
|
||||
[
|
||||
(1000, "<=", 11, 69),
|
||||
(100, "<=", 1.1, 6.9),
|
||||
(7500, "<=", 16.75, 74.75),
|
||||
(8000, "<=", 17, 75),
|
||||
(80000, "<=", 17, 75), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
||||
(1000, ">", 7, 7),
|
||||
(100, ">", 16.9, 69.1),
|
||||
(7500, ">", 1.25, 1.25),
|
||||
(8000, ">", 1, 1),
|
||||
(80000, ">", 1, 1), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
||||
],
|
||||
)
|
||||
def test_histogram_count_for_delta_service(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
threshold: float,
|
||||
operator: str,
|
||||
first_value: float,
|
||||
last_value: float,
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
metric_name = f"test_bucket"
|
||||
|
||||
metrics = Metrics.load_from_file(
|
||||
FILE,
|
||||
base_time=now - timedelta(minutes=60),
|
||||
metric_name_override=metric_name,
|
||||
)
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
query = build_builder_query(
|
||||
"A",
|
||||
metric_name,
|
||||
"rate",
|
||||
"histogram_count",
|
||||
comparisonSpaceAggregationParam={
|
||||
"threshold": threshold,
|
||||
"operator": operator
|
||||
},
|
||||
filter_expression='service = "web"',
|
||||
)
|
||||
|
||||
response = make_query_request(signoz, token, start_ms, end_ms, [query])
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
data = response.json()
|
||||
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
|
||||
assert len(result_values) == 60 ## in delta, the value at 10:01 will also be reported
|
||||
assert result_values[1]["value"] == first_value ## to keep parallel to the cumulative test cases, first_value refers to the value at 10:02
|
||||
assert result_values[-1]["value"] == last_value
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"threshold, operator, first_value, last_value",
|
||||
[
|
||||
(1000, "<=", 33, 207),
|
||||
(100, "<=", 3.3, 20.7),
|
||||
(7500, "<=", 50.25, 224.25),
|
||||
(8000, "<=", 51, 225),
|
||||
(80000, "<=", 51, 225),
|
||||
(1000, ">", 21, 21),
|
||||
(100, ">", 50.7, 207.3),
|
||||
(7500, ">", 3.75, 3.75),
|
||||
(8000, ">", 3, 3),
|
||||
(80000, ">", 3, 3),
|
||||
],
|
||||
)
|
||||
def test_histogram_count_for_all_services(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
threshold: float,
|
||||
operator: str,
|
||||
first_value: float,
|
||||
last_value: float,
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
metric_name = f"test_bucket"
|
||||
|
||||
metrics = Metrics.load_from_file(
|
||||
FILE,
|
||||
base_time=now - timedelta(minutes=60),
|
||||
metric_name_override=metric_name,
|
||||
)
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
query = build_builder_query(
|
||||
"A",
|
||||
metric_name,
|
||||
"rate",
|
||||
"histogram_count",
|
||||
comparisonSpaceAggregationParam={
|
||||
"threshold": threshold,
|
||||
"operator": operator
|
||||
},
|
||||
## no services filter, this tests for multitemporality handling as well
|
||||
)
|
||||
|
||||
response = make_query_request(signoz, token, start_ms, end_ms, [query])
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
data = response.json()
|
||||
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
|
||||
assert len(result_values) == 60
|
||||
assert result_values[1]["value"] == first_value ## to keep parallel to the cumulative test cases, first_value refers to the value at 10:02
|
||||
assert result_values[-1]["value"] == last_value
|
||||
1440
tests/integration/testdata/histogram_data_1h.jsonl
vendored
Normal file
1440
tests/integration/testdata/histogram_data_1h.jsonl
vendored
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user