mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-20 15:52:41 +00:00
Compare commits
4 Commits
fts-body
...
chore/cmd-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
23f9e1e0e9 | ||
|
|
92b07d15ea | ||
|
|
a0dad1602e | ||
|
|
5cf5b70aca |
@@ -85,6 +85,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return noopgateway.NewProviderFactory()
|
||||
},
|
||||
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
|
||||
return querier.NewHandler(ps, q, a)
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
eequerier "github.com/SigNoz/signoz/ee/querier"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
@@ -124,6 +125,10 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return httpgateway.NewProviderFactory(licensing)
|
||||
},
|
||||
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
|
||||
communityHandler := querier.NewHandler(ps, q, a)
|
||||
return eequerier.NewHandler(ps, q, communityHandler)
|
||||
},
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -307,11 +307,16 @@ components:
|
||||
type: string
|
||||
value:
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
- value
|
||||
type: object
|
||||
GatewaytypesGettableCreatedIngestionKeyLimit:
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
type: object
|
||||
GatewaytypesGettableIngestionKeys:
|
||||
properties:
|
||||
@@ -432,6 +437,8 @@ components:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- name
|
||||
type: object
|
||||
GatewaytypesPostableIngestionKeyLimit:
|
||||
properties:
|
||||
@@ -454,6 +461,8 @@ components:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
required:
|
||||
- config
|
||||
type: object
|
||||
MetricsexplorertypesListMetric:
|
||||
properties:
|
||||
@@ -1933,6 +1942,11 @@ components:
|
||||
type: string
|
||||
tier:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
- state
|
||||
- tier
|
||||
- hosts
|
||||
type: object
|
||||
ZeustypesHost:
|
||||
properties:
|
||||
@@ -1942,6 +1956,10 @@ components:
|
||||
type: string
|
||||
url:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
- is_default
|
||||
- url
|
||||
type: object
|
||||
ZeustypesPostableHost:
|
||||
properties:
|
||||
@@ -1976,6 +1994,16 @@ components:
|
||||
type: boolean
|
||||
where_did_you_discover_signoz:
|
||||
type: string
|
||||
required:
|
||||
- uses_otel
|
||||
- has_existing_observability_tool
|
||||
- existing_observability_tool
|
||||
- reasons_for_interest_in_signoz
|
||||
- logs_scale_per_day_in_gb
|
||||
- number_of_services
|
||||
- number_of_hosts
|
||||
- where_did_you_discover_signoz
|
||||
- timeline_for_migrating_to_signoz
|
||||
type: object
|
||||
securitySchemes:
|
||||
api_key:
|
||||
@@ -3195,12 +3223,29 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- logs
|
||||
@@ -3225,12 +3270,29 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- EDITOR
|
||||
- tokenizer:
|
||||
- EDITOR
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- logs
|
||||
@@ -4777,6 +4839,7 @@ paths:
|
||||
parameters:
|
||||
- in: query
|
||||
name: name
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
@@ -6157,4 +6220,58 @@ paths:
|
||||
- VIEWER
|
||||
summary: Query range
|
||||
tags:
|
||||
- query
|
||||
- querier
|
||||
/api/v5/substitute_vars:
|
||||
post:
|
||||
deprecated: false
|
||||
description: Replace variables in a query
|
||||
operationId: ReplaceVariables
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Replace variables
|
||||
tags:
|
||||
- querier
|
||||
|
||||
@@ -190,43 +190,44 @@ type OpenAPIExample struct {
|
||||
}
|
||||
```
|
||||
|
||||
For reference, see `pkg/querier/openapi.go` which defines some examples for the `/api/v5/query_range` endpoint:
|
||||
For reference, see `pkg/apiserver/signozapiserver/querier.go` which defines examples inline for the `/api/v5/query_range` endpoint:
|
||||
|
||||
```go
|
||||
var QueryRangeV5OpenAPIDef = handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"query"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestExamples: queryRangeV5Examples, // []handler.OpenAPIExample
|
||||
// ...
|
||||
}
|
||||
|
||||
var queryRangeV5Examples = []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
// ...
|
||||
if err := router.Handle("/api/v5/query_range", handler.New(provider.authZ.ViewAccess(provider.querierHandler.QueryRange), handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"querier"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
RequestExamples: []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
// ...
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// ... more examples
|
||||
},
|
||||
// ... more examples
|
||||
// ...
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
@@ -338,4 +339,4 @@ func (Step) JSONSchema() (jsonschema.Schema, error) {
|
||||
- **Add `required:"true"`** on fields where the key must be present in the JSON (this is about key presence, not about the zero value).
|
||||
- **Add `nullable:"true"`** on fields that can be `null`. Pay special attention to slices and maps -- in Go these default to `nil` which serializes to `null`. If the field should always be an array, initialize it and do not mark it nullable.
|
||||
- **Implement `Enum()`** on every type that has a fixed set of acceptable values so the JSON schema generates proper `enum` constraints.
|
||||
- **Add request examples** via `RequestExamples` in `OpenAPIDef` for any non-trivial endpoint. See `pkg/querier/openapi.go` for reference.
|
||||
- **Add request examples** via `RequestExamples` in `OpenAPIDef` for any non-trivial endpoint. See `pkg/apiserver/signozapiserver/querier.go` for reference.
|
||||
|
||||
178
ee/querier/handler.go
Normal file
178
ee/querier/handler.go
Normal file
@@ -0,0 +1,178 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"runtime/debug"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
set factory.ProviderSettings
|
||||
querier querier.Querier
|
||||
community querier.Handler
|
||||
}
|
||||
|
||||
func NewHandler(set factory.ProviderSettings, querier querier.Querier, community querier.Handler) querier.Handler {
|
||||
return &handler{
|
||||
set: set,
|
||||
querier: querier,
|
||||
community: community,
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
bodyBytes, err := io.ReadAll(req.Body)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
|
||||
return
|
||||
}
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
|
||||
ctx := req.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
stackTrace := string(debug.Stack())
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
h.set.Logger.ErrorContext(ctx, "panic in QueryRange",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
"stacktrace", stackTrace,
|
||||
)
|
||||
|
||||
render.Error(rw, errors.NewInternalf(
|
||||
errors.CodeInternal,
|
||||
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
|
||||
))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := queryRangeRequest.Validate(); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
|
||||
anomalies, err := h.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
results := []any{}
|
||||
for _, item := range anomalies.Results {
|
||||
results = append(results, item)
|
||||
}
|
||||
|
||||
// Build step intervals from the anomaly query
|
||||
stepIntervals := make(map[string]uint64)
|
||||
if anomalyQuery.StepInterval.Duration > 0 {
|
||||
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
|
||||
}
|
||||
|
||||
finalResp := &qbtypes.QueryRangeResponse{
|
||||
Type: queryRangeRequest.RequestType,
|
||||
Data: qbtypes.QueryData{
|
||||
Results: results,
|
||||
},
|
||||
Meta: qbtypes.ExecStats{
|
||||
StepIntervals: stepIntervals,
|
||||
},
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, finalResp)
|
||||
return
|
||||
}
|
||||
|
||||
// regular query range request, delegate to community handler
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
h.community.QueryRange(rw, req)
|
||||
}
|
||||
|
||||
func (h *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
h.community.QueryRawStream(rw, req)
|
||||
}
|
||||
|
||||
func (h *handler) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
h.community.ReplaceVariables(rw, req)
|
||||
}
|
||||
|
||||
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
|
||||
for _, fn := range anomalyQuery.Functions {
|
||||
if fn.Name == qbtypes.FunctionNameAnomaly {
|
||||
for _, arg := range fn.Args {
|
||||
if arg.Name == "seasonality" {
|
||||
if seasonalityStr, ok := arg.Value.(string); ok {
|
||||
switch seasonalityStr {
|
||||
case "weekly":
|
||||
return anomalyV2.SeasonalityWeekly
|
||||
case "hourly":
|
||||
return anomalyV2.SeasonalityHourly
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return anomalyV2.SeasonalityDaily // default
|
||||
}
|
||||
|
||||
func (h *handler) createAnomalyProvider(seasonality anomalyV2.Seasonality) anomalyV2.Provider {
|
||||
switch seasonality {
|
||||
case anomalyV2.SeasonalityWeekly:
|
||||
return anomalyV2.NewWeeklyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](h.set.Logger),
|
||||
)
|
||||
case anomalyV2.SeasonalityHourly:
|
||||
return anomalyV2.NewHourlyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](h.set.Logger),
|
||||
)
|
||||
default:
|
||||
return anomalyV2.NewDailyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.DailyProvider](h.set.Logger),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
|
||||
seasonality := extractSeasonality(anomalyQuery)
|
||||
provider := h.createAnomalyProvider(seasonality)
|
||||
|
||||
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
|
||||
}
|
||||
@@ -10,9 +10,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
@@ -57,7 +55,6 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
}, config)
|
||||
|
||||
@@ -106,14 +103,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// v4
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
// v5
|
||||
router.Handle("/api/v5/query_range", handler.New(
|
||||
am.ViewAccess(ah.queryRangeV5),
|
||||
querierAPI.QueryRangeV5OpenAPIDef,
|
||||
)).Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
|
||||
|
||||
// Gateway
|
||||
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.EditAccess(ah.ServeGatewayHTTP))
|
||||
|
||||
|
||||
@@ -2,16 +2,11 @@ package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"runtime/debug"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
@@ -20,8 +15,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
@@ -144,140 +137,3 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
}
|
||||
|
||||
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
|
||||
for _, fn := range anomalyQuery.Functions {
|
||||
if fn.Name == qbtypes.FunctionNameAnomaly {
|
||||
for _, arg := range fn.Args {
|
||||
if arg.Name == "seasonality" {
|
||||
if seasonalityStr, ok := arg.Value.(string); ok {
|
||||
switch seasonalityStr {
|
||||
case "weekly":
|
||||
return anomalyV2.SeasonalityWeekly
|
||||
case "hourly":
|
||||
return anomalyV2.SeasonalityHourly
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return anomalyV2.SeasonalityDaily // default
|
||||
}
|
||||
|
||||
func createAnomalyProvider(aH *APIHandler, seasonality anomalyV2.Seasonality) anomalyV2.Provider {
|
||||
switch seasonality {
|
||||
case anomalyV2.SeasonalityWeekly:
|
||||
return anomalyV2.NewWeeklyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
case anomalyV2.SeasonalityHourly:
|
||||
return anomalyV2.NewHourlyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
default:
|
||||
return anomalyV2.NewDailyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.DailyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
|
||||
seasonality := extractSeasonality(anomalyQuery)
|
||||
provider := createAnomalyProvider(aH, seasonality)
|
||||
|
||||
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
|
||||
}
|
||||
|
||||
func (aH *APIHandler) queryRangeV5(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
bodyBytes, err := io.ReadAll(req.Body)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
|
||||
return
|
||||
}
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
|
||||
ctx := req.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
stackTrace := string(debug.Stack())
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
aH.Signoz.Instrumentation.Logger().ErrorContext(ctx, "panic in QueryRange",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
"stacktrace", stackTrace,
|
||||
)
|
||||
|
||||
render.Error(rw, errors.NewInternalf(
|
||||
errors.CodeInternal,
|
||||
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
|
||||
))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := queryRangeRequest.Validate(); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
|
||||
anomalies, err := aH.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
results := []any{}
|
||||
for _, item := range anomalies.Results {
|
||||
results = append(results, item)
|
||||
}
|
||||
|
||||
// Build step intervals from the anomaly query
|
||||
stepIntervals := make(map[string]uint64)
|
||||
if anomalyQuery.StepInterval.Duration > 0 {
|
||||
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
|
||||
}
|
||||
|
||||
finalResp := &qbtypes.QueryRangeResponse{
|
||||
Type: queryRangeRequest.RequestType,
|
||||
Data: qbtypes.QueryData{
|
||||
Results: results,
|
||||
},
|
||||
Meta: qbtypes.ExecStats{
|
||||
StepIntervals: stepIntervals,
|
||||
},
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, finalResp)
|
||||
return
|
||||
} else {
|
||||
// regular query range request, let the querier handle it
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
aH.QuerierAPI.QueryRange(rw, req)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -240,7 +240,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.RegisterQueryRangeV3Routes(r, am)
|
||||
apiHandler.RegisterInfraMetricsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV4Routes(r, am)
|
||||
apiHandler.RegisterQueryRangeV5Routes(r, am)
|
||||
apiHandler.RegisterWebSocketPaths(r, am)
|
||||
apiHandler.RegisterMessagingQueuesRoutes(r, am)
|
||||
apiHandler.RegisterThirdPartyApiRoutes(r, am)
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
import { GatewayApiV2Instance as axios } from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { DeploymentsDataProps } from 'types/api/customDomain/types';
|
||||
|
||||
export const getDeploymentsData = (): Promise<
|
||||
AxiosResponse<DeploymentsDataProps>
|
||||
> => axios.get(`/deployments/me`);
|
||||
@@ -1,16 +0,0 @@
|
||||
import { GatewayApiV2Instance as axios } from 'api';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import {
|
||||
PayloadProps,
|
||||
UpdateCustomDomainProps,
|
||||
} from 'types/api/customDomain/types';
|
||||
|
||||
const updateSubDomainAPI = async (
|
||||
props: UpdateCustomDomainProps,
|
||||
): Promise<SuccessResponse<PayloadProps> | AxiosError> =>
|
||||
axios.put(`/deployments/me/host`, {
|
||||
...props.data,
|
||||
});
|
||||
|
||||
export default updateSubDomainAPI;
|
||||
@@ -678,7 +678,7 @@ export const useUpdateIngestionKeyLimit = <
|
||||
* @summary Search ingestion keys for workspace
|
||||
*/
|
||||
export const searchIngestionKeys = (
|
||||
params?: SearchIngestionKeysParams,
|
||||
params: SearchIngestionKeysParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<SearchIngestionKeys200>({
|
||||
@@ -699,7 +699,7 @@ export const getSearchIngestionKeysQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: SearchIngestionKeysParams,
|
||||
params: SearchIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
@@ -737,7 +737,7 @@ export function useSearchIngestionKeys<
|
||||
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: SearchIngestionKeysParams,
|
||||
params: SearchIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
@@ -762,7 +762,7 @@ export function useSearchIngestionKeys<
|
||||
*/
|
||||
export const invalidateSearchIngestionKeys = async (
|
||||
queryClient: QueryClient,
|
||||
params?: SearchIngestionKeysParams,
|
||||
params: SearchIngestionKeysParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
|
||||
@@ -16,6 +16,7 @@ import type {
|
||||
Querybuildertypesv5QueryRangeRequestDTO,
|
||||
QueryRangeV5200,
|
||||
RenderErrorResponseDTO,
|
||||
ReplaceVariables200,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
@@ -105,3 +106,86 @@ export const useQueryRangeV5 = <
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* Replace variables in a query
|
||||
* @summary Replace variables
|
||||
*/
|
||||
export const replaceVariables = (
|
||||
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<ReplaceVariables200>({
|
||||
url: `/api/v5/substitute_vars`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getReplaceVariablesMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['replaceVariables'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return replaceVariables(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type ReplaceVariablesMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof replaceVariables>>
|
||||
>;
|
||||
export type ReplaceVariablesMutationBody = Querybuildertypesv5QueryRangeRequestDTO;
|
||||
export type ReplaceVariablesMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Replace variables
|
||||
*/
|
||||
export const useReplaceVariables = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getReplaceVariablesMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
@@ -453,18 +453,18 @@ export interface GatewaytypesGettableCreatedIngestionKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
value?: string;
|
||||
value: string;
|
||||
}
|
||||
|
||||
export interface GatewaytypesGettableCreatedIngestionKeyLimitDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
id: string;
|
||||
}
|
||||
|
||||
export interface GatewaytypesGettableIngestionKeysDTO {
|
||||
@@ -616,7 +616,7 @@ export interface GatewaytypesPostableIngestionKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name?: string;
|
||||
name: string;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
@@ -638,7 +638,7 @@ export interface GatewaytypesPostableIngestionKeyLimitDTO {
|
||||
}
|
||||
|
||||
export interface GatewaytypesUpdatableIngestionKeyLimitDTO {
|
||||
config?: GatewaytypesLimitConfigDTO;
|
||||
config: GatewaytypesLimitConfigDTO;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
@@ -2419,34 +2419,34 @@ export interface ZeustypesGettableHostDTO {
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
hosts?: ZeustypesHostDTO[] | null;
|
||||
hosts: ZeustypesHostDTO[] | null;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name?: string;
|
||||
name: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
state?: string;
|
||||
state: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
tier?: string;
|
||||
tier: string;
|
||||
}
|
||||
|
||||
export interface ZeustypesHostDTO {
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
is_default?: boolean;
|
||||
is_default: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name?: string;
|
||||
name: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
url?: string;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface ZeustypesPostableHostDTO {
|
||||
@@ -2460,43 +2460,43 @@ export interface ZeustypesPostableProfileDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
existing_observability_tool?: string;
|
||||
existing_observability_tool: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
has_existing_observability_tool?: boolean;
|
||||
has_existing_observability_tool: boolean;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
logs_scale_per_day_in_gb?: number;
|
||||
logs_scale_per_day_in_gb: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
number_of_hosts?: number;
|
||||
number_of_hosts: number;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
number_of_services?: number;
|
||||
number_of_services: number;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
reasons_for_interest_in_signoz?: string[] | null;
|
||||
reasons_for_interest_in_signoz: string[] | null;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
timeline_for_migrating_to_signoz?: string;
|
||||
timeline_for_migrating_to_signoz: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
uses_otel?: boolean;
|
||||
uses_otel: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
where_did_you_discover_signoz?: string;
|
||||
where_did_you_discover_signoz: string;
|
||||
}
|
||||
|
||||
export type ChangePasswordPathParameters = {
|
||||
@@ -3039,7 +3039,7 @@ export type SearchIngestionKeysParams = {
|
||||
* @type string
|
||||
* @description undefined
|
||||
*/
|
||||
name?: string;
|
||||
name: string;
|
||||
/**
|
||||
* @type integer
|
||||
* @description undefined
|
||||
@@ -3229,3 +3229,11 @@ export type QueryRangeV5200 = {
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
export type ReplaceVariables200 = {
|
||||
data?: Querybuildertypesv5QueryRangeRequestDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status?: string;
|
||||
};
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
import { GatewayApiV2Instance } from 'api';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { UpdateProfileProps } from 'types/api/onboarding/types';
|
||||
|
||||
const updateProfile = async (
|
||||
props: UpdateProfileProps,
|
||||
): Promise<SuccessResponse<UpdateProfileProps> | ErrorResponse> => {
|
||||
const response = await GatewayApiV2Instance.put('/profiles/me', {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
|
||||
export default updateProfile;
|
||||
@@ -1,7 +1,6 @@
|
||||
/* eslint-disable jsx-a11y/no-static-element-interactions */
|
||||
/* eslint-disable jsx-a11y/click-events-have-key-events */
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useCopyToClipboard } from 'react-use';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import {
|
||||
@@ -15,14 +14,16 @@ import {
|
||||
Tag,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import updateSubDomainAPI from 'api/customDomain/updateSubDomain';
|
||||
import {
|
||||
RenderErrorResponseDTO,
|
||||
ZeustypesHostDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { useGetHosts, usePutHost } from 'api/generated/services/zeus';
|
||||
import { AxiosError } from 'axios';
|
||||
import LaunchChatSupport from 'components/LaunchChatSupport/LaunchChatSupport';
|
||||
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { InfoIcon, Link2, Pencil } from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { HostsProps } from 'types/api/customDomain/types';
|
||||
|
||||
import './CustomDomainSettings.styles.scss';
|
||||
|
||||
@@ -35,7 +36,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
const { notifications } = useNotifications();
|
||||
const [isEditModalOpen, setIsEditModalOpen] = useState(false);
|
||||
const [isPollingEnabled, setIsPollingEnabled] = useState(false);
|
||||
const [hosts, setHosts] = useState<HostsProps[] | null>(null);
|
||||
const [hosts, setHosts] = useState<ZeustypesHostDTO[] | null>(null);
|
||||
|
||||
const [updateDomainError, setUpdateDomainError] = useState<AxiosError | null>(
|
||||
null,
|
||||
@@ -57,36 +58,37 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
};
|
||||
|
||||
const {
|
||||
data: deploymentsData,
|
||||
isLoading: isLoadingDeploymentsData,
|
||||
isFetching: isFetchingDeploymentsData,
|
||||
refetch: refetchDeploymentsData,
|
||||
} = useGetDeploymentsData(true);
|
||||
data: hostsData,
|
||||
isLoading: isLoadingHosts,
|
||||
isFetching: isFetchingHosts,
|
||||
refetch: refetchHosts,
|
||||
} = useGetHosts();
|
||||
|
||||
const {
|
||||
mutate: updateSubDomain,
|
||||
isLoading: isLoadingUpdateCustomDomain,
|
||||
} = useMutation(updateSubDomainAPI, {
|
||||
onSuccess: () => {
|
||||
setIsPollingEnabled(true);
|
||||
refetchDeploymentsData();
|
||||
setIsEditModalOpen(false);
|
||||
},
|
||||
onError: (error: AxiosError) => {
|
||||
setUpdateDomainError(error);
|
||||
setIsPollingEnabled(false);
|
||||
},
|
||||
});
|
||||
} = usePutHost<AxiosError<RenderErrorResponseDTO>>();
|
||||
|
||||
const stripProtocol = (url: string): string => {
|
||||
return url?.split('://')[1] ?? url;
|
||||
};
|
||||
|
||||
const dnsSuffix = useMemo(() => {
|
||||
const defaultHost = hosts?.find((h) => h.is_default);
|
||||
return defaultHost?.url && defaultHost?.name
|
||||
? defaultHost.url.split(`${defaultHost.name}.`)[1] || ''
|
||||
: '';
|
||||
}, [hosts]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isFetchingDeploymentsData) {
|
||||
if (isFetchingHosts) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (deploymentsData?.data?.status === 'success') {
|
||||
setHosts(deploymentsData.data.data.hosts);
|
||||
if (hostsData?.data?.status === 'success') {
|
||||
setHosts(hostsData?.data?.data?.hosts ?? null);
|
||||
|
||||
const activeCustomDomain = deploymentsData.data.data.hosts.find(
|
||||
const activeCustomDomain = hostsData?.data?.data?.hosts?.find(
|
||||
(host) => !host.is_default,
|
||||
);
|
||||
|
||||
@@ -97,32 +99,36 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
}
|
||||
}
|
||||
|
||||
if (deploymentsData?.data?.data?.state !== 'HEALTHY' && isPollingEnabled) {
|
||||
if (hostsData?.data?.data?.state !== 'HEALTHY' && isPollingEnabled) {
|
||||
setTimeout(() => {
|
||||
refetchDeploymentsData();
|
||||
refetchHosts();
|
||||
}, 3000);
|
||||
}
|
||||
|
||||
if (deploymentsData?.data?.data.state === 'HEALTHY') {
|
||||
if (hostsData?.data?.data?.state === 'HEALTHY') {
|
||||
setIsPollingEnabled(false);
|
||||
}
|
||||
}, [
|
||||
deploymentsData,
|
||||
refetchDeploymentsData,
|
||||
isPollingEnabled,
|
||||
isFetchingDeploymentsData,
|
||||
]);
|
||||
}, [hostsData, refetchHosts, isPollingEnabled, isFetchingHosts]);
|
||||
|
||||
const onUpdateCustomDomainSettings = (): void => {
|
||||
editForm
|
||||
.validateFields()
|
||||
.then((values) => {
|
||||
if (values.subdomain) {
|
||||
updateSubDomain({
|
||||
data: {
|
||||
name: values.subdomain,
|
||||
updateSubDomain(
|
||||
{ data: { name: values.subdomain } },
|
||||
{
|
||||
onSuccess: () => {
|
||||
setIsPollingEnabled(true);
|
||||
refetchHosts();
|
||||
setIsEditModalOpen(false);
|
||||
},
|
||||
onError: (error: AxiosError<RenderErrorResponseDTO>) => {
|
||||
setUpdateDomainError(error as AxiosError);
|
||||
setIsPollingEnabled(false);
|
||||
},
|
||||
},
|
||||
});
|
||||
);
|
||||
|
||||
setCustomDomainDetails({
|
||||
subdomain: values.subdomain,
|
||||
@@ -134,10 +140,8 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
});
|
||||
};
|
||||
|
||||
const onCopyUrlHandler = (host: string): void => {
|
||||
const url = `${host}.${deploymentsData?.data.data.cluster.region.dns}`;
|
||||
|
||||
setCopyUrl(url);
|
||||
const onCopyUrlHandler = (url: string): void => {
|
||||
setCopyUrl(stripProtocol(url));
|
||||
notifications.success({
|
||||
message: 'Copied to clipboard',
|
||||
});
|
||||
@@ -157,7 +161,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
</div>
|
||||
|
||||
<div className="custom-domain-settings-content">
|
||||
{!isLoadingDeploymentsData && (
|
||||
{!isLoadingHosts && (
|
||||
<Card className="custom-domain-settings-card">
|
||||
<div className="custom-domain-settings-content-header">
|
||||
Team {org?.[0]?.displayName} Information
|
||||
@@ -169,10 +173,9 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
<div
|
||||
className="custom-domain-url"
|
||||
key={host.name}
|
||||
onClick={(): void => onCopyUrlHandler(host.name)}
|
||||
onClick={(): void => onCopyUrlHandler(host.url || '')}
|
||||
>
|
||||
<Link2 size={12} /> {host.name}.
|
||||
{deploymentsData?.data.data.cluster.region.dns}
|
||||
<Link2 size={12} /> {stripProtocol(host.url || '')}
|
||||
{host.is_default && <Tag color={Color.BG_ROBIN_500}>Default</Tag>}
|
||||
</div>
|
||||
))}
|
||||
@@ -181,11 +184,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
<div className="custom-domain-url-edit-btn">
|
||||
<Button
|
||||
className="periscope-btn"
|
||||
disabled={
|
||||
isLoadingDeploymentsData ||
|
||||
isFetchingDeploymentsData ||
|
||||
isPollingEnabled
|
||||
}
|
||||
disabled={isLoadingHosts || isFetchingHosts || isPollingEnabled}
|
||||
type="default"
|
||||
icon={<Pencil size={10} />}
|
||||
onClick={(): void => setIsEditModalOpen(true)}
|
||||
@@ -198,7 +197,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
{isPollingEnabled && (
|
||||
<Alert
|
||||
className="custom-domain-update-status"
|
||||
message={`Updating your URL to ⎯ ${customDomainDetails?.subdomain}.${deploymentsData?.data.data.cluster.region.dns}. This may take a few mins.`}
|
||||
message={`Updating your URL to ⎯ ${customDomainDetails?.subdomain}.${dnsSuffix}. This may take a few mins.`}
|
||||
type="info"
|
||||
icon={<InfoIcon size={12} />}
|
||||
/>
|
||||
@@ -206,7 +205,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{isLoadingDeploymentsData && (
|
||||
{isLoadingHosts && (
|
||||
<Card className="custom-domain-settings-card">
|
||||
<Skeleton
|
||||
className="custom-domain-settings-skeleton"
|
||||
@@ -255,7 +254,7 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
addonBefore={updateDomainError && <InfoIcon size={12} color="red" />}
|
||||
placeholder="Enter Domain"
|
||||
onChange={(): void => setUpdateDomainError(null)}
|
||||
addonAfter={deploymentsData?.data.data.cluster.region.dns}
|
||||
addonAfter={dnsSuffix}
|
||||
autoFocus
|
||||
/>
|
||||
</Form.Item>
|
||||
@@ -267,7 +266,8 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
{updateDomainError.status === 409 ? (
|
||||
<Alert
|
||||
message={
|
||||
(updateDomainError?.response?.data as { error?: string })?.error ||
|
||||
(updateDomainError?.response?.data as RenderErrorResponseDTO)?.error
|
||||
?.message ||
|
||||
'You’ve already updated the custom domain once today. To make further changes, please contact our support team for assistance.'
|
||||
}
|
||||
type="warning"
|
||||
@@ -275,7 +275,10 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
/>
|
||||
) : (
|
||||
<Typography.Text type="danger">
|
||||
{(updateDomainError.response?.data as { error: string })?.error}
|
||||
{
|
||||
(updateDomainError?.response?.data as RenderErrorResponseDTO)?.error
|
||||
?.message
|
||||
}
|
||||
</Typography.Text>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,128 @@
|
||||
import { GetHosts200 } from 'api/generated/services/sigNoz.schemas';
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
|
||||
import CustomDomainSettings from '../CustomDomainSettings';
|
||||
|
||||
const ZEUS_HOSTS_ENDPOINT = '*/api/v2/zeus/hosts';
|
||||
|
||||
const mockHostsResponse: GetHosts200 = {
|
||||
status: 'success',
|
||||
data: {
|
||||
name: 'accepted-starfish',
|
||||
state: 'HEALTHY',
|
||||
tier: 'PREMIUM',
|
||||
hosts: [
|
||||
{
|
||||
name: 'accepted-starfish',
|
||||
is_default: true,
|
||||
url: 'https://accepted-starfish.test.cloud',
|
||||
},
|
||||
{
|
||||
name: 'custom-host',
|
||||
is_default: false,
|
||||
url: 'https://custom-host.test.cloud',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
describe('CustomDomainSettings', () => {
|
||||
afterEach(() => server.resetHandlers());
|
||||
|
||||
it('renders host URLs with protocol stripped and marks the default host', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<CustomDomainSettings />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
await screen.findByText(/custom-host\.test\.cloud/i);
|
||||
expect(screen.getByText('Default')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('opens edit modal with DNS suffix derived from the default host', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
render(<CustomDomainSettings />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
|
||||
await user.click(
|
||||
screen.getByRole('button', { name: /customize team['’]s url/i }),
|
||||
);
|
||||
|
||||
expect(
|
||||
screen.getByRole('dialog', { name: /customize your team['’]s url/i }),
|
||||
).toBeInTheDocument();
|
||||
// DNS suffix is the part of the default host URL after the name prefix
|
||||
expect(screen.getByText('test.cloud')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('submits PUT to /zeus/hosts with the entered subdomain as the payload', async () => {
|
||||
let capturedBody: Record<string, unknown> = {};
|
||||
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
rest.put(ZEUS_HOSTS_ENDPOINT, async (req, res, ctx) => {
|
||||
capturedBody = await req.json<Record<string, unknown>>();
|
||||
return res(ctx.status(200), ctx.json({}));
|
||||
}),
|
||||
);
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
render(<CustomDomainSettings />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
await user.click(
|
||||
screen.getByRole('button', { name: /customize team['’]s url/i }),
|
||||
);
|
||||
|
||||
const input = screen.getByPlaceholderText(/enter domain/i);
|
||||
await user.clear(input);
|
||||
await user.type(input, 'myteam');
|
||||
await user.click(screen.getByRole('button', { name: /apply changes/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(capturedBody).toEqual({ name: 'myteam' });
|
||||
});
|
||||
});
|
||||
|
||||
it('shows contact support option when domain update returns 409', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
rest.put(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(
|
||||
ctx.status(409),
|
||||
ctx.json({ error: { message: 'Already updated today' } }),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
render(<CustomDomainSettings />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
await user.click(
|
||||
screen.getByRole('button', { name: /customize team['’]s url/i }),
|
||||
);
|
||||
await user.type(screen.getByPlaceholderText(/enter domain/i), 'myteam');
|
||||
await user.click(screen.getByRole('button', { name: /apply changes/i }));
|
||||
|
||||
expect(
|
||||
await screen.findByRole('button', { name: /contact support/i }),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,7 +1,6 @@
|
||||
import { useCallback } from 'react';
|
||||
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
|
||||
import HistogramTooltip from 'lib/uPlotV2/components/Tooltip/HistogramTooltip';
|
||||
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
|
||||
import {
|
||||
HistogramTooltipProps,
|
||||
TooltipRenderArgs,
|
||||
@@ -22,21 +21,11 @@ export default function Histogram(props: HistogramChartProps): JSX.Element {
|
||||
if (customRenderTooltip) {
|
||||
return customRenderTooltip(props);
|
||||
}
|
||||
const content = buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: rest.yAxisUnit ?? '',
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
});
|
||||
const tooltipProps: HistogramTooltipProps = {
|
||||
...props,
|
||||
timezone: rest.timezone,
|
||||
yAxisUnit: rest.yAxisUnit,
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
content,
|
||||
};
|
||||
return <HistogramTooltip {...tooltipProps} />;
|
||||
},
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { useCallback } from 'react';
|
||||
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
|
||||
import TimeSeriesTooltip from 'lib/uPlotV2/components/Tooltip/TimeSeriesTooltip';
|
||||
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
|
||||
import {
|
||||
TimeSeriesTooltipProps,
|
||||
TooltipRenderArgs,
|
||||
@@ -17,21 +16,11 @@ export default function TimeSeries(props: TimeSeriesChartProps): JSX.Element {
|
||||
if (customRenderTooltip) {
|
||||
return customRenderTooltip(props);
|
||||
}
|
||||
const content = buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: rest.yAxisUnit ?? '',
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
});
|
||||
const tooltipProps: TimeSeriesTooltipProps = {
|
||||
...props,
|
||||
timezone: rest.timezone,
|
||||
yAxisUnit: rest.yAxisUnit,
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
content,
|
||||
};
|
||||
return <TimeSeriesTooltip {...tooltipProps} />;
|
||||
},
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import { Button, Skeleton, Tag, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { useGetHosts } from 'api/generated/services/zeus';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
|
||||
import history from 'lib/history';
|
||||
import { Globe, Link2 } from 'lucide-react';
|
||||
import { Link2 } from 'lucide-react';
|
||||
import Card from 'periscope/components/Card/Card';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { LicensePlatform } from 'types/api/licensesV3/getActive';
|
||||
@@ -26,36 +26,21 @@ function DataSourceInfo({
|
||||
const isEnabled =
|
||||
activeLicense && activeLicense.platform === LicensePlatform.CLOUD;
|
||||
|
||||
const {
|
||||
data: deploymentsData,
|
||||
isError: isErrorDeploymentsData,
|
||||
} = useGetDeploymentsData(isEnabled || false);
|
||||
const { data: hostsData, isError } = useGetHosts({
|
||||
query: { enabled: isEnabled || false },
|
||||
});
|
||||
|
||||
const [region, setRegion] = useState<string>('');
|
||||
const [url, setUrl] = useState<string>('');
|
||||
|
||||
useEffect(() => {
|
||||
if (deploymentsData) {
|
||||
switch (deploymentsData?.data.data.cluster.region.name) {
|
||||
case 'in':
|
||||
setRegion('India');
|
||||
break;
|
||||
case 'us':
|
||||
setRegion('United States');
|
||||
break;
|
||||
case 'eu':
|
||||
setRegion('Europe');
|
||||
break;
|
||||
default:
|
||||
setRegion(deploymentsData?.data.data.cluster.region.name);
|
||||
break;
|
||||
if (hostsData) {
|
||||
const defaultHost = hostsData?.data?.data?.hosts?.find((h) => h.is_default);
|
||||
if (defaultHost?.url) {
|
||||
const url = defaultHost?.url?.split('://')[1] ?? '';
|
||||
setUrl(url);
|
||||
}
|
||||
|
||||
setUrl(
|
||||
`${deploymentsData?.data.data.name}.${deploymentsData?.data.data.cluster.region.dns}`,
|
||||
);
|
||||
}
|
||||
}, [deploymentsData]);
|
||||
}, [hostsData]);
|
||||
|
||||
const renderNotSendingData = (): JSX.Element => (
|
||||
<>
|
||||
@@ -123,14 +108,8 @@ function DataSourceInfo({
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{!isErrorDeploymentsData && deploymentsData && (
|
||||
{!isError && hostsData && (
|
||||
<div className="workspace-details">
|
||||
<div className="workspace-region">
|
||||
<Globe size={10} />
|
||||
|
||||
<Typography>{region}</Typography>
|
||||
</div>
|
||||
|
||||
<div className="workspace-url">
|
||||
<Link2 size={12} />
|
||||
|
||||
@@ -156,17 +135,11 @@ function DataSourceInfo({
|
||||
Hello there, Welcome to your SigNoz workspace
|
||||
</Typography>
|
||||
|
||||
{!isErrorDeploymentsData && deploymentsData && (
|
||||
{!isError && hostsData && (
|
||||
<Card className="welcome-card">
|
||||
<Card.Content>
|
||||
<div className="workspace-ready-container">
|
||||
<div className="workspace-details">
|
||||
<div className="workspace-region">
|
||||
<Globe size={10} />
|
||||
|
||||
<Typography>{region}</Typography>
|
||||
</div>
|
||||
|
||||
<div className="workspace-url">
|
||||
<Link2 size={12} />
|
||||
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
import { GetHosts200 } from 'api/generated/services/sigNoz.schemas';
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
|
||||
import DataSourceInfo from '../DataSourceInfo';
|
||||
|
||||
const ZEUS_HOSTS_ENDPOINT = '*/api/v2/zeus/hosts';
|
||||
|
||||
const mockHostsResponse: GetHosts200 = {
|
||||
status: 'success',
|
||||
data: {
|
||||
name: 'accepted-starfish',
|
||||
state: 'HEALTHY',
|
||||
tier: 'PREMIUM',
|
||||
hosts: [
|
||||
{
|
||||
name: 'accepted-starfish',
|
||||
is_default: true,
|
||||
url: 'https://accepted-starfish.test.cloud',
|
||||
},
|
||||
{
|
||||
name: 'custom-host',
|
||||
is_default: false,
|
||||
url: 'https://custom-host.test.cloud',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
describe('DataSourceInfo', () => {
|
||||
afterEach(() => server.resetHandlers());
|
||||
|
||||
it('renders the default workspace URL with protocol stripped', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<DataSourceInfo dataSentToSigNoz={false} isLoading={false} />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
});
|
||||
|
||||
it('does not render workspace URL when GET /zeus/hosts fails', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(500), ctx.json({})),
|
||||
),
|
||||
);
|
||||
|
||||
render(<DataSourceInfo dataSentToSigNoz={false} isLoading={false} />);
|
||||
|
||||
await screen.findByText(/Your workspace is ready/i);
|
||||
expect(screen.queryByText(/signoz\.cloud/i)).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders workspace URL in the data-received view when telemetry is flowing', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<DataSourceInfo dataSentToSigNoz={true} isLoading={false} />);
|
||||
|
||||
await screen.findByText(/accepted-starfish\.test\.cloud/i);
|
||||
});
|
||||
});
|
||||
@@ -36,24 +36,6 @@ jest.mock('react-router-dom', () => {
|
||||
};
|
||||
});
|
||||
|
||||
// Mock deployments data hook to avoid unrelated network calls in this page
|
||||
jest.mock(
|
||||
'hooks/CustomDomain/useGetDeploymentsData',
|
||||
(): Record<string, unknown> => ({
|
||||
useGetDeploymentsData: (): {
|
||||
data: undefined;
|
||||
isLoading: boolean;
|
||||
isFetching: boolean;
|
||||
isError: boolean;
|
||||
} => ({
|
||||
data: undefined,
|
||||
isLoading: false,
|
||||
isFetching: false,
|
||||
isError: false,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
const TEST_CREATED_UPDATED = '2024-01-01T00:00:00Z';
|
||||
const TEST_EXPIRES_AT = '2030-01-01T00:00:00Z';
|
||||
const TEST_WORKSPACE_ID = 'w1';
|
||||
|
||||
@@ -26,7 +26,7 @@ jest.mock('lib/history', () => ({
|
||||
// API Endpoints
|
||||
const ORG_PREFERENCES_ENDPOINT = '*/api/v1/org/preferences/list';
|
||||
const UPDATE_ORG_PREFERENCE_ENDPOINT = '*/api/v1/org/preferences/name/update';
|
||||
const UPDATE_PROFILE_ENDPOINT = '*/api/gateway/v2/profiles/me';
|
||||
const UPDATE_PROFILE_ENDPOINT = '*/api/v2/zeus/profiles';
|
||||
const EDIT_ORG_ENDPOINT = '*/api/v2/orgs/me';
|
||||
const INVITE_USERS_ENDPOINT = '*/api/v1/invite/bulk/create';
|
||||
|
||||
@@ -277,6 +277,46 @@ describe('OnboardingQuestionaire Component', () => {
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('fires PUT to /zeus/profiles and advances to step 4 on success', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
let profilePutCalled = false;
|
||||
|
||||
server.use(
|
||||
rest.put(UPDATE_PROFILE_ENDPOINT, (_, res, ctx) => {
|
||||
profilePutCalled = true;
|
||||
return res(ctx.status(200), ctx.json({ status: 'success', data: {} }));
|
||||
}),
|
||||
);
|
||||
|
||||
render(<OnboardingQuestionaire />);
|
||||
|
||||
// Navigate to step 3
|
||||
await user.click(screen.getByLabelText(/datadog/i));
|
||||
await user.click(screen.getByRole('radio', { name: /yes/i }));
|
||||
await user.click(screen.getByLabelText(/just exploring/i));
|
||||
await user.click(screen.getByRole('button', { name: /next/i }));
|
||||
|
||||
await user.type(
|
||||
await screen.findByPlaceholderText(/e\.g\., googling/i),
|
||||
'Found via Google',
|
||||
);
|
||||
await user.click(screen.getByLabelText(/lowering observability costs/i));
|
||||
await user.click(screen.getByRole('button', { name: /next/i }));
|
||||
|
||||
// Click "I'll do this later" on step 3 — triggers PUT /zeus/profiles
|
||||
await user.click(
|
||||
await screen.findByRole('button', { name: /i'll do this later/i }),
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(profilePutCalled).toBe(true);
|
||||
// Step 3 content is gone — successfully advanced to step 4
|
||||
expect(
|
||||
screen.queryByText(/what does your scale approximately look like/i),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('shows do later button', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
render(<OnboardingQuestionaire />);
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
import { toast } from '@signozhq/sonner';
|
||||
import { NotificationInstance } from 'antd/es/notification/interface';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import updateProfileAPI from 'api/onboarding/updateProfile';
|
||||
import { RenderErrorResponseDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { usePutProfile } from 'api/generated/services/zeus';
|
||||
import listOrgPreferences from 'api/v1/org/preferences/list';
|
||||
import updateOrgPreferenceAPI from 'api/v1/org/preferences/name/update';
|
||||
import { AxiosError } from 'axios';
|
||||
@@ -121,20 +123,9 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
optimiseSignozDetails.hostsPerDay === 0 &&
|
||||
optimiseSignozDetails.services === 0;
|
||||
|
||||
const { mutate: updateProfile, isLoading: isUpdatingProfile } = useMutation(
|
||||
updateProfileAPI,
|
||||
{
|
||||
onSuccess: () => {
|
||||
setCurrentStep(4);
|
||||
},
|
||||
onError: (error) => {
|
||||
showErrorNotification(notifications, error as AxiosError);
|
||||
|
||||
// Allow user to proceed even if API fails
|
||||
setCurrentStep(4);
|
||||
},
|
||||
},
|
||||
);
|
||||
const { mutate: updateProfile, isLoading: isUpdatingProfile } = usePutProfile<
|
||||
AxiosError<RenderErrorResponseDTO>
|
||||
>();
|
||||
|
||||
const { mutate: updateOrgPreference } = useMutation(updateOrgPreferenceAPI, {
|
||||
onSuccess: () => {
|
||||
@@ -153,29 +144,44 @@ function OnboardingQuestionaire(): JSX.Element {
|
||||
nextPageID: 4,
|
||||
});
|
||||
|
||||
updateProfile({
|
||||
uses_otel: orgDetails?.usesOtel as boolean,
|
||||
has_existing_observability_tool: orgDetails?.usesObservability as boolean,
|
||||
existing_observability_tool:
|
||||
orgDetails?.observabilityTool === 'Others'
|
||||
? (orgDetails?.otherTool as string)
|
||||
: (orgDetails?.observabilityTool as string),
|
||||
where_did_you_discover_signoz: signozDetails?.discoverSignoz as string,
|
||||
timeline_for_migrating_to_signoz: orgDetails?.migrationTimeline as string,
|
||||
reasons_for_interest_in_signoz: signozDetails?.interestInSignoz?.includes(
|
||||
'Others',
|
||||
)
|
||||
? ([
|
||||
...(signozDetails?.interestInSignoz?.filter(
|
||||
(item) => item !== 'Others',
|
||||
) || []),
|
||||
signozDetails?.otherInterestInSignoz,
|
||||
] as string[])
|
||||
: (signozDetails?.interestInSignoz as string[]),
|
||||
logs_scale_per_day_in_gb: optimiseSignozDetails?.logsPerDay as number,
|
||||
number_of_hosts: optimiseSignozDetails?.hostsPerDay as number,
|
||||
number_of_services: optimiseSignozDetails?.services as number,
|
||||
});
|
||||
updateProfile(
|
||||
{
|
||||
data: {
|
||||
uses_otel: orgDetails?.usesOtel as boolean,
|
||||
has_existing_observability_tool: orgDetails?.usesObservability as boolean,
|
||||
existing_observability_tool:
|
||||
orgDetails?.observabilityTool === 'Others'
|
||||
? (orgDetails?.otherTool as string)
|
||||
: (orgDetails?.observabilityTool as string),
|
||||
where_did_you_discover_signoz: signozDetails?.discoverSignoz as string,
|
||||
timeline_for_migrating_to_signoz: orgDetails?.migrationTimeline as string,
|
||||
reasons_for_interest_in_signoz: signozDetails?.interestInSignoz?.includes(
|
||||
'Others',
|
||||
)
|
||||
? ([
|
||||
...(signozDetails?.interestInSignoz?.filter(
|
||||
(item) => item !== 'Others',
|
||||
) || []),
|
||||
signozDetails?.otherInterestInSignoz,
|
||||
] as string[])
|
||||
: (signozDetails?.interestInSignoz as string[]),
|
||||
logs_scale_per_day_in_gb: optimiseSignozDetails?.logsPerDay as number,
|
||||
number_of_hosts: optimiseSignozDetails?.hostsPerDay as number,
|
||||
number_of_services: optimiseSignozDetails?.services as number,
|
||||
},
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
setCurrentStep(4);
|
||||
},
|
||||
onError: (error: any) => {
|
||||
toast.error(error?.message || SOMETHING_WENT_WRONG);
|
||||
|
||||
// Allow user to proceed even if API fails
|
||||
setCurrentStep(4);
|
||||
},
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
const handleOnboardingComplete = (): void => {
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
import { useQuery, UseQueryResult } from 'react-query';
|
||||
import { getDeploymentsData } from 'api/customDomain/getDeploymentsData';
|
||||
import { AxiosError, AxiosResponse } from 'axios';
|
||||
import { DeploymentsDataProps } from 'types/api/customDomain/types';
|
||||
|
||||
export const useGetDeploymentsData = (
|
||||
isEnabled: boolean,
|
||||
): UseQueryResult<AxiosResponse<DeploymentsDataProps>, AxiosError> =>
|
||||
useQuery<AxiosResponse<DeploymentsDataProps>, AxiosError>({
|
||||
queryKey: ['getDeploymentsData'],
|
||||
queryFn: () => getDeploymentsData(),
|
||||
enabled: isEnabled,
|
||||
});
|
||||
@@ -1,8 +1,31 @@
|
||||
import { HistogramTooltipProps } from '../types';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import { HistogramTooltipProps, TooltipContentItem } from '../types';
|
||||
import Tooltip from './Tooltip';
|
||||
import { buildTooltipContent } from './utils';
|
||||
|
||||
export default function HistogramTooltip(
|
||||
props: HistogramTooltipProps,
|
||||
): JSX.Element {
|
||||
return <Tooltip {...props} showTooltipHeader={false} />;
|
||||
const content = useMemo(
|
||||
(): TooltipContentItem[] =>
|
||||
buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: props.yAxisUnit ?? '',
|
||||
decimalPrecision: props.decimalPrecision,
|
||||
}),
|
||||
[
|
||||
props.uPlotInstance,
|
||||
props.seriesIndex,
|
||||
props.dataIndexes,
|
||||
props.yAxisUnit,
|
||||
props.decimalPrecision,
|
||||
],
|
||||
);
|
||||
|
||||
return <Tooltip {...props} content={content} showTooltipHeader={false} />;
|
||||
}
|
||||
|
||||
@@ -1,8 +1,31 @@
|
||||
import { TimeSeriesTooltipProps } from '../types';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import { TimeSeriesTooltipProps, TooltipContentItem } from '../types';
|
||||
import Tooltip from './Tooltip';
|
||||
import { buildTooltipContent } from './utils';
|
||||
|
||||
export default function TimeSeriesTooltip(
|
||||
props: TimeSeriesTooltipProps,
|
||||
): JSX.Element {
|
||||
return <Tooltip {...props} />;
|
||||
const content = useMemo(
|
||||
(): TooltipContentItem[] =>
|
||||
buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: props.yAxisUnit ?? '',
|
||||
decimalPrecision: props.decimalPrecision,
|
||||
}),
|
||||
[
|
||||
props.uPlotInstance,
|
||||
props.seriesIndex,
|
||||
props.dataIndexes,
|
||||
props.yAxisUnit,
|
||||
props.decimalPrecision,
|
||||
],
|
||||
);
|
||||
|
||||
return <Tooltip {...props} content={content} />;
|
||||
}
|
||||
|
||||
@@ -28,18 +28,23 @@
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.uplot-tooltip-list {
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
}
|
||||
.uplot-tooltip-list-container {
|
||||
overflow-y: auto;
|
||||
max-height: 330px;
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
.uplot-tooltip-list {
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-slate-100);
|
||||
border-radius: 0.5rem;
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-slate-100);
|
||||
border-radius: 0.5rem;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useMemo } from 'react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { Virtuoso } from 'react-virtuoso';
|
||||
import cx from 'classnames';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
@@ -11,6 +11,7 @@ import './Tooltip.styles.scss';
|
||||
|
||||
const TOOLTIP_LIST_MAX_HEIGHT = 330;
|
||||
const TOOLTIP_ITEM_HEIGHT = 38;
|
||||
const TOOLTIP_LIST_PADDING = 10;
|
||||
|
||||
export default function Tooltip({
|
||||
uPlotInstance,
|
||||
@@ -19,7 +20,7 @@ export default function Tooltip({
|
||||
showTooltipHeader = true,
|
||||
}: TooltipProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const [listHeight, setListHeight] = useState(0);
|
||||
const tooltipContent = content ?? [];
|
||||
|
||||
const headerTitle = useMemo(() => {
|
||||
@@ -41,42 +42,52 @@ export default function Tooltip({
|
||||
showTooltipHeader,
|
||||
]);
|
||||
|
||||
const virtuosoStyle = useMemo(() => {
|
||||
return {
|
||||
height:
|
||||
listHeight > 0
|
||||
? Math.min(listHeight + TOOLTIP_LIST_PADDING, TOOLTIP_LIST_MAX_HEIGHT)
|
||||
: Math.min(
|
||||
tooltipContent.length * TOOLTIP_ITEM_HEIGHT,
|
||||
TOOLTIP_LIST_MAX_HEIGHT,
|
||||
),
|
||||
width: '100%',
|
||||
};
|
||||
}, [listHeight, tooltipContent.length]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cx(
|
||||
'uplot-tooltip-container',
|
||||
isDarkMode ? 'darkMode' : 'lightMode',
|
||||
)}
|
||||
data-testid="uplot-tooltip-container"
|
||||
>
|
||||
{showTooltipHeader && (
|
||||
<div className="uplot-tooltip-header">
|
||||
<div className="uplot-tooltip-header" data-testid="uplot-tooltip-header">
|
||||
<span>{headerTitle}</span>
|
||||
</div>
|
||||
)}
|
||||
<div
|
||||
style={{
|
||||
height: Math.min(
|
||||
tooltipContent.length * TOOLTIP_ITEM_HEIGHT,
|
||||
TOOLTIP_LIST_MAX_HEIGHT,
|
||||
),
|
||||
minHeight: 0,
|
||||
}}
|
||||
>
|
||||
<div className="uplot-tooltip-list-container">
|
||||
{tooltipContent.length > 0 ? (
|
||||
<Virtuoso
|
||||
className="uplot-tooltip-list"
|
||||
data-testid="uplot-tooltip-list"
|
||||
data={tooltipContent}
|
||||
defaultItemHeight={TOOLTIP_ITEM_HEIGHT}
|
||||
style={virtuosoStyle}
|
||||
totalListHeightChanged={setListHeight}
|
||||
itemContent={(_, item): JSX.Element => (
|
||||
<div className="uplot-tooltip-item">
|
||||
<div className="uplot-tooltip-item" data-testid="uplot-tooltip-item">
|
||||
<div
|
||||
className="uplot-tooltip-item-marker"
|
||||
style={{ borderColor: item.color }}
|
||||
data-is-legend-marker={true}
|
||||
data-testid="uplot-tooltip-item-marker"
|
||||
/>
|
||||
<div
|
||||
className="uplot-tooltip-item-content"
|
||||
style={{ color: item.color, fontWeight: item.isActive ? 700 : 400 }}
|
||||
data-testid="uplot-tooltip-item-content"
|
||||
>
|
||||
{item.label}: {item.tooltipValue}
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,208 @@
|
||||
import React from 'react';
|
||||
import { VirtuosoMockContext } from 'react-virtuoso';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import dayjs from 'dayjs';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { render, RenderResult, screen } from 'tests/test-utils';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
import { TooltipContentItem } from '../../types';
|
||||
import Tooltip from '../Tooltip';
|
||||
|
||||
type MockVirtuosoProps = {
|
||||
data: TooltipContentItem[];
|
||||
itemContent: (index: number, item: TooltipContentItem) => React.ReactNode;
|
||||
className?: string;
|
||||
style?: React.CSSProperties;
|
||||
totalListHeightChanged?: (height: number) => void;
|
||||
'data-testid'?: string;
|
||||
};
|
||||
|
||||
let mockTotalListHeight = 200;
|
||||
|
||||
jest.mock('react-virtuoso', () => {
|
||||
const actual = jest.requireActual('react-virtuoso');
|
||||
|
||||
return {
|
||||
...actual,
|
||||
Virtuoso: ({
|
||||
data,
|
||||
itemContent,
|
||||
className,
|
||||
style,
|
||||
totalListHeightChanged,
|
||||
'data-testid': dataTestId,
|
||||
}: MockVirtuosoProps): JSX.Element => {
|
||||
if (totalListHeightChanged) {
|
||||
// Simulate Virtuoso reporting total list height
|
||||
totalListHeightChanged(mockTotalListHeight);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={className} style={style} data-testid={dataTestId}>
|
||||
{data.map((item, index) => (
|
||||
<div key={item.label ?? index.toString()}>{itemContent(index, item)}</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('hooks/useDarkMode', () => ({
|
||||
useIsDarkMode: jest.fn(),
|
||||
}));
|
||||
|
||||
const mockUseIsDarkMode = useIsDarkMode as jest.MockedFunction<
|
||||
typeof useIsDarkMode
|
||||
>;
|
||||
|
||||
type TooltipTestProps = React.ComponentProps<typeof Tooltip>;
|
||||
|
||||
function createTooltipContent(
|
||||
overrides: Partial<TooltipContentItem> = {},
|
||||
): TooltipContentItem {
|
||||
return {
|
||||
label: 'Series A',
|
||||
value: 10,
|
||||
tooltipValue: '10',
|
||||
color: '#ff0000',
|
||||
isActive: true,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createUPlotInstance(cursorIdx: number | null): uPlot {
|
||||
return ({
|
||||
data: [[1], []],
|
||||
cursor: { idx: cursorIdx },
|
||||
// The rest of the uPlot fields are not used by Tooltip
|
||||
} as unknown) as uPlot;
|
||||
}
|
||||
|
||||
function renderTooltip(props: Partial<TooltipTestProps> = {}): RenderResult {
|
||||
const defaultProps: TooltipTestProps = {
|
||||
uPlotInstance: createUPlotInstance(null),
|
||||
timezone: 'UTC',
|
||||
content: [],
|
||||
showTooltipHeader: true,
|
||||
// TooltipRenderArgs (not used directly in component but required by type)
|
||||
dataIndexes: [],
|
||||
seriesIndex: null,
|
||||
isPinned: false,
|
||||
dismiss: jest.fn(),
|
||||
viaSync: false,
|
||||
} as TooltipTestProps;
|
||||
|
||||
return render(
|
||||
<VirtuosoMockContext.Provider value={{ viewportHeight: 300, itemHeight: 38 }}>
|
||||
<Tooltip {...defaultProps} {...props} />
|
||||
</VirtuosoMockContext.Provider>,
|
||||
);
|
||||
}
|
||||
|
||||
describe('Tooltip', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockUseIsDarkMode.mockReturnValue(false);
|
||||
mockTotalListHeight = 200;
|
||||
});
|
||||
|
||||
it('renders header title when showTooltipHeader is true and cursor index is present', () => {
|
||||
const uPlotInstance = createUPlotInstance(0);
|
||||
|
||||
renderTooltip({ uPlotInstance });
|
||||
|
||||
const expectedTitle = dayjs(1 * 1000)
|
||||
.tz('UTC')
|
||||
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
|
||||
|
||||
expect(screen.getByText(expectedTitle)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not render header when showTooltipHeader is false', () => {
|
||||
const uPlotInstance = createUPlotInstance(0);
|
||||
|
||||
renderTooltip({ uPlotInstance, showTooltipHeader: false });
|
||||
|
||||
const unexpectedTitle = dayjs(1 * 1000)
|
||||
.tz('UTC')
|
||||
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
|
||||
|
||||
expect(screen.queryByText(unexpectedTitle)).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders lightMode class when dark mode is disabled', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
mockUseIsDarkMode.mockReturnValue(false);
|
||||
|
||||
renderTooltip({ uPlotInstance });
|
||||
|
||||
const container = screen.getByTestId('uplot-tooltip-container');
|
||||
|
||||
expect(container).toHaveClass('lightMode');
|
||||
expect(container).not.toHaveClass('darkMode');
|
||||
});
|
||||
|
||||
it('renders darkMode class when dark mode is enabled', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
mockUseIsDarkMode.mockReturnValue(true);
|
||||
|
||||
renderTooltip({ uPlotInstance });
|
||||
|
||||
const container = screen.getByTestId('uplot-tooltip-container');
|
||||
|
||||
expect(container).toHaveClass('darkMode');
|
||||
expect(container).not.toHaveClass('lightMode');
|
||||
});
|
||||
|
||||
it('renders tooltip items when content is provided', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
const content = [createTooltipContent()];
|
||||
|
||||
renderTooltip({ uPlotInstance, content });
|
||||
|
||||
const list = screen.queryByTestId('uplot-tooltip-list');
|
||||
|
||||
expect(list).not.toBeNull();
|
||||
|
||||
const marker = screen.getByTestId('uplot-tooltip-item-marker');
|
||||
const itemContent = screen.getByTestId('uplot-tooltip-item-content');
|
||||
|
||||
expect(marker).toHaveStyle({ borderColor: '#ff0000' });
|
||||
expect(itemContent).toHaveStyle({ color: '#ff0000', fontWeight: '700' });
|
||||
expect(itemContent).toHaveTextContent('Series A: 10');
|
||||
});
|
||||
|
||||
it('does not render tooltip list when content is empty', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
|
||||
renderTooltip({ uPlotInstance, content: [] });
|
||||
|
||||
const list = screen.queryByTestId('uplot-tooltip-list');
|
||||
|
||||
expect(list).toBeNull();
|
||||
});
|
||||
|
||||
it('sets tooltip list height based on content length, height returned by Virtuoso', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
const content = [createTooltipContent(), createTooltipContent()];
|
||||
|
||||
renderTooltip({ uPlotInstance, content });
|
||||
|
||||
const list = screen.getByTestId('uplot-tooltip-list');
|
||||
expect(list).toHaveStyle({ height: '210px' });
|
||||
});
|
||||
|
||||
it('sets tooltip list height based on content length when Virtuoso reports 0 height', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
const content = [createTooltipContent(), createTooltipContent()];
|
||||
mockTotalListHeight = 0;
|
||||
|
||||
renderTooltip({ uPlotInstance, content });
|
||||
|
||||
const list = screen.getByTestId('uplot-tooltip-list');
|
||||
// Falls back to content length: 2 items * 38px = 76px
|
||||
expect(list).toHaveStyle({ height: '76px' });
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,277 @@
|
||||
import { PrecisionOption } from 'components/Graph/types';
|
||||
import { getToolTipValue } from 'components/Graph/yAxisConfig';
|
||||
import uPlot, { AlignedData, Series } from 'uplot';
|
||||
|
||||
import { TooltipContentItem } from '../../types';
|
||||
import {
|
||||
buildTooltipContent,
|
||||
FALLBACK_SERIES_COLOR,
|
||||
getTooltipBaseValue,
|
||||
resolveSeriesColor,
|
||||
} from '../utils';
|
||||
|
||||
jest.mock('components/Graph/yAxisConfig', () => ({
|
||||
getToolTipValue: jest.fn(),
|
||||
}));
|
||||
|
||||
const mockGetToolTipValue = getToolTipValue as jest.MockedFunction<
|
||||
typeof getToolTipValue
|
||||
>;
|
||||
|
||||
function createUPlotInstance(): uPlot {
|
||||
return ({
|
||||
data: [],
|
||||
cursor: { idx: 0 },
|
||||
} as unknown) as uPlot;
|
||||
}
|
||||
|
||||
describe('Tooltip utils', () => {
|
||||
describe('resolveSeriesColor', () => {
|
||||
it('returns string stroke when provided', () => {
|
||||
const u = createUPlotInstance();
|
||||
const stroke: Series.Stroke = '#ff0000';
|
||||
|
||||
const color = resolveSeriesColor(stroke, u, 1);
|
||||
|
||||
expect(color).toBe('#ff0000');
|
||||
});
|
||||
|
||||
it('returns result of stroke function when provided', () => {
|
||||
const u = createUPlotInstance();
|
||||
const strokeFn: Series.Stroke = (uInstance, seriesIdx): string =>
|
||||
`color-${seriesIdx}-${uInstance.cursor.idx}`;
|
||||
|
||||
const color = resolveSeriesColor(strokeFn, u, 2);
|
||||
|
||||
expect(color).toBe('color-2-0');
|
||||
});
|
||||
|
||||
it('returns fallback color when stroke is not provided', () => {
|
||||
const u = createUPlotInstance();
|
||||
|
||||
const color = resolveSeriesColor(undefined, u, 1);
|
||||
|
||||
expect(color).toBe(FALLBACK_SERIES_COLOR);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTooltipBaseValue', () => {
|
||||
it('returns value from aligned data for non-stacked charts', () => {
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[10, 20],
|
||||
];
|
||||
|
||||
const result = getTooltipBaseValue({
|
||||
data,
|
||||
index: 1,
|
||||
dataIndex: 1,
|
||||
isStackedBarChart: false,
|
||||
});
|
||||
|
||||
expect(result).toBe(20);
|
||||
});
|
||||
|
||||
it('returns null when value is missing', () => {
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[10, null],
|
||||
];
|
||||
|
||||
const result = getTooltipBaseValue({
|
||||
data,
|
||||
index: 1,
|
||||
dataIndex: 1,
|
||||
});
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('subtracts next visible stacked value for stacked bar charts', () => {
|
||||
// data[1] and data[2] contain stacked values at dataIndex 1
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[30, 60], // series 1 stacked
|
||||
[10, 20], // series 2 stacked
|
||||
];
|
||||
|
||||
const series: Series[] = [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true } as Series,
|
||||
{ label: 'B', show: true } as Series,
|
||||
];
|
||||
|
||||
const result = getTooltipBaseValue({
|
||||
data,
|
||||
index: 1,
|
||||
dataIndex: 1,
|
||||
isStackedBarChart: true,
|
||||
series,
|
||||
});
|
||||
|
||||
// 60 (stacked at series 1) - 20 (next visible stacked) = 40
|
||||
expect(result).toBe(40);
|
||||
});
|
||||
|
||||
it('skips hidden series when computing base value for stacked charts', () => {
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[30, 60], // series 1 stacked
|
||||
[10, 20], // series 2 stacked but hidden
|
||||
[5, 10], // series 3 stacked and visible
|
||||
];
|
||||
|
||||
const series: Series[] = [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true } as Series,
|
||||
{ label: 'B', show: false } as Series,
|
||||
{ label: 'C', show: true } as Series,
|
||||
];
|
||||
|
||||
const result = getTooltipBaseValue({
|
||||
data,
|
||||
index: 1,
|
||||
dataIndex: 1,
|
||||
isStackedBarChart: true,
|
||||
series,
|
||||
});
|
||||
|
||||
// 60 (stacked at series 1) - 10 (next *visible* stacked, series 3) = 50
|
||||
expect(result).toBe(50);
|
||||
});
|
||||
|
||||
it('does not subtract when there is no next visible series', () => {
|
||||
const data: AlignedData = [
|
||||
[0, 1],
|
||||
[10, 20], // series 1
|
||||
[5, (null as unknown) as number], // series 2 missing
|
||||
];
|
||||
|
||||
const series: Series[] = [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true } as Series,
|
||||
{ label: 'B', show: false } as Series,
|
||||
];
|
||||
|
||||
const result = getTooltipBaseValue({
|
||||
data,
|
||||
index: 1,
|
||||
dataIndex: 1,
|
||||
isStackedBarChart: true,
|
||||
series,
|
||||
});
|
||||
|
||||
expect(result).toBe(20);
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildTooltipContent', () => {
|
||||
const yAxisUnit = 'ms';
|
||||
const decimalPrecision: PrecisionOption = 2;
|
||||
|
||||
beforeEach(() => {
|
||||
mockGetToolTipValue.mockReset();
|
||||
mockGetToolTipValue.mockImplementation(
|
||||
(value: string | number): string => `formatted-${value}`,
|
||||
);
|
||||
});
|
||||
|
||||
function createSeriesConfig(): Series[] {
|
||||
return [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true, stroke: '#ff0000' } as Series,
|
||||
{
|
||||
label: 'B',
|
||||
show: true,
|
||||
stroke: (_u: uPlot, idx: number): string => `color-${idx}`,
|
||||
} as Series,
|
||||
{ label: 'C', show: false, stroke: '#00ff00' } as Series,
|
||||
];
|
||||
}
|
||||
|
||||
it('builds tooltip content with active series first', () => {
|
||||
const data: AlignedData = [[0], [10], [20], [30]];
|
||||
const series = createSeriesConfig();
|
||||
const dataIndexes = [null, 0, 0, 0];
|
||||
const u = createUPlotInstance();
|
||||
|
||||
const result = buildTooltipContent({
|
||||
data,
|
||||
series,
|
||||
dataIndexes,
|
||||
activeSeriesIndex: 2,
|
||||
uPlotInstance: u,
|
||||
yAxisUnit,
|
||||
decimalPrecision,
|
||||
});
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
// Active (series index 2) should come first
|
||||
expect(result[0]).toMatchObject<Partial<TooltipContentItem>>({
|
||||
label: 'B',
|
||||
value: 20,
|
||||
tooltipValue: 'formatted-20',
|
||||
color: 'color-2',
|
||||
isActive: true,
|
||||
});
|
||||
expect(result[1]).toMatchObject<Partial<TooltipContentItem>>({
|
||||
label: 'A',
|
||||
value: 10,
|
||||
tooltipValue: 'formatted-10',
|
||||
color: '#ff0000',
|
||||
isActive: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('skips series with null data index or non-finite values', () => {
|
||||
const data: AlignedData = [[0], [42], [Infinity]];
|
||||
const series: Series[] = [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true, stroke: '#ff0000' } as Series,
|
||||
{ label: 'B', show: true, stroke: '#00ff00' } as Series,
|
||||
];
|
||||
const dataIndexes = [null, 0, null];
|
||||
const u = createUPlotInstance();
|
||||
|
||||
const result = buildTooltipContent({
|
||||
data,
|
||||
series,
|
||||
dataIndexes,
|
||||
activeSeriesIndex: 1,
|
||||
uPlotInstance: u,
|
||||
yAxisUnit,
|
||||
decimalPrecision,
|
||||
});
|
||||
|
||||
// Only the finite, non-null value from series A should be included
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].label).toBe('A');
|
||||
});
|
||||
|
||||
it('uses stacked base values when building content for stacked bar charts', () => {
|
||||
const data: AlignedData = [[0], [60], [30]];
|
||||
const series: Series[] = [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'A', show: true, stroke: '#ff0000' } as Series,
|
||||
{ label: 'B', show: true, stroke: '#00ff00' } as Series,
|
||||
];
|
||||
const dataIndexes = [null, 0, 0];
|
||||
const u = createUPlotInstance();
|
||||
|
||||
const result = buildTooltipContent({
|
||||
data,
|
||||
series,
|
||||
dataIndexes,
|
||||
activeSeriesIndex: 1,
|
||||
uPlotInstance: u,
|
||||
yAxisUnit,
|
||||
decimalPrecision,
|
||||
isStackedBarChart: true,
|
||||
});
|
||||
|
||||
// baseValue for series 1 at index 0 should be 60 - 30 (next visible) = 30
|
||||
expect(result[0].value).toBe(30);
|
||||
expect(result[1].value).toBe(30);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -4,7 +4,7 @@ import uPlot, { AlignedData, Series } from 'uplot';
|
||||
|
||||
import { TooltipContentItem } from '../types';
|
||||
|
||||
const FALLBACK_SERIES_COLOR = '#000000';
|
||||
export const FALLBACK_SERIES_COLOR = '#000000';
|
||||
|
||||
export function resolveSeriesColor(
|
||||
stroke: Series.Stroke | undefined,
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
export interface HostsProps {
|
||||
name: string;
|
||||
is_default: boolean;
|
||||
}
|
||||
|
||||
export interface RegionProps {
|
||||
id: string;
|
||||
name: string;
|
||||
category: string;
|
||||
dns: string;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
}
|
||||
|
||||
export interface ClusterProps {
|
||||
id: string;
|
||||
name: string;
|
||||
cloud_account_id: string;
|
||||
cloud_region: string;
|
||||
address: string;
|
||||
region: RegionProps;
|
||||
}
|
||||
|
||||
export interface DeploymentData {
|
||||
id: string;
|
||||
name: string;
|
||||
email: string;
|
||||
state: string;
|
||||
tier: string;
|
||||
user: string;
|
||||
password: string;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
cluster_id: string;
|
||||
hosts: HostsProps[];
|
||||
cluster: ClusterProps;
|
||||
}
|
||||
|
||||
export interface DeploymentsDataProps {
|
||||
status: string;
|
||||
data: DeploymentData;
|
||||
}
|
||||
|
||||
export type PayloadProps = {
|
||||
status: string;
|
||||
data: string;
|
||||
};
|
||||
|
||||
export interface UpdateCustomDomainProps {
|
||||
data: {
|
||||
name: string;
|
||||
};
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
export interface UpdateProfileProps {
|
||||
reasons_for_interest_in_signoz: string[];
|
||||
uses_otel: boolean;
|
||||
has_existing_observability_tool: boolean;
|
||||
existing_observability_tool: string;
|
||||
logs_scale_per_day_in_gb: number;
|
||||
number_of_services: number;
|
||||
number_of_hosts: number;
|
||||
where_did_you_discover_signoz: string;
|
||||
timeline_for_migrating_to_signoz: string;
|
||||
}
|
||||
19
frontend/src/utils/navigateWithCtrlMetaKey.ts
Normal file
19
frontend/src/utils/navigateWithCtrlMetaKey.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import React from 'react';
|
||||
|
||||
export function isMetaOrCtrlKey(
|
||||
event: React.MouseEvent | React.KeyboardEvent | MouseEvent | KeyboardEvent,
|
||||
): boolean {
|
||||
return event.metaKey || event.ctrlKey;
|
||||
}
|
||||
|
||||
export function navigateWithCtrlMetaKey(
|
||||
event: React.MouseEvent | MouseEvent,
|
||||
url: string,
|
||||
navigateFn: (url: string) => void,
|
||||
): void {
|
||||
if (isMetaOrCtrlKey(event)) {
|
||||
window.open(url, '_blank');
|
||||
return;
|
||||
}
|
||||
navigateFn(url);
|
||||
}
|
||||
287
go.mod
287
go.mod
@@ -3,22 +3,23 @@ module github.com/SigNoz/signoz
|
||||
go 1.24.0
|
||||
|
||||
require (
|
||||
dario.cat/mergo v1.0.2
|
||||
dario.cat/mergo v1.0.1
|
||||
github.com/AfterShip/clickhouse-sql-parser v0.4.16
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.40.1
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||
github.com/SigNoz/signoz-otel-collector v0.142.1-rc.1
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.9
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||
github.com/antonmedv/expr v1.15.3
|
||||
github.com/bytedance/sonic v1.14.1
|
||||
github.com/cespare/xxhash/v2 v2.3.0
|
||||
github.com/coreos/go-oidc/v3 v3.16.0
|
||||
github.com/coreos/go-oidc/v3 v3.14.1
|
||||
github.com/dgraph-io/ristretto/v2 v2.3.0
|
||||
github.com/dustin/go-humanize v1.0.1
|
||||
github.com/gin-gonic/gin v1.11.0
|
||||
github.com/go-co-op/gocron v1.30.1
|
||||
github.com/go-openapi/runtime v0.28.0
|
||||
github.com/go-openapi/strfmt v0.24.0
|
||||
github.com/go-openapi/strfmt v0.23.0
|
||||
github.com/go-redis/redismock/v9 v9.2.0
|
||||
github.com/go-viper/mapstructure/v2 v2.4.0
|
||||
github.com/gojek/heimdall/v7 v7.0.3
|
||||
@@ -29,22 +30,22 @@ require (
|
||||
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674
|
||||
github.com/huandu/go-sqlbuilder v1.35.0
|
||||
github.com/jackc/pgx/v5 v5.7.6
|
||||
github.com/json-iterator/go v1.1.13-0.20220915233716-71ac16282d12
|
||||
github.com/json-iterator/go v1.1.12
|
||||
github.com/knadh/koanf v1.5.0
|
||||
github.com/knadh/koanf/v2 v2.3.0
|
||||
github.com/mailru/easyjson v0.9.0
|
||||
github.com/open-telemetry/opamp-go v0.22.0
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.142.0
|
||||
github.com/knadh/koanf/v2 v2.2.0
|
||||
github.com/mailru/easyjson v0.7.7
|
||||
github.com/open-telemetry/opamp-go v0.19.0
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.128.0
|
||||
github.com/openfga/api/proto v0.0.0-20250909172242-b4b2a12f5c67
|
||||
github.com/openfga/language/pkg/go v0.2.0-beta.2.0.20250428093642-7aeebe78bbfe
|
||||
github.com/opentracing/opentracing-go v1.2.0
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/prometheus/alertmanager v0.28.1
|
||||
github.com/prometheus/client_golang v1.23.2
|
||||
github.com/prometheus/common v0.67.4
|
||||
github.com/prometheus/prometheus v0.308.0
|
||||
github.com/prometheus/common v0.66.1
|
||||
github.com/prometheus/prometheus v0.304.1
|
||||
github.com/redis/go-redis/extra/redisotel/v9 v9.15.1
|
||||
github.com/redis/go-redis/v9 v9.17.2
|
||||
github.com/redis/go-redis/v9 v9.15.1
|
||||
github.com/rs/cors v1.11.1
|
||||
github.com/russellhaering/gosaml2 v0.9.0
|
||||
github.com/russellhaering/goxmldsig v1.2.0
|
||||
@@ -53,7 +54,7 @@ require (
|
||||
github.com/sethvargo/go-password v0.2.0
|
||||
github.com/smartystreets/goconvey v1.8.1
|
||||
github.com/soheilhy/cmux v0.1.5
|
||||
github.com/spf13/cobra v1.10.2
|
||||
github.com/spf13/cobra v1.10.1
|
||||
github.com/srikanthccv/ClickHouse-go-mock v0.13.0
|
||||
github.com/stretchr/testify v1.11.1
|
||||
github.com/swaggest/jsonschema-go v0.3.78
|
||||
@@ -63,59 +64,43 @@ require (
|
||||
github.com/uptrace/bun/dialect/pgdialect v1.2.9
|
||||
github.com/uptrace/bun/dialect/sqlitedialect v1.2.9
|
||||
github.com/uptrace/bun/extra/bunotel v1.2.9
|
||||
go.opentelemetry.io/collector/confmap v1.48.0
|
||||
go.opentelemetry.io/collector/otelcol v0.142.0
|
||||
go.opentelemetry.io/collector/pdata v1.48.0
|
||||
go.opentelemetry.io/collector/confmap v1.34.0
|
||||
go.opentelemetry.io/collector/otelcol v0.128.0
|
||||
go.opentelemetry.io/collector/pdata v1.34.0
|
||||
go.opentelemetry.io/contrib/config v0.10.0
|
||||
go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux v0.63.0
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0
|
||||
go.opentelemetry.io/otel v1.39.0
|
||||
go.opentelemetry.io/otel/metric v1.39.0
|
||||
go.opentelemetry.io/otel/sdk v1.39.0
|
||||
go.opentelemetry.io/otel/trace v1.39.0
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0
|
||||
go.opentelemetry.io/otel v1.38.0
|
||||
go.opentelemetry.io/otel/metric v1.38.0
|
||||
go.opentelemetry.io/otel/sdk v1.38.0
|
||||
go.opentelemetry.io/otel/trace v1.38.0
|
||||
go.uber.org/multierr v1.11.0
|
||||
go.uber.org/zap v1.27.1
|
||||
go.uber.org/zap v1.27.0
|
||||
golang.org/x/crypto v0.46.0
|
||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6
|
||||
golang.org/x/net v0.48.0
|
||||
golang.org/x/oauth2 v0.33.0
|
||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b
|
||||
golang.org/x/net v0.47.0
|
||||
golang.org/x/oauth2 v0.30.0
|
||||
golang.org/x/sync v0.19.0
|
||||
golang.org/x/text v0.32.0
|
||||
google.golang.org/protobuf v1.36.10
|
||||
google.golang.org/protobuf v1.36.9
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
k8s.io/apimachinery v0.35.0-alpha.0
|
||||
k8s.io/apimachinery v0.34.0
|
||||
modernc.org/sqlite v1.39.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/aws/aws-sdk-go-v2 v1.40.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/config v1.32.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.19.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/signin v1.0.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.30.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.9 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.41.1 // indirect
|
||||
github.com/aws/smithy-go v1.23.2 // indirect
|
||||
github.com/bytedance/gopkg v0.1.3 // indirect
|
||||
github.com/bytedance/sonic v1.14.1 // indirect
|
||||
github.com/bytedance/sonic/loader v0.3.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.6 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.27.0 // indirect
|
||||
github.com/goccy/go-yaml v1.19.0 // indirect
|
||||
github.com/goccy/go-yaml v1.18.0 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/prometheus/client_golang/exp v0.0.0-20250914183048-a974e0d45e0a // indirect
|
||||
github.com/redis/go-redis/extra/rediscmd/v9 v9.15.1 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/swaggest/refl v1.4.0 // indirect
|
||||
@@ -123,27 +108,24 @@ require (
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/ugorji/go/codec v1.3.0 // indirect
|
||||
github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 // indirect
|
||||
go.opentelemetry.io/collector/client v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configoptional v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configretry v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/exporterhelper v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/xpdata v0.142.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.3 // indirect
|
||||
go.opentelemetry.io/collector/config/configretry v1.34.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.2 // indirect
|
||||
golang.org/x/arch v0.20.0 // indirect
|
||||
golang.org/x/tools/godoc v0.1.0-deprecated // indirect
|
||||
modernc.org/libc v1.66.10 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
modernc.org/memory v1.11.0 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
cel.dev/expr v0.25.1 // indirect
|
||||
cloud.google.com/go/auth v0.17.0 // indirect
|
||||
cel.dev/expr v0.24.0 // indirect
|
||||
cloud.google.com/go/auth v0.16.1 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.9.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.20.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.13.1 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.2 // indirect
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.6.0 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.8.2 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 // indirect
|
||||
github.com/ClickHouse/ch-go v0.67.0 // indirect
|
||||
github.com/Masterminds/squirrel v1.5.4 // indirect
|
||||
github.com/Yiling-J/theine-go v0.6.2 // indirect
|
||||
@@ -151,35 +133,35 @@ require (
|
||||
github.com/andybalholm/brotli v1.2.0 // indirect
|
||||
github.com/armon/go-metrics v0.4.1 // indirect
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
||||
github.com/aws/aws-sdk-go v1.55.8 // indirect
|
||||
github.com/aws/aws-sdk-go v1.55.7 // indirect
|
||||
github.com/beevik/etree v1.1.0 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect
|
||||
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
|
||||
github.com/cenkalti/backoff/v5 v5.0.3 // indirect
|
||||
github.com/coder/quartz v0.1.2 // indirect
|
||||
github.com/coreos/go-systemd/v22 v22.6.0 // indirect
|
||||
github.com/coreos/go-systemd/v22 v22.5.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/dennwc/varint v1.0.0 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/docker/go-units v0.5.0 // indirect
|
||||
github.com/ebitengine/purego v0.9.1 // indirect
|
||||
github.com/ebitengine/purego v0.8.4 // indirect
|
||||
github.com/edsrzf/mmap-go v1.2.0 // indirect
|
||||
github.com/elastic/lunes v0.2.0 // indirect
|
||||
github.com/elastic/lunes v0.1.0 // indirect
|
||||
github.com/emirpasic/gods v1.18.1 // indirect
|
||||
github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect
|
||||
github.com/expr-lang/expr v1.17.7
|
||||
github.com/expr-lang/expr v1.17.5
|
||||
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/fsnotify/fsnotify v1.9.0 // indirect
|
||||
github.com/go-faster/city v1.0.1 // indirect
|
||||
github.com/go-faster/errors v0.7.1 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.1.3 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.1.1 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-ole/go-ole v1.3.0 // indirect
|
||||
github.com/go-openapi/analysis v0.23.0 // indirect
|
||||
github.com/go-openapi/errors v0.22.3 // indirect
|
||||
github.com/go-openapi/errors v0.22.0 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.21.0 // indirect
|
||||
github.com/go-openapi/jsonreference v0.21.0 // indirect
|
||||
github.com/go-openapi/loads v0.22.0 // indirect
|
||||
@@ -196,19 +178,19 @@ require (
|
||||
github.com/google/btree v1.1.3 // indirect
|
||||
github.com/google/cel-go v0.26.1 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.7 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.15.0 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/gopherjs/gopherjs v1.17.2 // indirect
|
||||
github.com/grafana/regexp v0.0.0-20250905093917-f7b3be9d1853 // indirect
|
||||
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.3 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
|
||||
github.com/hashicorp/go-msgpack/v2 v2.1.1 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/hashicorp/go-sockaddr v1.0.7 // indirect
|
||||
github.com/hashicorp/go-version v1.8.0 // indirect
|
||||
github.com/hashicorp/go-version v1.7.0 // indirect
|
||||
github.com/hashicorp/golang-lru v1.0.2 // indirect
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
||||
github.com/hashicorp/memberlist v0.5.1 // indirect
|
||||
@@ -224,21 +206,21 @@ require (
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/jpillora/backoff v1.0.0 // indirect
|
||||
github.com/jtolds/gls v4.20.0+incompatible // indirect
|
||||
github.com/klauspost/compress v1.18.2 // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
|
||||
github.com/kylelemons/godebug v1.1.0 // indirect
|
||||
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect
|
||||
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect
|
||||
github.com/leodido/go-syslog/v4 v4.3.0 // indirect
|
||||
github.com/leodido/go-syslog/v4 v4.2.0 // indirect
|
||||
github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b // indirect
|
||||
github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35 // indirect
|
||||
github.com/magefile/mage v1.15.0 // indirect
|
||||
github.com/mattermost/xml-roundtrip-validator v0.1.0 // indirect
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
|
||||
github.com/mdlayher/socket v0.5.1 // indirect
|
||||
github.com/mdlayher/socket v0.4.1 // indirect
|
||||
github.com/mdlayher/vsock v1.2.1 // indirect
|
||||
github.com/mfridman/interpolate v0.0.2 // indirect
|
||||
github.com/miekg/dns v1.1.68 // indirect
|
||||
github.com/miekg/dns v1.1.65 // indirect
|
||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.1-0.20231216201459-8508981c8b6c // indirect
|
||||
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
||||
@@ -247,14 +229,14 @@ require (
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f // indirect
|
||||
github.com/natefinch/wrap v0.2.0 // indirect
|
||||
github.com/oklog/run v1.2.0 // indirect
|
||||
github.com/oklog/run v1.1.0 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/oklog/ulid/v2 v2.1.1
|
||||
github.com/open-feature/go-sdk v1.17.0
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.142.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.142.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.142.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.142.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.128.0 // indirect
|
||||
github.com/openfga/openfga v1.10.1
|
||||
github.com/paulmach/orb v0.11.1 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||
@@ -264,10 +246,10 @@ require (
|
||||
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect
|
||||
github.com/pressly/goose/v3 v3.25.0 // indirect
|
||||
github.com/prometheus/client_model v0.6.2 // indirect
|
||||
github.com/prometheus/exporter-toolkit v0.15.0 // indirect
|
||||
github.com/prometheus/otlptranslator v1.0.0 // indirect
|
||||
github.com/prometheus/procfs v0.19.2 // indirect
|
||||
github.com/prometheus/sigv4 v0.3.0 // indirect
|
||||
github.com/prometheus/exporter-toolkit v0.14.0 // indirect
|
||||
github.com/prometheus/otlptranslator v0.0.0-20250320144820-d800c8b0eb07 // indirect
|
||||
github.com/prometheus/procfs v0.16.1 // indirect
|
||||
github.com/prometheus/sigv4 v0.1.2 // indirect
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
|
||||
github.com/robfig/cron/v3 v3.0.1 // indirect
|
||||
github.com/sagikazarmark/locafero v0.9.0 // indirect
|
||||
@@ -275,7 +257,7 @@ require (
|
||||
github.com/segmentio/asm v1.2.0 // indirect
|
||||
github.com/segmentio/backo-go v1.0.1 // indirect
|
||||
github.com/sethvargo/go-retry v0.3.0 // indirect
|
||||
github.com/shirou/gopsutil/v4 v4.25.11 // indirect
|
||||
github.com/shirou/gopsutil/v4 v4.25.5 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c // indirect
|
||||
github.com/shurcooL/vfsgen v0.0.0-20230704071429-0000e147ea92 // indirect
|
||||
@@ -290,9 +272,9 @@ require (
|
||||
github.com/subosito/gotenv v1.6.0 // indirect
|
||||
github.com/swaggest/openapi-go v0.2.60
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.1 // indirect
|
||||
github.com/tklauser/go-sysconf v0.3.16 // indirect
|
||||
github.com/tklauser/numcpus v0.11.0 // indirect
|
||||
github.com/tidwall/pretty v1.2.0 // indirect
|
||||
github.com/tklauser/go-sysconf v0.3.15 // indirect
|
||||
github.com/tklauser/numcpus v0.10.0 // indirect
|
||||
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect
|
||||
github.com/trivago/tgo v1.0.7 // indirect
|
||||
github.com/valyala/fastjson v1.6.4 // indirect
|
||||
@@ -301,82 +283,83 @@ require (
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
||||
github.com/zeebo/xxh3 v1.0.2 // indirect
|
||||
go.mongodb.org/mongo-driver v1.17.4 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
||||
go.opentelemetry.io/collector/component v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componentstatus v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componenttest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configtelemetry v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/envprovider v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/fileprovider v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/xconfmap v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/connector v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/connectortest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/xconnector v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumererror v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumertest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/xconsumer v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/exportertest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/xexporter v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/extension v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensioncapabilities v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensiontest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/xextension v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/featuregate v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/fanoutconsumer v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/telemetry v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/pprofile v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/testdata v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline/xpipeline v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/processor v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processorhelper v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processortest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/xprocessor v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receiverhelper v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receivertest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/xreceiver v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/semconv v0.128.1-0.20250610090210-188191247685
|
||||
go.opentelemetry.io/collector/service v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/service/hostcapabilities v0.142.0 // indirect
|
||||
go.opentelemetry.io/contrib/bridges/otelzap v0.13.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.63.0 // indirect
|
||||
go.opentelemetry.io/contrib/otelconf v0.18.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.60.0
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/log v0.15.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk/log v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk/metric v1.39.0
|
||||
go.opentelemetry.io/proto/otlp v1.9.0 // indirect
|
||||
go.mongodb.org/mongo-driver v1.17.1 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/collector/component v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componentstatus v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componenttest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configtelemetry v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/envprovider v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/fileprovider v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/xconfmap v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/connector v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/connectortest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/xconnector v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumererror v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumertest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/xconsumer v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/exportertest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/xexporter v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/extension v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensioncapabilities v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensiontest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/xextension v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/featuregate v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/fanoutconsumer v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/telemetry v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/pprofile v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/testdata v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline/xpipeline v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/processor v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processorhelper v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processortest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/xprocessor v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receiverhelper v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receivertest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/xreceiver v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/semconv v0.128.0
|
||||
go.opentelemetry.io/collector/service v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/service/hostcapabilities v0.128.0 // indirect
|
||||
go.opentelemetry.io/contrib/bridges/otelzap v0.11.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.60.0 // indirect
|
||||
go.opentelemetry.io/contrib/otelconf v0.16.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.58.0
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/log v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/sdk/log v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0
|
||||
go.opentelemetry.io/proto/otlp v1.8.0 // indirect
|
||||
go.uber.org/atomic v1.11.0 // indirect
|
||||
go.uber.org/mock v0.6.0 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/mod v0.30.0 // indirect
|
||||
golang.org/x/sys v0.39.0 // indirect
|
||||
golang.org/x/time v0.14.0 // indirect
|
||||
golang.org/x/time v0.11.0 // indirect
|
||||
golang.org/x/tools v0.39.0 // indirect
|
||||
gonum.org/v1/gonum v0.16.0 // indirect
|
||||
google.golang.org/api v0.257.0
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251202230838-ff82c1b0f217 // indirect
|
||||
google.golang.org/grpc v1.77.0 // indirect
|
||||
google.golang.org/api v0.236.0
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 // indirect
|
||||
google.golang.org/grpc v1.75.1 // indirect
|
||||
gopkg.in/telebot.v3 v3.3.8 // indirect
|
||||
k8s.io/client-go v0.34.2 // indirect
|
||||
k8s.io/client-go v0.34.0 // indirect
|
||||
k8s.io/klog/v2 v2.130.1 // indirect
|
||||
k8s.io/utils v0.0.0-20250604170112-4c0f3b243397 // indirect
|
||||
sigs.k8s.io/yaml v1.6.0 // indirect
|
||||
)
|
||||
|
||||
replace github.com/expr-lang/expr => github.com/SigNoz/expr v1.17.7-beta
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -20,6 +21,7 @@ func (provider *provider) addPromoteRoutes(router *mux.Router) error {
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleEditor),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -35,6 +37,7 @@ func (provider *provider) addPromoteRoutes(router *mux.Router) error {
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
@@ -46,6 +47,7 @@ type provider struct {
|
||||
fieldsHandler fields.Handler
|
||||
authzHandler authz.Handler
|
||||
zeusHandler zeus.Handler
|
||||
querierHandler querier.Handler
|
||||
}
|
||||
|
||||
func NewFactory(
|
||||
@@ -66,6 +68,7 @@ func NewFactory(
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
zeusHandler zeus.Handler,
|
||||
querierHandler querier.Handler,
|
||||
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
|
||||
return newProvider(
|
||||
@@ -89,6 +92,7 @@ func NewFactory(
|
||||
fieldsHandler,
|
||||
authzHandler,
|
||||
zeusHandler,
|
||||
querierHandler,
|
||||
)
|
||||
})
|
||||
}
|
||||
@@ -114,6 +118,7 @@ func newProvider(
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
zeusHandler zeus.Handler,
|
||||
querierHandler querier.Handler,
|
||||
) (apiserver.APIServer, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
|
||||
router := mux.NewRouter().UseEncodedPath()
|
||||
@@ -137,6 +142,7 @@ func newProvider(
|
||||
fieldsHandler: fieldsHandler,
|
||||
authzHandler: authzHandler,
|
||||
zeusHandler: zeusHandler,
|
||||
querierHandler: querierHandler,
|
||||
}
|
||||
|
||||
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
|
||||
@@ -209,6 +215,10 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addQuerierRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
471
pkg/apiserver/signozapiserver/querier.go
Normal file
471
pkg/apiserver/signozapiserver/querier.go
Normal file
@@ -0,0 +1,471 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addQuerierRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v5/query_range", handler.New(provider.authZ.ViewAccess(provider.querierHandler.QueryRange), handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"querier"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
RequestExamples: []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "span_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_time_series",
|
||||
Summary: "Time series: count error logs grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "log_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "log_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_gauge_time_series",
|
||||
Summary: "Time series: latest gauge value averaged across series",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "system.cpu.utilization", "timeAggregation": "latest", "spaceAggregation": "avg"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "host.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_rate_time_series",
|
||||
Summary: "Time series: rate of cumulative counter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum"},
|
||||
},
|
||||
"stepInterval": 120,
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_histogram_time_series",
|
||||
Summary: "Time series: p99 latency from histogram",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.bucket", "spaceAggregation": "p99"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_raw",
|
||||
Summary: "Raw: fetch raw log records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "body", "fieldContext": "log"},
|
||||
map[string]any{"name": "service.name", "fieldContext": "resource"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "log"}, "direction": "desc"},
|
||||
map[string]any{"key": map[string]any{"name": "id"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 50,
|
||||
"offset": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_raw",
|
||||
Summary: "Raw: fetch raw span records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend' AND has_error = true"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "name", "fieldContext": "span"},
|
||||
map[string]any{"name": "duration_nano", "fieldContext": "span"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "span"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_scalar",
|
||||
Summary: "Scalar: total span count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_scalar",
|
||||
Summary: "Scalar: total error log count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "error_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_scalar",
|
||||
Summary: "Scalar: single reduced metric value",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum", "reduceTo": "sum"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "formula",
|
||||
Summary: "Formula: error rate from two trace queries",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "countIf(has_error = true)"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "B",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "count()"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_formula",
|
||||
"spec": map[string]any{
|
||||
"name": "error_rate",
|
||||
"expression": "A / B * 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "promql",
|
||||
Summary: "PromQL: request rate with UTF-8 metric name",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "promql",
|
||||
"spec": map[string]any{
|
||||
"name": "request_rate",
|
||||
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
|
||||
"step": 60,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_time_series",
|
||||
Summary: "ClickHouse SQL: traces time series with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "span_rate",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" GROUP BY ts ORDER BY ts",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_logs_raw",
|
||||
Summary: "ClickHouse SQL: raw logs with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "recent_errors",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT timestamp, body" +
|
||||
" FROM signoz_logs.distributed_logs_v2" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" AND severity_text = 'ERROR'" +
|
||||
" ORDER BY timestamp DESC LIMIT 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_scalar",
|
||||
Summary: "ClickHouse SQL: scalar aggregate with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "total_spans",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Response: new(qbtypes.QueryRangeResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v5/substitute_vars", handler.New(provider.authZ.ViewAccess(provider.querierHandler.ReplaceVariables), handler.OpenAPIDef{
|
||||
ID: "ReplaceVariables",
|
||||
Tags: []string{"querier"},
|
||||
Summary: "Replace variables",
|
||||
Description: "Replace variables in a query",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(qbtypes.QueryRangeRequest),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -76,7 +76,7 @@ func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetype
|
||||
|
||||
// add the paths that are not promoted but have indexes
|
||||
for path, indexes := range aggr {
|
||||
path := strings.TrimPrefix(path, telemetrylogs.BodyV2ColumnPrefix)
|
||||
path := strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
|
||||
path = telemetrytypes.BodyJSONStringSearchPrefix + path
|
||||
response = append(response, promotetypes.PromotePath{
|
||||
Path: path,
|
||||
@@ -156,7 +156,7 @@ func (m *module) PromoteAndIndexPaths(
|
||||
}
|
||||
}
|
||||
if len(it.Indexes) > 0 {
|
||||
parentColumn := telemetrylogs.LogsV2BodyV2Column
|
||||
parentColumn := telemetrylogs.LogsV2BodyJSONColumn
|
||||
// if the path is already promoted or is being promoted, add it to the promoted column
|
||||
if _, promoted := existingPromotedPaths[it.Path]; promoted || it.Promote {
|
||||
parentColumn = telemetrylogs.LogsV2BodyPromotedColumn
|
||||
|
||||
@@ -87,24 +87,6 @@ func (client *client) Read(ctx context.Context, query *prompb.Query, sortSeries
|
||||
return remote.FromQueryResult(sortSeries, res), nil
|
||||
}
|
||||
|
||||
func (c *client) ReadMultiple(ctx context.Context, queries []*prompb.Query, sortSeries bool) (storage.SeriesSet, error) {
|
||||
if len(queries) == 0 {
|
||||
return storage.EmptySeriesSet(), nil
|
||||
}
|
||||
if len(queries) == 1 {
|
||||
return c.Read(ctx, queries[0], sortSeries)
|
||||
}
|
||||
sets := make([]storage.SeriesSet, 0, len(queries))
|
||||
for _, q := range queries {
|
||||
ss, err := c.Read(ctx, q, sortSeries)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sets = append(sets, ss)
|
||||
}
|
||||
return storage.NewMergeSeriesSet(sets, 0, storage.ChainedSeriesMerge), nil
|
||||
}
|
||||
|
||||
func (client *client) queryToClickhouseQuery(_ context.Context, query *prompb.Query, metricName string, subQuery bool) (string, []any, error) {
|
||||
var clickHouseQuery string
|
||||
var conditions []string
|
||||
|
||||
@@ -2,7 +2,6 @@ package prometheus
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
)
|
||||
|
||||
@@ -16,24 +15,30 @@ func RemoveExtraLabels(res *promql.Result, labelsToRemove ...string) error {
|
||||
toRemove[l] = struct{}{}
|
||||
}
|
||||
|
||||
dropLabels := func(metric labels.Labels) labels.Labels {
|
||||
b := labels.NewBuilder(metric)
|
||||
for name := range toRemove {
|
||||
b.Del(name)
|
||||
}
|
||||
return b.Labels()
|
||||
}
|
||||
|
||||
switch res.Value.(type) {
|
||||
case promql.Vector:
|
||||
value := res.Value.(promql.Vector)
|
||||
for i := range value {
|
||||
(value)[i].Metric = dropLabels((value)[i].Metric)
|
||||
series := &(value)[i]
|
||||
dst := series.Metric[:0]
|
||||
for _, lbl := range series.Metric {
|
||||
if _, drop := toRemove[lbl.Name]; !drop {
|
||||
dst = append(dst, lbl)
|
||||
}
|
||||
}
|
||||
series.Metric = dst
|
||||
}
|
||||
case promql.Matrix:
|
||||
value := res.Value.(promql.Matrix)
|
||||
for i := range value {
|
||||
(value)[i].Metric = dropLabels((value)[i].Metric)
|
||||
series := &(value)[i]
|
||||
dst := series.Metric[:0]
|
||||
for _, lbl := range series.Metric {
|
||||
if _, drop := toRemove[lbl.Name]; !drop {
|
||||
dst = append(dst, lbl)
|
||||
}
|
||||
}
|
||||
series.Metric = dst
|
||||
}
|
||||
case promql.Scalar:
|
||||
return nil
|
||||
|
||||
@@ -21,18 +21,17 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/variables"
|
||||
)
|
||||
|
||||
type API struct {
|
||||
type handler struct {
|
||||
set factory.ProviderSettings
|
||||
analytics analytics.Analytics
|
||||
querier Querier
|
||||
}
|
||||
|
||||
func NewAPI(set factory.ProviderSettings, querier Querier, analytics analytics.Analytics) *API {
|
||||
return &API{set: set, querier: querier, analytics: analytics}
|
||||
func NewHandler(set factory.ProviderSettings, querier Querier, analytics analytics.Analytics) Handler {
|
||||
return &handler{set: set, querier: querier, analytics: analytics}
|
||||
}
|
||||
|
||||
func (a *API) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
func (handler *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
@@ -53,7 +52,7 @@ func (a *API) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
a.set.Logger.ErrorContext(ctx, "panic in QueryRange",
|
||||
handler.set.Logger.ErrorContext(ctx, "panic in QueryRange",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
@@ -79,17 +78,17 @@ func (a *API) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
queryRangeResponse, err := a.querier.QueryRange(ctx, orgID, &queryRangeRequest)
|
||||
queryRangeResponse, err := handler.querier.QueryRange(ctx, orgID, &queryRangeRequest)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
a.logEvent(req.Context(), req.Header.Get("Referer"), queryRangeResponse.QBEvent)
|
||||
handler.logEvent(req.Context(), req.Header.Get("Referer"), queryRangeResponse.QBEvent)
|
||||
|
||||
render.Success(rw, http.StatusOK, queryRangeResponse)
|
||||
}
|
||||
func (a *API) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
func (handler *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
|
||||
// get the param from url and add it to body
|
||||
@@ -153,7 +152,7 @@ func (a *API) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
a.set.Logger.ErrorContext(ctx, "panic in QueryRawStream",
|
||||
handler.set.Logger.ErrorContext(ctx, "panic in QueryRawStream",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
@@ -193,7 +192,7 @@ func (a *API) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
flusher.Flush()
|
||||
|
||||
client := &qbtypes.RawStream{Name: req.RemoteAddr, Logs: make(chan *qbtypes.RawRow, 1000), Done: make(chan *bool), Error: make(chan error)}
|
||||
go a.querier.QueryRawStream(ctx, orgID, &queryRangeRequest, client)
|
||||
go handler.querier.QueryRawStream(ctx, orgID, &queryRangeRequest, client)
|
||||
|
||||
for {
|
||||
select {
|
||||
@@ -220,7 +219,7 @@ func (a *API) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
// TODO(srikanthccv): everything done here can be done on frontend as well
|
||||
// For the time being I am adding a helper function
|
||||
func (a *API) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
func (handler *handler) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
|
||||
@@ -272,7 +271,7 @@ func (a *API) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
render.Success(rw, http.StatusOK, queryRangeRequest)
|
||||
}
|
||||
|
||||
func (a *API) logEvent(ctx context.Context, referrer string, event *qbtypes.QBEvent) {
|
||||
func (handler *handler) logEvent(ctx context.Context, referrer string, event *qbtypes.QBEvent) {
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
return
|
||||
@@ -305,8 +304,9 @@ func (a *API) logEvent(ctx context.Context, referrer string, event *qbtypes.QBEv
|
||||
}
|
||||
|
||||
if !event.HasData {
|
||||
a.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", properties)
|
||||
handler.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", properties)
|
||||
return
|
||||
}
|
||||
a.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
|
||||
|
||||
handler.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
|
||||
}
|
||||
|
||||
@@ -10,9 +10,11 @@ import (
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/bytedance/sonic"
|
||||
)
|
||||
|
||||
type builderQuery[T any] struct {
|
||||
@@ -248,6 +250,40 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// merge body_json and promoted into body
|
||||
if q.spec.Signal == telemetrytypes.SignalLogs {
|
||||
switch typedPayload := payload.(type) {
|
||||
case *qbtypes.RawData:
|
||||
for _, rr := range typedPayload.Rows {
|
||||
seeder := func() error {
|
||||
body, ok := rr.Data[telemetrylogs.LogsV2BodyJSONColumn].(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
promoted, ok := rr.Data[telemetrylogs.LogsV2BodyPromotedColumn].(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
seed(promoted, body)
|
||||
str, err := sonic.MarshalString(body)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to marshal body")
|
||||
}
|
||||
rr.Data["body"] = str
|
||||
return nil
|
||||
}
|
||||
err := seeder()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
delete(rr.Data, telemetrylogs.LogsV2BodyJSONColumn)
|
||||
delete(rr.Data, telemetrylogs.LogsV2BodyPromotedColumn)
|
||||
}
|
||||
payload = typedPayload
|
||||
}
|
||||
}
|
||||
|
||||
return &qbtypes.Result{
|
||||
Type: q.kind,
|
||||
Value: payload,
|
||||
@@ -375,3 +411,18 @@ func decodeCursor(cur string) (int64, error) {
|
||||
}
|
||||
return strconv.ParseInt(string(b), 10, 64)
|
||||
}
|
||||
|
||||
func seed(promoted map[string]any, body map[string]any) {
|
||||
for key, fromValue := range promoted {
|
||||
if toValue, ok := body[key]; !ok {
|
||||
body[key] = fromValue
|
||||
} else {
|
||||
if fromValue, ok := fromValue.(map[string]any); ok {
|
||||
if toValue, ok := toValue.(map[string]any); ok {
|
||||
seed(fromValue, toValue)
|
||||
body[key] = toValue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/bytedance/sonic"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -393,11 +394,17 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
|
||||
|
||||
// de-reference the typed pointer to any
|
||||
val := reflect.ValueOf(cellPtr).Elem().Interface()
|
||||
// Post-process JSON columns: normalize into String value
|
||||
|
||||
// Post-process JSON columns: normalize into structured values
|
||||
if strings.HasPrefix(strings.ToUpper(colTypes[i].DatabaseTypeName()), "JSON") {
|
||||
switch x := val.(type) {
|
||||
case []byte:
|
||||
val = string(x)
|
||||
if len(x) > 0 {
|
||||
var v any
|
||||
if err := sonic.Unmarshal(x, &v); err == nil {
|
||||
val = v
|
||||
}
|
||||
}
|
||||
default:
|
||||
// already a structured type (map[string]any, []any, etc.)
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package querier
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -20,3 +21,9 @@ type BucketCache interface {
|
||||
// store fresh buckets for future hits
|
||||
Put(ctx context.Context, orgID valuer.UUID, q qbtypes.Query, step qbtypes.Step, fresh *qbtypes.Result)
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
QueryRange(rw http.ResponseWriter, req *http.Request)
|
||||
QueryRawStream(rw http.ResponseWriter, req *http.Request)
|
||||
ReplaceVariables(rw http.ResponseWriter, req *http.Request)
|
||||
}
|
||||
|
||||
@@ -1,454 +1 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
)
|
||||
|
||||
// QueryRangeV5OpenAPIDef is the OpenAPI definition for the /api/v5/query_range endpoint.
|
||||
var QueryRangeV5OpenAPIDef = handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"query"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
RequestExamples: queryRangeV5Examples,
|
||||
Response: new(qbtypes.QueryRangeResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{
|
||||
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
},
|
||||
}
|
||||
|
||||
var queryRangeV5Examples = []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "span_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_time_series",
|
||||
Summary: "Time series: count error logs grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "log_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "log_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_gauge_time_series",
|
||||
Summary: "Time series: latest gauge value averaged across series",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "system.cpu.utilization", "timeAggregation": "latest", "spaceAggregation": "avg"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "host.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_rate_time_series",
|
||||
Summary: "Time series: rate of cumulative counter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum"},
|
||||
},
|
||||
"stepInterval": 120,
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_histogram_time_series",
|
||||
Summary: "Time series: p99 latency from histogram",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.bucket", "spaceAggregation": "p99"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_raw",
|
||||
Summary: "Raw: fetch raw log records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "body", "fieldContext": "log"},
|
||||
map[string]any{"name": "service.name", "fieldContext": "resource"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "log"}, "direction": "desc"},
|
||||
map[string]any{"key": map[string]any{"name": "id"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 50,
|
||||
"offset": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_raw",
|
||||
Summary: "Raw: fetch raw span records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend' AND has_error = true"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "name", "fieldContext": "span"},
|
||||
map[string]any{"name": "duration_nano", "fieldContext": "span"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "span"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_scalar",
|
||||
Summary: "Scalar: total span count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_scalar",
|
||||
Summary: "Scalar: total error log count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "error_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_scalar",
|
||||
Summary: "Scalar: single reduced metric value",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum", "reduceTo": "sum"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "formula",
|
||||
Summary: "Formula: error rate from two trace queries",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "countIf(has_error = true)"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "B",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "count()"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_formula",
|
||||
"spec": map[string]any{
|
||||
"name": "error_rate",
|
||||
"expression": "A / B * 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "promql",
|
||||
Summary: "PromQL: request rate with UTF-8 metric name",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "promql",
|
||||
"spec": map[string]any{
|
||||
"name": "request_rate",
|
||||
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
|
||||
"step": 60,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_time_series",
|
||||
Summary: "ClickHouse SQL: traces time series with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "span_rate",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" GROUP BY ts ORDER BY ts",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_logs_raw",
|
||||
Summary: "ClickHouse SQL: raw logs with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "recent_errors",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT timestamp, body" +
|
||||
" FROM signoz_logs.distributed_logs_v2" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" AND severity_text = 'ERROR'" +
|
||||
" ORDER BY timestamp DESC LIMIT 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_scalar",
|
||||
Summary: "ClickHouse SQL: scalar aggregate with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "total_spans",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -16,7 +16,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbv5 "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
)
|
||||
@@ -241,13 +240,13 @@ func (q *promqlQuery) Execute(ctx context.Context) (*qbv5.Result, error) {
|
||||
var series []*qbv5.TimeSeries
|
||||
for _, v := range matrix {
|
||||
var s qbv5.TimeSeries
|
||||
lbls := make([]*qbv5.Label, 0, v.Metric.Len())
|
||||
v.Metric.Range(func(l labels.Label) {
|
||||
lbls := make([]*qbv5.Label, 0, len(v.Metric))
|
||||
for name, value := range v.Metric.Copy().Map() {
|
||||
lbls = append(lbls, &qbv5.Label{
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: l.Name},
|
||||
Value: l.Value,
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: name},
|
||||
Value: value,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
s.Labels = lbls
|
||||
|
||||
|
||||
@@ -79,8 +79,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
)
|
||||
|
||||
type status string
|
||||
@@ -145,8 +143,6 @@ type APIHandler struct {
|
||||
|
||||
LicensingAPI licensing.API
|
||||
|
||||
QuerierAPI *querierAPI.API
|
||||
|
||||
QueryParserAPI *queryparser.API
|
||||
|
||||
Signoz *signoz.SigNoz
|
||||
@@ -175,8 +171,6 @@ type APIHandlerOpts struct {
|
||||
|
||||
LicensingAPI licensing.API
|
||||
|
||||
QuerierAPI *querierAPI.API
|
||||
|
||||
QueryParserAPI *queryparser.API
|
||||
|
||||
Signoz *signoz.SigNoz
|
||||
@@ -239,7 +233,6 @@ func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, erro
|
||||
AlertmanagerAPI: opts.AlertmanagerAPI,
|
||||
LicensingAPI: opts.LicensingAPI,
|
||||
Signoz: opts.Signoz,
|
||||
QuerierAPI: opts.QuerierAPI,
|
||||
QueryParserAPI: opts.QueryParserAPI,
|
||||
}
|
||||
|
||||
@@ -396,7 +389,7 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *middlew
|
||||
subRouter.HandleFunc("/query_progress", am.ViewAccess(aH.GetQueryProgressUpdates)).Methods(http.MethodGet)
|
||||
|
||||
// live logs
|
||||
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.QuerierAPI.QueryRawStream)).Methods(http.MethodGet)
|
||||
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.Signoz.Handlers.QuerierHandler.QueryRawStream)).Methods(http.MethodGet)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
@@ -470,12 +463,6 @@ func (aH *APIHandler) RegisterQueryRangeV4Routes(router *mux.Router, am *middlew
|
||||
subRouter.HandleFunc("/metric/metric_metadata", am.ViewAccess(aH.getMetricMetadata)).Methods(http.MethodGet)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) RegisterQueryRangeV5Routes(router *mux.Router, am *middleware.AuthZ) {
|
||||
subRouter := router.PathPrefix("/api/v5").Subrouter()
|
||||
subRouter.HandleFunc("/query_range", am.ViewAccess(aH.QuerierAPI.QueryRange)).Methods(http.MethodPost)
|
||||
subRouter.HandleFunc("/substitute_vars", am.ViewAccess(aH.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
|
||||
}
|
||||
|
||||
// todo(remove): Implemented at render package (github.com/SigNoz/signoz/pkg/http/render) with the new error structure
|
||||
func (aH *APIHandler) Respond(w http.ResponseWriter, data interface{}) {
|
||||
writeHttpResponse(w, data)
|
||||
|
||||
@@ -23,7 +23,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
@@ -133,7 +132,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
LicensingAPI: nooplicensing.NewLicenseAPI(),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
}, config)
|
||||
if err != nil {
|
||||
@@ -217,7 +215,6 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
|
||||
api.RegisterInfraMetricsRoutes(r, am)
|
||||
api.RegisterWebSocketPaths(r, am)
|
||||
api.RegisterQueryRangeV4Routes(r, am)
|
||||
api.RegisterQueryRangeV5Routes(r, am)
|
||||
api.RegisterMessagingQueuesRoutes(r, am)
|
||||
api.RegisterThirdPartyApiRoutes(r, am)
|
||||
api.MetricExplorerRoutes(r, am)
|
||||
|
||||
@@ -19,7 +19,6 @@ import (
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
)
|
||||
|
||||
@@ -462,12 +461,12 @@ func toCommonSeries(series promql.Series) v3.Series {
|
||||
Points: make([]v3.Point, 0),
|
||||
}
|
||||
|
||||
series.Metric.Range(func(l labels.Label) {
|
||||
commonSeries.Labels[l.Name] = l.Value
|
||||
for _, lbl := range series.Metric {
|
||||
commonSeries.Labels[lbl.Name] = lbl.Value
|
||||
commonSeries.LabelsArray = append(commonSeries.LabelsArray, map[string]string{
|
||||
l.Name: l.Value,
|
||||
lbl.Name: lbl.Value,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
for _, f := range series.Floats {
|
||||
commonSeries.Points = append(commonSeries.Points, v3.Point{
|
||||
|
||||
@@ -204,10 +204,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
// While we expect user not to send the mixed data types, it inevitably happens
|
||||
// So we handle the data type collisions here
|
||||
switch key.FieldDataType {
|
||||
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString, telemetrytypes.FieldDataTypeJSON:
|
||||
if key.FieldDataType == telemetrytypes.FieldDataTypeJSON {
|
||||
tblFieldName = fmt.Sprintf("toString(%s)", tblFieldName)
|
||||
}
|
||||
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString:
|
||||
switch v := value.(type) {
|
||||
case float64:
|
||||
// try to convert the string value to to number
|
||||
@@ -222,6 +219,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
// we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string
|
||||
value = fmt.Sprintf("%t", v)
|
||||
}
|
||||
|
||||
case telemetrytypes.FieldDataTypeInt64,
|
||||
telemetrytypes.FieldDataTypeArrayInt64,
|
||||
telemetrytypes.FieldDataTypeNumber,
|
||||
|
||||
@@ -313,31 +313,37 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any
|
||||
return ""
|
||||
}
|
||||
child := ctx.GetChild(0)
|
||||
var searchText string
|
||||
if keyCtx, ok := child.(*grammar.KeyContext); ok {
|
||||
// create a full text search condition on the body field
|
||||
searchText = keyCtx.GetText()
|
||||
|
||||
keyText := keyCtx.GetText()
|
||||
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(keyText), v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
|
||||
return ""
|
||||
}
|
||||
return cond
|
||||
} else if valCtx, ok := child.(*grammar.ValueContext); ok {
|
||||
var text string
|
||||
if valCtx.QUOTED_TEXT() != nil {
|
||||
searchText = trimQuotes(valCtx.QUOTED_TEXT().GetText())
|
||||
text = trimQuotes(valCtx.QUOTED_TEXT().GetText())
|
||||
} else if valCtx.NUMBER() != nil {
|
||||
searchText = valCtx.NUMBER().GetText()
|
||||
text = valCtx.NUMBER().GetText()
|
||||
} else if valCtx.BOOL() != nil {
|
||||
searchText = valCtx.BOOL().GetText()
|
||||
text = valCtx.BOOL().GetText()
|
||||
} else if valCtx.KEY() != nil {
|
||||
searchText = valCtx.KEY().GetText()
|
||||
text = valCtx.KEY().GetText()
|
||||
} else {
|
||||
v.errors = append(v.errors, fmt.Sprintf("unsupported value type: %s", valCtx.GetText()))
|
||||
return ""
|
||||
}
|
||||
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
|
||||
return ""
|
||||
}
|
||||
return cond
|
||||
}
|
||||
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(searchText), v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
|
||||
return ""
|
||||
}
|
||||
v.warnings = append(v.warnings, v.fullTextColumn.Warnings...)
|
||||
return cond
|
||||
}
|
||||
|
||||
return "" // Should not happen with valid input
|
||||
@@ -377,11 +383,9 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, nil, v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build condition: %s", err.Error()))
|
||||
return ""
|
||||
}
|
||||
conds = append(conds, condition)
|
||||
v.warnings = append(v.warnings, key.Warnings...)
|
||||
}
|
||||
// if there is only one condition, return it directly, one less `()` wrapper
|
||||
if len(conds) == 1 {
|
||||
@@ -454,7 +458,6 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
return ""
|
||||
}
|
||||
conds = append(conds, condition)
|
||||
v.warnings = append(v.warnings, key.Warnings...)
|
||||
}
|
||||
if len(conds) == 1 {
|
||||
return conds[0]
|
||||
@@ -503,7 +506,6 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
return ""
|
||||
}
|
||||
conds = append(conds, condition)
|
||||
v.warnings = append(v.warnings, key.Warnings...)
|
||||
}
|
||||
if len(conds) == 1 {
|
||||
return conds[0]
|
||||
@@ -590,7 +592,6 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
return ""
|
||||
}
|
||||
conds = append(conds, condition)
|
||||
v.warnings = append(v.warnings, key.Warnings...)
|
||||
}
|
||||
if len(conds) == 1 {
|
||||
return conds[0]
|
||||
@@ -647,6 +648,7 @@ func (v *filterExpressionVisitor) VisitValueList(ctx *grammar.ValueListContext)
|
||||
|
||||
// VisitFullText handles standalone quoted strings for full-text search
|
||||
func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) any {
|
||||
|
||||
if v.skipFullTextFilter {
|
||||
return ""
|
||||
}
|
||||
@@ -668,8 +670,6 @@ func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) an
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
|
||||
return ""
|
||||
}
|
||||
v.warnings = append(v.warnings, v.fullTextColumn.Warnings...)
|
||||
|
||||
return cond
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package signoz
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/authz/signozauthzapi"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
@@ -50,12 +51,14 @@ type Handlers struct {
|
||||
Fields fields.Handler
|
||||
AuthzHandler authz.Handler
|
||||
ZeusHandler zeus.Handler
|
||||
QuerierHandler querier.Handler
|
||||
}
|
||||
|
||||
func NewHandlers(
|
||||
modules Modules,
|
||||
providerSettings factory.ProviderSettings,
|
||||
querier querier.Querier,
|
||||
analytics analytics.Analytics,
|
||||
querierHandler querier.Handler,
|
||||
licensing licensing.Licensing,
|
||||
global global.Global,
|
||||
flaggerService flagger.Flagger,
|
||||
@@ -80,5 +83,6 @@ func NewHandlers(
|
||||
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
|
||||
AuthzHandler: signozauthzapi.NewHandler(authz),
|
||||
ZeusHandler: zeus.NewHandler(zeusService, licensing),
|
||||
QuerierHandler: querierHandler,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/sharder"
|
||||
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
|
||||
@@ -42,7 +43,8 @@ func TestNewHandlers(t *testing.T) {
|
||||
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
|
||||
|
||||
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil, nil, nil, nil)
|
||||
querierHandler := querier.NewHandler(providerSettings, nil, nil)
|
||||
handlers := NewHandlers(modules, providerSettings, nil, querierHandler, nil, nil, nil, nil, nil, nil, nil)
|
||||
reflectVal := reflect.ValueOf(handlers)
|
||||
for i := 0; i < reflectVal.NumField(); i++ {
|
||||
f := reflectVal.Field(i)
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"os"
|
||||
"reflect"
|
||||
|
||||
@@ -27,7 +26,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
"github.com/swaggest/openapi-go"
|
||||
@@ -60,15 +58,12 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
|
||||
struct{ fields.Handler }{},
|
||||
struct{ authz.Handler }{},
|
||||
struct{ zeus.Handler }{},
|
||||
struct{ querier.Handler }{},
|
||||
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Register routes that live outside the APIServer modules
|
||||
// so they are discovered by the OpenAPI walker.
|
||||
registerQueryRoutes(apiserver.Router())
|
||||
|
||||
reflector := openapi3.NewReflector()
|
||||
reflector.JSONSchemaReflector().DefaultOptions = append(reflector.JSONSchemaReflector().DefaultOptions, jsonschema.InterceptDefName(func(t reflect.Type, defaultDefName string) string {
|
||||
if defaultDefName == "RenderSuccessResponse" {
|
||||
@@ -159,10 +154,3 @@ func convertJSONNumbers(v interface{}) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func registerQueryRoutes(router *mux.Router) {
|
||||
router.Handle("/api/v5/query_range", handler.New(
|
||||
func(http.ResponseWriter, *http.Request) {},
|
||||
querier.QueryRangeV5OpenAPIDef,
|
||||
)).Methods(http.MethodPost)
|
||||
}
|
||||
|
||||
@@ -253,6 +253,7 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
|
||||
handlers.Fields,
|
||||
handlers.AuthzHandler,
|
||||
handlers.ZeusHandler,
|
||||
handlers.QuerierHandler,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -90,6 +90,7 @@ func New(
|
||||
authzCallback func(context.Context, sqlstore.SQLStore, licensing.Licensing, dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config],
|
||||
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
|
||||
gatewayProviderFactory func(licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config],
|
||||
querierHandlerCallback func(factory.ProviderSettings, querier.Querier, analytics.Analytics) querier.Handler,
|
||||
) (*SigNoz, error) {
|
||||
// Initialize instrumentation
|
||||
instrumentation, err := instrumentation.New(ctx, config.Instrumentation, version.Info, "signoz")
|
||||
@@ -391,8 +392,11 @@ func New(
|
||||
|
||||
userService := impluser.NewService(providerSettings, impluser.NewStore(sqlstore, providerSettings), modules.User, orgGetter, authz, config.User.Root)
|
||||
|
||||
// Initialize the querier handler via callback (allows EE to decorate with anomaly detection)
|
||||
querierHandler := querierHandlerCallback(providerSettings, querier, analytics)
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz, zeus)
|
||||
handlers := NewHandlers(modules, providerSettings, analytics, querierHandler, licensing, global, flagger, gateway, telemetryMetadataStore, authz, zeus)
|
||||
|
||||
// Initialize the API server
|
||||
apiserver, err := factory.NewProviderFromNamedMap(
|
||||
|
||||
@@ -35,19 +35,13 @@ func (c *conditionBuilder) conditionFor(
|
||||
return "", err
|
||||
}
|
||||
|
||||
if column.Type.GetType() == schema.ColumnTypeEnumJSON {
|
||||
// If field data is Not JSON Column Itself, then we need to build a JSON condition
|
||||
if querybuilder.BodyJSONQueryEnabled && key.MustBuildJSONCondition() {
|
||||
valueType, value := InferDataType(value, operator, key)
|
||||
cond, err := NewJSONConditionBuilder(key, valueType).buildJSONCondition(operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return cond, nil
|
||||
} else if key.FieldDataType == telemetrytypes.FieldDataTypeJSON && !operator.IsOpValidForJSON() {
|
||||
// throw error for invalid operators on JSON columns
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid operator")
|
||||
if column.IsJSONColumn() && querybuilder.BodyJSONQueryEnabled {
|
||||
valueType, value := InferDataType(value, operator, key)
|
||||
cond, err := NewJSONConditionBuilder(key, valueType).buildJSONCondition(operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return cond, nil
|
||||
}
|
||||
|
||||
if operator.IsStringSearchOperator() {
|
||||
@@ -114,6 +108,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
return sb.ILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
|
||||
case qbtypes.FilterOperatorNotContains:
|
||||
return sb.NotILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
|
||||
|
||||
case qbtypes.FilterOperatorRegexp:
|
||||
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
|
||||
// Only needed because we are using sprintf instead of sb.Match (not implemented in sqlbuilder)
|
||||
@@ -181,16 +176,9 @@ func (c *conditionBuilder) conditionFor(
|
||||
var value any
|
||||
switch column.Type.GetType() {
|
||||
case schema.ColumnTypeEnumJSON:
|
||||
switch key.FieldDataType {
|
||||
case telemetrytypes.FieldDataTypeJSON:
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return sb.EQ(fmt.Sprintf("empty(%s)", tblFieldName), false), nil
|
||||
}
|
||||
return sb.EQ(fmt.Sprintf("empty(%s)", tblFieldName), true), nil
|
||||
default:
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return sb.IsNotNull(tblFieldName), nil
|
||||
}
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return sb.IsNotNull(tblFieldName), nil
|
||||
} else {
|
||||
return sb.IsNull(tblFieldName), nil
|
||||
}
|
||||
case schema.ColumnTypeEnumLowCardinality:
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
@@ -20,7 +17,7 @@ const (
|
||||
LogsV2TimestampColumn = "timestamp"
|
||||
LogsV2ObservedTimestampColumn = "observed_timestamp"
|
||||
LogsV2BodyColumn = "body"
|
||||
LogsV2BodyV2Column = constants.BodyV2Column
|
||||
LogsV2BodyJSONColumn = constants.BodyJSONColumn
|
||||
LogsV2BodyPromotedColumn = constants.BodyPromotedColumn
|
||||
LogsV2TraceIDColumn = "trace_id"
|
||||
LogsV2SpanIDColumn = "span_id"
|
||||
@@ -37,25 +34,11 @@ const (
|
||||
LogsV2ResourcesStringColumn = "resources_string"
|
||||
LogsV2ScopeStringColumn = "scope_string"
|
||||
|
||||
BodyV2ColumnPrefix = constants.BodyV2ColumnPrefix
|
||||
BodyJSONColumnPrefix = constants.BodyJSONColumnPrefix
|
||||
BodyPromotedColumnPrefix = constants.BodyPromotedColumnPrefix
|
||||
MessageSubColumn = "message"
|
||||
ftsBodyFieldKey = "object(body)"
|
||||
bodySearchDefaultWarning = "When you search on `body` (full text or by field), Query Builder uses body.message:string by default. Check that this matches what you want to search."
|
||||
ftsBodyFieldWarning = "Avoid using `object(body)` in queries. It performs a full scan and can severely slow queries. Use explicit fields instead (for example, `body.response.status_code` or `response.status_code`). Only use `object(body)` for temporary schema exploration. We do not recommend its usage elsewhere."
|
||||
)
|
||||
|
||||
var (
|
||||
// mapping for body logical field to message sub column
|
||||
// TODO(Piyush): Add description for detailed explanation of remapping of body to message
|
||||
BodyLogicalFieldJSONMapping = &telemetrytypes.TelemetryFieldKey{
|
||||
Name: MessageSubColumn,
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
JSONDataType: &telemetrytypes.String,
|
||||
Warnings: []string{bodySearchDefaultWarning},
|
||||
}
|
||||
DefaultFullTextColumn = &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "body",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
@@ -135,44 +118,3 @@ var (
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func bodyAliasExpression() string {
|
||||
if !querybuilder.BodyJSONQueryEnabled {
|
||||
return LogsV2BodyColumn
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s as body", LogsV2BodyV2Column)
|
||||
}
|
||||
|
||||
func enrichIntrinsicFields() error {
|
||||
// body logical field is mapped to message field in the body context that too only with String data type
|
||||
err := BodyLogicalFieldJSONMapping.SetJSONAccessPlan(telemetrytypes.JSONColumnMetadata{
|
||||
BaseColumn: LogsV2BodyV2Column,
|
||||
PromotedColumn: LogsV2BodyPromotedColumn,
|
||||
}, map[string][]telemetrytypes.JSONDataType{
|
||||
MessageSubColumn: {telemetrytypes.String},
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
DefaultFullTextColumn = BodyLogicalFieldJSONMapping
|
||||
IntrinsicFields["body"] = *BodyLogicalFieldJSONMapping
|
||||
IntrinsicFields[ftsBodyFieldKey] = telemetrytypes.TelemetryFieldKey{
|
||||
Name: "body_v2",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeJSON,
|
||||
Warnings: []string{ftsBodyFieldWarning},
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
err := enrichIntrinsicFields()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ var (
|
||||
"severity_text": {Name: "severity_text", Type: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}},
|
||||
"severity_number": {Name: "severity_number", Type: schema.ColumnTypeUInt8},
|
||||
"body": {Name: "body", Type: schema.ColumnTypeString},
|
||||
LogsV2BodyV2Column: {Name: LogsV2BodyV2Column, Type: schema.JSONColumnType{
|
||||
LogsV2BodyJSONColumn: {Name: LogsV2BodyJSONColumn, Type: schema.JSONColumnType{
|
||||
MaxDynamicTypes: utils.ToPointer(uint(32)),
|
||||
MaxDynamicPaths: utils.ToPointer(uint(0)),
|
||||
}},
|
||||
@@ -89,9 +89,9 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
|
||||
}
|
||||
case telemetrytypes.FieldContextBody:
|
||||
// Body context is for JSON body fields
|
||||
// Use body_v2 if feature flag is enabled
|
||||
// Use body_json if feature flag is enabled
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
return logsV2Columns[LogsV2BodyV2Column], nil
|
||||
return logsV2Columns[LogsV2BodyJSONColumn], nil
|
||||
}
|
||||
// Fall back to legacy body column
|
||||
return logsV2Columns["body"], nil
|
||||
@@ -100,9 +100,9 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
|
||||
if !ok {
|
||||
// check if the key has body JSON search
|
||||
if strings.HasPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
// Use body_v2 if feature flag is enabled and we have a body condition builder
|
||||
// Use body_json if feature flag is enabled and we have a body condition builder
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
return logsV2Columns[LogsV2BodyV2Column], nil
|
||||
return logsV2Columns[LogsV2BodyJSONColumn], nil
|
||||
}
|
||||
// Fall back to legacy body column
|
||||
return logsV2Columns["body"], nil
|
||||
@@ -147,9 +147,6 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr
|
||||
}
|
||||
|
||||
return m.buildFieldForJSON(key)
|
||||
case telemetrytypes.FieldContextLog:
|
||||
// return the column name as is for log context fields
|
||||
return column.Name, nil
|
||||
default:
|
||||
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource/body context fields are supported for json columns, got %s", key.FieldContext.String)
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ func NewJSONConditionBuilder(key *telemetrytypes.TelemetryFieldKey, valueType te
|
||||
return &jsonConditionBuilder{key: key, valueType: telemetrytypes.MappingFieldDataTypeToJSONDataType[valueType]}
|
||||
}
|
||||
|
||||
// BuildCondition builds the full WHERE condition for body_v2 JSON paths
|
||||
// BuildCondition builds the full WHERE condition for body_json JSON paths
|
||||
func (c *jsonConditionBuilder) buildJSONCondition(operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
conditions := []string{}
|
||||
for _, node := range c.key.JSONPlan {
|
||||
@@ -40,7 +40,6 @@ func (c *jsonConditionBuilder) buildJSONCondition(operator qbtypes.FilterOperato
|
||||
}
|
||||
conditions = append(conditions, condition)
|
||||
}
|
||||
|
||||
return sb.Or(conditions...), nil
|
||||
}
|
||||
|
||||
@@ -289,9 +288,9 @@ func (c *jsonConditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, field
|
||||
}
|
||||
return sb.NotIn(fieldExpr, values...), nil
|
||||
case qbtypes.FilterOperatorExists:
|
||||
return sb.IsNotNull(fieldExpr), nil
|
||||
return fmt.Sprintf("%s IS NOT NULL", fieldExpr), nil
|
||||
case qbtypes.FilterOperatorNotExists:
|
||||
return sb.IsNull(fieldExpr), nil
|
||||
return fmt.Sprintf("%s IS NULL", fieldExpr), nil
|
||||
// between and not between
|
||||
case qbtypes.FilterOperatorBetween, qbtypes.FilterOperatorNotBetween:
|
||||
values, ok := value.([]any)
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -203,6 +203,7 @@ func (b *logQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[stri
|
||||
}
|
||||
|
||||
func (b *logQueryStatementBuilder) adjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) []string {
|
||||
|
||||
// First check if it matches with any intrinsic fields
|
||||
var intrinsicOrCalculatedField telemetrytypes.TelemetryFieldKey
|
||||
if _, ok := IntrinsicFields[key.Name]; ok {
|
||||
@@ -211,6 +212,7 @@ func (b *logQueryStatementBuilder) adjustKey(key *telemetrytypes.TelemetryFieldK
|
||||
}
|
||||
|
||||
return querybuilder.AdjustKey(key, keys, nil)
|
||||
|
||||
}
|
||||
|
||||
// buildListQuery builds a query for list panel type
|
||||
@@ -247,7 +249,11 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
sb.SelectMore(LogsV2SeverityNumberColumn)
|
||||
sb.SelectMore(LogsV2ScopeNameColumn)
|
||||
sb.SelectMore(LogsV2ScopeVersionColumn)
|
||||
sb.SelectMore(bodyAliasExpression())
|
||||
sb.SelectMore(LogsV2BodyColumn)
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
sb.SelectMore(LogsV2BodyJSONColumn)
|
||||
sb.SelectMore(LogsV2BodyPromotedColumn)
|
||||
}
|
||||
sb.SelectMore(LogsV2AttributesStringColumn)
|
||||
sb.SelectMore(LogsV2AttributesNumberColumn)
|
||||
sb.SelectMore(LogsV2AttributesBoolColumn)
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
|
||||
@@ -887,154 +886,3 @@ func TestAdjustKey(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestStmtBuilderBodyField(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
requestType qbtypes.RequestType
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]
|
||||
enableBodyJSONQuery bool
|
||||
expected qbtypes.Statement
|
||||
expectedErr error
|
||||
}{
|
||||
{
|
||||
name: "body_exists",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body Exists"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: true,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (dynamicElement(body_v2.`message`, 'String') IS NOT NULL) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{bodySearchDefaultWarning},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "body_exists_disabled",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body Exists"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: false,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND body <> ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "body_empty",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body == ''"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: true,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (dynamicElement(body_v2.`message`, 'String') = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{bodySearchDefaultWarning},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "body_empty_disabled",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body == ''"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: false,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND body = ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "body_contains",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body CONTAINS 'error'"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: true,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (LOWER(dynamicElement(body_v2.`message`, 'String')) LIKE LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%error%", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{bodySearchDefaultWarning},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "body_contains_disabled",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body CONTAINS 'error'"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: false,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND LOWER(body) LIKE LOWER(?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%error%", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
enable, disable := jsonQueryTestUtil(t)
|
||||
defer disable()
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
if c.enableBodyJSONQuery {
|
||||
enable()
|
||||
} else {
|
||||
disable()
|
||||
}
|
||||
// build the key map after enabling/disabling body JSON query
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
statementBuilder := NewLogQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
if c.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), c.expectedErr.Error())
|
||||
} else {
|
||||
if err != nil {
|
||||
_, _, _, _, _, add := errors.Unwrapb(err)
|
||||
t.Logf("error additionals: %v", add)
|
||||
}
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, c.expected.Query, q.Query)
|
||||
require.Equal(t, c.expected.Args, q.Args)
|
||||
require.Equal(t, c.expected.Warnings, q.Warnings)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,6 +27,13 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"body": {
|
||||
{
|
||||
Name: "body",
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"http.status_code": {
|
||||
{
|
||||
Name: "http.status_code",
|
||||
@@ -938,11 +945,6 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
||||
key.Signal = telemetrytypes.SignalLogs
|
||||
}
|
||||
}
|
||||
|
||||
// add intrinsic fields to the map
|
||||
for fieldName, key := range IntrinsicFields {
|
||||
keysMap[fieldName] = append(keysMap[fieldName], &key)
|
||||
}
|
||||
return keysMap
|
||||
}
|
||||
|
||||
|
||||
@@ -254,7 +254,7 @@ func buildListLogsJSONIndexesQuery(cluster string, filters ...string) (string, [
|
||||
sb.Where(sb.Equal("database", telemetrylogs.DBName))
|
||||
sb.Where(sb.Equal("table", telemetrylogs.LogsV2LocalTableName))
|
||||
sb.Where(sb.Or(
|
||||
sb.ILike("expr", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyV2ColumnPrefix))),
|
||||
sb.ILike("expr", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyJSONColumnPrefix))),
|
||||
sb.ILike("expr", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyPromotedColumnPrefix))),
|
||||
))
|
||||
|
||||
@@ -338,7 +338,7 @@ func (t *telemetryMetaStore) ListJSONValues(ctx context.Context, path string, li
|
||||
if promoted {
|
||||
path = telemetrylogs.BodyPromotedColumnPrefix + path
|
||||
} else {
|
||||
path = telemetrylogs.BodyV2ColumnPrefix + path
|
||||
path = telemetrylogs.BodyJSONColumnPrefix + path
|
||||
}
|
||||
|
||||
from := fmt.Sprintf("%s.%s", telemetrylogs.DBName, telemetrylogs.LogsV2TableName)
|
||||
@@ -529,7 +529,7 @@ func (t *telemetryMetaStore) GetPromotedPaths(ctx context.Context, paths ...stri
|
||||
// TODO(Piyush): Remove this function
|
||||
func CleanPathPrefixes(path string) string {
|
||||
path = strings.TrimPrefix(path, telemetrytypes.BodyJSONStringSearchPrefix)
|
||||
path = strings.TrimPrefix(path, telemetrylogs.BodyV2ColumnPrefix)
|
||||
path = strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
|
||||
path = strings.TrimPrefix(path, telemetrylogs.BodyPromotedColumnPrefix)
|
||||
return path
|
||||
}
|
||||
|
||||
@@ -117,7 +117,7 @@ func TestBuildListLogsJSONIndexesQuery(t *testing.T) {
|
||||
expectedArgs: []any{
|
||||
telemetrylogs.DBName,
|
||||
telemetrylogs.LogsV2LocalTableName,
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyV2ColumnPrefix)),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyJSONColumnPrefix)),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyPromotedColumnPrefix)),
|
||||
},
|
||||
},
|
||||
@@ -130,7 +130,7 @@ func TestBuildListLogsJSONIndexesQuery(t *testing.T) {
|
||||
expectedArgs: []any{
|
||||
telemetrylogs.DBName,
|
||||
telemetrylogs.LogsV2LocalTableName,
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyV2ColumnPrefix)),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyJSONColumnPrefix)),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyPromotedColumnPrefix)),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains("foo")),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains("bar")),
|
||||
|
||||
@@ -100,7 +100,7 @@ func NewTelemetryMetaStore(
|
||||
jsonColumnMetadata: map[telemetrytypes.Signal]map[telemetrytypes.FieldContext]telemetrytypes.JSONColumnMetadata{
|
||||
telemetrytypes.SignalLogs: {
|
||||
telemetrytypes.FieldContextBody: telemetrytypes.JSONColumnMetadata{
|
||||
BaseColumn: telemetrylogs.LogsV2BodyV2Column,
|
||||
BaseColumn: telemetrylogs.LogsV2BodyJSONColumn,
|
||||
PromotedColumn: telemetrylogs.LogsV2BodyPromotedColumn,
|
||||
},
|
||||
},
|
||||
@@ -333,7 +333,7 @@ func (t *telemetryMetaStore) logsTblStatementToFieldKeys(ctx context.Context) ([
|
||||
}
|
||||
|
||||
// getLogsKeys returns the keys from the spans that match the field selection criteria
|
||||
func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) (telemetrytypes.TelemetryFieldKeys, bool, error) {
|
||||
func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
|
||||
if len(fieldKeySelectors) == 0 {
|
||||
return nil, true, nil
|
||||
}
|
||||
@@ -343,10 +343,9 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
// setOfKeys to reuse the same key object for qualified names
|
||||
setOfKeys := make(map[string]*telemetrytypes.TelemetryFieldKey)
|
||||
mapOfKeys := make(map[string]*telemetrytypes.TelemetryFieldKey)
|
||||
for _, key := range matKeys {
|
||||
setOfKeys[key.QualifiedName()] = key
|
||||
mapOfKeys[key.Name+";"+key.FieldContext.StringValue()+";"+key.FieldDataType.StringValue()] = key
|
||||
}
|
||||
|
||||
// queries for both attribute and resource keys tables
|
||||
@@ -447,7 +446,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
|
||||
if len(queries) == 0 {
|
||||
// No matching contexts, return empty result
|
||||
return nil, true, nil
|
||||
return []*telemetrytypes.TelemetryFieldKey{}, true, nil
|
||||
}
|
||||
|
||||
// Combine queries with UNION ALL
|
||||
@@ -475,7 +474,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
mapOfKeys := make(telemetrytypes.TelemetryFieldKeys)
|
||||
keys := []*telemetrytypes.TelemetryFieldKey{}
|
||||
rowCount := 0
|
||||
searchTexts := []string{}
|
||||
dataTypes := []telemetrytypes.FieldDataType{}
|
||||
@@ -503,7 +502,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
if err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
}
|
||||
key, ok := setOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()]
|
||||
key, ok := mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()]
|
||||
|
||||
// if there is no materialised column, create a key with the field context and data type
|
||||
if !ok {
|
||||
@@ -515,8 +514,8 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
}
|
||||
}
|
||||
|
||||
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
||||
setOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()] = key
|
||||
keys = append(keys, key)
|
||||
mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()] = key
|
||||
}
|
||||
|
||||
if rows.Err() != nil {
|
||||
@@ -542,13 +541,17 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
|
||||
if found {
|
||||
if field, exists := telemetrylogs.IntrinsicFields[key]; exists {
|
||||
if _, added := setOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
|
||||
mapOfKeys[field.Name] = append(mapOfKeys[field.Name], &field)
|
||||
// intrinsic field can be a logical field mapped to a different physical location
|
||||
mapOfKeys[key] = append(mapOfKeys[key], &field)
|
||||
if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
|
||||
keys = append(keys, &field)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
|
||||
Name: key,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -557,13 +560,10 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
if err != nil {
|
||||
t.logger.ErrorContext(ctx, "failed to extract body JSON paths", "error", err)
|
||||
}
|
||||
for _, key := range bodyJSONPaths {
|
||||
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
||||
}
|
||||
keys = append(keys, bodyJSONPaths...)
|
||||
complete = complete && finished
|
||||
}
|
||||
|
||||
return mapOfKeys, complete, nil
|
||||
return keys, complete, nil
|
||||
}
|
||||
|
||||
func getPriorityForContext(ctx telemetrytypes.FieldContext) int {
|
||||
@@ -847,20 +847,12 @@ func (t *telemetryMetaStore) GetKeys(ctx context.Context, fieldKeySelector *tele
|
||||
if fieldKeySelector != nil {
|
||||
selectors = []*telemetrytypes.FieldKeySelector{fieldKeySelector}
|
||||
}
|
||||
mapOfKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
|
||||
|
||||
switch fieldKeySelector.Signal {
|
||||
case telemetrytypes.SignalTraces:
|
||||
keys, complete, err = t.getTracesKeys(ctx, selectors)
|
||||
case telemetrytypes.SignalLogs:
|
||||
mapOfLogKeys, logsComplete, err := t.getLogsKeys(ctx, selectors)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
for keyName, keys := range mapOfLogKeys {
|
||||
mapOfKeys[keyName] = append(mapOfKeys[keyName], keys...)
|
||||
}
|
||||
complete = complete && logsComplete
|
||||
keys, complete, err = t.getLogsKeys(ctx, selectors)
|
||||
case telemetrytypes.SignalMetrics:
|
||||
if fieldKeySelector.Source == telemetrytypes.SourceMeter {
|
||||
keys, complete, err = t.getMeterSourceMetricKeys(ctx, selectors)
|
||||
@@ -876,14 +868,11 @@ func (t *telemetryMetaStore) GetKeys(ctx context.Context, fieldKeySelector *tele
|
||||
keys = append(keys, tracesKeys...)
|
||||
|
||||
// get logs keys
|
||||
mapOfLogKeys, logsComplete, err := t.getLogsKeys(ctx, selectors)
|
||||
logsKeys, logsComplete, err := t.getLogsKeys(ctx, selectors)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
for keyName, keys := range mapOfLogKeys {
|
||||
mapOfKeys[keyName] = append(mapOfKeys[keyName], keys...)
|
||||
}
|
||||
complete = complete && logsComplete
|
||||
keys = append(keys, logsKeys...)
|
||||
|
||||
// get metrics keys
|
||||
metricsKeys, metricsComplete, err := t.getMetricsKeys(ctx, selectors)
|
||||
@@ -898,6 +887,7 @@ func (t *telemetryMetaStore) GetKeys(ctx context.Context, fieldKeySelector *tele
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
mapOfKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
|
||||
for _, key := range keys {
|
||||
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
||||
}
|
||||
@@ -934,7 +924,7 @@ func (t *telemetryMetaStore) GetKeysMulti(ctx context.Context, fieldKeySelectors
|
||||
}
|
||||
}
|
||||
|
||||
mapOfLogKeys, logsComplete, err := t.getLogsKeys(ctx, logsSelectors)
|
||||
logsKeys, logsComplete, err := t.getLogsKeys(ctx, logsSelectors)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
@@ -955,8 +945,8 @@ func (t *telemetryMetaStore) GetKeysMulti(ctx context.Context, fieldKeySelectors
|
||||
complete := logsComplete && tracesComplete && metricsComplete
|
||||
|
||||
mapOfKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
|
||||
for keyName, keys := range mapOfLogKeys {
|
||||
mapOfKeys[keyName] = append(mapOfKeys[keyName], keys...)
|
||||
for _, key := range logsKeys {
|
||||
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
||||
}
|
||||
for _, key := range tracesKeys {
|
||||
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
||||
|
||||
@@ -7,6 +7,6 @@ const (
|
||||
AttributesMetadataTableName = "distributed_attributes_metadata"
|
||||
AttributesMetadataLocalTableName = "attributes_metadata"
|
||||
PathTypesTableName = otelcollectorconst.DistributedPathTypesTable
|
||||
PromotedPathsTableName = "distributed_json_promoted_paths"
|
||||
PromotedPathsTableName = otelcollectorconst.DistributedPromotedPathsTable
|
||||
SkipIndexTableName = "system.data_skipping_indices"
|
||||
)
|
||||
|
||||
@@ -60,7 +60,7 @@ type IngestionKeysParams struct {
|
||||
}
|
||||
|
||||
type SearchIngestionKeysParams struct {
|
||||
Name string `query:"name"`
|
||||
Name string `query:"name" required:"true"`
|
||||
Page int `query:"page"`
|
||||
PerPage int `query:"per_page"`
|
||||
}
|
||||
@@ -71,14 +71,14 @@ type GettableIngestionKeys struct {
|
||||
}
|
||||
|
||||
type PostableIngestionKey struct {
|
||||
Name string `json:"name"`
|
||||
Name string `json:"name" required:"true"`
|
||||
Tags []string `json:"tags"`
|
||||
ExpiresAt time.Time `json:"expires_at"`
|
||||
}
|
||||
|
||||
type GettableCreatedIngestionKey struct {
|
||||
ID string `json:"id"`
|
||||
Value string `json:"value"`
|
||||
ID string `json:"id" required:"true"`
|
||||
Value string `json:"value" required:"true"`
|
||||
}
|
||||
|
||||
type PostableIngestionKeyLimit struct {
|
||||
@@ -88,10 +88,10 @@ type PostableIngestionKeyLimit struct {
|
||||
}
|
||||
|
||||
type GettableCreatedIngestionKeyLimit struct {
|
||||
ID string `json:"id"`
|
||||
ID string `json:"id" required:"true"`
|
||||
}
|
||||
|
||||
type UpdatableIngestionKeyLimit struct {
|
||||
Config LimitConfig `json:"config"`
|
||||
Config LimitConfig `json:"config" required:"true"`
|
||||
Tags []string `json:"tags"`
|
||||
}
|
||||
|
||||
@@ -36,8 +36,8 @@ func (i *PromotePath) ValidateAndSetDefaults() error {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "array paths can not be promoted or indexed")
|
||||
}
|
||||
|
||||
if strings.HasPrefix(i.Path, constants.BodyV2ColumnPrefix) || strings.HasPrefix(i.Path, constants.BodyPromotedColumnPrefix) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "`%s`, `%s` don't add these prefixes to the path", constants.BodyV2ColumnPrefix, constants.BodyPromotedColumnPrefix)
|
||||
if strings.HasPrefix(i.Path, constants.BodyJSONColumnPrefix) || strings.HasPrefix(i.Path, constants.BodyPromotedColumnPrefix) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "`%s`, `%s` don't add these prefixes to the path", constants.BodyJSONColumnPrefix, constants.BodyPromotedColumnPrefix)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(i.Path, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
|
||||
@@ -197,17 +197,6 @@ func (f FilterOperator) IsArrayOperator() bool {
|
||||
}
|
||||
}
|
||||
|
||||
func (f FilterOperator) IsOpValidForJSON() bool {
|
||||
switch f {
|
||||
case FilterOperatorExists, FilterOperatorNotExists,
|
||||
FilterOperatorContains, FilterOperatorNotContains,
|
||||
FilterOperatorRegexp, FilterOperatorNotRegexp:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
type OrderDirection struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
@@ -29,8 +29,6 @@ const (
|
||||
ArrayAnyIndexSuffix = "[*]"
|
||||
)
|
||||
|
||||
type TelemetryFieldKeys map[string][]*TelemetryFieldKey
|
||||
|
||||
type TelemetryFieldKey struct {
|
||||
Name string `json:"name" required:"true"`
|
||||
Description string `json:"description,omitempty"`
|
||||
@@ -43,7 +41,6 @@ type TelemetryFieldKey struct {
|
||||
JSONPlan JSONAccessPlan `json:"-"`
|
||||
Indexes []JSONDataTypeIndex `json:"-"`
|
||||
Materialized bool `json:"-"` // refers to promoted in case of body.... fields
|
||||
Warnings []string `json:"-"` // warnings attached to using a particular field key
|
||||
}
|
||||
|
||||
func (f *TelemetryFieldKey) KeyNameContainsArray() bool {
|
||||
@@ -65,10 +62,6 @@ func (f *TelemetryFieldKey) ArrayParentPaths() []string {
|
||||
return paths
|
||||
}
|
||||
|
||||
func (f *TelemetryFieldKey) MustBuildJSONCondition() bool {
|
||||
return f.FieldDataType != FieldDataTypeJSON && f.JSONDataType != nil
|
||||
}
|
||||
|
||||
func (f *TelemetryFieldKey) ArrayParentSelectors() []*FieldKeySelector {
|
||||
paths := f.ArrayParentPaths()
|
||||
selectors := make([]*FieldKeySelector, 0, len(paths))
|
||||
@@ -176,10 +169,6 @@ func (f *TelemetryFieldKey) Normalize() {
|
||||
|
||||
}
|
||||
|
||||
func (f *TelemetryFieldKey) QualifiedName() string {
|
||||
return f.Name + ";" + f.FieldContext.StringValue() + ";" + f.FieldDataType.StringValue()
|
||||
}
|
||||
|
||||
// GetFieldKeyFromKeyText returns a TelemetryFieldKey from a key text.
|
||||
// The key text is expected to be in the format of `fieldContext.fieldName:fieldDataType` in the search query.
|
||||
// Both fieldContext and :fieldDataType are optional.
|
||||
|
||||
@@ -21,7 +21,6 @@ var (
|
||||
// int64 and number are synonyms for float64
|
||||
FieldDataTypeInt64 = FieldDataType{valuer.NewString("int64")}
|
||||
FieldDataTypeNumber = FieldDataType{valuer.NewString("number")}
|
||||
FieldDataTypeJSON = FieldDataType{valuer.NewString("json")}
|
||||
FieldDataTypeUnspecified = FieldDataType{valuer.NewString("")}
|
||||
|
||||
FieldDataTypeArrayString = FieldDataType{valuer.NewString("[]string")}
|
||||
|
||||
@@ -40,7 +40,7 @@ type JSONAccessNode struct {
|
||||
// Node information
|
||||
Name string
|
||||
IsTerminal bool
|
||||
isRoot bool // marked true for only body_v2 and body_promoted
|
||||
isRoot bool // marked true for only body_json and body_json_promoted
|
||||
|
||||
// Precomputed type information (single source of truth)
|
||||
AvailableTypes []JSONDataType
|
||||
|
||||
@@ -1,19 +1,12 @@
|
||||
package telemetrytypes
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
otelconstants "github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
const (
|
||||
bodyV2Column = otelconstants.BodyV2Column
|
||||
bodyPromotedColumn = otelconstants.BodyPromotedColumn
|
||||
)
|
||||
|
||||
// ============================================================================
|
||||
// Helper Functions for Test Data Creation
|
||||
// ============================================================================
|
||||
@@ -116,8 +109,8 @@ func TestNode_Alias(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "Root node returns name as-is",
|
||||
node: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
expected: bodyV2Column,
|
||||
node: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
expected: "body_json",
|
||||
},
|
||||
{
|
||||
name: "Node without parent returns backticked name",
|
||||
@@ -131,9 +124,9 @@ func TestNode_Alias(t *testing.T) {
|
||||
name: "Node with root parent uses dot separator",
|
||||
node: &JSONAccessNode{
|
||||
Name: "age",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
expected: "`" + bodyV2Column + ".age`",
|
||||
expected: "`" + "body_json" + ".age`",
|
||||
},
|
||||
{
|
||||
name: "Node with non-root parent uses array separator",
|
||||
@@ -141,10 +134,10 @@ func TestNode_Alias(t *testing.T) {
|
||||
Name: "name",
|
||||
Parent: &JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
},
|
||||
expected: "`" + bodyV2Column + ".education[].name`",
|
||||
expected: "`" + "body_json" + ".education[].name`",
|
||||
},
|
||||
{
|
||||
name: "Nested array path with multiple levels",
|
||||
@@ -154,11 +147,11 @@ func TestNode_Alias(t *testing.T) {
|
||||
Name: "awards",
|
||||
Parent: &JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: "`" + bodyV2Column + ".education[].awards[].type`",
|
||||
expected: "`" + "body_json" + ".education[].awards[].type`",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -180,18 +173,18 @@ func TestNode_FieldPath(t *testing.T) {
|
||||
name: "Simple field path from root",
|
||||
node: &JSONAccessNode{
|
||||
Name: "user",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: bodyV2Column + ".`user`",
|
||||
expected: "body_json" + ".`user`",
|
||||
},
|
||||
{
|
||||
name: "Field path with backtick-required key",
|
||||
node: &JSONAccessNode{
|
||||
Name: "user-name", // requires backtick
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
expected: bodyV2Column + ".`user-name`",
|
||||
expected: "body_json" + ".`user-name`",
|
||||
},
|
||||
{
|
||||
name: "Nested field path",
|
||||
@@ -199,11 +192,11 @@ func TestNode_FieldPath(t *testing.T) {
|
||||
Name: "age",
|
||||
Parent: &JSONAccessNode{
|
||||
Name: "user",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: "`" + bodyV2Column + ".user`.`age`",
|
||||
expected: "`" + "body_json" + ".user`.`age`",
|
||||
},
|
||||
{
|
||||
name: "Array element field path",
|
||||
@@ -211,11 +204,11 @@ func TestNode_FieldPath(t *testing.T) {
|
||||
Name: "name",
|
||||
Parent: &JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: "`" + bodyV2Column + ".education`.`name`",
|
||||
expected: "`" + "body_json" + ".education`.`name`",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -243,36 +236,36 @@ func TestPlanJSON_BasicStructure(t *testing.T) {
|
||||
{
|
||||
name: "Simple path not promoted",
|
||||
key: makeKey("user.name", String, false),
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: user.name
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Simple path promoted",
|
||||
key: makeKey("user.name", String, true),
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: user.name
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
- name: user.name
|
||||
column: %s
|
||||
column: body_json_promoted
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column, bodyPromotedColumn),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Empty path returns error",
|
||||
@@ -285,8 +278,8 @@ func TestPlanJSON_BasicStructure(t *testing.T) {
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := tt.key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
if tt.expectErr {
|
||||
require.Error(t, err)
|
||||
@@ -311,9 +304,9 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
{
|
||||
name: "Single array level - JSON branch only",
|
||||
path: "education[].name",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -325,14 +318,14 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Single array level - both JSON and Dynamic branches",
|
||||
path: "education[].awards[].type",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -359,14 +352,14 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Deeply nested array path",
|
||||
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: interests
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -406,14 +399,14 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
- String
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "ArrayAnyIndex replacement [*] to []",
|
||||
path: "education[*].name",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -425,7 +418,7 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -433,8 +426,8 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
key := makeKey(tt.path, String, false)
|
||||
err := key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, key.JSONPlan)
|
||||
@@ -452,15 +445,15 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
t.Run("Non-promoted plan", func(t *testing.T) {
|
||||
key := makeKey(path, String, false)
|
||||
err := key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, key.JSONPlan, 1)
|
||||
|
||||
expectedYAML := fmt.Sprintf(`
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -487,7 +480,7 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column)
|
||||
`
|
||||
got := plansToYAML(t, key.JSONPlan)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
})
|
||||
@@ -495,15 +488,15 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
t.Run("Promoted plan", func(t *testing.T) {
|
||||
key := makeKey(path, String, true)
|
||||
err := key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, key.JSONPlan, 2)
|
||||
|
||||
expectedYAML := fmt.Sprintf(`
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -531,7 +524,7 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json_promoted
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -561,7 +554,7 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column, bodyPromotedColumn)
|
||||
`
|
||||
got := plansToYAML(t, key.JSONPlan)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
})
|
||||
@@ -582,11 +575,11 @@ func TestPlanJSON_EdgeCases(t *testing.T) {
|
||||
expectErr: true,
|
||||
},
|
||||
{
|
||||
name: "Very deep nesting - validates progression doesn't go negative",
|
||||
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
name: "Very deep nesting - validates progression doesn't go negative",
|
||||
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
|
||||
expectedYAML: `
|
||||
- name: interests
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -626,14 +619,14 @@ func TestPlanJSON_EdgeCases(t *testing.T) {
|
||||
- String
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Path with mixed scalar and array types",
|
||||
path: "education[].type",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
name: "Path with mixed scalar and array types",
|
||||
path: "education[].type",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -646,20 +639,20 @@ func TestPlanJSON_EdgeCases(t *testing.T) {
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Exists with only array types available",
|
||||
path: "education",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
name: "Exists with only array types available",
|
||||
path: "education",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: Array(JSON)
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -675,8 +668,8 @@ func TestPlanJSON_EdgeCases(t *testing.T) {
|
||||
}
|
||||
key := makeKey(tt.path, keyType, false)
|
||||
err := key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
if tt.expectErr {
|
||||
require.Error(t, err)
|
||||
@@ -694,15 +687,15 @@ func TestPlanJSON_TreeStructure(t *testing.T) {
|
||||
path := "education[].awards[].participated[].team[].branch"
|
||||
key := makeKey(path, String, false)
|
||||
err := key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, key.JSONPlan, 1)
|
||||
|
||||
expectedYAML := fmt.Sprintf(`
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -787,7 +780,7 @@ func TestPlanJSON_TreeStructure(t *testing.T) {
|
||||
maxDynamicPaths: 64
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column)
|
||||
`
|
||||
|
||||
got := plansToYAML(t, key.JSONPlan)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
|
||||
@@ -2,7 +2,6 @@ package telemetrytypestest
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
@@ -177,9 +176,8 @@ func matchesKey(selector *telemetrytypes.FieldKeySelector, key *telemetrytypes.T
|
||||
return true
|
||||
}
|
||||
|
||||
matchNameExceptions := []string{"body"}
|
||||
// Check name (already checked in matchesName, but double-check here)
|
||||
if selector.Name != "" && !matchesName(selector, key.Name) && slices.Contains(matchNameExceptions, key.Name) {
|
||||
if selector.Name != "" && !matchesName(selector, key.Name) {
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
@@ -11,28 +11,28 @@ type PostableHost struct {
|
||||
}
|
||||
|
||||
type PostableProfile struct {
|
||||
UsesOtel bool `json:"uses_otel"`
|
||||
HasExistingObservabilityTool bool `json:"has_existing_observability_tool"`
|
||||
ExistingObservabilityTool string `json:"existing_observability_tool"`
|
||||
ReasonsForInterestInSigNoz []string `json:"reasons_for_interest_in_signoz"`
|
||||
LogsScalePerDayInGB int64 `json:"logs_scale_per_day_in_gb"`
|
||||
NumberOfServices int64 `json:"number_of_services"`
|
||||
NumberOfHosts int64 `json:"number_of_hosts"`
|
||||
WhereDidYouDiscoverSigNoz string `json:"where_did_you_discover_signoz"`
|
||||
TimelineForMigratingToSigNoz string `json:"timeline_for_migrating_to_signoz"`
|
||||
UsesOtel bool `json:"uses_otel" required:"true"`
|
||||
HasExistingObservabilityTool bool `json:"has_existing_observability_tool" required:"true"`
|
||||
ExistingObservabilityTool string `json:"existing_observability_tool" required:"true"`
|
||||
ReasonsForInterestInSigNoz []string `json:"reasons_for_interest_in_signoz" required:"true"`
|
||||
LogsScalePerDayInGB int64 `json:"logs_scale_per_day_in_gb" required:"true"`
|
||||
NumberOfServices int64 `json:"number_of_services" required:"true"`
|
||||
NumberOfHosts int64 `json:"number_of_hosts" required:"true"`
|
||||
WhereDidYouDiscoverSigNoz string `json:"where_did_you_discover_signoz" required:"true"`
|
||||
TimelineForMigratingToSigNoz string `json:"timeline_for_migrating_to_signoz" required:"true"`
|
||||
}
|
||||
|
||||
type GettableHost struct {
|
||||
Name string `json:"name"`
|
||||
State string `json:"state"`
|
||||
Tier string `json:"tier"`
|
||||
Hosts []Host `json:"hosts"`
|
||||
Name string `json:"name" required:"true"`
|
||||
State string `json:"state" required:"true"`
|
||||
Tier string `json:"tier" required:"true"`
|
||||
Hosts []Host `json:"hosts" required:"true"`
|
||||
}
|
||||
|
||||
type Host struct {
|
||||
Name string `json:"name"`
|
||||
IsDefault bool `json:"is_default"`
|
||||
URL string `json:"url"`
|
||||
Name string `json:"name" required:"true"`
|
||||
IsDefault bool `json:"is_default" required:"true"`
|
||||
URL string `json:"url" required:"true"`
|
||||
}
|
||||
|
||||
func NewGettableHost(data []byte) *GettableHost {
|
||||
|
||||
Reference in New Issue
Block a user