Compare commits

..

53 Commits

Author SHA1 Message Date
Piyush Singariya
62fa98c546 feat: forced full text on body 2026-02-20 15:20:16 +05:30
Piyush Singariya
2a492cc783 fix: change warning to a const to fix tests 2026-02-20 14:54:16 +05:30
Piyush Singariya
24afdad36c fix: append warnings from fieldkeys 2026-02-20 14:51:13 +05:30
Piyush Singariya
5d20019207 fix: body.message not being mapped correctly 2026-02-20 14:26:12 +05:30
Piyush Singariya
1963d5811d Merge branch 'main' into merge-json-col-fields 2026-02-20 10:08:25 +05:30
Piyush Singariya
15cfccad74 revert: change ReadMultiple is needed 2026-02-20 10:08:11 +05:30
Piyush Singariya
a0399560e3 revert: remvoing unused function 2026-02-19 16:33:12 +05:30
Piyush Singariya
265e337d5c Merge branch 'main' into merge-json-col-fields 2026-02-19 16:29:55 +05:30
Piyush Singariya
bb8c874755 fix: go lint 2026-02-17 17:19:24 +05:30
Piyush Singariya
13cbe03d64 fix: tests 2026-02-17 16:58:00 +05:30
Piyush Singariya
93621c29b7 fix: go mod changes 2026-02-17 16:29:00 +05:30
Piyush Singariya
2c691b5a75 fix: test fixed 2026-02-17 16:28:54 +05:30
Piyush Singariya
cd7e1bb114 Merge branch 'main' into merge-json-col-fields 2026-02-17 16:22:58 +05:30
Piyush Singariya
a1d2ec8b8a fix: remove unused function 2026-02-17 16:18:38 +05:30
Piyush Singariya
8bbafb52d5 fix: go.mod required changes 2026-02-17 16:16:17 +05:30
Piyush Singariya
075cfab463 feat: mapping body_v2.message:string map to body 2026-02-17 13:26:34 +05:30
Piyush Singariya
86bccaac0c test: blocked on pr #10153 2026-02-16 15:24:31 +05:30
Piyush Singariya
de1aac63c0 revert: more unrelated change 2026-02-16 13:19:49 +05:30
Piyush Singariya
14fe8745b5 Merge branch 'main' into merge-json-col-fields 2026-02-16 13:14:39 +05:30
Piyush Singariya
4013c7ee03 revert: few unrelated changes 2026-02-16 13:13:07 +05:30
Piyush Singariya
0d34360e0b fix: handle datatype collision 2026-01-30 12:17:28 +05:30
srikanthccv
d204c89dec Merge branch 'main' into merge-json-col-fields 2026-01-30 02:12:14 +05:30
Piyush Singariya
8dd33c1ab7 Merge branch 'main' into merge-json-col-fields 2026-01-29 19:57:22 +05:30
Piyush Singariya
8e5c3d5ae1 chore: merge json fields 2026-01-29 16:46:00 +05:30
Piyush Singariya
d45bb52f33 Merge branch 'has-jsonqb' into merge-json-col-fields 2026-01-29 13:21:13 +05:30
Piyush Singariya
e71818292d fix: go test flakiness 2026-01-29 10:17:53 +05:30
Piyush Singariya
37557f7f24 Merge branch 'main' into has-jsonqb 2026-01-29 09:12:22 +05:30
Piyush Singariya
27ff102660 Merge branch 'main' into has-jsonqb 2026-01-28 17:44:48 +05:30
Piyush Singariya
cb2aa4cffd fix: tests 2026-01-28 17:42:17 +05:30
Piyush Singariya
58d1d84ec7 test: fix 2026-01-28 15:34:22 +05:30
Piyush Singariya
d8e116a7bc fix: merge conflict 2026-01-28 15:15:50 +05:30
Piyush Singariya
6a48bdc37e Merge branch 'main' into has-jsonqb 2026-01-28 15:15:17 +05:30
Piyush Singariya
ffb62432f8 chore: var renamed 2026-01-28 14:42:51 +05:30
Piyush Singariya
57c51f070c fix: merge json body columns together 2026-01-28 14:36:15 +05:30
Piyush Singariya
36becfc7a2 fix: removed comment 2026-01-27 13:20:52 +05:30
Piyush Singariya
8e71de09f3 fix: remove unnecessary bool checking 2026-01-27 13:16:30 +05:30
Piyush Singariya
56de92de73 fix: changes based on review from Srikanth 2026-01-27 13:12:32 +05:30
Piyush Singariya
62b10f8e77 Merge branch 'main' into has-jsonqb 2026-01-27 10:04:32 +05:30
Piyush Singariya
20b53d7856 fix: review based on tushar 2026-01-27 10:04:15 +05:30
Piyush Singariya
8f2c506304 fix: json qb test fix 2026-01-22 22:00:06 +05:30
Srikanth Chekuri
7b5b9027dd Merge branch 'main' into has-jsonqb 2026-01-22 20:19:39 +05:30
Piyush Singariya
b77f97fcb7 fix: tests 2026-01-22 17:24:26 +05:30
Piyush Singariya
62942a4162 fix: tests 2026-01-22 15:47:20 +05:30
Piyush Singariya
349bbbbf1d Merge branch 'main' into has-jsonqb 2026-01-22 12:36:45 +05:30
Piyush Singariya
1966a7a5f6 fix: empty filteredArrays condition 2026-01-22 12:36:29 +05:30
Piyush Singariya
a4eed9ff13 fix: build json plans in metadata 2026-01-22 12:33:51 +05:30
Piyush Singariya
24d1ee33b5 revert: gitignore change 2026-01-22 10:39:02 +05:30
Srikanth Chekuri
3402203021 Merge branch 'main' into has-jsonqb 2026-01-21 13:47:39 +05:30
Piyush Singariya
e8e4897cc8 fix: tests GroupBy 2026-01-20 12:10:44 +05:30
Piyush Singariya
96fb88aaee fix: ignored .vscode in gitignore 2026-01-20 11:47:34 +05:30
Piyush Singariya
5a00e6c2cd Merge branch 'main' into has-jsonqb 2026-01-20 11:44:32 +05:30
Piyush Singariya
e2500cff7d fix: tests expected queries and values 2026-01-20 11:40:56 +05:30
Piyush Singariya
4864c3bc37 feat: has JSON QB 2026-01-20 11:23:50 +05:30
84 changed files with 2171 additions and 4528 deletions

View File

@@ -85,9 +85,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return noopgateway.NewProviderFactory()
},
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
return querier.NewHandler(ps, q, a)
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", "error", err)

View File

@@ -9,7 +9,6 @@ import (
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
eequerier "github.com/SigNoz/signoz/ee/querier"
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
@@ -125,10 +124,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return httpgateway.NewProviderFactory(licensing)
},
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
communityHandler := querier.NewHandler(ps, q, a)
return eequerier.NewHandler(ps, q, communityHandler)
},
)
if err != nil {

View File

@@ -307,16 +307,11 @@ components:
type: string
value:
type: string
required:
- id
- value
type: object
GatewaytypesGettableCreatedIngestionKeyLimit:
properties:
id:
type: string
required:
- id
type: object
GatewaytypesGettableIngestionKeys:
properties:
@@ -437,8 +432,6 @@ components:
type: string
nullable: true
type: array
required:
- name
type: object
GatewaytypesPostableIngestionKeyLimit:
properties:
@@ -461,8 +454,6 @@ components:
type: string
nullable: true
type: array
required:
- config
type: object
MetricsexplorertypesListMetric:
properties:
@@ -749,14 +740,6 @@ components:
- temporality
- isMonotonic
type: object
MetrictypesComparisonSpaceAggregationParam:
properties:
operator:
type: string
threshold:
format: double
type: number
type: object
MetrictypesSpaceAggregation:
enum:
- sum
@@ -769,7 +752,6 @@ components:
- p90
- p95
- p99
- histogram_count
type: string
MetrictypesTemporality:
enum:
@@ -1054,8 +1036,6 @@ components:
type: object
Querybuildertypesv5MetricAggregation:
properties:
comparisonSpaceAggregationParam:
$ref: '#/components/schemas/MetrictypesComparisonSpaceAggregationParam'
metricName:
type: string
reduceTo:
@@ -1953,11 +1933,6 @@ components:
type: string
tier:
type: string
required:
- name
- state
- tier
- hosts
type: object
ZeustypesHost:
properties:
@@ -1967,10 +1942,6 @@ components:
type: string
url:
type: string
required:
- name
- is_default
- url
type: object
ZeustypesPostableHost:
properties:
@@ -2005,16 +1976,6 @@ components:
type: boolean
where_did_you_discover_signoz:
type: string
required:
- uses_otel
- has_existing_observability_tool
- existing_observability_tool
- reasons_for_interest_in_signoz
- logs_scale_per_day_in_gb
- number_of_services
- number_of_hosts
- where_did_you_discover_signoz
- timeline_for_migrating_to_signoz
type: object
securitySchemes:
api_key:
@@ -3234,29 +3195,12 @@ paths:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Promote and index paths
tags:
- logs
@@ -3281,29 +3225,12 @@ paths:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- EDITOR
- tokenizer:
- EDITOR
summary: Promote and index paths
tags:
- logs
@@ -4850,7 +4777,6 @@ paths:
parameters:
- in: query
name: name
required: true
schema:
type: string
- in: query
@@ -6231,58 +6157,4 @@ paths:
- VIEWER
summary: Query range
tags:
- querier
/api/v5/substitute_vars:
post:
deprecated: false
description: Replace variables in a query
operationId: ReplaceVariables
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
status:
type: string
type: object
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Replace variables
tags:
- querier
- query

View File

@@ -190,44 +190,43 @@ type OpenAPIExample struct {
}
```
For reference, see `pkg/apiserver/signozapiserver/querier.go` which defines examples inline for the `/api/v5/query_range` endpoint:
For reference, see `pkg/querier/openapi.go` which defines some examples for the `/api/v5/query_range` endpoint:
```go
if err := router.Handle("/api/v5/query_range", handler.New(provider.authZ.ViewAccess(provider.querierHandler.QueryRange), handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"querier"},
Summary: "Query range",
Description: "Execute a composite query over a time range.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
RequestExamples: []handler.OpenAPIExample{
{
Name: "traces_time_series",
Summary: "Time series: count spans grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
// ...
},
var QueryRangeV5OpenAPIDef = handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"query"},
Summary: "Query range",
Description: "Execute a composite query over a time range.",
Request: new(qbtypes.QueryRangeRequest),
RequestExamples: queryRangeV5Examples, // []handler.OpenAPIExample
// ...
}
var queryRangeV5Examples = []handler.OpenAPIExample{
{
Name: "traces_time_series",
Summary: "Time series: count spans grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
// ...
},
},
},
},
},
// ... more examples
},
// ...
})).Methods(http.MethodPost).GetError(); err != nil {
return err
// ... more examples
}
```
@@ -339,4 +338,4 @@ func (Step) JSONSchema() (jsonschema.Schema, error) {
- **Add `required:"true"`** on fields where the key must be present in the JSON (this is about key presence, not about the zero value).
- **Add `nullable:"true"`** on fields that can be `null`. Pay special attention to slices and maps -- in Go these default to `nil` which serializes to `null`. If the field should always be an array, initialize it and do not mark it nullable.
- **Implement `Enum()`** on every type that has a fixed set of acceptable values so the JSON schema generates proper `enum` constraints.
- **Add request examples** via `RequestExamples` in `OpenAPIDef` for any non-trivial endpoint. See `pkg/apiserver/signozapiserver/querier.go` for reference.
- **Add request examples** via `RequestExamples` in `OpenAPIDef` for any non-trivial endpoint. See `pkg/querier/openapi.go` for reference.

View File

@@ -1,178 +0,0 @@
package querier
import (
"bytes"
"context"
"encoding/json"
"io"
"net/http"
"runtime/debug"
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/types/authtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/valuer"
)
type handler struct {
set factory.ProviderSettings
querier querier.Querier
community querier.Handler
}
func NewHandler(set factory.ProviderSettings, querier querier.Querier, community querier.Handler) querier.Handler {
return &handler{
set: set,
querier: querier,
community: community,
}
}
func (h *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
bodyBytes, err := io.ReadAll(req.Body)
if err != nil {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
return
}
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := req.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
var queryRangeRequest qbtypes.QueryRangeRequest
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
return
}
defer func() {
if r := recover(); r != nil {
stackTrace := string(debug.Stack())
queryJSON, _ := json.Marshal(queryRangeRequest)
h.set.Logger.ErrorContext(ctx, "panic in QueryRange",
"error", r,
"user", claims.UserID,
"payload", string(queryJSON),
"stacktrace", stackTrace,
)
render.Error(rw, errors.NewInternalf(
errors.CodeInternal,
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
))
}
}()
if err := queryRangeRequest.Validate(); err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, err)
return
}
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
anomalies, err := h.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
if err != nil {
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
return
}
results := []any{}
for _, item := range anomalies.Results {
results = append(results, item)
}
// Build step intervals from the anomaly query
stepIntervals := make(map[string]uint64)
if anomalyQuery.StepInterval.Duration > 0 {
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
}
finalResp := &qbtypes.QueryRangeResponse{
Type: queryRangeRequest.RequestType,
Data: qbtypes.QueryData{
Results: results,
},
Meta: qbtypes.ExecStats{
StepIntervals: stepIntervals,
},
}
render.Success(rw, http.StatusOK, finalResp)
return
}
// regular query range request, delegate to community handler
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
h.community.QueryRange(rw, req)
}
func (h *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
h.community.QueryRawStream(rw, req)
}
func (h *handler) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
h.community.ReplaceVariables(rw, req)
}
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
for _, fn := range anomalyQuery.Functions {
if fn.Name == qbtypes.FunctionNameAnomaly {
for _, arg := range fn.Args {
if arg.Name == "seasonality" {
if seasonalityStr, ok := arg.Value.(string); ok {
switch seasonalityStr {
case "weekly":
return anomalyV2.SeasonalityWeekly
case "hourly":
return anomalyV2.SeasonalityHourly
}
}
}
}
}
}
return anomalyV2.SeasonalityDaily // default
}
func (h *handler) createAnomalyProvider(seasonality anomalyV2.Seasonality) anomalyV2.Provider {
switch seasonality {
case anomalyV2.SeasonalityWeekly:
return anomalyV2.NewWeeklyProvider(
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](h.querier),
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](h.set.Logger),
)
case anomalyV2.SeasonalityHourly:
return anomalyV2.NewHourlyProvider(
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](h.querier),
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](h.set.Logger),
)
default:
return anomalyV2.NewDailyProvider(
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](h.querier),
anomalyV2.WithLogger[*anomalyV2.DailyProvider](h.set.Logger),
)
}
}
func (h *handler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
seasonality := extractSeasonality(anomalyQuery)
provider := h.createAnomalyProvider(seasonality)
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
}

View File

@@ -10,7 +10,9 @@ import (
"github.com/SigNoz/signoz/ee/query-service/usage"
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
@@ -55,6 +57,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
}, config)
@@ -103,6 +106,14 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
// v4
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
// v5
router.Handle("/api/v5/query_range", handler.New(
am.ViewAccess(ah.queryRangeV5),
querierAPI.QueryRangeV5OpenAPIDef,
)).Methods(http.MethodPost)
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
// Gateway
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.EditAccess(ah.ServeGatewayHTTP))

View File

@@ -2,11 +2,16 @@ package api
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"runtime/debug"
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
@@ -15,6 +20,8 @@ import (
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
@@ -137,3 +144,140 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
}
}
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
for _, fn := range anomalyQuery.Functions {
if fn.Name == qbtypes.FunctionNameAnomaly {
for _, arg := range fn.Args {
if arg.Name == "seasonality" {
if seasonalityStr, ok := arg.Value.(string); ok {
switch seasonalityStr {
case "weekly":
return anomalyV2.SeasonalityWeekly
case "hourly":
return anomalyV2.SeasonalityHourly
}
}
}
}
}
}
return anomalyV2.SeasonalityDaily // default
}
func createAnomalyProvider(aH *APIHandler, seasonality anomalyV2.Seasonality) anomalyV2.Provider {
switch seasonality {
case anomalyV2.SeasonalityWeekly:
return anomalyV2.NewWeeklyProvider(
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](aH.Signoz.Querier),
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](aH.Signoz.Instrumentation.Logger()),
)
case anomalyV2.SeasonalityHourly:
return anomalyV2.NewHourlyProvider(
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](aH.Signoz.Querier),
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](aH.Signoz.Instrumentation.Logger()),
)
default:
return anomalyV2.NewDailyProvider(
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](aH.Signoz.Querier),
anomalyV2.WithLogger[*anomalyV2.DailyProvider](aH.Signoz.Instrumentation.Logger()),
)
}
}
func (aH *APIHandler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
seasonality := extractSeasonality(anomalyQuery)
provider := createAnomalyProvider(aH, seasonality)
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
}
func (aH *APIHandler) queryRangeV5(rw http.ResponseWriter, req *http.Request) {
bodyBytes, err := io.ReadAll(req.Body)
if err != nil {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
return
}
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
ctx := req.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
var queryRangeRequest qbtypes.QueryRangeRequest
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
return
}
defer func() {
if r := recover(); r != nil {
stackTrace := string(debug.Stack())
queryJSON, _ := json.Marshal(queryRangeRequest)
aH.Signoz.Instrumentation.Logger().ErrorContext(ctx, "panic in QueryRange",
"error", r,
"user", claims.UserID,
"payload", string(queryJSON),
"stacktrace", stackTrace,
)
render.Error(rw, errors.NewInternalf(
errors.CodeInternal,
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
))
}
}()
if err := queryRangeRequest.Validate(); err != nil {
render.Error(rw, err)
return
}
orgID, err := valuer.NewUUID(claims.OrgID)
if err != nil {
render.Error(rw, err)
return
}
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
anomalies, err := aH.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
if err != nil {
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
return
}
results := []any{}
for _, item := range anomalies.Results {
results = append(results, item)
}
// Build step intervals from the anomaly query
stepIntervals := make(map[string]uint64)
if anomalyQuery.StepInterval.Duration > 0 {
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
}
finalResp := &qbtypes.QueryRangeResponse{
Type: queryRangeRequest.RequestType,
Data: qbtypes.QueryData{
Results: results,
},
Meta: qbtypes.ExecStats{
StepIntervals: stepIntervals,
},
}
render.Success(rw, http.StatusOK, finalResp)
return
} else {
// regular query range request, let the querier handle it
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
aH.QuerierAPI.QueryRange(rw, req)
}
}

View File

@@ -240,6 +240,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterInfraMetricsRoutes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am)
apiHandler.RegisterQueryRangeV5Routes(r, am)
apiHandler.RegisterWebSocketPaths(r, am)
apiHandler.RegisterMessagingQueuesRoutes(r, am)
apiHandler.RegisterThirdPartyApiRoutes(r, am)

View File

@@ -0,0 +1,7 @@
import { GatewayApiV2Instance as axios } from 'api';
import { AxiosResponse } from 'axios';
import { DeploymentsDataProps } from 'types/api/customDomain/types';
export const getDeploymentsData = (): Promise<
AxiosResponse<DeploymentsDataProps>
> => axios.get(`/deployments/me`);

View File

@@ -0,0 +1,16 @@
import { GatewayApiV2Instance as axios } from 'api';
import { AxiosError } from 'axios';
import { SuccessResponse } from 'types/api';
import {
PayloadProps,
UpdateCustomDomainProps,
} from 'types/api/customDomain/types';
const updateSubDomainAPI = async (
props: UpdateCustomDomainProps,
): Promise<SuccessResponse<PayloadProps> | AxiosError> =>
axios.put(`/deployments/me/host`, {
...props.data,
});
export default updateSubDomainAPI;

View File

@@ -678,7 +678,7 @@ export const useUpdateIngestionKeyLimit = <
* @summary Search ingestion keys for workspace
*/
export const searchIngestionKeys = (
params: SearchIngestionKeysParams,
params?: SearchIngestionKeysParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<SearchIngestionKeys200>({
@@ -699,7 +699,7 @@ export const getSearchIngestionKeysQueryOptions = <
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
TError = RenderErrorResponseDTO
>(
params: SearchIngestionKeysParams,
params?: SearchIngestionKeysParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof searchIngestionKeys>>,
@@ -737,7 +737,7 @@ export function useSearchIngestionKeys<
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
TError = RenderErrorResponseDTO
>(
params: SearchIngestionKeysParams,
params?: SearchIngestionKeysParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof searchIngestionKeys>>,
@@ -762,7 +762,7 @@ export function useSearchIngestionKeys<
*/
export const invalidateSearchIngestionKeys = async (
queryClient: QueryClient,
params: SearchIngestionKeysParams,
params?: SearchIngestionKeysParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(

View File

@@ -16,7 +16,6 @@ import type {
Querybuildertypesv5QueryRangeRequestDTO,
QueryRangeV5200,
RenderErrorResponseDTO,
ReplaceVariables200,
} from '../sigNoz.schemas';
type AwaitedInput<T> = PromiseLike<T> | T;
@@ -106,86 +105,3 @@ export const useQueryRangeV5 = <
return useMutation(mutationOptions);
};
/**
* Replace variables in a query
* @summary Replace variables
*/
export const replaceVariables = (
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<ReplaceVariables200>({
url: `/api/v5/substitute_vars`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: querybuildertypesv5QueryRangeRequestDTO,
signal,
});
};
export const getReplaceVariablesMutationOptions = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof replaceVariables>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof replaceVariables>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
> => {
const mutationKey = ['replaceVariables'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof replaceVariables>>,
{ data: Querybuildertypesv5QueryRangeRequestDTO }
> = (props) => {
const { data } = props ?? {};
return replaceVariables(data);
};
return { mutationFn, ...mutationOptions };
};
export type ReplaceVariablesMutationResult = NonNullable<
Awaited<ReturnType<typeof replaceVariables>>
>;
export type ReplaceVariablesMutationBody = Querybuildertypesv5QueryRangeRequestDTO;
export type ReplaceVariablesMutationError = RenderErrorResponseDTO;
/**
* @summary Replace variables
*/
export const useReplaceVariables = <
TError = RenderErrorResponseDTO,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof replaceVariables>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof replaceVariables>>,
TError,
{ data: Querybuildertypesv5QueryRangeRequestDTO },
TContext
> => {
const mutationOptions = getReplaceVariablesMutationOptions(options);
return useMutation(mutationOptions);
};

View File

@@ -453,18 +453,18 @@ export interface GatewaytypesGettableCreatedIngestionKeyDTO {
/**
* @type string
*/
id: string;
id?: string;
/**
* @type string
*/
value: string;
value?: string;
}
export interface GatewaytypesGettableCreatedIngestionKeyLimitDTO {
/**
* @type string
*/
id: string;
id?: string;
}
export interface GatewaytypesGettableIngestionKeysDTO {
@@ -616,7 +616,7 @@ export interface GatewaytypesPostableIngestionKeyDTO {
/**
* @type string
*/
name: string;
name?: string;
/**
* @type array
* @nullable true
@@ -638,7 +638,7 @@ export interface GatewaytypesPostableIngestionKeyLimitDTO {
}
export interface GatewaytypesUpdatableIngestionKeyLimitDTO {
config: GatewaytypesLimitConfigDTO;
config?: GatewaytypesLimitConfigDTO;
/**
* @type array
* @nullable true
@@ -927,18 +927,6 @@ export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
unit: string;
}
export interface MetrictypesComparisonSpaceAggregationParamDTO {
/**
* @type string
*/
operator?: string;
/**
* @type number
* @format double
*/
threshold?: number;
}
export enum MetrictypesSpaceAggregationDTO {
sum = 'sum',
avg = 'avg',
@@ -950,7 +938,6 @@ export enum MetrictypesSpaceAggregationDTO {
p90 = 'p90',
p95 = 'p95',
p99 = 'p99',
histogram_count = 'histogram_count',
}
export enum MetrictypesTemporalityDTO {
delta = 'delta',
@@ -1301,7 +1288,6 @@ export interface Querybuildertypesv5LogAggregationDTO {
}
export interface Querybuildertypesv5MetricAggregationDTO {
comparisonSpaceAggregationParam?: MetrictypesComparisonSpaceAggregationParamDTO;
/**
* @type string
*/
@@ -2433,34 +2419,34 @@ export interface ZeustypesGettableHostDTO {
* @type array
* @nullable true
*/
hosts: ZeustypesHostDTO[] | null;
hosts?: ZeustypesHostDTO[] | null;
/**
* @type string
*/
name: string;
name?: string;
/**
* @type string
*/
state: string;
state?: string;
/**
* @type string
*/
tier: string;
tier?: string;
}
export interface ZeustypesHostDTO {
/**
* @type boolean
*/
is_default: boolean;
is_default?: boolean;
/**
* @type string
*/
name: string;
name?: string;
/**
* @type string
*/
url: string;
url?: string;
}
export interface ZeustypesPostableHostDTO {
@@ -2474,43 +2460,43 @@ export interface ZeustypesPostableProfileDTO {
/**
* @type string
*/
existing_observability_tool: string;
existing_observability_tool?: string;
/**
* @type boolean
*/
has_existing_observability_tool: boolean;
has_existing_observability_tool?: boolean;
/**
* @type integer
* @format int64
*/
logs_scale_per_day_in_gb: number;
logs_scale_per_day_in_gb?: number;
/**
* @type integer
* @format int64
*/
number_of_hosts: number;
number_of_hosts?: number;
/**
* @type integer
* @format int64
*/
number_of_services: number;
number_of_services?: number;
/**
* @type array
* @nullable true
*/
reasons_for_interest_in_signoz: string[] | null;
reasons_for_interest_in_signoz?: string[] | null;
/**
* @type string
*/
timeline_for_migrating_to_signoz: string;
timeline_for_migrating_to_signoz?: string;
/**
* @type boolean
*/
uses_otel: boolean;
uses_otel?: boolean;
/**
* @type string
*/
where_did_you_discover_signoz: string;
where_did_you_discover_signoz?: string;
}
export type ChangePasswordPathParameters = {
@@ -3053,7 +3039,7 @@ export type SearchIngestionKeysParams = {
* @type string
* @description undefined
*/
name: string;
name?: string;
/**
* @type integer
* @description undefined
@@ -3243,11 +3229,3 @@ export type QueryRangeV5200 = {
*/
status?: string;
};
export type ReplaceVariables200 = {
data?: Querybuildertypesv5QueryRangeRequestDTO;
/**
* @type string
*/
status?: string;
};

View File

@@ -0,0 +1,20 @@
import { GatewayApiV2Instance } from 'api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { UpdateProfileProps } from 'types/api/onboarding/types';
const updateProfile = async (
props: UpdateProfileProps,
): Promise<SuccessResponse<UpdateProfileProps> | ErrorResponse> => {
const response = await GatewayApiV2Instance.put('/profiles/me', {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
};
export default updateProfile;

View File

@@ -1,6 +1,7 @@
/* eslint-disable jsx-a11y/no-static-element-interactions */
/* eslint-disable jsx-a11y/click-events-have-key-events */
import { useEffect, useMemo, useState } from 'react';
import { useEffect, useState } from 'react';
import { useMutation } from 'react-query';
import { useCopyToClipboard } from 'react-use';
import { Color } from '@signozhq/design-tokens';
import {
@@ -14,16 +15,14 @@ import {
Tag,
Typography,
} from 'antd';
import {
RenderErrorResponseDTO,
ZeustypesHostDTO,
} from 'api/generated/services/sigNoz.schemas';
import { useGetHosts, usePutHost } from 'api/generated/services/zeus';
import updateSubDomainAPI from 'api/customDomain/updateSubDomain';
import { AxiosError } from 'axios';
import LaunchChatSupport from 'components/LaunchChatSupport/LaunchChatSupport';
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
import { useNotifications } from 'hooks/useNotifications';
import { InfoIcon, Link2, Pencil } from 'lucide-react';
import { useAppContext } from 'providers/App/App';
import { HostsProps } from 'types/api/customDomain/types';
import './CustomDomainSettings.styles.scss';
@@ -36,7 +35,7 @@ export default function CustomDomainSettings(): JSX.Element {
const { notifications } = useNotifications();
const [isEditModalOpen, setIsEditModalOpen] = useState(false);
const [isPollingEnabled, setIsPollingEnabled] = useState(false);
const [hosts, setHosts] = useState<ZeustypesHostDTO[] | null>(null);
const [hosts, setHosts] = useState<HostsProps[] | null>(null);
const [updateDomainError, setUpdateDomainError] = useState<AxiosError | null>(
null,
@@ -58,37 +57,36 @@ export default function CustomDomainSettings(): JSX.Element {
};
const {
data: hostsData,
isLoading: isLoadingHosts,
isFetching: isFetchingHosts,
refetch: refetchHosts,
} = useGetHosts();
data: deploymentsData,
isLoading: isLoadingDeploymentsData,
isFetching: isFetchingDeploymentsData,
refetch: refetchDeploymentsData,
} = useGetDeploymentsData(true);
const {
mutate: updateSubDomain,
isLoading: isLoadingUpdateCustomDomain,
} = usePutHost<AxiosError<RenderErrorResponseDTO>>();
const stripProtocol = (url: string): string => {
return url?.split('://')[1] ?? url;
};
const dnsSuffix = useMemo(() => {
const defaultHost = hosts?.find((h) => h.is_default);
return defaultHost?.url && defaultHost?.name
? defaultHost.url.split(`${defaultHost.name}.`)[1] || ''
: '';
}, [hosts]);
} = useMutation(updateSubDomainAPI, {
onSuccess: () => {
setIsPollingEnabled(true);
refetchDeploymentsData();
setIsEditModalOpen(false);
},
onError: (error: AxiosError) => {
setUpdateDomainError(error);
setIsPollingEnabled(false);
},
});
useEffect(() => {
if (isFetchingHosts) {
if (isFetchingDeploymentsData) {
return;
}
if (hostsData?.data?.status === 'success') {
setHosts(hostsData?.data?.data?.hosts ?? null);
if (deploymentsData?.data?.status === 'success') {
setHosts(deploymentsData.data.data.hosts);
const activeCustomDomain = hostsData?.data?.data?.hosts?.find(
const activeCustomDomain = deploymentsData.data.data.hosts.find(
(host) => !host.is_default,
);
@@ -99,36 +97,32 @@ export default function CustomDomainSettings(): JSX.Element {
}
}
if (hostsData?.data?.data?.state !== 'HEALTHY' && isPollingEnabled) {
if (deploymentsData?.data?.data?.state !== 'HEALTHY' && isPollingEnabled) {
setTimeout(() => {
refetchHosts();
refetchDeploymentsData();
}, 3000);
}
if (hostsData?.data?.data?.state === 'HEALTHY') {
if (deploymentsData?.data?.data.state === 'HEALTHY') {
setIsPollingEnabled(false);
}
}, [hostsData, refetchHosts, isPollingEnabled, isFetchingHosts]);
}, [
deploymentsData,
refetchDeploymentsData,
isPollingEnabled,
isFetchingDeploymentsData,
]);
const onUpdateCustomDomainSettings = (): void => {
editForm
.validateFields()
.then((values) => {
if (values.subdomain) {
updateSubDomain(
{ data: { name: values.subdomain } },
{
onSuccess: () => {
setIsPollingEnabled(true);
refetchHosts();
setIsEditModalOpen(false);
},
onError: (error: AxiosError<RenderErrorResponseDTO>) => {
setUpdateDomainError(error as AxiosError);
setIsPollingEnabled(false);
},
updateSubDomain({
data: {
name: values.subdomain,
},
);
});
setCustomDomainDetails({
subdomain: values.subdomain,
@@ -140,8 +134,10 @@ export default function CustomDomainSettings(): JSX.Element {
});
};
const onCopyUrlHandler = (url: string): void => {
setCopyUrl(stripProtocol(url));
const onCopyUrlHandler = (host: string): void => {
const url = `${host}.${deploymentsData?.data.data.cluster.region.dns}`;
setCopyUrl(url);
notifications.success({
message: 'Copied to clipboard',
});
@@ -161,7 +157,7 @@ export default function CustomDomainSettings(): JSX.Element {
</div>
<div className="custom-domain-settings-content">
{!isLoadingHosts && (
{!isLoadingDeploymentsData && (
<Card className="custom-domain-settings-card">
<div className="custom-domain-settings-content-header">
Team {org?.[0]?.displayName} Information
@@ -173,9 +169,10 @@ export default function CustomDomainSettings(): JSX.Element {
<div
className="custom-domain-url"
key={host.name}
onClick={(): void => onCopyUrlHandler(host.url || '')}
onClick={(): void => onCopyUrlHandler(host.name)}
>
<Link2 size={12} /> {stripProtocol(host.url || '')}
<Link2 size={12} /> {host.name}.
{deploymentsData?.data.data.cluster.region.dns}
{host.is_default && <Tag color={Color.BG_ROBIN_500}>Default</Tag>}
</div>
))}
@@ -184,7 +181,11 @@ export default function CustomDomainSettings(): JSX.Element {
<div className="custom-domain-url-edit-btn">
<Button
className="periscope-btn"
disabled={isLoadingHosts || isFetchingHosts || isPollingEnabled}
disabled={
isLoadingDeploymentsData ||
isFetchingDeploymentsData ||
isPollingEnabled
}
type="default"
icon={<Pencil size={10} />}
onClick={(): void => setIsEditModalOpen(true)}
@@ -197,7 +198,7 @@ export default function CustomDomainSettings(): JSX.Element {
{isPollingEnabled && (
<Alert
className="custom-domain-update-status"
message={`Updating your URL to ⎯ ${customDomainDetails?.subdomain}.${dnsSuffix}. This may take a few mins.`}
message={`Updating your URL to ⎯ ${customDomainDetails?.subdomain}.${deploymentsData?.data.data.cluster.region.dns}. This may take a few mins.`}
type="info"
icon={<InfoIcon size={12} />}
/>
@@ -205,7 +206,7 @@ export default function CustomDomainSettings(): JSX.Element {
</Card>
)}
{isLoadingHosts && (
{isLoadingDeploymentsData && (
<Card className="custom-domain-settings-card">
<Skeleton
className="custom-domain-settings-skeleton"
@@ -254,7 +255,7 @@ export default function CustomDomainSettings(): JSX.Element {
addonBefore={updateDomainError && <InfoIcon size={12} color="red" />}
placeholder="Enter Domain"
onChange={(): void => setUpdateDomainError(null)}
addonAfter={dnsSuffix}
addonAfter={deploymentsData?.data.data.cluster.region.dns}
autoFocus
/>
</Form.Item>
@@ -266,8 +267,7 @@ export default function CustomDomainSettings(): JSX.Element {
{updateDomainError.status === 409 ? (
<Alert
message={
(updateDomainError?.response?.data as RenderErrorResponseDTO)?.error
?.message ||
(updateDomainError?.response?.data as { error?: string })?.error ||
'Youve already updated the custom domain once today. To make further changes, please contact our support team for assistance.'
}
type="warning"
@@ -275,10 +275,7 @@ export default function CustomDomainSettings(): JSX.Element {
/>
) : (
<Typography.Text type="danger">
{
(updateDomainError?.response?.data as RenderErrorResponseDTO)?.error
?.message
}
{(updateDomainError.response?.data as { error: string })?.error}
</Typography.Text>
)}
</div>

View File

@@ -1,128 +0,0 @@
import { GetHosts200 } from 'api/generated/services/sigNoz.schemas';
import { rest, server } from 'mocks-server/server';
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import CustomDomainSettings from '../CustomDomainSettings';
const ZEUS_HOSTS_ENDPOINT = '*/api/v2/zeus/hosts';
const mockHostsResponse: GetHosts200 = {
status: 'success',
data: {
name: 'accepted-starfish',
state: 'HEALTHY',
tier: 'PREMIUM',
hosts: [
{
name: 'accepted-starfish',
is_default: true,
url: 'https://accepted-starfish.test.cloud',
},
{
name: 'custom-host',
is_default: false,
url: 'https://custom-host.test.cloud',
},
],
},
};
describe('CustomDomainSettings', () => {
afterEach(() => server.resetHandlers());
it('renders host URLs with protocol stripped and marks the default host', async () => {
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(mockHostsResponse)),
),
);
render(<CustomDomainSettings />);
await screen.findByText(/accepted-starfish\.test\.cloud/i);
await screen.findByText(/custom-host\.test\.cloud/i);
expect(screen.getByText('Default')).toBeInTheDocument();
});
it('opens edit modal with DNS suffix derived from the default host', async () => {
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(mockHostsResponse)),
),
);
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<CustomDomainSettings />);
await screen.findByText(/accepted-starfish\.test\.cloud/i);
await user.click(
screen.getByRole('button', { name: /customize team[']s url/i }),
);
expect(
screen.getByRole('dialog', { name: /customize your team[']s url/i }),
).toBeInTheDocument();
// DNS suffix is the part of the default host URL after the name prefix
expect(screen.getByText('test.cloud')).toBeInTheDocument();
});
it('submits PUT to /zeus/hosts with the entered subdomain as the payload', async () => {
let capturedBody: Record<string, unknown> = {};
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(mockHostsResponse)),
),
rest.put(ZEUS_HOSTS_ENDPOINT, async (req, res, ctx) => {
capturedBody = await req.json<Record<string, unknown>>();
return res(ctx.status(200), ctx.json({}));
}),
);
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<CustomDomainSettings />);
await screen.findByText(/accepted-starfish\.test\.cloud/i);
await user.click(
screen.getByRole('button', { name: /customize team[']s url/i }),
);
const input = screen.getByPlaceholderText(/enter domain/i);
await user.clear(input);
await user.type(input, 'myteam');
await user.click(screen.getByRole('button', { name: /apply changes/i }));
await waitFor(() => {
expect(capturedBody).toEqual({ name: 'myteam' });
});
});
it('shows contact support option when domain update returns 409', async () => {
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(mockHostsResponse)),
),
rest.put(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(
ctx.status(409),
ctx.json({ error: { message: 'Already updated today' } }),
),
),
);
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<CustomDomainSettings />);
await screen.findByText(/accepted-starfish\.test\.cloud/i);
await user.click(
screen.getByRole('button', { name: /customize team[']s url/i }),
);
await user.type(screen.getByPlaceholderText(/enter domain/i), 'myteam');
await user.click(screen.getByRole('button', { name: /apply changes/i }));
expect(
await screen.findByRole('button', { name: /contact support/i }),
).toBeInTheDocument();
});
});

View File

@@ -1,6 +1,7 @@
import { useCallback } from 'react';
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
import HistogramTooltip from 'lib/uPlotV2/components/Tooltip/HistogramTooltip';
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
import {
HistogramTooltipProps,
TooltipRenderArgs,
@@ -21,11 +22,21 @@ export default function Histogram(props: HistogramChartProps): JSX.Element {
if (customRenderTooltip) {
return customRenderTooltip(props);
}
const content = buildTooltipContent({
data: props.uPlotInstance.data,
series: props.uPlotInstance.series,
dataIndexes: props.dataIndexes,
activeSeriesIndex: props.seriesIndex,
uPlotInstance: props.uPlotInstance,
yAxisUnit: rest.yAxisUnit ?? '',
decimalPrecision: rest.decimalPrecision,
});
const tooltipProps: HistogramTooltipProps = {
...props,
timezone: rest.timezone,
yAxisUnit: rest.yAxisUnit,
decimalPrecision: rest.decimalPrecision,
content,
};
return <HistogramTooltip {...tooltipProps} />;
},

View File

@@ -1,6 +1,7 @@
import { useCallback } from 'react';
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
import TimeSeriesTooltip from 'lib/uPlotV2/components/Tooltip/TimeSeriesTooltip';
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
import {
TimeSeriesTooltipProps,
TooltipRenderArgs,
@@ -16,11 +17,21 @@ export default function TimeSeries(props: TimeSeriesChartProps): JSX.Element {
if (customRenderTooltip) {
return customRenderTooltip(props);
}
const content = buildTooltipContent({
data: props.uPlotInstance.data,
series: props.uPlotInstance.series,
dataIndexes: props.dataIndexes,
activeSeriesIndex: props.seriesIndex,
uPlotInstance: props.uPlotInstance,
yAxisUnit: rest.yAxisUnit ?? '',
decimalPrecision: rest.decimalPrecision,
});
const tooltipProps: TimeSeriesTooltipProps = {
...props,
timezone: rest.timezone,
yAxisUnit: rest.yAxisUnit,
decimalPrecision: rest.decimalPrecision,
content,
};
return <TimeSeriesTooltip {...tooltipProps} />;
},

View File

@@ -2,10 +2,10 @@
import { useEffect, useState } from 'react';
import { Button, Skeleton, Tag, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import { useGetHosts } from 'api/generated/services/zeus';
import ROUTES from 'constants/routes';
import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData';
import history from 'lib/history';
import { Link2 } from 'lucide-react';
import { Globe, Link2 } from 'lucide-react';
import Card from 'periscope/components/Card/Card';
import { useAppContext } from 'providers/App/App';
import { LicensePlatform } from 'types/api/licensesV3/getActive';
@@ -26,21 +26,36 @@ function DataSourceInfo({
const isEnabled =
activeLicense && activeLicense.platform === LicensePlatform.CLOUD;
const { data: hostsData, isError } = useGetHosts({
query: { enabled: isEnabled || false },
});
const {
data: deploymentsData,
isError: isErrorDeploymentsData,
} = useGetDeploymentsData(isEnabled || false);
const [region, setRegion] = useState<string>('');
const [url, setUrl] = useState<string>('');
useEffect(() => {
if (hostsData) {
const defaultHost = hostsData?.data?.data?.hosts?.find((h) => h.is_default);
if (defaultHost?.url) {
const url = defaultHost?.url?.split('://')[1] ?? '';
setUrl(url);
if (deploymentsData) {
switch (deploymentsData?.data.data.cluster.region.name) {
case 'in':
setRegion('India');
break;
case 'us':
setRegion('United States');
break;
case 'eu':
setRegion('Europe');
break;
default:
setRegion(deploymentsData?.data.data.cluster.region.name);
break;
}
setUrl(
`${deploymentsData?.data.data.name}.${deploymentsData?.data.data.cluster.region.dns}`,
);
}
}, [hostsData]);
}, [deploymentsData]);
const renderNotSendingData = (): JSX.Element => (
<>
@@ -108,8 +123,14 @@ function DataSourceInfo({
</Button>
</div>
{!isError && hostsData && (
{!isErrorDeploymentsData && deploymentsData && (
<div className="workspace-details">
<div className="workspace-region">
<Globe size={10} />
<Typography>{region}</Typography>
</div>
<div className="workspace-url">
<Link2 size={12} />
@@ -135,11 +156,17 @@ function DataSourceInfo({
Hello there, Welcome to your SigNoz workspace
</Typography>
{!isError && hostsData && (
{!isErrorDeploymentsData && deploymentsData && (
<Card className="welcome-card">
<Card.Content>
<div className="workspace-ready-container">
<div className="workspace-details">
<div className="workspace-region">
<Globe size={10} />
<Typography>{region}</Typography>
</div>
<div className="workspace-url">
<Link2 size={12} />

View File

@@ -1,69 +0,0 @@
import { GetHosts200 } from 'api/generated/services/sigNoz.schemas';
import { rest, server } from 'mocks-server/server';
import { render, screen } from 'tests/test-utils';
import DataSourceInfo from '../DataSourceInfo';
const ZEUS_HOSTS_ENDPOINT = '*/api/v2/zeus/hosts';
const mockHostsResponse: GetHosts200 = {
status: 'success',
data: {
name: 'accepted-starfish',
state: 'HEALTHY',
tier: 'PREMIUM',
hosts: [
{
name: 'accepted-starfish',
is_default: true,
url: 'https://accepted-starfish.test.cloud',
},
{
name: 'custom-host',
is_default: false,
url: 'https://custom-host.test.cloud',
},
],
},
};
describe('DataSourceInfo', () => {
afterEach(() => server.resetHandlers());
it('renders the default workspace URL with protocol stripped', async () => {
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(mockHostsResponse)),
),
);
render(<DataSourceInfo dataSentToSigNoz={false} isLoading={false} />);
await screen.findByText(/accepted-starfish\.test\.cloud/i);
});
it('does not render workspace URL when GET /zeus/hosts fails', async () => {
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(500), ctx.json({})),
),
);
render(<DataSourceInfo dataSentToSigNoz={false} isLoading={false} />);
await screen.findByText(/Your workspace is ready/i);
expect(screen.queryByText(/signoz\.cloud/i)).not.toBeInTheDocument();
});
it('renders workspace URL in the data-received view when telemetry is flowing', async () => {
server.use(
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(mockHostsResponse)),
),
);
render(<DataSourceInfo dataSentToSigNoz={true} isLoading={false} />);
await screen.findByText(/accepted-starfish\.test\.cloud/i);
});
});

View File

@@ -36,6 +36,24 @@ jest.mock('react-router-dom', () => {
};
});
// Mock deployments data hook to avoid unrelated network calls in this page
jest.mock(
'hooks/CustomDomain/useGetDeploymentsData',
(): Record<string, unknown> => ({
useGetDeploymentsData: (): {
data: undefined;
isLoading: boolean;
isFetching: boolean;
isError: boolean;
} => ({
data: undefined,
isLoading: false,
isFetching: false,
isError: false,
}),
}),
);
const TEST_CREATED_UPDATED = '2024-01-01T00:00:00Z';
const TEST_EXPIRES_AT = '2030-01-01T00:00:00Z';
const TEST_WORKSPACE_ID = 'w1';

View File

@@ -26,7 +26,7 @@ jest.mock('lib/history', () => ({
// API Endpoints
const ORG_PREFERENCES_ENDPOINT = '*/api/v1/org/preferences/list';
const UPDATE_ORG_PREFERENCE_ENDPOINT = '*/api/v1/org/preferences/name/update';
const UPDATE_PROFILE_ENDPOINT = '*/api/v2/zeus/profiles';
const UPDATE_PROFILE_ENDPOINT = '*/api/gateway/v2/profiles/me';
const EDIT_ORG_ENDPOINT = '*/api/v2/orgs/me';
const INVITE_USERS_ENDPOINT = '*/api/v1/invite/bulk/create';
@@ -277,46 +277,6 @@ describe('OnboardingQuestionaire Component', () => {
).toBeInTheDocument();
});
it('fires PUT to /zeus/profiles and advances to step 4 on success', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
let profilePutCalled = false;
server.use(
rest.put(UPDATE_PROFILE_ENDPOINT, (_, res, ctx) => {
profilePutCalled = true;
return res(ctx.status(200), ctx.json({ status: 'success', data: {} }));
}),
);
render(<OnboardingQuestionaire />);
// Navigate to step 3
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
await user.click(screen.getByLabelText(/just exploring/i));
await user.click(screen.getByRole('button', { name: /next/i }));
await user.type(
await screen.findByPlaceholderText(/e\.g\., googling/i),
'Found via Google',
);
await user.click(screen.getByLabelText(/lowering observability costs/i));
await user.click(screen.getByRole('button', { name: /next/i }));
// Click "I'll do this later" on step 3 — triggers PUT /zeus/profiles
await user.click(
await screen.findByRole('button', { name: /i'll do this later/i }),
);
await waitFor(() => {
expect(profilePutCalled).toBe(true);
// Step 3 content is gone — successfully advanced to step 4
expect(
screen.queryByText(/what does your scale approximately look like/i),
).not.toBeInTheDocument();
});
});
it('shows do later button', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<OnboardingQuestionaire />);

View File

@@ -1,10 +1,8 @@
import { useEffect, useState } from 'react';
import { useMutation, useQuery } from 'react-query';
import { toast } from '@signozhq/sonner';
import { NotificationInstance } from 'antd/es/notification/interface';
import logEvent from 'api/common/logEvent';
import { RenderErrorResponseDTO } from 'api/generated/services/sigNoz.schemas';
import { usePutProfile } from 'api/generated/services/zeus';
import updateProfileAPI from 'api/onboarding/updateProfile';
import listOrgPreferences from 'api/v1/org/preferences/list';
import updateOrgPreferenceAPI from 'api/v1/org/preferences/name/update';
import { AxiosError } from 'axios';
@@ -123,9 +121,20 @@ function OnboardingQuestionaire(): JSX.Element {
optimiseSignozDetails.hostsPerDay === 0 &&
optimiseSignozDetails.services === 0;
const { mutate: updateProfile, isLoading: isUpdatingProfile } = usePutProfile<
AxiosError<RenderErrorResponseDTO>
>();
const { mutate: updateProfile, isLoading: isUpdatingProfile } = useMutation(
updateProfileAPI,
{
onSuccess: () => {
setCurrentStep(4);
},
onError: (error) => {
showErrorNotification(notifications, error as AxiosError);
// Allow user to proceed even if API fails
setCurrentStep(4);
},
},
);
const { mutate: updateOrgPreference } = useMutation(updateOrgPreferenceAPI, {
onSuccess: () => {
@@ -144,44 +153,29 @@ function OnboardingQuestionaire(): JSX.Element {
nextPageID: 4,
});
updateProfile(
{
data: {
uses_otel: orgDetails?.usesOtel as boolean,
has_existing_observability_tool: orgDetails?.usesObservability as boolean,
existing_observability_tool:
orgDetails?.observabilityTool === 'Others'
? (orgDetails?.otherTool as string)
: (orgDetails?.observabilityTool as string),
where_did_you_discover_signoz: signozDetails?.discoverSignoz as string,
timeline_for_migrating_to_signoz: orgDetails?.migrationTimeline as string,
reasons_for_interest_in_signoz: signozDetails?.interestInSignoz?.includes(
'Others',
)
? ([
...(signozDetails?.interestInSignoz?.filter(
(item) => item !== 'Others',
) || []),
signozDetails?.otherInterestInSignoz,
] as string[])
: (signozDetails?.interestInSignoz as string[]),
logs_scale_per_day_in_gb: optimiseSignozDetails?.logsPerDay as number,
number_of_hosts: optimiseSignozDetails?.hostsPerDay as number,
number_of_services: optimiseSignozDetails?.services as number,
},
},
{
onSuccess: () => {
setCurrentStep(4);
},
onError: (error: any) => {
toast.error(error?.message || SOMETHING_WENT_WRONG);
// Allow user to proceed even if API fails
setCurrentStep(4);
},
},
);
updateProfile({
uses_otel: orgDetails?.usesOtel as boolean,
has_existing_observability_tool: orgDetails?.usesObservability as boolean,
existing_observability_tool:
orgDetails?.observabilityTool === 'Others'
? (orgDetails?.otherTool as string)
: (orgDetails?.observabilityTool as string),
where_did_you_discover_signoz: signozDetails?.discoverSignoz as string,
timeline_for_migrating_to_signoz: orgDetails?.migrationTimeline as string,
reasons_for_interest_in_signoz: signozDetails?.interestInSignoz?.includes(
'Others',
)
? ([
...(signozDetails?.interestInSignoz?.filter(
(item) => item !== 'Others',
) || []),
signozDetails?.otherInterestInSignoz,
] as string[])
: (signozDetails?.interestInSignoz as string[]),
logs_scale_per_day_in_gb: optimiseSignozDetails?.logsPerDay as number,
number_of_hosts: optimiseSignozDetails?.hostsPerDay as number,
number_of_services: optimiseSignozDetails?.services as number,
});
};
const handleOnboardingComplete = (): void => {

View File

@@ -0,0 +1,13 @@
import { useQuery, UseQueryResult } from 'react-query';
import { getDeploymentsData } from 'api/customDomain/getDeploymentsData';
import { AxiosError, AxiosResponse } from 'axios';
import { DeploymentsDataProps } from 'types/api/customDomain/types';
export const useGetDeploymentsData = (
isEnabled: boolean,
): UseQueryResult<AxiosResponse<DeploymentsDataProps>, AxiosError> =>
useQuery<AxiosResponse<DeploymentsDataProps>, AxiosError>({
queryKey: ['getDeploymentsData'],
queryFn: () => getDeploymentsData(),
enabled: isEnabled,
});

View File

@@ -1,31 +1,8 @@
import { useMemo } from 'react';
import { HistogramTooltipProps, TooltipContentItem } from '../types';
import { HistogramTooltipProps } from '../types';
import Tooltip from './Tooltip';
import { buildTooltipContent } from './utils';
export default function HistogramTooltip(
props: HistogramTooltipProps,
): JSX.Element {
const content = useMemo(
(): TooltipContentItem[] =>
buildTooltipContent({
data: props.uPlotInstance.data,
series: props.uPlotInstance.series,
dataIndexes: props.dataIndexes,
activeSeriesIndex: props.seriesIndex,
uPlotInstance: props.uPlotInstance,
yAxisUnit: props.yAxisUnit ?? '',
decimalPrecision: props.decimalPrecision,
}),
[
props.uPlotInstance,
props.seriesIndex,
props.dataIndexes,
props.yAxisUnit,
props.decimalPrecision,
],
);
return <Tooltip {...props} content={content} showTooltipHeader={false} />;
return <Tooltip {...props} showTooltipHeader={false} />;
}

View File

@@ -1,31 +1,8 @@
import { useMemo } from 'react';
import { TimeSeriesTooltipProps, TooltipContentItem } from '../types';
import { TimeSeriesTooltipProps } from '../types';
import Tooltip from './Tooltip';
import { buildTooltipContent } from './utils';
export default function TimeSeriesTooltip(
props: TimeSeriesTooltipProps,
): JSX.Element {
const content = useMemo(
(): TooltipContentItem[] =>
buildTooltipContent({
data: props.uPlotInstance.data,
series: props.uPlotInstance.series,
dataIndexes: props.dataIndexes,
activeSeriesIndex: props.seriesIndex,
uPlotInstance: props.uPlotInstance,
yAxisUnit: props.yAxisUnit ?? '',
decimalPrecision: props.decimalPrecision,
}),
[
props.uPlotInstance,
props.seriesIndex,
props.dataIndexes,
props.yAxisUnit,
props.decimalPrecision,
],
);
return <Tooltip {...props} content={content} />;
return <Tooltip {...props} />;
}

View File

@@ -28,23 +28,18 @@
font-weight: 500;
}
.uplot-tooltip-list-container {
overflow-y: auto;
max-height: 330px;
.uplot-tooltip-list {
&::-webkit-scrollbar {
width: 0.3rem;
}
.uplot-tooltip-list {
&::-webkit-scrollbar {
width: 0.3rem;
}
&::-webkit-scrollbar-track {
background: transparent;
}
&::-webkit-scrollbar-track {
background: transparent;
}
&::-webkit-scrollbar-thumb {
background: var(--bg-slate-100);
border-radius: 0.5rem;
}
&::-webkit-scrollbar-thumb {
background: var(--bg-slate-100);
border-radius: 0.5rem;
}
}

View File

@@ -1,4 +1,4 @@
import { useMemo, useState } from 'react';
import { useMemo } from 'react';
import { Virtuoso } from 'react-virtuoso';
import cx from 'classnames';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
@@ -11,7 +11,6 @@ import './Tooltip.styles.scss';
const TOOLTIP_LIST_MAX_HEIGHT = 330;
const TOOLTIP_ITEM_HEIGHT = 38;
const TOOLTIP_LIST_PADDING = 10;
export default function Tooltip({
uPlotInstance,
@@ -20,7 +19,7 @@ export default function Tooltip({
showTooltipHeader = true,
}: TooltipProps): JSX.Element {
const isDarkMode = useIsDarkMode();
const [listHeight, setListHeight] = useState(0);
const tooltipContent = content ?? [];
const headerTitle = useMemo(() => {
@@ -42,52 +41,42 @@ export default function Tooltip({
showTooltipHeader,
]);
const virtuosoStyle = useMemo(() => {
return {
height:
listHeight > 0
? Math.min(listHeight + TOOLTIP_LIST_PADDING, TOOLTIP_LIST_MAX_HEIGHT)
: Math.min(
tooltipContent.length * TOOLTIP_ITEM_HEIGHT,
TOOLTIP_LIST_MAX_HEIGHT,
),
width: '100%',
};
}, [listHeight, tooltipContent.length]);
return (
<div
className={cx(
'uplot-tooltip-container',
isDarkMode ? 'darkMode' : 'lightMode',
)}
data-testid="uplot-tooltip-container"
>
{showTooltipHeader && (
<div className="uplot-tooltip-header" data-testid="uplot-tooltip-header">
<div className="uplot-tooltip-header">
<span>{headerTitle}</span>
</div>
)}
<div className="uplot-tooltip-list-container">
<div
style={{
height: Math.min(
tooltipContent.length * TOOLTIP_ITEM_HEIGHT,
TOOLTIP_LIST_MAX_HEIGHT,
),
minHeight: 0,
}}
>
{tooltipContent.length > 0 ? (
<Virtuoso
className="uplot-tooltip-list"
data-testid="uplot-tooltip-list"
data={tooltipContent}
style={virtuosoStyle}
totalListHeightChanged={setListHeight}
defaultItemHeight={TOOLTIP_ITEM_HEIGHT}
itemContent={(_, item): JSX.Element => (
<div className="uplot-tooltip-item" data-testid="uplot-tooltip-item">
<div className="uplot-tooltip-item">
<div
className="uplot-tooltip-item-marker"
style={{ borderColor: item.color }}
data-is-legend-marker={true}
data-testid="uplot-tooltip-item-marker"
/>
<div
className="uplot-tooltip-item-content"
style={{ color: item.color, fontWeight: item.isActive ? 700 : 400 }}
data-testid="uplot-tooltip-item-content"
>
{item.label}: {item.tooltipValue}
</div>

View File

@@ -1,208 +0,0 @@
import React from 'react';
import { VirtuosoMockContext } from 'react-virtuoso';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import dayjs from 'dayjs';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { render, RenderResult, screen } from 'tests/test-utils';
import uPlot from 'uplot';
import { TooltipContentItem } from '../../types';
import Tooltip from '../Tooltip';
type MockVirtuosoProps = {
data: TooltipContentItem[];
itemContent: (index: number, item: TooltipContentItem) => React.ReactNode;
className?: string;
style?: React.CSSProperties;
totalListHeightChanged?: (height: number) => void;
'data-testid'?: string;
};
let mockTotalListHeight = 200;
jest.mock('react-virtuoso', () => {
const actual = jest.requireActual('react-virtuoso');
return {
...actual,
Virtuoso: ({
data,
itemContent,
className,
style,
totalListHeightChanged,
'data-testid': dataTestId,
}: MockVirtuosoProps): JSX.Element => {
if (totalListHeightChanged) {
// Simulate Virtuoso reporting total list height
totalListHeightChanged(mockTotalListHeight);
}
return (
<div className={className} style={style} data-testid={dataTestId}>
{data.map((item, index) => (
<div key={item.label ?? index.toString()}>{itemContent(index, item)}</div>
))}
</div>
);
},
};
});
jest.mock('hooks/useDarkMode', () => ({
useIsDarkMode: jest.fn(),
}));
const mockUseIsDarkMode = useIsDarkMode as jest.MockedFunction<
typeof useIsDarkMode
>;
type TooltipTestProps = React.ComponentProps<typeof Tooltip>;
function createTooltipContent(
overrides: Partial<TooltipContentItem> = {},
): TooltipContentItem {
return {
label: 'Series A',
value: 10,
tooltipValue: '10',
color: '#ff0000',
isActive: true,
...overrides,
};
}
function createUPlotInstance(cursorIdx: number | null): uPlot {
return ({
data: [[1], []],
cursor: { idx: cursorIdx },
// The rest of the uPlot fields are not used by Tooltip
} as unknown) as uPlot;
}
function renderTooltip(props: Partial<TooltipTestProps> = {}): RenderResult {
const defaultProps: TooltipTestProps = {
uPlotInstance: createUPlotInstance(null),
timezone: 'UTC',
content: [],
showTooltipHeader: true,
// TooltipRenderArgs (not used directly in component but required by type)
dataIndexes: [],
seriesIndex: null,
isPinned: false,
dismiss: jest.fn(),
viaSync: false,
} as TooltipTestProps;
return render(
<VirtuosoMockContext.Provider value={{ viewportHeight: 300, itemHeight: 38 }}>
<Tooltip {...defaultProps} {...props} />
</VirtuosoMockContext.Provider>,
);
}
describe('Tooltip', () => {
beforeEach(() => {
jest.clearAllMocks();
mockUseIsDarkMode.mockReturnValue(false);
mockTotalListHeight = 200;
});
it('renders header title when showTooltipHeader is true and cursor index is present', () => {
const uPlotInstance = createUPlotInstance(0);
renderTooltip({ uPlotInstance });
const expectedTitle = dayjs(1 * 1000)
.tz('UTC')
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
expect(screen.getByText(expectedTitle)).toBeInTheDocument();
});
it('does not render header when showTooltipHeader is false', () => {
const uPlotInstance = createUPlotInstance(0);
renderTooltip({ uPlotInstance, showTooltipHeader: false });
const unexpectedTitle = dayjs(1 * 1000)
.tz('UTC')
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
expect(screen.queryByText(unexpectedTitle)).not.toBeInTheDocument();
});
it('renders lightMode class when dark mode is disabled', () => {
const uPlotInstance = createUPlotInstance(null);
mockUseIsDarkMode.mockReturnValue(false);
renderTooltip({ uPlotInstance });
const container = screen.getByTestId('uplot-tooltip-container');
expect(container).toHaveClass('lightMode');
expect(container).not.toHaveClass('darkMode');
});
it('renders darkMode class when dark mode is enabled', () => {
const uPlotInstance = createUPlotInstance(null);
mockUseIsDarkMode.mockReturnValue(true);
renderTooltip({ uPlotInstance });
const container = screen.getByTestId('uplot-tooltip-container');
expect(container).toHaveClass('darkMode');
expect(container).not.toHaveClass('lightMode');
});
it('renders tooltip items when content is provided', () => {
const uPlotInstance = createUPlotInstance(null);
const content = [createTooltipContent()];
renderTooltip({ uPlotInstance, content });
const list = screen.queryByTestId('uplot-tooltip-list');
expect(list).not.toBeNull();
const marker = screen.getByTestId('uplot-tooltip-item-marker');
const itemContent = screen.getByTestId('uplot-tooltip-item-content');
expect(marker).toHaveStyle({ borderColor: '#ff0000' });
expect(itemContent).toHaveStyle({ color: '#ff0000', fontWeight: '700' });
expect(itemContent).toHaveTextContent('Series A: 10');
});
it('does not render tooltip list when content is empty', () => {
const uPlotInstance = createUPlotInstance(null);
renderTooltip({ uPlotInstance, content: [] });
const list = screen.queryByTestId('uplot-tooltip-list');
expect(list).toBeNull();
});
it('sets tooltip list height based on content length, height returned by Virtuoso', () => {
const uPlotInstance = createUPlotInstance(null);
const content = [createTooltipContent(), createTooltipContent()];
renderTooltip({ uPlotInstance, content });
const list = screen.getByTestId('uplot-tooltip-list');
expect(list).toHaveStyle({ height: '210px' });
});
it('sets tooltip list height based on content length when Virtuoso reports 0 height', () => {
const uPlotInstance = createUPlotInstance(null);
const content = [createTooltipContent(), createTooltipContent()];
mockTotalListHeight = 0;
renderTooltip({ uPlotInstance, content });
const list = screen.getByTestId('uplot-tooltip-list');
// Falls back to content length: 2 items * 38px = 76px
expect(list).toHaveStyle({ height: '76px' });
});
});

View File

@@ -1,277 +0,0 @@
import { PrecisionOption } from 'components/Graph/types';
import { getToolTipValue } from 'components/Graph/yAxisConfig';
import uPlot, { AlignedData, Series } from 'uplot';
import { TooltipContentItem } from '../../types';
import {
buildTooltipContent,
FALLBACK_SERIES_COLOR,
getTooltipBaseValue,
resolveSeriesColor,
} from '../utils';
jest.mock('components/Graph/yAxisConfig', () => ({
getToolTipValue: jest.fn(),
}));
const mockGetToolTipValue = getToolTipValue as jest.MockedFunction<
typeof getToolTipValue
>;
function createUPlotInstance(): uPlot {
return ({
data: [],
cursor: { idx: 0 },
} as unknown) as uPlot;
}
describe('Tooltip utils', () => {
describe('resolveSeriesColor', () => {
it('returns string stroke when provided', () => {
const u = createUPlotInstance();
const stroke: Series.Stroke = '#ff0000';
const color = resolveSeriesColor(stroke, u, 1);
expect(color).toBe('#ff0000');
});
it('returns result of stroke function when provided', () => {
const u = createUPlotInstance();
const strokeFn: Series.Stroke = (uInstance, seriesIdx): string =>
`color-${seriesIdx}-${uInstance.cursor.idx}`;
const color = resolveSeriesColor(strokeFn, u, 2);
expect(color).toBe('color-2-0');
});
it('returns fallback color when stroke is not provided', () => {
const u = createUPlotInstance();
const color = resolveSeriesColor(undefined, u, 1);
expect(color).toBe(FALLBACK_SERIES_COLOR);
});
});
describe('getTooltipBaseValue', () => {
it('returns value from aligned data for non-stacked charts', () => {
const data: AlignedData = [
[0, 1],
[10, 20],
];
const result = getTooltipBaseValue({
data,
index: 1,
dataIndex: 1,
isStackedBarChart: false,
});
expect(result).toBe(20);
});
it('returns null when value is missing', () => {
const data: AlignedData = [
[0, 1],
[10, null],
];
const result = getTooltipBaseValue({
data,
index: 1,
dataIndex: 1,
});
expect(result).toBeNull();
});
it('subtracts next visible stacked value for stacked bar charts', () => {
// data[1] and data[2] contain stacked values at dataIndex 1
const data: AlignedData = [
[0, 1],
[30, 60], // series 1 stacked
[10, 20], // series 2 stacked
];
const series: Series[] = [
{ label: 'x', show: true } as Series,
{ label: 'A', show: true } as Series,
{ label: 'B', show: true } as Series,
];
const result = getTooltipBaseValue({
data,
index: 1,
dataIndex: 1,
isStackedBarChart: true,
series,
});
// 60 (stacked at series 1) - 20 (next visible stacked) = 40
expect(result).toBe(40);
});
it('skips hidden series when computing base value for stacked charts', () => {
const data: AlignedData = [
[0, 1],
[30, 60], // series 1 stacked
[10, 20], // series 2 stacked but hidden
[5, 10], // series 3 stacked and visible
];
const series: Series[] = [
{ label: 'x', show: true } as Series,
{ label: 'A', show: true } as Series,
{ label: 'B', show: false } as Series,
{ label: 'C', show: true } as Series,
];
const result = getTooltipBaseValue({
data,
index: 1,
dataIndex: 1,
isStackedBarChart: true,
series,
});
// 60 (stacked at series 1) - 10 (next *visible* stacked, series 3) = 50
expect(result).toBe(50);
});
it('does not subtract when there is no next visible series', () => {
const data: AlignedData = [
[0, 1],
[10, 20], // series 1
[5, (null as unknown) as number], // series 2 missing
];
const series: Series[] = [
{ label: 'x', show: true } as Series,
{ label: 'A', show: true } as Series,
{ label: 'B', show: false } as Series,
];
const result = getTooltipBaseValue({
data,
index: 1,
dataIndex: 1,
isStackedBarChart: true,
series,
});
expect(result).toBe(20);
});
});
describe('buildTooltipContent', () => {
const yAxisUnit = 'ms';
const decimalPrecision: PrecisionOption = 2;
beforeEach(() => {
mockGetToolTipValue.mockReset();
mockGetToolTipValue.mockImplementation(
(value: string | number): string => `formatted-${value}`,
);
});
function createSeriesConfig(): Series[] {
return [
{ label: 'x', show: true } as Series,
{ label: 'A', show: true, stroke: '#ff0000' } as Series,
{
label: 'B',
show: true,
stroke: (_u: uPlot, idx: number): string => `color-${idx}`,
} as Series,
{ label: 'C', show: false, stroke: '#00ff00' } as Series,
];
}
it('builds tooltip content with active series first', () => {
const data: AlignedData = [[0], [10], [20], [30]];
const series = createSeriesConfig();
const dataIndexes = [null, 0, 0, 0];
const u = createUPlotInstance();
const result = buildTooltipContent({
data,
series,
dataIndexes,
activeSeriesIndex: 2,
uPlotInstance: u,
yAxisUnit,
decimalPrecision,
});
expect(result).toHaveLength(2);
// Active (series index 2) should come first
expect(result[0]).toMatchObject<Partial<TooltipContentItem>>({
label: 'B',
value: 20,
tooltipValue: 'formatted-20',
color: 'color-2',
isActive: true,
});
expect(result[1]).toMatchObject<Partial<TooltipContentItem>>({
label: 'A',
value: 10,
tooltipValue: 'formatted-10',
color: '#ff0000',
isActive: false,
});
});
it('skips series with null data index or non-finite values', () => {
const data: AlignedData = [[0], [42], [Infinity]];
const series: Series[] = [
{ label: 'x', show: true } as Series,
{ label: 'A', show: true, stroke: '#ff0000' } as Series,
{ label: 'B', show: true, stroke: '#00ff00' } as Series,
];
const dataIndexes = [null, 0, null];
const u = createUPlotInstance();
const result = buildTooltipContent({
data,
series,
dataIndexes,
activeSeriesIndex: 1,
uPlotInstance: u,
yAxisUnit,
decimalPrecision,
});
// Only the finite, non-null value from series A should be included
expect(result).toHaveLength(1);
expect(result[0].label).toBe('A');
});
it('uses stacked base values when building content for stacked bar charts', () => {
const data: AlignedData = [[0], [60], [30]];
const series: Series[] = [
{ label: 'x', show: true } as Series,
{ label: 'A', show: true, stroke: '#ff0000' } as Series,
{ label: 'B', show: true, stroke: '#00ff00' } as Series,
];
const dataIndexes = [null, 0, 0];
const u = createUPlotInstance();
const result = buildTooltipContent({
data,
series,
dataIndexes,
activeSeriesIndex: 1,
uPlotInstance: u,
yAxisUnit,
decimalPrecision,
isStackedBarChart: true,
});
// baseValue for series 1 at index 0 should be 60 - 30 (next visible) = 30
expect(result[0].value).toBe(30);
expect(result[1].value).toBe(30);
});
});
});

View File

@@ -4,7 +4,7 @@ import uPlot, { AlignedData, Series } from 'uplot';
import { TooltipContentItem } from '../types';
export const FALLBACK_SERIES_COLOR = '#000000';
const FALLBACK_SERIES_COLOR = '#000000';
export function resolveSeriesColor(
stroke: Series.Stroke | undefined,

View File

@@ -0,0 +1,53 @@
export interface HostsProps {
name: string;
is_default: boolean;
}
export interface RegionProps {
id: string;
name: string;
category: string;
dns: string;
created_at: string;
updated_at: string;
}
export interface ClusterProps {
id: string;
name: string;
cloud_account_id: string;
cloud_region: string;
address: string;
region: RegionProps;
}
export interface DeploymentData {
id: string;
name: string;
email: string;
state: string;
tier: string;
user: string;
password: string;
created_at: string;
updated_at: string;
cluster_id: string;
hosts: HostsProps[];
cluster: ClusterProps;
}
export interface DeploymentsDataProps {
status: string;
data: DeploymentData;
}
export type PayloadProps = {
status: string;
data: string;
};
export interface UpdateCustomDomainProps {
data: {
name: string;
};
}

View File

@@ -0,0 +1,11 @@
export interface UpdateProfileProps {
reasons_for_interest_in_signoz: string[];
uses_otel: boolean;
has_existing_observability_tool: boolean;
existing_observability_tool: string;
logs_scale_per_day_in_gb: number;
number_of_services: number;
number_of_hosts: number;
where_did_you_discover_signoz: string;
timeline_for_migrating_to_signoz: string;
}

287
go.mod
View File

@@ -3,23 +3,22 @@ module github.com/SigNoz/signoz
go 1.24.0
require (
dario.cat/mergo v1.0.1
dario.cat/mergo v1.0.2
github.com/AfterShip/clickhouse-sql-parser v0.4.16
github.com/ClickHouse/clickhouse-go/v2 v2.40.1
github.com/DATA-DOG/go-sqlmock v1.5.2
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.9
github.com/SigNoz/signoz-otel-collector v0.142.1-rc.1
github.com/antlr4-go/antlr/v4 v4.13.1
github.com/antonmedv/expr v1.15.3
github.com/bytedance/sonic v1.14.1
github.com/cespare/xxhash/v2 v2.3.0
github.com/coreos/go-oidc/v3 v3.14.1
github.com/coreos/go-oidc/v3 v3.16.0
github.com/dgraph-io/ristretto/v2 v2.3.0
github.com/dustin/go-humanize v1.0.1
github.com/gin-gonic/gin v1.11.0
github.com/go-co-op/gocron v1.30.1
github.com/go-openapi/runtime v0.28.0
github.com/go-openapi/strfmt v0.23.0
github.com/go-openapi/strfmt v0.24.0
github.com/go-redis/redismock/v9 v9.2.0
github.com/go-viper/mapstructure/v2 v2.4.0
github.com/gojek/heimdall/v7 v7.0.3
@@ -30,22 +29,22 @@ require (
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674
github.com/huandu/go-sqlbuilder v1.35.0
github.com/jackc/pgx/v5 v5.7.6
github.com/json-iterator/go v1.1.12
github.com/json-iterator/go v1.1.13-0.20220915233716-71ac16282d12
github.com/knadh/koanf v1.5.0
github.com/knadh/koanf/v2 v2.2.0
github.com/mailru/easyjson v0.7.7
github.com/open-telemetry/opamp-go v0.19.0
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.128.0
github.com/knadh/koanf/v2 v2.3.0
github.com/mailru/easyjson v0.9.0
github.com/open-telemetry/opamp-go v0.22.0
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.142.0
github.com/openfga/api/proto v0.0.0-20250909172242-b4b2a12f5c67
github.com/openfga/language/pkg/go v0.2.0-beta.2.0.20250428093642-7aeebe78bbfe
github.com/opentracing/opentracing-go v1.2.0
github.com/pkg/errors v0.9.1
github.com/prometheus/alertmanager v0.28.1
github.com/prometheus/client_golang v1.23.2
github.com/prometheus/common v0.66.1
github.com/prometheus/prometheus v0.304.1
github.com/prometheus/common v0.67.4
github.com/prometheus/prometheus v0.308.0
github.com/redis/go-redis/extra/redisotel/v9 v9.15.1
github.com/redis/go-redis/v9 v9.15.1
github.com/redis/go-redis/v9 v9.17.2
github.com/rs/cors v1.11.1
github.com/russellhaering/gosaml2 v0.9.0
github.com/russellhaering/goxmldsig v1.2.0
@@ -54,7 +53,7 @@ require (
github.com/sethvargo/go-password v0.2.0
github.com/smartystreets/goconvey v1.8.1
github.com/soheilhy/cmux v0.1.5
github.com/spf13/cobra v1.10.1
github.com/spf13/cobra v1.10.2
github.com/srikanthccv/ClickHouse-go-mock v0.13.0
github.com/stretchr/testify v1.11.1
github.com/swaggest/jsonschema-go v0.3.78
@@ -64,43 +63,59 @@ require (
github.com/uptrace/bun/dialect/pgdialect v1.2.9
github.com/uptrace/bun/dialect/sqlitedialect v1.2.9
github.com/uptrace/bun/extra/bunotel v1.2.9
go.opentelemetry.io/collector/confmap v1.34.0
go.opentelemetry.io/collector/otelcol v0.128.0
go.opentelemetry.io/collector/pdata v1.34.0
go.opentelemetry.io/collector/confmap v1.48.0
go.opentelemetry.io/collector/otelcol v0.142.0
go.opentelemetry.io/collector/pdata v1.48.0
go.opentelemetry.io/contrib/config v0.10.0
go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux v0.63.0
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0
go.opentelemetry.io/otel v1.38.0
go.opentelemetry.io/otel/metric v1.38.0
go.opentelemetry.io/otel/sdk v1.38.0
go.opentelemetry.io/otel/trace v1.38.0
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0
go.opentelemetry.io/otel v1.39.0
go.opentelemetry.io/otel/metric v1.39.0
go.opentelemetry.io/otel/sdk v1.39.0
go.opentelemetry.io/otel/trace v1.39.0
go.uber.org/multierr v1.11.0
go.uber.org/zap v1.27.0
go.uber.org/zap v1.27.1
golang.org/x/crypto v0.46.0
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b
golang.org/x/net v0.47.0
golang.org/x/oauth2 v0.30.0
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6
golang.org/x/net v0.48.0
golang.org/x/oauth2 v0.33.0
golang.org/x/sync v0.19.0
golang.org/x/text v0.32.0
google.golang.org/protobuf v1.36.9
google.golang.org/protobuf v1.36.10
gopkg.in/yaml.v2 v2.4.0
gopkg.in/yaml.v3 v3.0.1
k8s.io/apimachinery v0.34.0
k8s.io/apimachinery v0.35.0-alpha.0
modernc.org/sqlite v1.39.1
)
require (
github.com/aws/aws-sdk-go-v2 v1.40.0 // indirect
github.com/aws/aws-sdk-go-v2/config v1.32.1 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.19.1 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.14 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.14 // indirect
github.com/aws/aws-sdk-go-v2/service/signin v1.0.1 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.30.4 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.9 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.41.1 // indirect
github.com/aws/smithy-go v1.23.2 // indirect
github.com/bytedance/gopkg v0.1.3 // indirect
github.com/bytedance/sonic v1.14.1 // indirect
github.com/bytedance/sonic/loader v0.3.0 // indirect
github.com/cloudwego/base64x v0.1.6 // indirect
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.27.0 // indirect
github.com/goccy/go-yaml v1.18.0 // indirect
github.com/goccy/go-yaml v1.19.0 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/prometheus/client_golang/exp v0.0.0-20250914183048-a974e0d45e0a // indirect
github.com/redis/go-redis/extra/rediscmd/v9 v9.15.1 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/swaggest/refl v1.4.0 // indirect
@@ -108,24 +123,27 @@ require (
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.3.0 // indirect
github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 // indirect
go.opentelemetry.io/collector/config/configretry v1.34.0 // indirect
go.yaml.in/yaml/v2 v2.4.2 // indirect
go.opentelemetry.io/collector/client v1.48.0 // indirect
go.opentelemetry.io/collector/config/configoptional v1.48.0 // indirect
go.opentelemetry.io/collector/config/configretry v1.48.0 // indirect
go.opentelemetry.io/collector/exporter/exporterhelper v0.142.0 // indirect
go.opentelemetry.io/collector/pdata/xpdata v0.142.0 // indirect
go.yaml.in/yaml/v2 v2.4.3 // indirect
golang.org/x/arch v0.20.0 // indirect
golang.org/x/tools/godoc v0.1.0-deprecated // indirect
modernc.org/libc v1.66.10 // indirect
modernc.org/mathutil v1.7.1 // indirect
modernc.org/memory v1.11.0 // indirect
)
require (
cel.dev/expr v0.24.0 // indirect
cloud.google.com/go/auth v0.16.1 // indirect
cel.dev/expr v0.25.1 // indirect
cloud.google.com/go/auth v0.17.0 // indirect
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
cloud.google.com/go/compute/metadata v0.8.2 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 // indirect
cloud.google.com/go/compute/metadata v0.9.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.20.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.13.1 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.2 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.6.0 // indirect
github.com/ClickHouse/ch-go v0.67.0 // indirect
github.com/Masterminds/squirrel v1.5.4 // indirect
github.com/Yiling-J/theine-go v0.6.2 // indirect
@@ -133,35 +151,35 @@ require (
github.com/andybalholm/brotli v1.2.0 // indirect
github.com/armon/go-metrics v0.4.1 // indirect
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
github.com/aws/aws-sdk-go v1.55.7 // indirect
github.com/aws/aws-sdk-go v1.55.8 // indirect
github.com/beevik/etree v1.1.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
github.com/cenkalti/backoff/v5 v5.0.3 // indirect
github.com/coder/quartz v0.1.2 // indirect
github.com/coreos/go-systemd/v22 v22.5.0 // indirect
github.com/coreos/go-systemd/v22 v22.6.0 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/dennwc/varint v1.0.0 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/docker/go-units v0.5.0 // indirect
github.com/ebitengine/purego v0.8.4 // indirect
github.com/ebitengine/purego v0.9.1 // indirect
github.com/edsrzf/mmap-go v1.2.0 // indirect
github.com/elastic/lunes v0.1.0 // indirect
github.com/elastic/lunes v0.2.0 // indirect
github.com/emirpasic/gods v1.18.1 // indirect
github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect
github.com/expr-lang/expr v1.17.5
github.com/expr-lang/expr v1.17.7
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/fsnotify/fsnotify v1.9.0 // indirect
github.com/go-faster/city v1.0.1 // indirect
github.com/go-faster/errors v0.7.1 // indirect
github.com/go-jose/go-jose/v4 v4.1.1 // indirect
github.com/go-jose/go-jose/v4 v4.1.3 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-ole/go-ole v1.3.0 // indirect
github.com/go-openapi/analysis v0.23.0 // indirect
github.com/go-openapi/errors v0.22.0 // indirect
github.com/go-openapi/errors v0.22.3 // indirect
github.com/go-openapi/jsonpointer v0.21.0 // indirect
github.com/go-openapi/jsonreference v0.21.0 // indirect
github.com/go-openapi/loads v0.22.0 // indirect
@@ -178,19 +196,19 @@ require (
github.com/google/btree v1.1.3 // indirect
github.com/google/cel-go v0.26.1 // indirect
github.com/google/s2a-go v0.1.9 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.7 // indirect
github.com/googleapis/gax-go/v2 v2.15.0 // indirect
github.com/gopherjs/gopherjs v1.17.2 // indirect
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc // indirect
github.com/grafana/regexp v0.0.0-20250905093917-f7b3be9d1853 // indirect
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect
github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2 // indirect
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 // indirect
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.3 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
github.com/hashicorp/go-msgpack/v2 v2.1.1 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/go-sockaddr v1.0.7 // indirect
github.com/hashicorp/go-version v1.7.0 // indirect
github.com/hashicorp/go-version v1.8.0 // indirect
github.com/hashicorp/golang-lru v1.0.2 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/hashicorp/memberlist v0.5.1 // indirect
@@ -206,21 +224,21 @@ require (
github.com/josharian/intern v1.0.0 // indirect
github.com/jpillora/backoff v1.0.0 // indirect
github.com/jtolds/gls v4.20.0+incompatible // indirect
github.com/klauspost/compress v1.18.0 // indirect
github.com/klauspost/compress v1.18.2 // indirect
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect
github.com/leodido/go-syslog/v4 v4.2.0 // indirect
github.com/leodido/go-syslog/v4 v4.3.0 // indirect
github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b // indirect
github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35 // indirect
github.com/magefile/mage v1.15.0 // indirect
github.com/mattermost/xml-roundtrip-validator v0.1.0 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
github.com/mdlayher/socket v0.4.1 // indirect
github.com/mdlayher/socket v0.5.1 // indirect
github.com/mdlayher/vsock v1.2.1 // indirect
github.com/mfridman/interpolate v0.0.2 // indirect
github.com/miekg/dns v1.1.65 // indirect
github.com/miekg/dns v1.1.68 // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect
github.com/mitchellh/mapstructure v1.5.1-0.20231216201459-8508981c8b6c // indirect
github.com/mitchellh/reflectwalk v1.0.2 // indirect
@@ -229,14 +247,14 @@ require (
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f // indirect
github.com/natefinch/wrap v0.2.0 // indirect
github.com/oklog/run v1.1.0 // indirect
github.com/oklog/run v1.2.0 // indirect
github.com/oklog/ulid v1.3.1 // indirect
github.com/oklog/ulid/v2 v2.1.1
github.com/open-feature/go-sdk v1.17.0
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.128.0 // indirect
github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.128.0 // indirect
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.128.0 // indirect
github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.128.0 // indirect
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.142.0 // indirect
github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.142.0 // indirect
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.142.0 // indirect
github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.142.0 // indirect
github.com/openfga/openfga v1.10.1
github.com/paulmach/orb v0.11.1 // indirect
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
@@ -246,10 +264,10 @@ require (
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect
github.com/pressly/goose/v3 v3.25.0 // indirect
github.com/prometheus/client_model v0.6.2 // indirect
github.com/prometheus/exporter-toolkit v0.14.0 // indirect
github.com/prometheus/otlptranslator v0.0.0-20250320144820-d800c8b0eb07 // indirect
github.com/prometheus/procfs v0.16.1 // indirect
github.com/prometheus/sigv4 v0.1.2 // indirect
github.com/prometheus/exporter-toolkit v0.15.0 // indirect
github.com/prometheus/otlptranslator v1.0.0 // indirect
github.com/prometheus/procfs v0.19.2 // indirect
github.com/prometheus/sigv4 v0.3.0 // indirect
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
github.com/robfig/cron/v3 v3.0.1 // indirect
github.com/sagikazarmark/locafero v0.9.0 // indirect
@@ -257,7 +275,7 @@ require (
github.com/segmentio/asm v1.2.0 // indirect
github.com/segmentio/backo-go v1.0.1 // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect
github.com/shirou/gopsutil/v4 v4.25.5 // indirect
github.com/shirou/gopsutil/v4 v4.25.11 // indirect
github.com/shopspring/decimal v1.4.0 // indirect
github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c // indirect
github.com/shurcooL/vfsgen v0.0.0-20230704071429-0000e147ea92 // indirect
@@ -272,9 +290,9 @@ require (
github.com/subosito/gotenv v1.6.0 // indirect
github.com/swaggest/openapi-go v0.2.60
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.0 // indirect
github.com/tklauser/go-sysconf v0.3.15 // indirect
github.com/tklauser/numcpus v0.10.0 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
github.com/tklauser/go-sysconf v0.3.16 // indirect
github.com/tklauser/numcpus v0.11.0 // indirect
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect
github.com/trivago/tgo v1.0.7 // indirect
github.com/valyala/fastjson v1.6.4 // indirect
@@ -283,83 +301,82 @@ require (
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
github.com/zeebo/xxh3 v1.0.2 // indirect
go.mongodb.org/mongo-driver v1.17.1 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/collector/component v1.34.0 // indirect
go.opentelemetry.io/collector/component/componentstatus v0.128.0 // indirect
go.opentelemetry.io/collector/component/componenttest v0.128.0 // indirect
go.opentelemetry.io/collector/config/configtelemetry v0.128.0 // indirect
go.opentelemetry.io/collector/confmap/provider/envprovider v1.34.0 // indirect
go.opentelemetry.io/collector/confmap/provider/fileprovider v1.34.0 // indirect
go.opentelemetry.io/collector/confmap/xconfmap v0.128.0 // indirect
go.opentelemetry.io/collector/connector v0.128.0 // indirect
go.opentelemetry.io/collector/connector/connectortest v0.128.0 // indirect
go.opentelemetry.io/collector/connector/xconnector v0.128.0 // indirect
go.opentelemetry.io/collector/consumer v1.34.0 // indirect
go.opentelemetry.io/collector/consumer/consumererror v0.128.0 // indirect
go.opentelemetry.io/collector/consumer/consumertest v0.128.0 // indirect
go.opentelemetry.io/collector/consumer/xconsumer v0.128.0 // indirect
go.opentelemetry.io/collector/exporter v0.128.0 // indirect
go.opentelemetry.io/collector/exporter/exportertest v0.128.0 // indirect
go.opentelemetry.io/collector/exporter/xexporter v0.128.0 // indirect
go.opentelemetry.io/collector/extension v1.34.0 // indirect
go.opentelemetry.io/collector/extension/extensioncapabilities v0.128.0 // indirect
go.opentelemetry.io/collector/extension/extensiontest v0.128.0 // indirect
go.opentelemetry.io/collector/extension/xextension v0.128.0 // indirect
go.opentelemetry.io/collector/featuregate v1.34.0 // indirect
go.opentelemetry.io/collector/internal/fanoutconsumer v0.128.0 // indirect
go.opentelemetry.io/collector/internal/telemetry v0.128.0 // indirect
go.opentelemetry.io/collector/pdata/pprofile v0.128.0 // indirect
go.opentelemetry.io/collector/pdata/testdata v0.128.0 // indirect
go.opentelemetry.io/collector/pipeline v0.128.0 // indirect
go.opentelemetry.io/collector/pipeline/xpipeline v0.128.0 // indirect
go.opentelemetry.io/collector/processor v1.34.0 // indirect
go.opentelemetry.io/collector/processor/processorhelper v0.128.0 // indirect
go.opentelemetry.io/collector/processor/processortest v0.128.0 // indirect
go.opentelemetry.io/collector/processor/xprocessor v0.128.0 // indirect
go.opentelemetry.io/collector/receiver v1.34.0 // indirect
go.opentelemetry.io/collector/receiver/receiverhelper v0.128.0 // indirect
go.opentelemetry.io/collector/receiver/receivertest v0.128.0 // indirect
go.opentelemetry.io/collector/receiver/xreceiver v0.128.0 // indirect
go.opentelemetry.io/collector/semconv v0.128.0
go.opentelemetry.io/collector/service v0.128.0 // indirect
go.opentelemetry.io/collector/service/hostcapabilities v0.128.0 // indirect
go.opentelemetry.io/contrib/bridges/otelzap v0.11.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.60.0 // indirect
go.opentelemetry.io/contrib/otelconf v0.16.0 // indirect
go.opentelemetry.io/contrib/propagators/b3 v1.36.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.12.2 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.12.2 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.36.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.36.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.36.0 // indirect
go.opentelemetry.io/otel/exporters/prometheus v0.58.0
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.12.2 // indirect
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.36.0 // indirect
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0 // indirect
go.opentelemetry.io/otel/log v0.12.2 // indirect
go.opentelemetry.io/otel/sdk/log v0.12.2 // indirect
go.opentelemetry.io/otel/sdk/metric v1.38.0
go.opentelemetry.io/proto/otlp v1.8.0 // indirect
go.mongodb.org/mongo-driver v1.17.4 // indirect
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
go.opentelemetry.io/collector/component v1.48.0 // indirect
go.opentelemetry.io/collector/component/componentstatus v0.142.0 // indirect
go.opentelemetry.io/collector/component/componenttest v0.142.0 // indirect
go.opentelemetry.io/collector/config/configtelemetry v0.142.0 // indirect
go.opentelemetry.io/collector/confmap/provider/envprovider v1.48.0 // indirect
go.opentelemetry.io/collector/confmap/provider/fileprovider v1.48.0 // indirect
go.opentelemetry.io/collector/confmap/xconfmap v0.142.0 // indirect
go.opentelemetry.io/collector/connector v0.142.0 // indirect
go.opentelemetry.io/collector/connector/connectortest v0.142.0 // indirect
go.opentelemetry.io/collector/connector/xconnector v0.142.0 // indirect
go.opentelemetry.io/collector/consumer v1.48.0 // indirect
go.opentelemetry.io/collector/consumer/consumererror v0.142.0 // indirect
go.opentelemetry.io/collector/consumer/consumertest v0.142.0 // indirect
go.opentelemetry.io/collector/consumer/xconsumer v0.142.0 // indirect
go.opentelemetry.io/collector/exporter v1.48.0 // indirect
go.opentelemetry.io/collector/exporter/exportertest v0.142.0 // indirect
go.opentelemetry.io/collector/exporter/xexporter v0.142.0 // indirect
go.opentelemetry.io/collector/extension v1.48.0 // indirect
go.opentelemetry.io/collector/extension/extensioncapabilities v0.142.0 // indirect
go.opentelemetry.io/collector/extension/extensiontest v0.142.0 // indirect
go.opentelemetry.io/collector/extension/xextension v0.142.0 // indirect
go.opentelemetry.io/collector/featuregate v1.48.0 // indirect
go.opentelemetry.io/collector/internal/fanoutconsumer v0.142.0 // indirect
go.opentelemetry.io/collector/internal/telemetry v0.142.0 // indirect
go.opentelemetry.io/collector/pdata/pprofile v0.142.0 // indirect
go.opentelemetry.io/collector/pdata/testdata v0.142.0 // indirect
go.opentelemetry.io/collector/pipeline v1.48.0 // indirect
go.opentelemetry.io/collector/pipeline/xpipeline v0.142.0 // indirect
go.opentelemetry.io/collector/processor v1.48.0 // indirect
go.opentelemetry.io/collector/processor/processorhelper v0.142.0 // indirect
go.opentelemetry.io/collector/processor/processortest v0.142.0 // indirect
go.opentelemetry.io/collector/processor/xprocessor v0.142.0 // indirect
go.opentelemetry.io/collector/receiver v1.48.0 // indirect
go.opentelemetry.io/collector/receiver/receiverhelper v0.142.0 // indirect
go.opentelemetry.io/collector/receiver/receivertest v0.142.0 // indirect
go.opentelemetry.io/collector/receiver/xreceiver v0.142.0 // indirect
go.opentelemetry.io/collector/semconv v0.128.1-0.20250610090210-188191247685
go.opentelemetry.io/collector/service v0.142.0 // indirect
go.opentelemetry.io/collector/service/hostcapabilities v0.142.0 // indirect
go.opentelemetry.io/contrib/bridges/otelzap v0.13.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.63.0 // indirect
go.opentelemetry.io/contrib/otelconf v0.18.0 // indirect
go.opentelemetry.io/contrib/propagators/b3 v1.39.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.14.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.14.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.39.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.39.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.39.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.39.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.39.0 // indirect
go.opentelemetry.io/otel/exporters/prometheus v0.60.0
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.14.0 // indirect
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.39.0 // indirect
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.39.0 // indirect
go.opentelemetry.io/otel/log v0.15.0 // indirect
go.opentelemetry.io/otel/sdk/log v0.14.0 // indirect
go.opentelemetry.io/otel/sdk/metric v1.39.0
go.opentelemetry.io/proto/otlp v1.9.0 // indirect
go.uber.org/atomic v1.11.0 // indirect
go.uber.org/mock v0.6.0 // indirect
go.yaml.in/yaml/v3 v3.0.4 // indirect
golang.org/x/mod v0.30.0 // indirect
golang.org/x/sys v0.39.0 // indirect
golang.org/x/time v0.11.0 // indirect
golang.org/x/time v0.14.0 // indirect
golang.org/x/tools v0.39.0 // indirect
gonum.org/v1/gonum v0.16.0 // indirect
google.golang.org/api v0.236.0
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 // indirect
google.golang.org/grpc v1.75.1 // indirect
google.golang.org/api v0.257.0
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20251202230838-ff82c1b0f217 // indirect
google.golang.org/grpc v1.77.0 // indirect
gopkg.in/telebot.v3 v3.3.8 // indirect
k8s.io/client-go v0.34.0 // indirect
k8s.io/client-go v0.34.2 // indirect
k8s.io/klog/v2 v2.130.1 // indirect
k8s.io/utils v0.0.0-20250604170112-4c0f3b243397 // indirect
sigs.k8s.io/yaml v1.6.0 // indirect
)
replace github.com/expr-lang/expr => github.com/SigNoz/expr v1.17.7-beta

731
go.sum

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,6 @@ import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/promotetypes"
"github.com/gorilla/mux"
)
@@ -21,7 +20,6 @@ func (provider *provider) addPromoteRoutes(router *mux.Router) error {
ResponseContentType: "",
SuccessStatusCode: http.StatusCreated,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: newSecuritySchemes(types.RoleEditor),
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
@@ -37,7 +35,6 @@ func (provider *provider) addPromoteRoutes(router *mux.Router) error {
ResponseContentType: "",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}

View File

@@ -20,7 +20,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/promote"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
"github.com/SigNoz/signoz/pkg/zeus"
@@ -47,7 +46,6 @@ type provider struct {
fieldsHandler fields.Handler
authzHandler authz.Handler
zeusHandler zeus.Handler
querierHandler querier.Handler
}
func NewFactory(
@@ -68,7 +66,6 @@ func NewFactory(
fieldsHandler fields.Handler,
authzHandler authz.Handler,
zeusHandler zeus.Handler,
querierHandler querier.Handler,
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
return newProvider(
@@ -92,7 +89,6 @@ func NewFactory(
fieldsHandler,
authzHandler,
zeusHandler,
querierHandler,
)
})
}
@@ -118,7 +114,6 @@ func newProvider(
fieldsHandler fields.Handler,
authzHandler authz.Handler,
zeusHandler zeus.Handler,
querierHandler querier.Handler,
) (apiserver.APIServer, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
router := mux.NewRouter().UseEncodedPath()
@@ -142,7 +137,6 @@ func newProvider(
fieldsHandler: fieldsHandler,
authzHandler: authzHandler,
zeusHandler: zeusHandler,
querierHandler: querierHandler,
}
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
@@ -215,10 +209,6 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
return err
}
if err := provider.addQuerierRoutes(router); err != nil {
return err
}
return nil
}

View File

@@ -1,471 +0,0 @@
package signozapiserver
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/gorilla/mux"
)
func (provider *provider) addQuerierRoutes(router *mux.Router) error {
if err := router.Handle("/api/v5/query_range", handler.New(provider.authZ.ViewAccess(provider.querierHandler.QueryRange), handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"querier"},
Summary: "Query range",
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
RequestExamples: []handler.OpenAPIExample{
{
Name: "traces_time_series",
Summary: "Time series: count spans grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "span_count"},
},
"stepInterval": "60s",
"filter": map[string]any{"expression": "service.name = 'frontend'"},
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
"order": []any{map[string]any{"key": map[string]any{"name": "span_count"}, "direction": "desc"}},
"limit": 10,
},
},
},
},
},
},
{
Name: "logs_time_series",
Summary: "Time series: count error logs grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "log_count"},
},
"stepInterval": "60s",
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
"order": []any{map[string]any{"key": map[string]any{"name": "log_count"}, "direction": "desc"}},
"limit": 10,
},
},
},
},
},
},
{
Name: "metrics_gauge_time_series",
Summary: "Time series: latest gauge value averaged across series",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "system.cpu.utilization", "timeAggregation": "latest", "spaceAggregation": "avg"},
},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "host.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "metrics_rate_time_series",
Summary: "Time series: rate of cumulative counter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum"},
},
"stepInterval": 120,
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "metrics_histogram_time_series",
Summary: "Time series: p99 latency from histogram",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.bucket", "spaceAggregation": "p99"},
},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "logs_raw",
Summary: "Raw: fetch raw log records",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
"selectFields": []any{
map[string]any{"name": "body", "fieldContext": "log"},
map[string]any{"name": "service.name", "fieldContext": "resource"},
},
"order": []any{
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "log"}, "direction": "desc"},
map[string]any{"key": map[string]any{"name": "id"}, "direction": "desc"},
},
"limit": 50,
"offset": 0,
},
},
},
},
},
},
{
Name: "traces_raw",
Summary: "Raw: fetch raw span records",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"filter": map[string]any{"expression": "service.name = 'frontend' AND has_error = true"},
"selectFields": []any{
map[string]any{"name": "name", "fieldContext": "span"},
map[string]any{"name": "duration_nano", "fieldContext": "span"},
},
"order": []any{
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "span"}, "direction": "desc"},
},
"limit": 100,
},
},
},
},
},
},
{
Name: "traces_scalar",
Summary: "Scalar: total span count",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "span_count"},
},
"filter": map[string]any{"expression": "service.name = 'frontend'"},
},
},
},
},
},
},
{
Name: "logs_scalar",
Summary: "Scalar: total error log count",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "error_count"},
},
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
},
},
},
},
},
},
{
Name: "metrics_scalar",
Summary: "Scalar: single reduced metric value",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum", "reduceTo": "sum"},
},
"stepInterval": "60s",
},
},
},
},
},
},
{
Name: "formula",
Summary: "Formula: error rate from two trace queries",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{map[string]any{"expression": "countIf(has_error = true)"}},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "B",
"signal": "traces",
"aggregations": []any{map[string]any{"expression": "count()"}},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
map[string]any{
"type": "builder_formula",
"spec": map[string]any{
"name": "error_rate",
"expression": "A / B * 100",
},
},
},
},
},
},
{
Name: "promql",
Summary: "PromQL: request rate with UTF-8 metric name",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "promql",
"spec": map[string]any{
"name": "request_rate",
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
"step": 60,
},
},
},
},
},
},
{
Name: "clickhouse_sql_traces_time_series",
Summary: "ClickHouse SQL: traces time series with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "span_rate",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value" +
" FROM signoz_traces.distributed_signoz_index_v3" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
" GROUP BY ts ORDER BY ts",
},
},
},
},
},
},
{
Name: "clickhouse_sql_logs_raw",
Summary: "ClickHouse SQL: raw logs with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "recent_errors",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT timestamp, body" +
" FROM signoz_logs.distributed_logs_v2" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
" AND severity_text = 'ERROR'" +
" ORDER BY timestamp DESC LIMIT 100",
},
},
},
},
},
},
{
Name: "clickhouse_sql_traces_scalar",
Summary: "ClickHouse SQL: scalar aggregate with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "total_spans",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT count() AS value" +
" FROM signoz_traces.distributed_signoz_index_v3" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp",
},
},
},
},
},
},
},
Response: new(qbtypes.QueryRangeResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v5/substitute_vars", handler.New(provider.authZ.ViewAccess(provider.querierHandler.ReplaceVariables), handler.OpenAPIDef{
ID: "ReplaceVariables",
Tags: []string{"querier"},
Summary: "Replace variables",
Description: "Replace variables in a query",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
Response: new(qbtypes.QueryRangeRequest),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -76,7 +76,7 @@ func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetype
// add the paths that are not promoted but have indexes
for path, indexes := range aggr {
path := strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
path := strings.TrimPrefix(path, telemetrylogs.BodyV2ColumnPrefix)
path = telemetrytypes.BodyJSONStringSearchPrefix + path
response = append(response, promotetypes.PromotePath{
Path: path,
@@ -156,7 +156,7 @@ func (m *module) PromoteAndIndexPaths(
}
}
if len(it.Indexes) > 0 {
parentColumn := telemetrylogs.LogsV2BodyJSONColumn
parentColumn := telemetrylogs.LogsV2BodyV2Column
// if the path is already promoted or is being promoted, add it to the promoted column
if _, promoted := existingPromotedPaths[it.Path]; promoted || it.Promote {
parentColumn = telemetrylogs.LogsV2BodyPromotedColumn

View File

@@ -87,6 +87,24 @@ func (client *client) Read(ctx context.Context, query *prompb.Query, sortSeries
return remote.FromQueryResult(sortSeries, res), nil
}
func (c *client) ReadMultiple(ctx context.Context, queries []*prompb.Query, sortSeries bool) (storage.SeriesSet, error) {
if len(queries) == 0 {
return storage.EmptySeriesSet(), nil
}
if len(queries) == 1 {
return c.Read(ctx, queries[0], sortSeries)
}
sets := make([]storage.SeriesSet, 0, len(queries))
for _, q := range queries {
ss, err := c.Read(ctx, q, sortSeries)
if err != nil {
return nil, err
}
sets = append(sets, ss)
}
return storage.NewMergeSeriesSet(sets, 0, storage.ChainedSeriesMerge), nil
}
func (client *client) queryToClickhouseQuery(_ context.Context, query *prompb.Query, metricName string, subQuery bool) (string, []any, error) {
var clickHouseQuery string
var conditions []string

View File

@@ -2,6 +2,7 @@ package prometheus
import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/promql"
)
@@ -15,30 +16,24 @@ func RemoveExtraLabels(res *promql.Result, labelsToRemove ...string) error {
toRemove[l] = struct{}{}
}
dropLabels := func(metric labels.Labels) labels.Labels {
b := labels.NewBuilder(metric)
for name := range toRemove {
b.Del(name)
}
return b.Labels()
}
switch res.Value.(type) {
case promql.Vector:
value := res.Value.(promql.Vector)
for i := range value {
series := &(value)[i]
dst := series.Metric[:0]
for _, lbl := range series.Metric {
if _, drop := toRemove[lbl.Name]; !drop {
dst = append(dst, lbl)
}
}
series.Metric = dst
(value)[i].Metric = dropLabels((value)[i].Metric)
}
case promql.Matrix:
value := res.Value.(promql.Matrix)
for i := range value {
series := &(value)[i]
dst := series.Metric[:0]
for _, lbl := range series.Metric {
if _, drop := toRemove[lbl.Name]; !drop {
dst = append(dst, lbl)
}
}
series.Metric = dst
(value)[i].Metric = dropLabels((value)[i].Metric)
}
case promql.Scalar:
return nil

View File

@@ -21,17 +21,18 @@ import (
"github.com/SigNoz/signoz/pkg/variables"
)
type handler struct {
type API struct {
set factory.ProviderSettings
analytics analytics.Analytics
querier Querier
}
func NewHandler(set factory.ProviderSettings, querier Querier, analytics analytics.Analytics) Handler {
return &handler{set: set, querier: querier, analytics: analytics}
func NewAPI(set factory.ProviderSettings, querier Querier, analytics analytics.Analytics) *API {
return &API{set: set, querier: querier, analytics: analytics}
}
func (handler *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
func (a *API) QueryRange(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
@@ -52,7 +53,7 @@ func (handler *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
queryJSON, _ := json.Marshal(queryRangeRequest)
handler.set.Logger.ErrorContext(ctx, "panic in QueryRange",
a.set.Logger.ErrorContext(ctx, "panic in QueryRange",
"error", r,
"user", claims.UserID,
"payload", string(queryJSON),
@@ -78,17 +79,17 @@ func (handler *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
return
}
queryRangeResponse, err := handler.querier.QueryRange(ctx, orgID, &queryRangeRequest)
queryRangeResponse, err := a.querier.QueryRange(ctx, orgID, &queryRangeRequest)
if err != nil {
render.Error(rw, err)
return
}
handler.logEvent(req.Context(), req.Header.Get("Referer"), queryRangeResponse.QBEvent)
a.logEvent(req.Context(), req.Header.Get("Referer"), queryRangeResponse.QBEvent)
render.Success(rw, http.StatusOK, queryRangeResponse)
}
func (handler *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
func (a *API) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
// get the param from url and add it to body
@@ -152,7 +153,7 @@ func (handler *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request
queryJSON, _ := json.Marshal(queryRangeRequest)
handler.set.Logger.ErrorContext(ctx, "panic in QueryRawStream",
a.set.Logger.ErrorContext(ctx, "panic in QueryRawStream",
"error", r,
"user", claims.UserID,
"payload", string(queryJSON),
@@ -192,7 +193,7 @@ func (handler *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request
flusher.Flush()
client := &qbtypes.RawStream{Name: req.RemoteAddr, Logs: make(chan *qbtypes.RawRow, 1000), Done: make(chan *bool), Error: make(chan error)}
go handler.querier.QueryRawStream(ctx, orgID, &queryRangeRequest, client)
go a.querier.QueryRawStream(ctx, orgID, &queryRangeRequest, client)
for {
select {
@@ -219,7 +220,7 @@ func (handler *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request
// TODO(srikanthccv): everything done here can be done on frontend as well
// For the time being I am adding a helper function
func (handler *handler) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
func (a *API) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
var queryRangeRequest qbtypes.QueryRangeRequest
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
@@ -271,7 +272,7 @@ func (handler *handler) ReplaceVariables(rw http.ResponseWriter, req *http.Reque
render.Success(rw, http.StatusOK, queryRangeRequest)
}
func (handler *handler) logEvent(ctx context.Context, referrer string, event *qbtypes.QBEvent) {
func (a *API) logEvent(ctx context.Context, referrer string, event *qbtypes.QBEvent) {
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
return
@@ -304,9 +305,8 @@ func (handler *handler) logEvent(ctx context.Context, referrer string, event *qb
}
if !event.HasData {
handler.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", properties)
a.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", properties)
return
}
handler.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
a.analytics.TrackUser(ctx, claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
}

View File

@@ -10,11 +10,9 @@ import (
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrystore"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/bytedance/sonic"
)
type builderQuery[T any] struct {
@@ -80,16 +78,11 @@ func (q *builderQuery[T]) Fingerprint() string {
case qbtypes.LogAggregation:
aggParts = append(aggParts, a.Expression)
case qbtypes.MetricAggregation:
var spaceAggParamStr string
if a.ComparisonSpaceAggregationParam != nil {
spaceAggParamStr = a.ComparisonSpaceAggregationParam.StringValue()
}
aggParts = append(aggParts, fmt.Sprintf("%s:%s:%s:%s:%s",
aggParts = append(aggParts, fmt.Sprintf("%s:%s:%s:%s",
a.MetricName,
a.Temporality.StringValue(),
a.TimeAggregation.StringValue(),
a.SpaceAggregation.StringValue(),
spaceAggParamStr,
))
}
}
@@ -255,40 +248,6 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string,
return nil, err
}
// merge body_json and promoted into body
if q.spec.Signal == telemetrytypes.SignalLogs {
switch typedPayload := payload.(type) {
case *qbtypes.RawData:
for _, rr := range typedPayload.Rows {
seeder := func() error {
body, ok := rr.Data[telemetrylogs.LogsV2BodyJSONColumn].(map[string]any)
if !ok {
return nil
}
promoted, ok := rr.Data[telemetrylogs.LogsV2BodyPromotedColumn].(map[string]any)
if !ok {
return nil
}
seed(promoted, body)
str, err := sonic.MarshalString(body)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to marshal body")
}
rr.Data["body"] = str
return nil
}
err := seeder()
if err != nil {
return nil, err
}
delete(rr.Data, telemetrylogs.LogsV2BodyJSONColumn)
delete(rr.Data, telemetrylogs.LogsV2BodyPromotedColumn)
}
payload = typedPayload
}
}
return &qbtypes.Result{
Type: q.kind,
Value: payload,
@@ -416,18 +375,3 @@ func decodeCursor(cur string) (int64, error) {
}
return strconv.ParseInt(string(b), 10, 64)
}
func seed(promoted map[string]any, body map[string]any) {
for key, fromValue := range promoted {
if toValue, ok := body[key]; !ok {
body[key] = fromValue
} else {
if fromValue, ok := fromValue.(map[string]any); ok {
if toValue, ok := toValue.(map[string]any); ok {
seed(fromValue, toValue)
body[key] = toValue
}
}
}
}
}

View File

@@ -14,7 +14,6 @@ import (
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/bytedance/sonic"
)
var (
@@ -394,17 +393,11 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
// de-reference the typed pointer to any
val := reflect.ValueOf(cellPtr).Elem().Interface()
// Post-process JSON columns: normalize into structured values
// Post-process JSON columns: normalize into String value
if strings.HasPrefix(strings.ToUpper(colTypes[i].DatabaseTypeName()), "JSON") {
switch x := val.(type) {
case []byte:
if len(x) > 0 {
var v any
if err := sonic.Unmarshal(x, &v); err == nil {
val = v
}
}
val = string(x)
default:
// already a structured type (map[string]any, []any, etc.)
}

View File

@@ -2,7 +2,6 @@ package querier
import (
"context"
"net/http"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -21,9 +20,3 @@ type BucketCache interface {
// store fresh buckets for future hits
Put(ctx context.Context, orgID valuer.UUID, q qbtypes.Query, step qbtypes.Step, fresh *qbtypes.Result)
}
type Handler interface {
QueryRange(rw http.ResponseWriter, req *http.Request)
QueryRawStream(rw http.ResponseWriter, req *http.Request)
ReplaceVariables(rw http.ResponseWriter, req *http.Request)
}

View File

@@ -1 +1,454 @@
package querier
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
)
// QueryRangeV5OpenAPIDef is the OpenAPI definition for the /api/v5/query_range endpoint.
var QueryRangeV5OpenAPIDef = handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"query"},
Summary: "Query range",
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
RequestExamples: queryRangeV5Examples,
Response: new(qbtypes.QueryRangeResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: []handler.OpenAPISecurityScheme{
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
},
}
var queryRangeV5Examples = []handler.OpenAPIExample{
{
Name: "traces_time_series",
Summary: "Time series: count spans grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "span_count"},
},
"stepInterval": "60s",
"filter": map[string]any{"expression": "service.name = 'frontend'"},
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
"order": []any{map[string]any{"key": map[string]any{"name": "span_count"}, "direction": "desc"}},
"limit": 10,
},
},
},
},
},
},
{
Name: "logs_time_series",
Summary: "Time series: count error logs grouped by service",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "log_count"},
},
"stepInterval": "60s",
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
"order": []any{map[string]any{"key": map[string]any{"name": "log_count"}, "direction": "desc"}},
"limit": 10,
},
},
},
},
},
},
{
Name: "metrics_gauge_time_series",
Summary: "Time series: latest gauge value averaged across series",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "system.cpu.utilization", "timeAggregation": "latest", "spaceAggregation": "avg"},
},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "host.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "metrics_rate_time_series",
Summary: "Time series: rate of cumulative counter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum"},
},
"stepInterval": 120,
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "metrics_histogram_time_series",
Summary: "Time series: p99 latency from histogram",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.bucket", "spaceAggregation": "p99"},
},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
},
},
},
},
{
Name: "logs_raw",
Summary: "Raw: fetch raw log records",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
"selectFields": []any{
map[string]any{"name": "body", "fieldContext": "log"},
map[string]any{"name": "service.name", "fieldContext": "resource"},
},
"order": []any{
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "log"}, "direction": "desc"},
map[string]any{"key": map[string]any{"name": "id"}, "direction": "desc"},
},
"limit": 50,
"offset": 0,
},
},
},
},
},
},
{
Name: "traces_raw",
Summary: "Raw: fetch raw span records",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"filter": map[string]any{"expression": "service.name = 'frontend' AND has_error = true"},
"selectFields": []any{
map[string]any{"name": "name", "fieldContext": "span"},
map[string]any{"name": "duration_nano", "fieldContext": "span"},
},
"order": []any{
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "span"}, "direction": "desc"},
},
"limit": 100,
},
},
},
},
},
},
{
Name: "traces_scalar",
Summary: "Scalar: total span count",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "span_count"},
},
"filter": map[string]any{"expression": "service.name = 'frontend'"},
},
},
},
},
},
},
{
Name: "logs_scalar",
Summary: "Scalar: total error log count",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{"expression": "count()", "alias": "error_count"},
},
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
},
},
},
},
},
},
{
Name: "metrics_scalar",
Summary: "Scalar: single reduced metric value",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum", "reduceTo": "sum"},
},
"stepInterval": "60s",
},
},
},
},
},
},
{
Name: "formula",
Summary: "Formula: error rate from two trace queries",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{map[string]any{"expression": "countIf(has_error = true)"}},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "B",
"signal": "traces",
"aggregations": []any{map[string]any{"expression": "count()"}},
"stepInterval": "60s",
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
},
},
map[string]any{
"type": "builder_formula",
"spec": map[string]any{
"name": "error_rate",
"expression": "A / B * 100",
},
},
},
},
},
},
{
Name: "promql",
Summary: "PromQL: request rate with UTF-8 metric name",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "promql",
"spec": map[string]any{
"name": "request_rate",
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
"step": 60,
},
},
},
},
},
},
{
Name: "clickhouse_sql_traces_time_series",
Summary: "ClickHouse SQL: traces time series with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "span_rate",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value" +
" FROM signoz_traces.distributed_signoz_index_v3" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
" GROUP BY ts ORDER BY ts",
},
},
},
},
},
},
{
Name: "clickhouse_sql_logs_raw",
Summary: "ClickHouse SQL: raw logs with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "recent_errors",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT timestamp, body" +
" FROM signoz_logs.distributed_logs_v2" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
" AND severity_text = 'ERROR'" +
" ORDER BY timestamp DESC LIMIT 100",
},
},
},
},
},
},
{
Name: "clickhouse_sql_traces_scalar",
Summary: "ClickHouse SQL: scalar aggregate with resource filter",
Value: map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "total_spans",
"query": "WITH __resource_filter AS (" +
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
" ) SELECT count() AS value" +
" FROM signoz_traces.distributed_signoz_index_v3" +
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp",
},
},
},
},
},
},
}

View File

@@ -16,6 +16,7 @@ import (
"github.com/SigNoz/signoz/pkg/querybuilder"
qbv5 "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/promql/parser"
)
@@ -240,13 +241,13 @@ func (q *promqlQuery) Execute(ctx context.Context) (*qbv5.Result, error) {
var series []*qbv5.TimeSeries
for _, v := range matrix {
var s qbv5.TimeSeries
lbls := make([]*qbv5.Label, 0, len(v.Metric))
for name, value := range v.Metric.Copy().Map() {
lbls := make([]*qbv5.Label, 0, v.Metric.Len())
v.Metric.Range(func(l labels.Label) {
lbls = append(lbls, &qbv5.Label{
Key: telemetrytypes.TelemetryFieldKey{Name: name},
Value: value,
Key: telemetrytypes.TelemetryFieldKey{Name: l.Name},
Value: l.Value,
})
}
})
s.Labels = lbls

View File

@@ -79,6 +79,8 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/version"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
)
type status string
@@ -143,6 +145,8 @@ type APIHandler struct {
LicensingAPI licensing.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
Signoz *signoz.SigNoz
@@ -171,6 +175,8 @@ type APIHandlerOpts struct {
LicensingAPI licensing.API
QuerierAPI *querierAPI.API
QueryParserAPI *queryparser.API
Signoz *signoz.SigNoz
@@ -233,6 +239,7 @@ func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, erro
AlertmanagerAPI: opts.AlertmanagerAPI,
LicensingAPI: opts.LicensingAPI,
Signoz: opts.Signoz,
QuerierAPI: opts.QuerierAPI,
QueryParserAPI: opts.QueryParserAPI,
}
@@ -389,7 +396,7 @@ func (aH *APIHandler) RegisterQueryRangeV3Routes(router *mux.Router, am *middlew
subRouter.HandleFunc("/query_progress", am.ViewAccess(aH.GetQueryProgressUpdates)).Methods(http.MethodGet)
// live logs
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.Signoz.Handlers.QuerierHandler.QueryRawStream)).Methods(http.MethodGet)
subRouter.HandleFunc("/logs/livetail", am.ViewAccess(aH.QuerierAPI.QueryRawStream)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *middleware.AuthZ) {
@@ -463,6 +470,12 @@ func (aH *APIHandler) RegisterQueryRangeV4Routes(router *mux.Router, am *middlew
subRouter.HandleFunc("/metric/metric_metadata", am.ViewAccess(aH.getMetricMetadata)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterQueryRangeV5Routes(router *mux.Router, am *middleware.AuthZ) {
subRouter := router.PathPrefix("/api/v5").Subrouter()
subRouter.HandleFunc("/query_range", am.ViewAccess(aH.QuerierAPI.QueryRange)).Methods(http.MethodPost)
subRouter.HandleFunc("/substitute_vars", am.ViewAccess(aH.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
}
// todo(remove): Implemented at render package (github.com/SigNoz/signoz/pkg/http/render) with the new error structure
func (aH *APIHandler) Respond(w http.ResponseWriter, data interface{}) {
writeHttpResponse(w, data)

View File

@@ -23,6 +23,7 @@ import (
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/prometheus"
"github.com/SigNoz/signoz/pkg/querier"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
@@ -132,6 +133,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
LicensingAPI: nooplicensing.NewLicenseAPI(),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
}, config)
if err != nil {
@@ -215,6 +217,7 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
api.RegisterInfraMetricsRoutes(r, am)
api.RegisterWebSocketPaths(r, am)
api.RegisterQueryRangeV4Routes(r, am)
api.RegisterQueryRangeV5Routes(r, am)
api.RegisterMessagingQueuesRoutes(r, am)
api.RegisterThirdPartyApiRoutes(r, am)
api.MetricExplorerRoutes(r, am)

View File

@@ -19,6 +19,7 @@ import (
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/promql"
)
@@ -461,12 +462,12 @@ func toCommonSeries(series promql.Series) v3.Series {
Points: make([]v3.Point, 0),
}
for _, lbl := range series.Metric {
commonSeries.Labels[lbl.Name] = lbl.Value
series.Metric.Range(func(l labels.Label) {
commonSeries.Labels[l.Name] = l.Value
commonSeries.LabelsArray = append(commonSeries.LabelsArray, map[string]string{
lbl.Name: lbl.Value,
l.Name: l.Value,
})
}
})
for _, f := range series.Floats {
commonSeries.Points = append(commonSeries.Points, v3.Point{

View File

@@ -204,7 +204,10 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
// While we expect user not to send the mixed data types, it inevitably happens
// So we handle the data type collisions here
switch key.FieldDataType {
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString:
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString, telemetrytypes.FieldDataTypeJSON:
if key.FieldDataType == telemetrytypes.FieldDataTypeJSON {
tblFieldName = fmt.Sprintf("toString(%s)", tblFieldName)
}
switch v := value.(type) {
case float64:
// try to convert the string value to to number
@@ -219,7 +222,6 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
// we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string
value = fmt.Sprintf("%t", v)
}
case telemetrytypes.FieldDataTypeInt64,
telemetrytypes.FieldDataTypeArrayInt64,
telemetrytypes.FieldDataTypeNumber,

View File

@@ -313,37 +313,31 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any
return ""
}
child := ctx.GetChild(0)
var searchText string
if keyCtx, ok := child.(*grammar.KeyContext); ok {
// create a full text search condition on the body field
keyText := keyCtx.GetText()
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(keyText), v.builder, v.startNs, v.endNs)
if err != nil {
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
return ""
}
return cond
searchText = keyCtx.GetText()
} else if valCtx, ok := child.(*grammar.ValueContext); ok {
var text string
if valCtx.QUOTED_TEXT() != nil {
text = trimQuotes(valCtx.QUOTED_TEXT().GetText())
searchText = trimQuotes(valCtx.QUOTED_TEXT().GetText())
} else if valCtx.NUMBER() != nil {
text = valCtx.NUMBER().GetText()
searchText = valCtx.NUMBER().GetText()
} else if valCtx.BOOL() != nil {
text = valCtx.BOOL().GetText()
searchText = valCtx.BOOL().GetText()
} else if valCtx.KEY() != nil {
text = valCtx.KEY().GetText()
searchText = valCtx.KEY().GetText()
} else {
v.errors = append(v.errors, fmt.Sprintf("unsupported value type: %s", valCtx.GetText()))
return ""
}
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder, v.startNs, v.endNs)
if err != nil {
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
return ""
}
return cond
}
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(searchText), v.builder, v.startNs, v.endNs)
if err != nil {
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
return ""
}
v.warnings = append(v.warnings, v.fullTextColumn.Warnings...)
return cond
}
return "" // Should not happen with valid input
@@ -383,9 +377,11 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
for _, key := range keys {
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, nil, v.builder, v.startNs, v.endNs)
if err != nil {
v.errors = append(v.errors, fmt.Sprintf("failed to build condition: %s", err.Error()))
return ""
}
conds = append(conds, condition)
v.warnings = append(v.warnings, key.Warnings...)
}
// if there is only one condition, return it directly, one less `()` wrapper
if len(conds) == 1 {
@@ -458,6 +454,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
return ""
}
conds = append(conds, condition)
v.warnings = append(v.warnings, key.Warnings...)
}
if len(conds) == 1 {
return conds[0]
@@ -506,6 +503,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
return ""
}
conds = append(conds, condition)
v.warnings = append(v.warnings, key.Warnings...)
}
if len(conds) == 1 {
return conds[0]
@@ -592,6 +590,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
return ""
}
conds = append(conds, condition)
v.warnings = append(v.warnings, key.Warnings...)
}
if len(conds) == 1 {
return conds[0]
@@ -648,7 +647,6 @@ func (v *filterExpressionVisitor) VisitValueList(ctx *grammar.ValueListContext)
// VisitFullText handles standalone quoted strings for full-text search
func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) any {
if v.skipFullTextFilter {
return ""
}
@@ -670,6 +668,8 @@ func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) an
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
return ""
}
v.warnings = append(v.warnings, v.fullTextColumn.Warnings...)
return cond
}

View File

@@ -1,7 +1,6 @@
package signoz
import (
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/signozauthzapi"
"github.com/SigNoz/signoz/pkg/factory"
@@ -51,14 +50,12 @@ type Handlers struct {
Fields fields.Handler
AuthzHandler authz.Handler
ZeusHandler zeus.Handler
QuerierHandler querier.Handler
}
func NewHandlers(
modules Modules,
providerSettings factory.ProviderSettings,
analytics analytics.Analytics,
querierHandler querier.Handler,
querier querier.Querier,
licensing licensing.Licensing,
global global.Global,
flaggerService flagger.Flagger,
@@ -83,6 +80,5 @@ func NewHandlers(
Fields: implfields.NewHandler(providerSettings, telemetryMetadataStore),
AuthzHandler: signozauthzapi.NewHandler(authz),
ZeusHandler: zeus.NewHandler(zeusService, licensing),
QuerierHandler: querierHandler,
}
}

View File

@@ -13,7 +13,6 @@ import (
"github.com/SigNoz/signoz/pkg/factory/factorytest"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sharder"
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
@@ -43,8 +42,7 @@ func TestNewHandlers(t *testing.T) {
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule)
querierHandler := querier.NewHandler(providerSettings, nil, nil)
handlers := NewHandlers(modules, providerSettings, nil, querierHandler, nil, nil, nil, nil, nil, nil, nil)
handlers := NewHandlers(modules, providerSettings, nil, nil, nil, nil, nil, nil, nil, nil)
reflectVal := reflect.ValueOf(handlers)
for i := 0; i < reflectVal.NumField(); i++ {
f := reflectVal.Field(i)

View File

@@ -4,6 +4,7 @@ import (
"bytes"
"context"
"encoding/json"
"net/http"
"os"
"reflect"
@@ -26,6 +27,7 @@ import (
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
"github.com/gorilla/mux"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/swaggest/jsonschema-go"
"github.com/swaggest/openapi-go"
@@ -58,12 +60,15 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
struct{ fields.Handler }{},
struct{ authz.Handler }{},
struct{ zeus.Handler }{},
struct{ querier.Handler }{},
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
if err != nil {
return nil, err
}
// Register routes that live outside the APIServer modules
// so they are discovered by the OpenAPI walker.
registerQueryRoutes(apiserver.Router())
reflector := openapi3.NewReflector()
reflector.JSONSchemaReflector().DefaultOptions = append(reflector.JSONSchemaReflector().DefaultOptions, jsonschema.InterceptDefName(func(t reflect.Type, defaultDefName string) string {
if defaultDefName == "RenderSuccessResponse" {
@@ -154,3 +159,10 @@ func convertJSONNumbers(v interface{}) {
}
}
}
func registerQueryRoutes(router *mux.Router) {
router.Handle("/api/v5/query_range", handler.New(
func(http.ResponseWriter, *http.Request) {},
querier.QueryRangeV5OpenAPIDef,
)).Methods(http.MethodPost)
}

View File

@@ -253,7 +253,6 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
handlers.Fields,
handlers.AuthzHandler,
handlers.ZeusHandler,
handlers.QuerierHandler,
),
)
}

View File

@@ -90,7 +90,6 @@ func New(
authzCallback func(context.Context, sqlstore.SQLStore, licensing.Licensing, dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config],
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
gatewayProviderFactory func(licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config],
querierHandlerCallback func(factory.ProviderSettings, querier.Querier, analytics.Analytics) querier.Handler,
) (*SigNoz, error) {
// Initialize instrumentation
instrumentation, err := instrumentation.New(ctx, config.Instrumentation, version.Info, "signoz")
@@ -392,11 +391,8 @@ func New(
userService := impluser.NewService(providerSettings, impluser.NewStore(sqlstore, providerSettings), modules.User, orgGetter, authz, config.User.Root)
// Initialize the querier handler via callback (allows EE to decorate with anomaly detection)
querierHandler := querierHandlerCallback(providerSettings, querier, analytics)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings, analytics, querierHandler, licensing, global, flagger, gateway, telemetryMetadataStore, authz, zeus)
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz, zeus)
// Initialize the API server
apiserver, err := factory.NewProviderFromNamedMap(

View File

@@ -35,13 +35,19 @@ func (c *conditionBuilder) conditionFor(
return "", err
}
if column.IsJSONColumn() && querybuilder.BodyJSONQueryEnabled {
valueType, value := InferDataType(value, operator, key)
cond, err := NewJSONConditionBuilder(key, valueType).buildJSONCondition(operator, value, sb)
if err != nil {
return "", err
if column.Type.GetType() == schema.ColumnTypeEnumJSON {
// If field data is Not JSON Column Itself, then we need to build a JSON condition
if querybuilder.BodyJSONQueryEnabled && key.MustBuildJSONCondition() {
valueType, value := InferDataType(value, operator, key)
cond, err := NewJSONConditionBuilder(key, valueType).buildJSONCondition(operator, value, sb)
if err != nil {
return "", err
}
return cond, nil
} else if key.FieldDataType == telemetrytypes.FieldDataTypeJSON && !operator.IsOpValidForJSON() {
// throw error for invalid operators on JSON columns
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid operator")
}
return cond, nil
}
if operator.IsStringSearchOperator() {
@@ -108,7 +114,6 @@ func (c *conditionBuilder) conditionFor(
return sb.ILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
case qbtypes.FilterOperatorNotContains:
return sb.NotILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
case qbtypes.FilterOperatorRegexp:
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
// Only needed because we are using sprintf instead of sb.Match (not implemented in sqlbuilder)
@@ -176,9 +181,16 @@ func (c *conditionBuilder) conditionFor(
var value any
switch column.Type.GetType() {
case schema.ColumnTypeEnumJSON:
if operator == qbtypes.FilterOperatorExists {
return sb.IsNotNull(tblFieldName), nil
} else {
switch key.FieldDataType {
case telemetrytypes.FieldDataTypeJSON:
if operator == qbtypes.FilterOperatorExists {
return sb.EQ(fmt.Sprintf("empty(%s)", tblFieldName), false), nil
}
return sb.EQ(fmt.Sprintf("empty(%s)", tblFieldName), true), nil
default:
if operator == qbtypes.FilterOperatorExists {
return sb.IsNotNull(tblFieldName), nil
}
return sb.IsNull(tblFieldName), nil
}
case schema.ColumnTypeEnumLowCardinality:

View File

@@ -1,7 +1,10 @@
package telemetrylogs
import (
"fmt"
"github.com/SigNoz/signoz-otel-collector/constants"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
@@ -17,7 +20,7 @@ const (
LogsV2TimestampColumn = "timestamp"
LogsV2ObservedTimestampColumn = "observed_timestamp"
LogsV2BodyColumn = "body"
LogsV2BodyJSONColumn = constants.BodyJSONColumn
LogsV2BodyV2Column = constants.BodyV2Column
LogsV2BodyPromotedColumn = constants.BodyPromotedColumn
LogsV2TraceIDColumn = "trace_id"
LogsV2SpanIDColumn = "span_id"
@@ -34,11 +37,25 @@ const (
LogsV2ResourcesStringColumn = "resources_string"
LogsV2ScopeStringColumn = "scope_string"
BodyJSONColumnPrefix = constants.BodyJSONColumnPrefix
BodyV2ColumnPrefix = constants.BodyV2ColumnPrefix
BodyPromotedColumnPrefix = constants.BodyPromotedColumnPrefix
MessageSubColumn = "message"
ftsBodyFieldKey = "object(body)"
bodySearchDefaultWarning = "When you search on `body` (full text or by field), Query Builder uses body.message:string by default. Check that this matches what you want to search."
ftsBodyFieldWarning = "Avoid using `object(body)` in queries. It performs a full scan and can severely slow queries. Use explicit fields instead (for example, `body.response.status_code` or `response.status_code`). Only use `object(body)` for temporary schema exploration. We do not recommend its usage elsewhere."
)
var (
// mapping for body logical field to message sub column
// TODO(Piyush): Add description for detailed explanation of remapping of body to message
BodyLogicalFieldJSONMapping = &telemetrytypes.TelemetryFieldKey{
Name: MessageSubColumn,
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextBody,
FieldDataType: telemetrytypes.FieldDataTypeString,
JSONDataType: &telemetrytypes.String,
Warnings: []string{bodySearchDefaultWarning},
}
DefaultFullTextColumn = &telemetrytypes.TelemetryFieldKey{
Name: "body",
Signal: telemetrytypes.SignalLogs,
@@ -118,3 +135,44 @@ var (
},
}
)
func bodyAliasExpression() string {
if !querybuilder.BodyJSONQueryEnabled {
return LogsV2BodyColumn
}
return fmt.Sprintf("%s as body", LogsV2BodyV2Column)
}
func enrichIntrinsicFields() error {
// body logical field is mapped to message field in the body context that too only with String data type
err := BodyLogicalFieldJSONMapping.SetJSONAccessPlan(telemetrytypes.JSONColumnMetadata{
BaseColumn: LogsV2BodyV2Column,
PromotedColumn: LogsV2BodyPromotedColumn,
}, map[string][]telemetrytypes.JSONDataType{
MessageSubColumn: {telemetrytypes.String},
})
if err != nil {
return err
}
if querybuilder.BodyJSONQueryEnabled {
DefaultFullTextColumn = BodyLogicalFieldJSONMapping
IntrinsicFields["body"] = *BodyLogicalFieldJSONMapping
IntrinsicFields[ftsBodyFieldKey] = telemetrytypes.TelemetryFieldKey{
Name: "body_v2",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeJSON,
Warnings: []string{ftsBodyFieldWarning},
}
}
return nil
}
func init() {
err := enrichIntrinsicFields()
if err != nil {
panic(err)
}
}

View File

@@ -30,7 +30,7 @@ var (
"severity_text": {Name: "severity_text", Type: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}},
"severity_number": {Name: "severity_number", Type: schema.ColumnTypeUInt8},
"body": {Name: "body", Type: schema.ColumnTypeString},
LogsV2BodyJSONColumn: {Name: LogsV2BodyJSONColumn, Type: schema.JSONColumnType{
LogsV2BodyV2Column: {Name: LogsV2BodyV2Column, Type: schema.JSONColumnType{
MaxDynamicTypes: utils.ToPointer(uint(32)),
MaxDynamicPaths: utils.ToPointer(uint(0)),
}},
@@ -89,9 +89,9 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
}
case telemetrytypes.FieldContextBody:
// Body context is for JSON body fields
// Use body_json if feature flag is enabled
// Use body_v2 if feature flag is enabled
if querybuilder.BodyJSONQueryEnabled {
return logsV2Columns[LogsV2BodyJSONColumn], nil
return logsV2Columns[LogsV2BodyV2Column], nil
}
// Fall back to legacy body column
return logsV2Columns["body"], nil
@@ -100,9 +100,9 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
if !ok {
// check if the key has body JSON search
if strings.HasPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
// Use body_json if feature flag is enabled and we have a body condition builder
// Use body_v2 if feature flag is enabled and we have a body condition builder
if querybuilder.BodyJSONQueryEnabled {
return logsV2Columns[LogsV2BodyJSONColumn], nil
return logsV2Columns[LogsV2BodyV2Column], nil
}
// Fall back to legacy body column
return logsV2Columns["body"], nil
@@ -147,6 +147,9 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr
}
return m.buildFieldForJSON(key)
case telemetrytypes.FieldContextLog:
// return the column name as is for log context fields
return column.Name, nil
default:
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource/body context fields are supported for json columns, got %s", key.FieldContext.String)
}

View File

@@ -30,7 +30,7 @@ func NewJSONConditionBuilder(key *telemetrytypes.TelemetryFieldKey, valueType te
return &jsonConditionBuilder{key: key, valueType: telemetrytypes.MappingFieldDataTypeToJSONDataType[valueType]}
}
// BuildCondition builds the full WHERE condition for body_json JSON paths
// BuildCondition builds the full WHERE condition for body_v2 JSON paths
func (c *jsonConditionBuilder) buildJSONCondition(operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
conditions := []string{}
for _, node := range c.key.JSONPlan {
@@ -40,6 +40,7 @@ func (c *jsonConditionBuilder) buildJSONCondition(operator qbtypes.FilterOperato
}
conditions = append(conditions, condition)
}
return sb.Or(conditions...), nil
}
@@ -288,9 +289,9 @@ func (c *jsonConditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, field
}
return sb.NotIn(fieldExpr, values...), nil
case qbtypes.FilterOperatorExists:
return fmt.Sprintf("%s IS NOT NULL", fieldExpr), nil
return sb.IsNotNull(fieldExpr), nil
case qbtypes.FilterOperatorNotExists:
return fmt.Sprintf("%s IS NULL", fieldExpr), nil
return sb.IsNull(fieldExpr), nil
// between and not between
case qbtypes.FilterOperatorBetween, qbtypes.FilterOperatorNotBetween:
values, ok := value.([]any)

File diff suppressed because one or more lines are too long

View File

@@ -203,7 +203,6 @@ func (b *logQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[stri
}
func (b *logQueryStatementBuilder) adjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) []string {
// First check if it matches with any intrinsic fields
var intrinsicOrCalculatedField telemetrytypes.TelemetryFieldKey
if _, ok := IntrinsicFields[key.Name]; ok {
@@ -212,7 +211,6 @@ func (b *logQueryStatementBuilder) adjustKey(key *telemetrytypes.TelemetryFieldK
}
return querybuilder.AdjustKey(key, keys, nil)
}
// buildListQuery builds a query for list panel type
@@ -249,11 +247,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
sb.SelectMore(LogsV2SeverityNumberColumn)
sb.SelectMore(LogsV2ScopeNameColumn)
sb.SelectMore(LogsV2ScopeVersionColumn)
sb.SelectMore(LogsV2BodyColumn)
if querybuilder.BodyJSONQueryEnabled {
sb.SelectMore(LogsV2BodyJSONColumn)
sb.SelectMore(LogsV2BodyPromotedColumn)
}
sb.SelectMore(bodyAliasExpression())
sb.SelectMore(LogsV2AttributesStringColumn)
sb.SelectMore(LogsV2AttributesNumberColumn)
sb.SelectMore(LogsV2AttributesBoolColumn)

View File

@@ -5,6 +5,7 @@ import (
"testing"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
@@ -886,3 +887,154 @@ func TestAdjustKey(t *testing.T) {
})
}
}
func TestStmtBuilderBodyField(t *testing.T) {
cases := []struct {
name string
requestType qbtypes.RequestType
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]
enableBodyJSONQuery bool
expected qbtypes.Statement
expectedErr error
}{
{
name: "body_exists",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{Expression: "body Exists"},
Limit: 10,
},
enableBodyJSONQuery: true,
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (dynamicElement(body_v2.`message`, 'String') IS NOT NULL) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{bodySearchDefaultWarning},
},
expectedErr: nil,
},
{
name: "body_exists_disabled",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{Expression: "body Exists"},
Limit: 10,
},
enableBodyJSONQuery: false,
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND body <> ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
},
{
name: "body_empty",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{Expression: "body == ''"},
Limit: 10,
},
enableBodyJSONQuery: true,
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (dynamicElement(body_v2.`message`, 'String') = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{bodySearchDefaultWarning},
},
expectedErr: nil,
},
{
name: "body_empty_disabled",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{Expression: "body == ''"},
Limit: 10,
},
enableBodyJSONQuery: false,
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND body = ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
},
{
name: "body_contains",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{Expression: "body CONTAINS 'error'"},
Limit: 10,
},
enableBodyJSONQuery: true,
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (LOWER(dynamicElement(body_v2.`message`, 'String')) LIKE LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%error%", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{bodySearchDefaultWarning},
},
expectedErr: nil,
},
{
name: "body_contains_disabled",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{Expression: "body CONTAINS 'error'"},
Limit: 10,
},
enableBodyJSONQuery: false,
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND LOWER(body) LIKE LOWER(?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%error%", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
},
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
enable, disable := jsonQueryTestUtil(t)
defer disable()
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
if c.enableBodyJSONQuery {
enable()
} else {
disable()
}
// build the key map after enabling/disabling body JSON query
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewLogQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
GetBodyJSONKey,
)
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
if c.expectedErr != nil {
require.Error(t, err)
require.Contains(t, err.Error(), c.expectedErr.Error())
} else {
if err != nil {
_, _, _, _, _, add := errors.Unwrapb(err)
t.Logf("error additionals: %v", add)
}
require.NoError(t, err)
require.Equal(t, c.expected.Query, q.Query)
require.Equal(t, c.expected.Args, q.Args)
require.Equal(t, c.expected.Warnings, q.Warnings)
}
})
}
}

View File

@@ -27,13 +27,6 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"body": {
{
Name: "body",
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"http.status_code": {
{
Name: "http.status_code",
@@ -945,6 +938,11 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
key.Signal = telemetrytypes.SignalLogs
}
}
// add intrinsic fields to the map
for fieldName, key := range IntrinsicFields {
keysMap[fieldName] = append(keysMap[fieldName], &key)
}
return keysMap
}

View File

@@ -254,7 +254,7 @@ func buildListLogsJSONIndexesQuery(cluster string, filters ...string) (string, [
sb.Where(sb.Equal("database", telemetrylogs.DBName))
sb.Where(sb.Equal("table", telemetrylogs.LogsV2LocalTableName))
sb.Where(sb.Or(
sb.ILike("expr", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyJSONColumnPrefix))),
sb.ILike("expr", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyV2ColumnPrefix))),
sb.ILike("expr", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyPromotedColumnPrefix))),
))
@@ -338,7 +338,7 @@ func (t *telemetryMetaStore) ListJSONValues(ctx context.Context, path string, li
if promoted {
path = telemetrylogs.BodyPromotedColumnPrefix + path
} else {
path = telemetrylogs.BodyJSONColumnPrefix + path
path = telemetrylogs.BodyV2ColumnPrefix + path
}
from := fmt.Sprintf("%s.%s", telemetrylogs.DBName, telemetrylogs.LogsV2TableName)
@@ -529,7 +529,7 @@ func (t *telemetryMetaStore) GetPromotedPaths(ctx context.Context, paths ...stri
// TODO(Piyush): Remove this function
func CleanPathPrefixes(path string) string {
path = strings.TrimPrefix(path, telemetrytypes.BodyJSONStringSearchPrefix)
path = strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
path = strings.TrimPrefix(path, telemetrylogs.BodyV2ColumnPrefix)
path = strings.TrimPrefix(path, telemetrylogs.BodyPromotedColumnPrefix)
return path
}

View File

@@ -117,7 +117,7 @@ func TestBuildListLogsJSONIndexesQuery(t *testing.T) {
expectedArgs: []any{
telemetrylogs.DBName,
telemetrylogs.LogsV2LocalTableName,
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyJSONColumnPrefix)),
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyV2ColumnPrefix)),
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyPromotedColumnPrefix)),
},
},
@@ -130,7 +130,7 @@ func TestBuildListLogsJSONIndexesQuery(t *testing.T) {
expectedArgs: []any{
telemetrylogs.DBName,
telemetrylogs.LogsV2LocalTableName,
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyJSONColumnPrefix)),
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyV2ColumnPrefix)),
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyPromotedColumnPrefix)),
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains("foo")),
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains("bar")),

View File

@@ -100,7 +100,7 @@ func NewTelemetryMetaStore(
jsonColumnMetadata: map[telemetrytypes.Signal]map[telemetrytypes.FieldContext]telemetrytypes.JSONColumnMetadata{
telemetrytypes.SignalLogs: {
telemetrytypes.FieldContextBody: telemetrytypes.JSONColumnMetadata{
BaseColumn: telemetrylogs.LogsV2BodyJSONColumn,
BaseColumn: telemetrylogs.LogsV2BodyV2Column,
PromotedColumn: telemetrylogs.LogsV2BodyPromotedColumn,
},
},
@@ -333,7 +333,7 @@ func (t *telemetryMetaStore) logsTblStatementToFieldKeys(ctx context.Context) ([
}
// getLogsKeys returns the keys from the spans that match the field selection criteria
func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) (telemetrytypes.TelemetryFieldKeys, bool, error) {
if len(fieldKeySelectors) == 0 {
return nil, true, nil
}
@@ -343,9 +343,10 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
if err != nil {
return nil, false, err
}
mapOfKeys := make(map[string]*telemetrytypes.TelemetryFieldKey)
// setOfKeys to reuse the same key object for qualified names
setOfKeys := make(map[string]*telemetrytypes.TelemetryFieldKey)
for _, key := range matKeys {
mapOfKeys[key.Name+";"+key.FieldContext.StringValue()+";"+key.FieldDataType.StringValue()] = key
setOfKeys[key.QualifiedName()] = key
}
// queries for both attribute and resource keys tables
@@ -446,7 +447,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
if len(queries) == 0 {
// No matching contexts, return empty result
return []*telemetrytypes.TelemetryFieldKey{}, true, nil
return nil, true, nil
}
// Combine queries with UNION ALL
@@ -474,7 +475,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
}
defer rows.Close()
keys := []*telemetrytypes.TelemetryFieldKey{}
mapOfKeys := make(telemetrytypes.TelemetryFieldKeys)
rowCount := 0
searchTexts := []string{}
dataTypes := []telemetrytypes.FieldDataType{}
@@ -502,7 +503,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
if err != nil {
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
}
key, ok := mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()]
key, ok := setOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()]
// if there is no materialised column, create a key with the field context and data type
if !ok {
@@ -514,8 +515,8 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
}
}
keys = append(keys, key)
mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()] = key
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
setOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()] = key
}
if rows.Err() != nil {
@@ -541,17 +542,13 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
if found {
if field, exists := telemetrylogs.IntrinsicFields[key]; exists {
if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
keys = append(keys, &field)
if _, added := setOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
mapOfKeys[field.Name] = append(mapOfKeys[field.Name], &field)
// intrinsic field can be a logical field mapped to a different physical location
mapOfKeys[key] = append(mapOfKeys[key], &field)
}
continue
}
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
Name: key,
FieldContext: telemetrytypes.FieldContextLog,
Signal: telemetrytypes.SignalLogs,
})
}
}
@@ -560,10 +557,13 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
if err != nil {
t.logger.ErrorContext(ctx, "failed to extract body JSON paths", "error", err)
}
keys = append(keys, bodyJSONPaths...)
for _, key := range bodyJSONPaths {
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
}
complete = complete && finished
}
return keys, complete, nil
return mapOfKeys, complete, nil
}
func getPriorityForContext(ctx telemetrytypes.FieldContext) int {
@@ -847,12 +847,20 @@ func (t *telemetryMetaStore) GetKeys(ctx context.Context, fieldKeySelector *tele
if fieldKeySelector != nil {
selectors = []*telemetrytypes.FieldKeySelector{fieldKeySelector}
}
mapOfKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
switch fieldKeySelector.Signal {
case telemetrytypes.SignalTraces:
keys, complete, err = t.getTracesKeys(ctx, selectors)
case telemetrytypes.SignalLogs:
keys, complete, err = t.getLogsKeys(ctx, selectors)
mapOfLogKeys, logsComplete, err := t.getLogsKeys(ctx, selectors)
if err != nil {
return nil, false, err
}
for keyName, keys := range mapOfLogKeys {
mapOfKeys[keyName] = append(mapOfKeys[keyName], keys...)
}
complete = complete && logsComplete
case telemetrytypes.SignalMetrics:
if fieldKeySelector.Source == telemetrytypes.SourceMeter {
keys, complete, err = t.getMeterSourceMetricKeys(ctx, selectors)
@@ -868,11 +876,14 @@ func (t *telemetryMetaStore) GetKeys(ctx context.Context, fieldKeySelector *tele
keys = append(keys, tracesKeys...)
// get logs keys
logsKeys, logsComplete, err := t.getLogsKeys(ctx, selectors)
mapOfLogKeys, logsComplete, err := t.getLogsKeys(ctx, selectors)
if err != nil {
return nil, false, err
}
keys = append(keys, logsKeys...)
for keyName, keys := range mapOfLogKeys {
mapOfKeys[keyName] = append(mapOfKeys[keyName], keys...)
}
complete = complete && logsComplete
// get metrics keys
metricsKeys, metricsComplete, err := t.getMetricsKeys(ctx, selectors)
@@ -887,7 +898,6 @@ func (t *telemetryMetaStore) GetKeys(ctx context.Context, fieldKeySelector *tele
return nil, false, err
}
mapOfKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
for _, key := range keys {
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
}
@@ -924,7 +934,7 @@ func (t *telemetryMetaStore) GetKeysMulti(ctx context.Context, fieldKeySelectors
}
}
logsKeys, logsComplete, err := t.getLogsKeys(ctx, logsSelectors)
mapOfLogKeys, logsComplete, err := t.getLogsKeys(ctx, logsSelectors)
if err != nil {
return nil, false, err
}
@@ -945,8 +955,8 @@ func (t *telemetryMetaStore) GetKeysMulti(ctx context.Context, fieldKeySelectors
complete := logsComplete && tracesComplete && metricsComplete
mapOfKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
for _, key := range logsKeys {
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
for keyName, keys := range mapOfLogKeys {
mapOfKeys[keyName] = append(mapOfKeys[keyName], keys...)
}
for _, key := range tracesKeys {
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)

View File

@@ -7,6 +7,6 @@ const (
AttributesMetadataTableName = "distributed_attributes_metadata"
AttributesMetadataLocalTableName = "attributes_metadata"
PathTypesTableName = otelcollectorconst.DistributedPathTypesTable
PromotedPathsTableName = otelcollectorconst.DistributedPromotedPathsTable
PromotedPathsTableName = "distributed_json_promoted_paths"
SkipIndexTableName = "system.data_skipping_indices"
)

View File

@@ -123,8 +123,7 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
origTimeAgg := query.Aggregations[0].TimeAggregation
origGroupBy := slices.Clone(query.GroupBy)
if (query.Aggregations[0].SpaceAggregation.IsPercentile() ||
query.Aggregations[0].SpaceAggregation == metrictypes.SpaceAggregationHistogramCount) &&
if query.Aggregations[0].SpaceAggregation.IsPercentile() &&
query.Aggregations[0].Type != metrictypes.ExpHistogramType {
// add le in the group by if doesn't exist
leExists := false
@@ -155,11 +154,7 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
}
// make the time aggregation rate and space aggregation sum
if query.Aggregations[0].SpaceAggregation.IsPercentile() {
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationRate
} else {
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationIncrease
}
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationRate
query.Aggregations[0].SpaceAggregation = metrictypes.SpaceAggregationSum
}
@@ -529,7 +524,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
return "", nil, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`, `histogram_count`]",
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`]",
)
}
sb := sqlbuilder.NewSelectBuilder()
@@ -582,29 +577,6 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
sb.From("__spatial_aggregation_cte")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
sb.GroupBy("ts")
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
}
} else if query.Aggregations[0].SpaceAggregation == metrictypes.SpaceAggregationHistogramCount {
sb.Select("ts")
for _, g := range query.GroupBy {
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
aggQuery, err := AggregationQueryForHistogramCount(query.Aggregations[0].ComparisonSpaceAggregationParam)
if err != nil {
return nil, err
}
sb.SelectMore(aggQuery)
sb.From("__spatial_aggregation_cte")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
sb.GroupBy("ts")
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)

View File

@@ -1,7 +1,6 @@
package telemetrymetrics
import (
"fmt"
"time"
"github.com/SigNoz/signoz/pkg/errors"
@@ -309,20 +308,3 @@ func AggregationColumnForSamplesTable(
}
return aggregationColumn, nil
}
func AggregationQueryForHistogramCount(param *metrictypes.ComparisonSpaceAggregationParam) (string, error) {
if param == nil {
return "", errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "no aggregation param provided for histogram count")
}
histogramCountThreshold := param.Threshold
switch param.Operater {
case "<=":
return fmt.Sprintf("argMaxIf(value, toFloat64(le), toFloat64(le) <= %f) + (argMinIf(value, toFloat64(le), toFloat64(le) > %f) - argMaxIf(value, toFloat64(le), toFloat64(le) <= %f)) * (%f - maxIf(toFloat64(le), toFloat64(le) <= %f)) / (minIf(toFloat64(le), toFloat64(le) > %f) - maxIf(toFloat64(le), toFloat64(le) <= %f)) AS value", histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold), nil
case ">":
return fmt.Sprintf("argMax(value, toFloat64(le)) - (argMaxIf(value, toFloat64(le), toFloat64(le) <= %f) + (argMinIf(value, toFloat64(le), toFloat64(le) > %f) - argMaxIf(value, toFloat64(le), toFloat64(le) <= %f)) * (%f - maxIf(toFloat64(le), toFloat64(le) <= %f)) / (minIf(toFloat64(le), toFloat64(le) > %f) - maxIf(toFloat64(le), toFloat64(le) <= %f))) AS value", histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold, histogramCountThreshold), nil
default:
return "", errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid space aggregation operator, should be one of the following: [`<=`, `>`]")
}
}

View File

@@ -60,7 +60,7 @@ type IngestionKeysParams struct {
}
type SearchIngestionKeysParams struct {
Name string `query:"name" required:"true"`
Name string `query:"name"`
Page int `query:"page"`
PerPage int `query:"per_page"`
}
@@ -71,14 +71,14 @@ type GettableIngestionKeys struct {
}
type PostableIngestionKey struct {
Name string `json:"name" required:"true"`
Name string `json:"name"`
Tags []string `json:"tags"`
ExpiresAt time.Time `json:"expires_at"`
}
type GettableCreatedIngestionKey struct {
ID string `json:"id" required:"true"`
Value string `json:"value" required:"true"`
ID string `json:"id"`
Value string `json:"value"`
}
type PostableIngestionKeyLimit struct {
@@ -88,10 +88,10 @@ type PostableIngestionKeyLimit struct {
}
type GettableCreatedIngestionKeyLimit struct {
ID string `json:"id" required:"true"`
ID string `json:"id"`
}
type UpdatableIngestionKeyLimit struct {
Config LimitConfig `json:"config" required:"true"`
Config LimitConfig `json:"config"`
Tags []string `json:"tags"`
}

View File

@@ -2,7 +2,6 @@ package metrictypes
import (
"database/sql/driver"
"fmt"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
@@ -190,18 +189,17 @@ type SpaceAggregation struct {
}
var (
SpaceAggregationUnspecified = SpaceAggregation{valuer.NewString("")}
SpaceAggregationSum = SpaceAggregation{valuer.NewString("sum")}
SpaceAggregationAvg = SpaceAggregation{valuer.NewString("avg")}
SpaceAggregationMin = SpaceAggregation{valuer.NewString("min")}
SpaceAggregationMax = SpaceAggregation{valuer.NewString("max")}
SpaceAggregationCount = SpaceAggregation{valuer.NewString("count")}
SpaceAggregationPercentile50 = SpaceAggregation{valuer.NewString("p50")}
SpaceAggregationPercentile75 = SpaceAggregation{valuer.NewString("p75")}
SpaceAggregationPercentile90 = SpaceAggregation{valuer.NewString("p90")}
SpaceAggregationPercentile95 = SpaceAggregation{valuer.NewString("p95")}
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
SpaceAggregationHistogramCount = SpaceAggregation{valuer.NewString("histogram_count")}
SpaceAggregationUnspecified = SpaceAggregation{valuer.NewString("")}
SpaceAggregationSum = SpaceAggregation{valuer.NewString("sum")}
SpaceAggregationAvg = SpaceAggregation{valuer.NewString("avg")}
SpaceAggregationMin = SpaceAggregation{valuer.NewString("min")}
SpaceAggregationMax = SpaceAggregation{valuer.NewString("max")}
SpaceAggregationCount = SpaceAggregation{valuer.NewString("count")}
SpaceAggregationPercentile50 = SpaceAggregation{valuer.NewString("p50")}
SpaceAggregationPercentile75 = SpaceAggregation{valuer.NewString("p75")}
SpaceAggregationPercentile90 = SpaceAggregation{valuer.NewString("p90")}
SpaceAggregationPercentile95 = SpaceAggregation{valuer.NewString("p95")}
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
)
func (SpaceAggregation) Enum() []any {
@@ -216,7 +214,6 @@ func (SpaceAggregation) Enum() []any {
SpaceAggregationPercentile90,
SpaceAggregationPercentile95,
SpaceAggregationPercentile99,
SpaceAggregationHistogramCount,
}
}
@@ -259,12 +256,3 @@ type MetricTableHints struct {
type MetricValueFilter struct {
Value float64
}
type ComparisonSpaceAggregationParam struct {
Operater string `json:"operator"`
Threshold float64 `json:"threshold"`
}
func (param ComparisonSpaceAggregationParam) StringValue() string {
return fmt.Sprintf("{\"operator\": \"%s\", \"limit\": \"%f\"}", param.Operater, param.Threshold)
}

View File

@@ -36,8 +36,8 @@ func (i *PromotePath) ValidateAndSetDefaults() error {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "array paths can not be promoted or indexed")
}
if strings.HasPrefix(i.Path, constants.BodyJSONColumnPrefix) || strings.HasPrefix(i.Path, constants.BodyPromotedColumnPrefix) {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "`%s`, `%s` don't add these prefixes to the path", constants.BodyJSONColumnPrefix, constants.BodyPromotedColumnPrefix)
if strings.HasPrefix(i.Path, constants.BodyV2ColumnPrefix) || strings.HasPrefix(i.Path, constants.BodyPromotedColumnPrefix) {
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "`%s`, `%s` don't add these prefixes to the path", constants.BodyV2ColumnPrefix, constants.BodyPromotedColumnPrefix)
}
if !strings.HasPrefix(i.Path, telemetrytypes.BodyJSONStringSearchPrefix) {

View File

@@ -197,6 +197,17 @@ func (f FilterOperator) IsArrayOperator() bool {
}
}
func (f FilterOperator) IsOpValidForJSON() bool {
switch f {
case FilterOperatorExists, FilterOperatorNotExists,
FilterOperatorContains, FilterOperatorNotContains,
FilterOperatorRegexp, FilterOperatorNotRegexp:
return true
default:
return false
}
}
type OrderDirection struct {
valuer.String
}
@@ -446,8 +457,6 @@ type MetricAggregation struct {
TimeAggregation metrictypes.TimeAggregation `json:"timeAggregation"`
// space aggregation to apply to the query
SpaceAggregation metrictypes.SpaceAggregation `json:"spaceAggregation"`
// param for space aggregation if needed
ComparisonSpaceAggregationParam *metrictypes.ComparisonSpaceAggregationParam `json:"comparisonSpaceAggregationParam"`
// table hints to use for the query
TableHints *metrictypes.MetricTableHints `json:"-"`
// value filter to apply to the query

View File

@@ -29,6 +29,8 @@ const (
ArrayAnyIndexSuffix = "[*]"
)
type TelemetryFieldKeys map[string][]*TelemetryFieldKey
type TelemetryFieldKey struct {
Name string `json:"name" required:"true"`
Description string `json:"description,omitempty"`
@@ -41,6 +43,7 @@ type TelemetryFieldKey struct {
JSONPlan JSONAccessPlan `json:"-"`
Indexes []JSONDataTypeIndex `json:"-"`
Materialized bool `json:"-"` // refers to promoted in case of body.... fields
Warnings []string `json:"-"` // warnings attached to using a particular field key
}
func (f *TelemetryFieldKey) KeyNameContainsArray() bool {
@@ -62,6 +65,10 @@ func (f *TelemetryFieldKey) ArrayParentPaths() []string {
return paths
}
func (f *TelemetryFieldKey) MustBuildJSONCondition() bool {
return f.FieldDataType != FieldDataTypeJSON && f.JSONDataType != nil
}
func (f *TelemetryFieldKey) ArrayParentSelectors() []*FieldKeySelector {
paths := f.ArrayParentPaths()
selectors := make([]*FieldKeySelector, 0, len(paths))
@@ -169,6 +176,10 @@ func (f *TelemetryFieldKey) Normalize() {
}
func (f *TelemetryFieldKey) QualifiedName() string {
return f.Name + ";" + f.FieldContext.StringValue() + ";" + f.FieldDataType.StringValue()
}
// GetFieldKeyFromKeyText returns a TelemetryFieldKey from a key text.
// The key text is expected to be in the format of `fieldContext.fieldName:fieldDataType` in the search query.
// Both fieldContext and :fieldDataType are optional.

View File

@@ -21,6 +21,7 @@ var (
// int64 and number are synonyms for float64
FieldDataTypeInt64 = FieldDataType{valuer.NewString("int64")}
FieldDataTypeNumber = FieldDataType{valuer.NewString("number")}
FieldDataTypeJSON = FieldDataType{valuer.NewString("json")}
FieldDataTypeUnspecified = FieldDataType{valuer.NewString("")}
FieldDataTypeArrayString = FieldDataType{valuer.NewString("[]string")}

View File

@@ -40,7 +40,7 @@ type JSONAccessNode struct {
// Node information
Name string
IsTerminal bool
isRoot bool // marked true for only body_json and body_json_promoted
isRoot bool // marked true for only body_v2 and body_promoted
// Precomputed type information (single source of truth)
AvailableTypes []JSONDataType

View File

@@ -1,12 +1,19 @@
package telemetrytypes
import (
"fmt"
"testing"
otelconstants "github.com/SigNoz/signoz-otel-collector/constants"
"github.com/stretchr/testify/require"
"gopkg.in/yaml.v3"
)
const (
bodyV2Column = otelconstants.BodyV2Column
bodyPromotedColumn = otelconstants.BodyPromotedColumn
)
// ============================================================================
// Helper Functions for Test Data Creation
// ============================================================================
@@ -109,8 +116,8 @@ func TestNode_Alias(t *testing.T) {
}{
{
name: "Root node returns name as-is",
node: NewRootJSONAccessNode("body_json", 32, 0),
expected: "body_json",
node: NewRootJSONAccessNode(bodyV2Column, 32, 0),
expected: bodyV2Column,
},
{
name: "Node without parent returns backticked name",
@@ -124,9 +131,9 @@ func TestNode_Alias(t *testing.T) {
name: "Node with root parent uses dot separator",
node: &JSONAccessNode{
Name: "age",
Parent: NewRootJSONAccessNode("body_json", 32, 0),
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
},
expected: "`" + "body_json" + ".age`",
expected: "`" + bodyV2Column + ".age`",
},
{
name: "Node with non-root parent uses array separator",
@@ -134,10 +141,10 @@ func TestNode_Alias(t *testing.T) {
Name: "name",
Parent: &JSONAccessNode{
Name: "education",
Parent: NewRootJSONAccessNode("body_json", 32, 0),
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
},
},
expected: "`" + "body_json" + ".education[].name`",
expected: "`" + bodyV2Column + ".education[].name`",
},
{
name: "Nested array path with multiple levels",
@@ -147,11 +154,11 @@ func TestNode_Alias(t *testing.T) {
Name: "awards",
Parent: &JSONAccessNode{
Name: "education",
Parent: NewRootJSONAccessNode("body_json", 32, 0),
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
},
},
},
expected: "`" + "body_json" + ".education[].awards[].type`",
expected: "`" + bodyV2Column + ".education[].awards[].type`",
},
}
@@ -173,18 +180,18 @@ func TestNode_FieldPath(t *testing.T) {
name: "Simple field path from root",
node: &JSONAccessNode{
Name: "user",
Parent: NewRootJSONAccessNode("body_json", 32, 0),
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
},
// FieldPath() always wraps the field name in backticks
expected: "body_json" + ".`user`",
expected: bodyV2Column + ".`user`",
},
{
name: "Field path with backtick-required key",
node: &JSONAccessNode{
Name: "user-name", // requires backtick
Parent: NewRootJSONAccessNode("body_json", 32, 0),
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
},
expected: "body_json" + ".`user-name`",
expected: bodyV2Column + ".`user-name`",
},
{
name: "Nested field path",
@@ -192,11 +199,11 @@ func TestNode_FieldPath(t *testing.T) {
Name: "age",
Parent: &JSONAccessNode{
Name: "user",
Parent: NewRootJSONAccessNode("body_json", 32, 0),
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
},
},
// FieldPath() always wraps the field name in backticks
expected: "`" + "body_json" + ".user`.`age`",
expected: "`" + bodyV2Column + ".user`.`age`",
},
{
name: "Array element field path",
@@ -204,11 +211,11 @@ func TestNode_FieldPath(t *testing.T) {
Name: "name",
Parent: &JSONAccessNode{
Name: "education",
Parent: NewRootJSONAccessNode("body_json", 32, 0),
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
},
},
// FieldPath() always wraps the field name in backticks
expected: "`" + "body_json" + ".education`.`name`",
expected: "`" + bodyV2Column + ".education`.`name`",
},
}
@@ -236,36 +243,36 @@ func TestPlanJSON_BasicStructure(t *testing.T) {
{
name: "Simple path not promoted",
key: makeKey("user.name", String, false),
expectedYAML: `
expectedYAML: fmt.Sprintf(`
- name: user.name
column: body_json
column: %s
availableTypes:
- String
maxDynamicTypes: 16
isTerminal: true
elemType: String
`,
`, bodyV2Column),
},
{
name: "Simple path promoted",
key: makeKey("user.name", String, true),
expectedYAML: `
expectedYAML: fmt.Sprintf(`
- name: user.name
column: body_json
column: %s
availableTypes:
- String
maxDynamicTypes: 16
isTerminal: true
elemType: String
- name: user.name
column: body_json_promoted
column: %s
availableTypes:
- String
maxDynamicTypes: 16
maxDynamicPaths: 256
isTerminal: true
elemType: String
`,
`, bodyV2Column, bodyPromotedColumn),
},
{
name: "Empty path returns error",
@@ -278,8 +285,8 @@ func TestPlanJSON_BasicStructure(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := tt.key.SetJSONAccessPlan(JSONColumnMetadata{
BaseColumn: "body_json",
PromotedColumn: "body_json_promoted",
BaseColumn: bodyV2Column,
PromotedColumn: bodyPromotedColumn,
}, types)
if tt.expectErr {
require.Error(t, err)
@@ -304,9 +311,9 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
{
name: "Single array level - JSON branch only",
path: "education[].name",
expectedYAML: `
expectedYAML: fmt.Sprintf(`
- name: education
column: body_json
column: %s
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
@@ -318,14 +325,14 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
maxDynamicTypes: 8
isTerminal: true
elemType: String
`,
`, bodyV2Column),
},
{
name: "Single array level - both JSON and Dynamic branches",
path: "education[].awards[].type",
expectedYAML: `
expectedYAML: fmt.Sprintf(`
- name: education
column: body_json
column: %s
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
@@ -352,14 +359,14 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
maxDynamicPaths: 256
isTerminal: true
elemType: String
`,
`, bodyV2Column),
},
{
name: "Deeply nested array path",
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
expectedYAML: `
expectedYAML: fmt.Sprintf(`
- name: interests
column: body_json
column: %s
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
@@ -399,14 +406,14 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
- String
isTerminal: true
elemType: String
`,
`, bodyV2Column),
},
{
name: "ArrayAnyIndex replacement [*] to []",
path: "education[*].name",
expectedYAML: `
expectedYAML: fmt.Sprintf(`
- name: education
column: body_json
column: %s
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
@@ -418,7 +425,7 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
maxDynamicTypes: 8
isTerminal: true
elemType: String
`,
`, bodyV2Column),
},
}
@@ -426,8 +433,8 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
key := makeKey(tt.path, String, false)
err := key.SetJSONAccessPlan(JSONColumnMetadata{
BaseColumn: "body_json",
PromotedColumn: "body_json_promoted",
BaseColumn: bodyV2Column,
PromotedColumn: bodyPromotedColumn,
}, types)
require.NoError(t, err)
require.NotNil(t, key.JSONPlan)
@@ -445,15 +452,15 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
t.Run("Non-promoted plan", func(t *testing.T) {
key := makeKey(path, String, false)
err := key.SetJSONAccessPlan(JSONColumnMetadata{
BaseColumn: "body_json",
PromotedColumn: "body_json_promoted",
BaseColumn: bodyV2Column,
PromotedColumn: bodyPromotedColumn,
}, types)
require.NoError(t, err)
require.Len(t, key.JSONPlan, 1)
expectedYAML := `
expectedYAML := fmt.Sprintf(`
- name: education
column: body_json
column: %s
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
@@ -480,7 +487,7 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
maxDynamicPaths: 256
isTerminal: true
elemType: String
`
`, bodyV2Column)
got := plansToYAML(t, key.JSONPlan)
require.YAMLEq(t, expectedYAML, got)
})
@@ -488,15 +495,15 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
t.Run("Promoted plan", func(t *testing.T) {
key := makeKey(path, String, true)
err := key.SetJSONAccessPlan(JSONColumnMetadata{
BaseColumn: "body_json",
PromotedColumn: "body_json_promoted",
BaseColumn: bodyV2Column,
PromotedColumn: bodyPromotedColumn,
}, types)
require.NoError(t, err)
require.Len(t, key.JSONPlan, 2)
expectedYAML := `
expectedYAML := fmt.Sprintf(`
- name: education
column: body_json
column: %s
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
@@ -524,7 +531,7 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
isTerminal: true
elemType: String
- name: education
column: body_json_promoted
column: %s
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
@@ -554,7 +561,7 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
maxDynamicPaths: 256
isTerminal: true
elemType: String
`
`, bodyV2Column, bodyPromotedColumn)
got := plansToYAML(t, key.JSONPlan)
require.YAMLEq(t, expectedYAML, got)
})
@@ -575,11 +582,11 @@ func TestPlanJSON_EdgeCases(t *testing.T) {
expectErr: true,
},
{
name: "Very deep nesting - validates progression doesn't go negative",
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
expectedYAML: `
name: "Very deep nesting - validates progression doesn't go negative",
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
expectedYAML: fmt.Sprintf(`
- name: interests
column: body_json
column: %s
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
@@ -619,14 +626,14 @@ func TestPlanJSON_EdgeCases(t *testing.T) {
- String
isTerminal: true
elemType: String
`,
`, bodyV2Column),
},
{
name: "Path with mixed scalar and array types",
path: "education[].type",
expectedYAML: `
name: "Path with mixed scalar and array types",
path: "education[].type",
expectedYAML: fmt.Sprintf(`
- name: education
column: body_json
column: %s
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
@@ -639,20 +646,20 @@ func TestPlanJSON_EdgeCases(t *testing.T) {
maxDynamicTypes: 8
isTerminal: true
elemType: String
`,
`, bodyV2Column),
},
{
name: "Exists with only array types available",
path: "education",
expectedYAML: `
name: "Exists with only array types available",
path: "education",
expectedYAML: fmt.Sprintf(`
- name: education
column: body_json
column: %s
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
isTerminal: true
elemType: Array(JSON)
`,
`, bodyV2Column),
},
}
@@ -668,8 +675,8 @@ func TestPlanJSON_EdgeCases(t *testing.T) {
}
key := makeKey(tt.path, keyType, false)
err := key.SetJSONAccessPlan(JSONColumnMetadata{
BaseColumn: "body_json",
PromotedColumn: "body_json_promoted",
BaseColumn: bodyV2Column,
PromotedColumn: bodyPromotedColumn,
}, types)
if tt.expectErr {
require.Error(t, err)
@@ -687,15 +694,15 @@ func TestPlanJSON_TreeStructure(t *testing.T) {
path := "education[].awards[].participated[].team[].branch"
key := makeKey(path, String, false)
err := key.SetJSONAccessPlan(JSONColumnMetadata{
BaseColumn: "body_json",
PromotedColumn: "body_json_promoted",
BaseColumn: bodyV2Column,
PromotedColumn: bodyPromotedColumn,
}, types)
require.NoError(t, err)
require.Len(t, key.JSONPlan, 1)
expectedYAML := `
expectedYAML := fmt.Sprintf(`
- name: education
column: body_json
column: %s
availableTypes:
- Array(JSON)
maxDynamicTypes: 16
@@ -780,7 +787,7 @@ func TestPlanJSON_TreeStructure(t *testing.T) {
maxDynamicPaths: 64
isTerminal: true
elemType: String
`
`, bodyV2Column)
got := plansToYAML(t, key.JSONPlan)
require.YAMLEq(t, expectedYAML, got)

View File

@@ -2,6 +2,7 @@ package telemetrytypestest
import (
"context"
"slices"
"strings"
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
@@ -176,8 +177,9 @@ func matchesKey(selector *telemetrytypes.FieldKeySelector, key *telemetrytypes.T
return true
}
matchNameExceptions := []string{"body"}
// Check name (already checked in matchesName, but double-check here)
if selector.Name != "" && !matchesName(selector, key.Name) {
if selector.Name != "" && !matchesName(selector, key.Name) && slices.Contains(matchNameExceptions, key.Name) {
return false
}

View File

@@ -11,28 +11,28 @@ type PostableHost struct {
}
type PostableProfile struct {
UsesOtel bool `json:"uses_otel" required:"true"`
HasExistingObservabilityTool bool `json:"has_existing_observability_tool" required:"true"`
ExistingObservabilityTool string `json:"existing_observability_tool" required:"true"`
ReasonsForInterestInSigNoz []string `json:"reasons_for_interest_in_signoz" required:"true"`
LogsScalePerDayInGB int64 `json:"logs_scale_per_day_in_gb" required:"true"`
NumberOfServices int64 `json:"number_of_services" required:"true"`
NumberOfHosts int64 `json:"number_of_hosts" required:"true"`
WhereDidYouDiscoverSigNoz string `json:"where_did_you_discover_signoz" required:"true"`
TimelineForMigratingToSigNoz string `json:"timeline_for_migrating_to_signoz" required:"true"`
UsesOtel bool `json:"uses_otel"`
HasExistingObservabilityTool bool `json:"has_existing_observability_tool"`
ExistingObservabilityTool string `json:"existing_observability_tool"`
ReasonsForInterestInSigNoz []string `json:"reasons_for_interest_in_signoz"`
LogsScalePerDayInGB int64 `json:"logs_scale_per_day_in_gb"`
NumberOfServices int64 `json:"number_of_services"`
NumberOfHosts int64 `json:"number_of_hosts"`
WhereDidYouDiscoverSigNoz string `json:"where_did_you_discover_signoz"`
TimelineForMigratingToSigNoz string `json:"timeline_for_migrating_to_signoz"`
}
type GettableHost struct {
Name string `json:"name" required:"true"`
State string `json:"state" required:"true"`
Tier string `json:"tier" required:"true"`
Hosts []Host `json:"hosts" required:"true"`
Name string `json:"name"`
State string `json:"state"`
Tier string `json:"tier"`
Hosts []Host `json:"hosts"`
}
type Host struct {
Name string `json:"name" required:"true"`
IsDefault bool `json:"is_default" required:"true"`
URL string `json:"url" required:"true"`
Name string `json:"name"`
IsDefault bool `json:"is_default"`
URL string `json:"url"`
}
func NewGettableHost(data []byte) *GettableHost {

View File

@@ -52,7 +52,6 @@ def build_builder_query(
time_aggregation: str,
space_aggregation: str,
*,
comparisonSpaceAggregationParam: Optional[Dict] = None,
temporality: Optional[str] = None,
step_interval: int = DEFAULT_STEP_INTERVAL,
group_by: Optional[List[str]] = None,
@@ -75,8 +74,7 @@ def build_builder_query(
}
if temporality:
spec["aggregations"][0]["temporality"] = temporality
if comparisonSpaceAggregationParam:
spec["aggregations"][0]["comparisonSpaceAggregationParam"] = comparisonSpaceAggregationParam
if group_by:
spec["groupBy"] = [
{

View File

@@ -1,259 +0,0 @@
"""
Look at the multi_temporality_counters_1h.jsonl file for the relevant data
"""
import random
from datetime import datetime, timedelta, timezone
from http import HTTPStatus
from typing import Callable, List
import pytest
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.metrics import Metrics
from fixtures.querier import (
build_builder_query,
get_all_series,
get_series_values,
make_query_request,
)
from fixtures.utils import get_testdata_file_path
FILE = get_testdata_file_path("histogram_data_1h.jsonl")
@pytest.mark.parametrize(
"threshold, operator, first_value, last_value",
[
(1000, "<=", 11, 69),
(100, "<=", 1.1, 6.9),
(7500, "<=", 16.75, 74.75),
(8000, "<=", 17, 75),
(80000, "<=", 17, 75), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
(1000, ">", 7, 7),
(100, ">", 16.9, 69.1),
(7500, ">", 1.25, 1.25),
(8000, ">", 1, 1),
(80000, ">", 1, 1), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
],
)
def test_histogram_count_for_one_endpoint(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_metrics: Callable[[List[Metrics]], None],
threshold: float,
operator: str,
first_value: float,
last_value: float,
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
metric_name = f"test_bucket"
metrics = Metrics.load_from_file(
FILE,
base_time=now - timedelta(minutes=60),
metric_name_override=metric_name,
)
insert_metrics(metrics)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
query = build_builder_query(
"A",
metric_name,
"rate",
"histogram_count",
comparisonSpaceAggregationParam={
"threshold": threshold,
"operator": operator
},
filter_expression='endpoint = "/health"',
)
response = make_query_request(signoz, token, start_ms, end_ms, [query])
assert response.status_code == HTTPStatus.OK
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) == 59
assert result_values[0]["value"] == first_value
assert result_values[-1]["value"] == last_value
@pytest.mark.parametrize(
"threshold, operator, first_value, last_value",
[
(1000, "<=", 22, 138),
(100, "<=", 2.2, 13.8),
(7500, "<=", 33.5, 149.5),
(8000, "<=", 34, 150),
(80000, "<=", 34, 150), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
(1000, ">", 14, 14),
(100, ">", 33.8, 138.2),
(7500, ">", 2.5, 2.5),
(8000, ">", 2, 2),
(80000, ">", 2, 2), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
],
)
def test_histogram_count_for_one_service(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_metrics: Callable[[List[Metrics]], None],
threshold: float,
operator: str,
first_value: float,
last_value: float,
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
metric_name = f"test_bucket"
metrics = Metrics.load_from_file(
FILE,
base_time=now - timedelta(minutes=60),
metric_name_override=metric_name,
)
insert_metrics(metrics)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
query = build_builder_query(
"A",
metric_name,
"rate",
"histogram_count",
comparisonSpaceAggregationParam={
"threshold": threshold,
"operator": operator
},
filter_expression='service = "api"',
)
response = make_query_request(signoz, token, start_ms, end_ms, [query])
assert response.status_code == HTTPStatus.OK
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) == 59
assert result_values[0]["value"] == first_value
assert result_values[-1]["value"] == last_value
@pytest.mark.parametrize(
"threshold, operator, first_value, last_value",
[
(1000, "<=", 11, 69),
(100, "<=", 1.1, 6.9),
(7500, "<=", 16.75, 74.75),
(8000, "<=", 17, 75),
(80000, "<=", 17, 75), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
(1000, ">", 7, 7),
(100, ">", 16.9, 69.1),
(7500, ">", 1.25, 1.25),
(8000, ">", 1, 1),
(80000, ">", 1, 1), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
],
)
def test_histogram_count_for_delta_service(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_metrics: Callable[[List[Metrics]], None],
threshold: float,
operator: str,
first_value: float,
last_value: float,
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
metric_name = f"test_bucket"
metrics = Metrics.load_from_file(
FILE,
base_time=now - timedelta(minutes=60),
metric_name_override=metric_name,
)
insert_metrics(metrics)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
query = build_builder_query(
"A",
metric_name,
"rate",
"histogram_count",
comparisonSpaceAggregationParam={
"threshold": threshold,
"operator": operator
},
filter_expression='service = "web"',
)
response = make_query_request(signoz, token, start_ms, end_ms, [query])
assert response.status_code == HTTPStatus.OK
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) == 60 ## in delta, the value at 10:01 will also be reported
assert result_values[1]["value"] == first_value ## to keep parallel to the cumulative test cases, first_value refers to the value at 10:02
assert result_values[-1]["value"] == last_value
@pytest.mark.parametrize(
"threshold, operator, first_value, last_value",
[
(1000, "<=", 33, 207),
(100, "<=", 3.3, 20.7),
(7500, "<=", 50.25, 224.25),
(8000, "<=", 51, 225),
(80000, "<=", 51, 225),
(1000, ">", 21, 21),
(100, ">", 50.7, 207.3),
(7500, ">", 3.75, 3.75),
(8000, ">", 3, 3),
(80000, ">", 3, 3),
],
)
def test_histogram_count_for_all_services(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_metrics: Callable[[List[Metrics]], None],
threshold: float,
operator: str,
first_value: float,
last_value: float,
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
metric_name = f"test_bucket"
metrics = Metrics.load_from_file(
FILE,
base_time=now - timedelta(minutes=60),
metric_name_override=metric_name,
)
insert_metrics(metrics)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
query = build_builder_query(
"A",
metric_name,
"rate",
"histogram_count",
comparisonSpaceAggregationParam={
"threshold": threshold,
"operator": operator
},
## no services filter, this tests for multitemporality handling as well
)
response = make_query_request(signoz, token, start_ms, end_ms, [query])
assert response.status_code == HTTPStatus.OK
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) == 60
assert result_values[1]["value"] == first_value ## to keep parallel to the cumulative test cases, first_value refers to the value at 10:02
assert result_values[-1]["value"] == last_value

File diff suppressed because it is too large Load Diff