mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-09 03:02:20 +00:00
Compare commits
1 Commits
issue-1000
...
update-co
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
01d632cfe4 |
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@@ -101,7 +101,7 @@
|
||||
|
||||
# Integration tests
|
||||
|
||||
/tests/integration/ @vikrantgupta25
|
||||
/tests/integration/ @vikrantgupta25, @srikanthccv
|
||||
|
||||
# OpenAPI types generator
|
||||
|
||||
|
||||
@@ -235,31 +235,28 @@ func (a *API) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
switch spec := item.Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
if spec.Filter != nil && spec.Filter.Expression != "" {
|
||||
replaced, warnings, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
|
||||
replaced, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
a.set.Logger.WarnContext(req.Context(), "variable replace warnings", "warnings", warnings)
|
||||
spec.Filter.Expression = replaced
|
||||
}
|
||||
queryRangeRequest.CompositeQuery.Queries[idx].Spec = spec
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
if spec.Filter != nil && spec.Filter.Expression != "" {
|
||||
replaced, warnings, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
|
||||
replaced, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
a.set.Logger.WarnContext(req.Context(), "variable replace warnings", "warnings", warnings)
|
||||
spec.Filter.Expression = replaced
|
||||
}
|
||||
queryRangeRequest.CompositeQuery.Queries[idx].Spec = spec
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
if spec.Filter != nil && spec.Filter.Expression != "" {
|
||||
replaced, warnings, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
|
||||
replaced, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
a.set.Logger.WarnContext(req.Context(), "variable replace warnings", "warnings", warnings)
|
||||
spec.Filter.Expression = replaced
|
||||
}
|
||||
queryRangeRequest.CompositeQuery.Queries[idx].Spec = spec
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
@@ -165,7 +164,6 @@ func PrepareWhereClause(query string, opts FilterExprVisitorOpts, startNs uint64
|
||||
return nil, combinedErrors.WithAdditional(visitor.errors...).WithUrl(url)
|
||||
}
|
||||
|
||||
// if there is nothing to filter, return true
|
||||
if cond == "" {
|
||||
cond = "true"
|
||||
}
|
||||
@@ -223,6 +221,7 @@ func (v *filterExpressionVisitor) Visit(tree antlr.ParseTree) any {
|
||||
}
|
||||
|
||||
func (v *filterExpressionVisitor) VisitQuery(ctx *grammar.QueryContext) any {
|
||||
|
||||
return v.Visit(ctx.Expression())
|
||||
}
|
||||
|
||||
@@ -504,16 +503,8 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
// Get all values for operations that need them
|
||||
values := ctx.AllValue()
|
||||
if len(values) > 0 {
|
||||
// there should only be one value for the following operators, even if there is more than one
|
||||
// we just take the first value
|
||||
value := v.Visit(values[0])
|
||||
|
||||
// Check if the value is a skip marker (embedded variable with __all__ value)
|
||||
if strVal, ok := value.(string); ok && strVal == specialSkipConditionMarker {
|
||||
v.logger.Info("skipping condition due to __all__ variable", "keys", keys, "value", value) //nolint:sloglint
|
||||
return ""
|
||||
}
|
||||
|
||||
if var_, ok := value.(string); ok {
|
||||
// check if this is a variables
|
||||
var ok bool
|
||||
@@ -525,13 +516,6 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
}
|
||||
|
||||
if ok {
|
||||
if varItem.Type == qbtypes.DynamicVariableType {
|
||||
if all_, ok := varItem.Value.(string); ok && all_ == "__all__" {
|
||||
// this is likely overlooked by user, we treat it as if it was IN instead of =
|
||||
v.logger.Warn("received unexpected __all__ value for single select dynamic variable", "variable", var_, "keys", keys, "value", value) //nolint:sloglint
|
||||
return ""
|
||||
}
|
||||
}
|
||||
switch varValues := varItem.Value.(type) {
|
||||
case []any:
|
||||
if len(varValues) == 0 {
|
||||
@@ -797,12 +781,7 @@ func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any {
|
||||
if ctx.QUOTED_TEXT() != nil {
|
||||
txt := ctx.QUOTED_TEXT().GetText()
|
||||
// trim quotes and return the value
|
||||
value := trimQuotes(txt)
|
||||
// Check if the string contains embedded variables
|
||||
if strings.Contains(value, "$") {
|
||||
return v.interpolateVariablesInString(value)
|
||||
}
|
||||
return value
|
||||
return trimQuotes(txt)
|
||||
} else if ctx.NUMBER() != nil {
|
||||
number, err := strconv.ParseFloat(ctx.NUMBER().GetText(), 64)
|
||||
if err != nil {
|
||||
@@ -819,12 +798,7 @@ func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any {
|
||||
// When the user writes an expression like `service.name=redis`
|
||||
// The `redis` part is a VALUE context but parsed as a KEY token
|
||||
// so we return the text as is
|
||||
keyText := ctx.KEY().GetText()
|
||||
// Check if this is a composed variable like $environment-xyz
|
||||
if strings.HasPrefix(keyText, "$") {
|
||||
return v.interpolateVariablesInString(keyText)
|
||||
}
|
||||
return keyText
|
||||
return ctx.KEY().GetText()
|
||||
}
|
||||
|
||||
return "" // Should not happen with valid input
|
||||
@@ -856,7 +830,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
// 1. either user meant key ( this is already handled above in fieldKeysForName )
|
||||
// 2. or user meant `attribute.key` we look up in the map for all possible field keys with name 'attribute.key'
|
||||
|
||||
// Note:
|
||||
// Note:
|
||||
// If user only wants to search `attribute.key`, then they have to use `attribute.attribute.key`
|
||||
// If user only wants to search `key`, then they have to use `key`
|
||||
// If user wants to search both, they can use `attribute.key` and we will resolve the ambiguity
|
||||
@@ -954,103 +928,3 @@ func trimQuotes(txt string) string {
|
||||
txt = strings.ReplaceAll(txt, `\'`, `'`)
|
||||
return txt
|
||||
}
|
||||
|
||||
// specialSkipConditionMarker is used to indicate that the entire condition should be removed
|
||||
const specialSkipConditionMarker = "__signoz_skip_condition__"
|
||||
|
||||
// interpolateVariablesInString finds and replaces variable references within a string
|
||||
// by checking against actual variable names in the variables map.
|
||||
// Pure variable references (e.g., "$service") are returned as-is to let the
|
||||
// existing variable handling code process them.
|
||||
// Returns specialSkipConditionMarker if any variable has __all__ value.
|
||||
func (v *filterExpressionVisitor) interpolateVariablesInString(s string) string {
|
||||
// if this is a complete variable reference (just $varname with nothing else)
|
||||
// if so, return as-is
|
||||
varName := s
|
||||
if strings.HasPrefix(varName, "$") {
|
||||
_, exactMatch := v.variables[varName]
|
||||
if !exactMatch {
|
||||
_, exactMatch = v.variables[varName[1:]]
|
||||
}
|
||||
if exactMatch {
|
||||
return s
|
||||
}
|
||||
}
|
||||
|
||||
result := s
|
||||
|
||||
// find and replace variables by checking each variable name in the map
|
||||
// process longer variable names first to handle cases with prefix substring
|
||||
varNames := make([]string, 0, len(v.variables)*2)
|
||||
for name := range v.variables {
|
||||
varNames = append(varNames, name)
|
||||
// add with $ prefix if not already present
|
||||
if !strings.HasPrefix(name, "$") {
|
||||
varNames = append(varNames, "$"+name)
|
||||
}
|
||||
}
|
||||
|
||||
// sort by length (longest first) to match longer variable names before shorter ones
|
||||
sort.Slice(varNames, func(i, j int) bool {
|
||||
return len(varNames[i]) > len(varNames[j])
|
||||
})
|
||||
|
||||
for _, vName := range varNames {
|
||||
searchPattern := vName
|
||||
if !strings.HasPrefix(searchPattern, "$") {
|
||||
searchPattern = "$" + vName
|
||||
}
|
||||
|
||||
if strings.Contains(result, searchPattern) {
|
||||
// direct lookup
|
||||
varItem, ok := v.variables[vName]
|
||||
if !ok {
|
||||
// Try without $ prefix
|
||||
varItem, ok = v.variables[strings.TrimPrefix(vName, "$")]
|
||||
}
|
||||
|
||||
if ok {
|
||||
// special check for __all__ value - skip the entire condition
|
||||
if varItem.Type == qbtypes.DynamicVariableType {
|
||||
if allVal, ok := varItem.Value.(string); ok && allVal == "__all__" {
|
||||
return specialSkipConditionMarker
|
||||
}
|
||||
}
|
||||
|
||||
replacement := v.formatVariableValueForInterpolation(varItem.Value, strings.TrimPrefix(vName, "$"))
|
||||
result = strings.ReplaceAll(result, searchPattern, replacement)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (v *filterExpressionVisitor) formatVariableValueForInterpolation(value any, varName string) string {
|
||||
switch val := value.(type) {
|
||||
case string:
|
||||
return val
|
||||
case []string:
|
||||
if len(val) > 1 {
|
||||
v.warnings = append(v.warnings, fmt.Sprintf("variable `%s` has multiple values, using first value `%s` for string interpolation", varName, val[0]))
|
||||
}
|
||||
if len(val) > 0 {
|
||||
return val[0]
|
||||
}
|
||||
return ""
|
||||
case []any:
|
||||
if len(val) > 1 {
|
||||
v.warnings = append(v.warnings, fmt.Sprintf("variable `%s` has multiple values, using first value for string interpolation", varName))
|
||||
}
|
||||
if len(val) > 0 {
|
||||
return v.formatVariableValueForInterpolation(val[0], varName)
|
||||
}
|
||||
return ""
|
||||
case int, int32, int64, float32, float64:
|
||||
return fmt.Sprintf("%v", val)
|
||||
case bool:
|
||||
return strconv.FormatBool(val)
|
||||
default:
|
||||
return fmt.Sprintf("%v", val)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,113 +10,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/antlr4-go/antlr/v4"
|
||||
sqlbuilder "github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
// TestInterpolateVariablesInString tests the embedded variable interpolation feature (GitHub issue #10008)
|
||||
func TestInterpolateVariablesInString(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
variables map[string]qbtypes.VariableItem
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "pure variable reference - not interpolated",
|
||||
input: "$service",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"service": {Value: "auth-service"},
|
||||
},
|
||||
expected: "$service", // Pure variables are handled by existing code
|
||||
},
|
||||
{
|
||||
name: "variable composed with suffix",
|
||||
input: "$environment-xyz",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"environment": {Value: "prod"},
|
||||
},
|
||||
expected: "prod-xyz",
|
||||
},
|
||||
{
|
||||
name: "variable in quoted string with suffix",
|
||||
input: "$env-cluster",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {Value: "staging"},
|
||||
},
|
||||
expected: "staging-cluster",
|
||||
},
|
||||
{
|
||||
name: "variable with prefix and suffix",
|
||||
input: "prefix-$var-suffix",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"var": {Value: "middle"},
|
||||
},
|
||||
expected: "prefix-middle-suffix",
|
||||
},
|
||||
{
|
||||
name: "multiple variables in one string",
|
||||
input: "$region-$env-cluster",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"region": {Value: "us-west"},
|
||||
"env": {Value: "prod"},
|
||||
},
|
||||
expected: "us-west-prod-cluster",
|
||||
},
|
||||
{
|
||||
name: "similar variable names - longer matches first",
|
||||
input: "$env-$environment",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {Value: "dev"},
|
||||
"environment": {Value: "production"},
|
||||
},
|
||||
expected: "dev-production",
|
||||
},
|
||||
{
|
||||
name: "unknown variable - preserved as-is",
|
||||
input: "$unknown-suffix",
|
||||
variables: map[string]qbtypes.VariableItem{},
|
||||
expected: "$unknown-suffix",
|
||||
},
|
||||
{
|
||||
name: "variable with underscore",
|
||||
input: "$my_var-test",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"my_var": {Value: "hello"},
|
||||
},
|
||||
expected: "hello-test",
|
||||
},
|
||||
{
|
||||
name: "__all__ value returns skip marker",
|
||||
input: "$env-suffix",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "__all__",
|
||||
},
|
||||
},
|
||||
expected: specialSkipConditionMarker,
|
||||
},
|
||||
{
|
||||
name: "multi-select takes first value",
|
||||
input: "$env-suffix",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {Value: []any{"prod", "staging", "dev"}},
|
||||
},
|
||||
expected: "prod-suffix",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
visitor := &filterExpressionVisitor{
|
||||
variables: tt.variables,
|
||||
}
|
||||
result := visitor.interpolateVariablesInString(tt.input)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TestPrepareWhereClause_EmptyVariableList ensures PrepareWhereClause errors when a variable has an empty list value
|
||||
func TestPrepareWhereClause_EmptyVariableList(t *testing.T) {
|
||||
tests := []struct {
|
||||
@@ -147,7 +42,7 @@ func TestPrepareWhereClause_EmptyVariableList(t *testing.T) {
|
||||
}
|
||||
|
||||
keys := map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"service": {
|
||||
"service": []*telemetrytypes.TelemetryFieldKey{
|
||||
{
|
||||
Name: "service",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
@@ -259,7 +154,7 @@ func TestVisitKey(t *testing.T) {
|
||||
name: "Key not found",
|
||||
keyText: "unknown_key",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"service": {
|
||||
"service": []*telemetrytypes.TelemetryFieldKey{
|
||||
{
|
||||
Name: "service",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
@@ -440,7 +335,7 @@ func TestVisitKey(t *testing.T) {
|
||||
name: "Unknown key with ignoreNotFoundKeys=false",
|
||||
keyText: "unknown_key",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"service": {
|
||||
"service": []*telemetrytypes.TelemetryFieldKey{
|
||||
{
|
||||
Name: "service",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
@@ -460,7 +355,7 @@ func TestVisitKey(t *testing.T) {
|
||||
name: "Unknown key with ignoreNotFoundKeys=true",
|
||||
keyText: "unknown_key",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"service": {
|
||||
"service": []*telemetrytypes.TelemetryFieldKey{
|
||||
{
|
||||
Name: "service",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
@@ -572,7 +467,7 @@ func TestVisitKey(t *testing.T) {
|
||||
expectedWarnings: nil,
|
||||
expectedMainWrnURL: "",
|
||||
},
|
||||
{
|
||||
{
|
||||
name: "only attribute.custom_field is selected",
|
||||
keyText: "attribute.attribute.custom_field",
|
||||
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
|
||||
@@ -1,191 +0,0 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestFilterExprEmbeddedVariables(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm, nil)
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
query string
|
||||
variables map[string]qbtypes.VariableItem
|
||||
shouldPass bool
|
||||
expectedQuery string
|
||||
expectedArgs []any
|
||||
}{
|
||||
{
|
||||
name: "variable composed with suffix in quoted string",
|
||||
query: "version = '$env-xyz'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['version'] = ? AND mapContains(attributes_string, 'version') = ?)",
|
||||
expectedArgs: []any{"prod-xyz", true},
|
||||
},
|
||||
{
|
||||
name: "variable in LIKE pattern with suffix",
|
||||
query: "service.name LIKE '$env%'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) LIKE ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)",
|
||||
expectedArgs: []any{"prod%"},
|
||||
},
|
||||
{
|
||||
name: "variable with prefix and suffix",
|
||||
query: "path = 'prefix-$var-suffix'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"var": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "middle",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['path'] = ? AND mapContains(attributes_string, 'path') = ?)",
|
||||
expectedArgs: []any{"prefix-middle-suffix", true},
|
||||
},
|
||||
{
|
||||
name: "multiple variables in one string",
|
||||
query: "path = '$region-$env-cluster'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"region": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "us-west",
|
||||
},
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['path'] = ? AND mapContains(attributes_string, 'path') = ?)",
|
||||
expectedArgs: []any{"us-west-prod-cluster", true},
|
||||
},
|
||||
{
|
||||
name: "similar variable names - longer matches first",
|
||||
query: "path = '$env-$environment'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "dev",
|
||||
},
|
||||
"environment": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "production",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['path'] = ? AND mapContains(attributes_string, 'path') = ?)",
|
||||
expectedArgs: []any{"dev-production", true},
|
||||
},
|
||||
{
|
||||
name: "pure variable reference - still works",
|
||||
query: "service.name = $service",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"service": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "auth-service",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)",
|
||||
expectedArgs: []any{"auth-service"},
|
||||
},
|
||||
{
|
||||
name: "variable with underscore composed with suffix",
|
||||
query: "version = '$my_var-test'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"my_var": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "hello",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['version'] = ? AND mapContains(attributes_string, 'version') = ?)",
|
||||
expectedArgs: []any{"hello-test", true},
|
||||
},
|
||||
{
|
||||
name: "variable in ILIKE pattern",
|
||||
query: "message ILIKE '%$pattern%'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"pattern": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "error",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?)",
|
||||
expectedArgs: []any{"%error%", true},
|
||||
},
|
||||
{
|
||||
name: "__all__ value skips condition",
|
||||
query: "version = '$env-xyz'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "__all__",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE true",
|
||||
expectedArgs: nil,
|
||||
},
|
||||
{
|
||||
name: "multi-select takes first value",
|
||||
query: "version = '$env-xyz'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: []any{"prod", "staging", "dev"},
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['version'] = ? AND mapContains(attributes_string, 'version') = ?)",
|
||||
expectedArgs: []any{"prod-xyz", true},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: DefaultFullTextColumn,
|
||||
JsonKeyToKey: GetBodyJSONKey,
|
||||
Variables: tc.variables,
|
||||
}
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
|
||||
if tc.shouldPass {
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
sql, args := clause.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
require.Equal(t, tc.expectedQuery, sql)
|
||||
require.Equal(t, tc.expectedArgs, args)
|
||||
} else {
|
||||
require.Error(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,235 +0,0 @@
|
||||
package telemetrymetrics
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func buildMetricsFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
||||
return map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||
"service.name": {
|
||||
{
|
||||
Name: "service.name",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"host.name": {
|
||||
{
|
||||
Name: "host.name",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"environment": {
|
||||
{
|
||||
Name: "environment",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"region": {
|
||||
{
|
||||
Name: "region",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"cluster": {
|
||||
{
|
||||
Name: "cluster",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func TestFilterExprEmbeddedVariables(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
keys := buildMetricsFieldKeyMap()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
query string
|
||||
variables map[string]qbtypes.VariableItem
|
||||
shouldPass bool
|
||||
expectedQuery string
|
||||
expectedArgs []any
|
||||
}{
|
||||
{
|
||||
name: "variable composed with suffix in quoted string",
|
||||
query: "host.name = '$env-server'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE JSONExtractString(labels, 'host.name') = ?",
|
||||
expectedArgs: []any{"prod-server"},
|
||||
},
|
||||
{
|
||||
name: "variable in LIKE pattern with suffix",
|
||||
query: "service.name LIKE '$env%'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE JSONExtractString(labels, 'service.name') LIKE ?",
|
||||
expectedArgs: []any{"prod%"},
|
||||
},
|
||||
{
|
||||
name: "variable with prefix and suffix",
|
||||
query: "cluster = 'prefix-$var-suffix'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"var": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "middle",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE JSONExtractString(labels, 'cluster') = ?",
|
||||
expectedArgs: []any{"prefix-middle-suffix"},
|
||||
},
|
||||
{
|
||||
name: "multiple variables in one string",
|
||||
query: "cluster = '$region-$env-cluster'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"region": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "us-west",
|
||||
},
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE JSONExtractString(labels, 'cluster') = ?",
|
||||
expectedArgs: []any{"us-west-prod-cluster"},
|
||||
},
|
||||
{
|
||||
name: "similar variable names - longer matches first",
|
||||
query: "cluster = '$env-$environment'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "dev",
|
||||
},
|
||||
"environment": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "production",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE JSONExtractString(labels, 'cluster') = ?",
|
||||
expectedArgs: []any{"dev-production"},
|
||||
},
|
||||
{
|
||||
name: "pure variable reference - still works",
|
||||
query: "service.name = $service",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"service": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "auth-service",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE JSONExtractString(labels, 'service.name') = ?",
|
||||
expectedArgs: []any{"auth-service"},
|
||||
},
|
||||
{
|
||||
name: "variable with underscore composed with suffix",
|
||||
query: "host.name = '$my_var-test'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"my_var": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "hello",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE JSONExtractString(labels, 'host.name') = ?",
|
||||
expectedArgs: []any{"hello-test"},
|
||||
},
|
||||
{
|
||||
name: "variable in ILIKE pattern",
|
||||
query: "environment ILIKE '%$pattern%'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"pattern": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "staging",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE LOWER(JSONExtractString(labels, 'environment')) LIKE LOWER(?)",
|
||||
expectedArgs: []any{"%staging%"},
|
||||
},
|
||||
{
|
||||
name: "__all__ value skips condition",
|
||||
query: "host.name = '$env-server'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "__all__",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE true",
|
||||
expectedArgs: nil,
|
||||
},
|
||||
{
|
||||
name: "multi-select takes first value",
|
||||
query: "host.name = '$env-server'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: []any{"prod", "staging", "dev"},
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE JSONExtractString(labels, 'host.name') = ?",
|
||||
expectedArgs: []any{"prod-server"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
FieldKeys: keys,
|
||||
Variables: tc.variables,
|
||||
}
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
|
||||
if tc.shouldPass {
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
sql, args := clause.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
require.Equal(t, tc.expectedQuery, sql)
|
||||
require.Equal(t, tc.expectedArgs, args)
|
||||
} else {
|
||||
require.Error(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,146 +0,0 @@
|
||||
package telemetrytraces
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestFilterExprEmbeddedVariables(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
query string
|
||||
variables map[string]qbtypes.VariableItem
|
||||
shouldPass bool
|
||||
expectedQuery string
|
||||
expectedArgs []any
|
||||
}{
|
||||
{
|
||||
name: "variable composed with suffix in quoted string",
|
||||
query: "http.method = '$method-request'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"method": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "GET",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)",
|
||||
expectedArgs: []any{"GET-request", true},
|
||||
},
|
||||
{
|
||||
name: "variable in LIKE pattern with suffix",
|
||||
query: "service.name LIKE '$env%'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) LIKE ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)",
|
||||
expectedArgs: []any{"prod%"},
|
||||
},
|
||||
{
|
||||
name: "variable with prefix and suffix",
|
||||
query: "user.id = 'user-$var-id'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"var": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "123",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['user.id'] = ? AND mapContains(attributes_string, 'user.id') = ?)",
|
||||
expectedArgs: []any{"user-123-id", true},
|
||||
},
|
||||
{
|
||||
name: "multiple variables in one string",
|
||||
query: "user.id = '$region-$env-user'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"region": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "us-west",
|
||||
},
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['user.id'] = ? AND mapContains(attributes_string, 'user.id') = ?)",
|
||||
expectedArgs: []any{"us-west-prod-user", true},
|
||||
},
|
||||
{
|
||||
name: "pure variable reference - still works",
|
||||
query: "service.name = $service",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"service": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "auth-service",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)",
|
||||
expectedArgs: []any{"auth-service"},
|
||||
},
|
||||
{
|
||||
name: "__all__ value skips condition",
|
||||
query: "http.method = '$method-request'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"method": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "__all__",
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE true",
|
||||
expectedArgs: nil,
|
||||
},
|
||||
{
|
||||
name: "multi-select takes first value",
|
||||
query: "http.method = '$method-request'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"method": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: []any{"GET", "POST", "PUT"},
|
||||
},
|
||||
},
|
||||
shouldPass: true,
|
||||
expectedQuery: "WHERE (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)",
|
||||
expectedArgs: []any{"GET-request", true},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
FieldKeys: keys,
|
||||
Variables: tc.variables,
|
||||
}
|
||||
|
||||
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
|
||||
|
||||
if tc.shouldPass {
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
sql, args := clause.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
require.Equal(t, tc.expectedQuery, sql)
|
||||
require.Equal(t, tc.expectedArgs, args)
|
||||
} else {
|
||||
require.Error(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,6 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
@@ -133,44 +131,12 @@ func (q *QueryBuilderQuery[T]) UnmarshalJSON(data []byte) error {
|
||||
|
||||
var temp Alias
|
||||
// Use UnmarshalJSONWithContext for better error messages
|
||||
if err := UnmarshalJSONWithContext(data, &temp, fmt.Sprintf("query spec for %T", q)); err != nil {
|
||||
if err := UnmarshalJSONWithContext(data, &temp, "query spec"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Copy the decoded values back to the original struct
|
||||
*q = QueryBuilderQuery[T](temp)
|
||||
|
||||
// Nomarlize the query after unmarshaling
|
||||
q.Normalize()
|
||||
return nil
|
||||
}
|
||||
|
||||
// Normalize normalizes all the field keys in the query
|
||||
func (q *QueryBuilderQuery[T]) Normalize() {
|
||||
|
||||
// normalize select fields
|
||||
for idx := range q.SelectFields {
|
||||
q.SelectFields[idx].Normalize()
|
||||
}
|
||||
|
||||
// normalize group by fields
|
||||
for idx := range q.GroupBy {
|
||||
q.GroupBy[idx].Normalize()
|
||||
}
|
||||
|
||||
// normalize order by fields
|
||||
for idx := range q.Order {
|
||||
q.Order[idx].Key.Normalize()
|
||||
}
|
||||
|
||||
// normalize secondary aggregations
|
||||
for idx := range q.SecondaryAggregations {
|
||||
for jdx := range q.SecondaryAggregations[idx].Order {
|
||||
q.SecondaryAggregations[idx].Order[jdx].Key.Normalize()
|
||||
}
|
||||
for jdx := range q.SecondaryAggregations[idx].GroupBy {
|
||||
q.SecondaryAggregations[idx].GroupBy[jdx].Normalize()
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,653 +0,0 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestQueryBuilderQuery_Copy(t *testing.T) {
|
||||
t.Run("copy with all fields populated", func(t *testing.T) {
|
||||
original := QueryBuilderQuery[TraceAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: Step{Duration: 60 * time.Second},
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Source: telemetrytypes.SourceUnspecified,
|
||||
Aggregations: []TraceAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
Alias: "trace_count",
|
||||
},
|
||||
},
|
||||
Disabled: false,
|
||||
Filter: &Filter{
|
||||
Expression: "service.name = 'frontend'",
|
||||
},
|
||||
GroupBy: []GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
},
|
||||
},
|
||||
},
|
||||
Order: []OrderBy{
|
||||
{
|
||||
Key: OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "timestamp",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
},
|
||||
Direction: OrderDirectionDesc,
|
||||
},
|
||||
},
|
||||
SelectFields: []telemetrytypes.TelemetryFieldKey{
|
||||
{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
},
|
||||
Limit: 100,
|
||||
Offset: 0,
|
||||
Cursor: "cursor123",
|
||||
LimitBy: &LimitBy{
|
||||
Value: "10",
|
||||
Keys: []string{
|
||||
"service.name",
|
||||
},
|
||||
},
|
||||
Having: &Having{
|
||||
Expression: "count() > 100",
|
||||
},
|
||||
SecondaryAggregations: []SecondaryAggregation{
|
||||
{
|
||||
Limit: 10,
|
||||
Order: []OrderBy{
|
||||
{
|
||||
Key: OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "value",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
},
|
||||
Direction: OrderDirectionAsc,
|
||||
},
|
||||
},
|
||||
GroupBy: []GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "region",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Functions: []Function{
|
||||
{
|
||||
Name: FunctionNameTimeShift,
|
||||
Args: []FunctionArg{
|
||||
{
|
||||
Name: "shift",
|
||||
Value: "1h",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Legend: "{{service.name}}",
|
||||
ShiftBy: 3600000,
|
||||
}
|
||||
|
||||
// Create a copy
|
||||
copied := original.Copy()
|
||||
|
||||
// Assert that values are equal
|
||||
assert.Equal(t, original.Name, copied.Name)
|
||||
assert.Equal(t, original.StepInterval, copied.StepInterval)
|
||||
assert.Equal(t, original.Signal, copied.Signal)
|
||||
assert.Equal(t, original.Source, copied.Source)
|
||||
assert.Equal(t, original.Disabled, copied.Disabled)
|
||||
assert.Equal(t, original.Limit, copied.Limit)
|
||||
assert.Equal(t, original.Offset, copied.Offset)
|
||||
assert.Equal(t, original.Cursor, copied.Cursor)
|
||||
assert.Equal(t, original.Legend, copied.Legend)
|
||||
assert.Equal(t, original.ShiftBy, copied.ShiftBy)
|
||||
|
||||
// Assert deep copies for slices and pointers
|
||||
require.NotNil(t, copied.Aggregations)
|
||||
assert.Equal(t, len(original.Aggregations), len(copied.Aggregations))
|
||||
assert.Equal(t, original.Aggregations[0].Expression, copied.Aggregations[0].Expression)
|
||||
|
||||
require.NotNil(t, copied.Filter)
|
||||
assert.Equal(t, original.Filter.Expression, copied.Filter.Expression)
|
||||
|
||||
require.NotNil(t, copied.GroupBy)
|
||||
assert.Equal(t, len(original.GroupBy), len(copied.GroupBy))
|
||||
assert.Equal(t, original.GroupBy[0].Name, copied.GroupBy[0].Name)
|
||||
|
||||
require.NotNil(t, copied.Order)
|
||||
assert.Equal(t, len(original.Order), len(copied.Order))
|
||||
assert.Equal(t, original.Order[0].Key.Name, copied.Order[0].Key.Name)
|
||||
|
||||
require.NotNil(t, copied.SelectFields)
|
||||
assert.Equal(t, len(original.SelectFields), len(copied.SelectFields))
|
||||
assert.Equal(t, original.SelectFields[0].Name, copied.SelectFields[0].Name)
|
||||
|
||||
require.NotNil(t, copied.LimitBy)
|
||||
assert.Equal(t, original.LimitBy.Value, copied.LimitBy.Value)
|
||||
assert.Equal(t, len(original.LimitBy.Keys), len(copied.LimitBy.Keys))
|
||||
|
||||
require.NotNil(t, copied.Having)
|
||||
assert.Equal(t, original.Having.Expression, copied.Having.Expression)
|
||||
|
||||
require.NotNil(t, copied.SecondaryAggregations)
|
||||
assert.Equal(t, len(original.SecondaryAggregations), len(copied.SecondaryAggregations))
|
||||
assert.Equal(t, original.SecondaryAggregations[0].Limit, copied.SecondaryAggregations[0].Limit)
|
||||
|
||||
require.NotNil(t, copied.Functions)
|
||||
assert.Equal(t, len(original.Functions), len(copied.Functions))
|
||||
assert.Equal(t, original.Functions[0].Name, copied.Functions[0].Name)
|
||||
|
||||
// Verify independence - modify copied and ensure original is unchanged
|
||||
copied.Name = "B"
|
||||
assert.Equal(t, "A", original.Name)
|
||||
|
||||
copied.Aggregations[0].Expression = "sum()"
|
||||
assert.Equal(t, "count()", original.Aggregations[0].Expression)
|
||||
|
||||
copied.Filter.Expression = "modified"
|
||||
assert.Equal(t, "service.name = 'frontend'", original.Filter.Expression)
|
||||
|
||||
copied.GroupBy[0].Name = "modified"
|
||||
assert.Equal(t, "service.name", original.GroupBy[0].Name)
|
||||
|
||||
copied.Order[0].Key.Name = "modified"
|
||||
assert.Equal(t, "timestamp", original.Order[0].Key.Name)
|
||||
|
||||
copied.SelectFields[0].Name = "modified"
|
||||
assert.Equal(t, "trace_id", original.SelectFields[0].Name)
|
||||
|
||||
copied.LimitBy.Value = "999"
|
||||
assert.Equal(t, "10", original.LimitBy.Value)
|
||||
|
||||
copied.Having.Expression = "modified"
|
||||
assert.Equal(t, "count() > 100", original.Having.Expression)
|
||||
|
||||
copied.SecondaryAggregations[0].Limit = 999
|
||||
assert.Equal(t, 10, original.SecondaryAggregations[0].Limit)
|
||||
|
||||
copied.Functions[0].Name = FunctionNameAbsolute
|
||||
assert.Equal(t, FunctionNameTimeShift, original.Functions[0].Name)
|
||||
})
|
||||
|
||||
t.Run("copy with nil fields", func(t *testing.T) {
|
||||
original := QueryBuilderQuery[TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
}
|
||||
|
||||
copied := original.Copy()
|
||||
|
||||
assert.Equal(t, original.Name, copied.Name)
|
||||
assert.Equal(t, original.Signal, copied.Signal)
|
||||
assert.Nil(t, copied.Aggregations)
|
||||
assert.Nil(t, copied.Filter)
|
||||
assert.Nil(t, copied.GroupBy)
|
||||
assert.Nil(t, copied.Order)
|
||||
assert.Nil(t, copied.SelectFields)
|
||||
assert.Nil(t, copied.LimitBy)
|
||||
assert.Nil(t, copied.Having)
|
||||
assert.Nil(t, copied.SecondaryAggregations)
|
||||
assert.Nil(t, copied.Functions)
|
||||
})
|
||||
|
||||
t.Run("copy metric aggregation query", func(t *testing.T) {
|
||||
original := QueryBuilderQuery[MetricAggregation]{
|
||||
Name: "M",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []MetricAggregation{
|
||||
{
|
||||
MetricName: "cpu_usage",
|
||||
SpaceAggregation: metrictypes.SpaceAggregationAvg,
|
||||
TimeAggregation: metrictypes.TimeAggregationAvg,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
copied := original.Copy()
|
||||
|
||||
assert.Equal(t, original.Name, copied.Name)
|
||||
assert.Equal(t, original.Signal, copied.Signal)
|
||||
require.NotNil(t, copied.Aggregations)
|
||||
assert.Equal(t, original.Aggregations[0].MetricName, copied.Aggregations[0].MetricName)
|
||||
assert.Equal(t, original.Aggregations[0].SpaceAggregation, copied.Aggregations[0].SpaceAggregation)
|
||||
|
||||
// Verify independence
|
||||
copied.Aggregations[0].MetricName = "modified"
|
||||
assert.Equal(t, "cpu_usage", original.Aggregations[0].MetricName)
|
||||
})
|
||||
|
||||
t.Run("copy log aggregation query", func(t *testing.T) {
|
||||
original := QueryBuilderQuery[LogAggregation]{
|
||||
Name: "L",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Aggregations: []LogAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
Alias: "log_count",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
copied := original.Copy()
|
||||
|
||||
assert.Equal(t, original.Name, copied.Name)
|
||||
assert.Equal(t, original.Signal, copied.Signal)
|
||||
require.NotNil(t, copied.Aggregations)
|
||||
assert.Equal(t, original.Aggregations[0].Expression, copied.Aggregations[0].Expression)
|
||||
|
||||
// Verify independence
|
||||
copied.Aggregations[0].Expression = "sum()"
|
||||
assert.Equal(t, "count()", original.Aggregations[0].Expression)
|
||||
})
|
||||
}
|
||||
|
||||
func TestQueryBuilderQuery_Normalize(t *testing.T) {
|
||||
t.Run("normalize select fields", func(t *testing.T) {
|
||||
query := QueryBuilderQuery[TraceAggregation]{
|
||||
SelectFields: []telemetrytypes.TelemetryFieldKey{
|
||||
{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
},
|
||||
{
|
||||
Name: "span.name",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
query.Normalize()
|
||||
|
||||
// Normalize only changes FieldContext, not the Name
|
||||
assert.Equal(t, "service.name", query.SelectFields[0].Name)
|
||||
assert.Equal(t, telemetrytypes.FieldContextResource, query.SelectFields[0].FieldContext)
|
||||
assert.Equal(t, "span.name", query.SelectFields[1].Name)
|
||||
assert.Equal(t, telemetrytypes.FieldContextSpan, query.SelectFields[1].FieldContext)
|
||||
})
|
||||
|
||||
t.Run("normalize group by fields", func(t *testing.T) {
|
||||
query := QueryBuilderQuery[TraceAggregation]{
|
||||
GroupBy: []GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
query.Normalize()
|
||||
|
||||
assert.Equal(t, "service.name", query.GroupBy[0].Name)
|
||||
assert.Equal(t, telemetrytypes.FieldContextResource, query.GroupBy[0].FieldContext)
|
||||
})
|
||||
|
||||
t.Run("normalize order by fields", func(t *testing.T) {
|
||||
query := QueryBuilderQuery[TraceAggregation]{
|
||||
Order: []OrderBy{
|
||||
{
|
||||
Key: OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "timestamp",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
},
|
||||
Direction: OrderDirectionDesc,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
query.Normalize()
|
||||
|
||||
assert.Equal(t, "timestamp", query.Order[0].Key.Name)
|
||||
assert.Equal(t, telemetrytypes.FieldContextSpan, query.Order[0].Key.FieldContext)
|
||||
})
|
||||
|
||||
t.Run("normalize secondary aggregations", func(t *testing.T) {
|
||||
query := QueryBuilderQuery[TraceAggregation]{
|
||||
SecondaryAggregations: []SecondaryAggregation{
|
||||
{
|
||||
Order: []OrderBy{
|
||||
{
|
||||
Key: OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "value",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
},
|
||||
Direction: OrderDirectionAsc,
|
||||
},
|
||||
},
|
||||
GroupBy: []GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "region",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
query.Normalize()
|
||||
|
||||
assert.Equal(t, "value", query.SecondaryAggregations[0].Order[0].Key.Name)
|
||||
assert.Equal(t, telemetrytypes.FieldContextSpan, query.SecondaryAggregations[0].Order[0].Key.FieldContext)
|
||||
assert.Equal(t, "region", query.SecondaryAggregations[0].GroupBy[0].Name)
|
||||
assert.Equal(t, telemetrytypes.FieldContextResource, query.SecondaryAggregations[0].GroupBy[0].FieldContext)
|
||||
})
|
||||
|
||||
t.Run("normalize with nil fields", func(t *testing.T) {
|
||||
query := QueryBuilderQuery[TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
}
|
||||
|
||||
// Should not panic
|
||||
query.Normalize()
|
||||
|
||||
assert.Equal(t, "A", query.Name)
|
||||
})
|
||||
|
||||
t.Run("normalize all fields together", func(t *testing.T) {
|
||||
query := QueryBuilderQuery[TraceAggregation]{
|
||||
SelectFields: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "service.name", FieldContext: telemetrytypes.FieldContextResource},
|
||||
},
|
||||
GroupBy: []GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "host.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
},
|
||||
},
|
||||
},
|
||||
Order: []OrderBy{
|
||||
{
|
||||
Key: OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "duration",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
SecondaryAggregations: []SecondaryAggregation{
|
||||
{
|
||||
Order: []OrderBy{
|
||||
{
|
||||
Key: OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "count",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
GroupBy: []GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "status.code",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
query.Normalize()
|
||||
|
||||
assert.Equal(t, "service.name", query.SelectFields[0].Name)
|
||||
assert.Equal(t, "host.name", query.GroupBy[0].Name)
|
||||
assert.Equal(t, "duration", query.Order[0].Key.Name)
|
||||
assert.Equal(t, "count", query.SecondaryAggregations[0].Order[0].Key.Name)
|
||||
assert.Equal(t, "status.code", query.SecondaryAggregations[0].GroupBy[0].Name)
|
||||
})
|
||||
}
|
||||
|
||||
func TestQueryBuilderQuery_UnmarshalJSON(t *testing.T) {
|
||||
t.Run("valid trace query", func(t *testing.T) {
|
||||
jsonData := `{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"stepInterval": 60,
|
||||
"aggregations": [{
|
||||
"expression": "count()",
|
||||
"alias": "trace_count"
|
||||
}],
|
||||
"filter": {
|
||||
"expression": "service.name = 'frontend'"
|
||||
},
|
||||
"groupBy": [{
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource"
|
||||
}],
|
||||
"order": [{
|
||||
"key": {
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource"
|
||||
},
|
||||
"direction": "desc"
|
||||
}],
|
||||
"limit": 100
|
||||
}`
|
||||
|
||||
var query QueryBuilderQuery[TraceAggregation]
|
||||
err := json.Unmarshal([]byte(jsonData), &query)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "A", query.Name)
|
||||
assert.Equal(t, telemetrytypes.SignalTraces, query.Signal)
|
||||
assert.Equal(t, int64(60000), query.StepInterval.Milliseconds())
|
||||
assert.Equal(t, 1, len(query.Aggregations))
|
||||
assert.Equal(t, "count()", query.Aggregations[0].Expression)
|
||||
assert.Equal(t, "trace_count", query.Aggregations[0].Alias)
|
||||
require.NotNil(t, query.Filter)
|
||||
assert.Equal(t, "service.name = 'frontend'", query.Filter.Expression)
|
||||
assert.Equal(t, 1, len(query.GroupBy))
|
||||
assert.Equal(t, "service.name", query.GroupBy[0].Name)
|
||||
assert.Equal(t, telemetrytypes.FieldContextResource, query.GroupBy[0].FieldContext)
|
||||
assert.Equal(t, 1, len(query.Order))
|
||||
assert.Equal(t, "service.name", query.Order[0].Key.Name)
|
||||
assert.Equal(t, telemetrytypes.FieldContextResource, query.Order[0].Key.FieldContext)
|
||||
assert.Equal(t, OrderDirectionDesc, query.Order[0].Direction)
|
||||
assert.Equal(t, 100, query.Limit)
|
||||
})
|
||||
|
||||
t.Run("valid metric query", func(t *testing.T) {
|
||||
jsonData := `{
|
||||
"name": "M",
|
||||
"signal": "metrics",
|
||||
"stepInterval": "1m",
|
||||
"aggregations": [{
|
||||
"metricName": "cpu_usage",
|
||||
"spaceAggregation": "avg",
|
||||
"timeAggregation": "avg"
|
||||
}]
|
||||
}`
|
||||
|
||||
var query QueryBuilderQuery[MetricAggregation]
|
||||
err := json.Unmarshal([]byte(jsonData), &query)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "M", query.Name)
|
||||
assert.Equal(t, telemetrytypes.SignalMetrics, query.Signal)
|
||||
assert.Equal(t, int64(60000), query.StepInterval.Milliseconds())
|
||||
assert.Equal(t, 1, len(query.Aggregations))
|
||||
assert.Equal(t, "cpu_usage", query.Aggregations[0].MetricName)
|
||||
})
|
||||
|
||||
t.Run("valid log query", func(t *testing.T) {
|
||||
jsonData := `{
|
||||
"name": "L",
|
||||
"signal": "logs",
|
||||
"aggregations": [{
|
||||
"expression": "count()",
|
||||
"alias": "log_count"
|
||||
}]
|
||||
}`
|
||||
|
||||
var query QueryBuilderQuery[LogAggregation]
|
||||
err := json.Unmarshal([]byte(jsonData), &query)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "L", query.Name)
|
||||
assert.Equal(t, telemetrytypes.SignalLogs, query.Signal)
|
||||
assert.Equal(t, 1, len(query.Aggregations))
|
||||
assert.Equal(t, "count()", query.Aggregations[0].Expression)
|
||||
})
|
||||
|
||||
t.Run("unknown field error", func(t *testing.T) {
|
||||
jsonData := `{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"unknownField": "value"
|
||||
}`
|
||||
|
||||
var query QueryBuilderQuery[TraceAggregation]
|
||||
err := json.Unmarshal([]byte(jsonData), &query)
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "unknown field")
|
||||
})
|
||||
|
||||
t.Run("query with all optional fields", func(t *testing.T) {
|
||||
// NOTE: This json payload is not realistic, just for testing all fields
|
||||
jsonData := `{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"stepInterval": "5m",
|
||||
"source": "traces",
|
||||
"aggregations": [{
|
||||
"expression": "count()",
|
||||
"alias": "span.count"
|
||||
}],
|
||||
"disabled": true,
|
||||
"filter": {
|
||||
"expression": "service.name = 'api'"
|
||||
},
|
||||
"groupBy": [{
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource"
|
||||
}],
|
||||
"order": [{
|
||||
"key": {
|
||||
"name": "timestamp",
|
||||
"fieldContext": "span"
|
||||
},
|
||||
"direction": "asc"
|
||||
}],
|
||||
"selectFields": [{
|
||||
"name": "trace_id",
|
||||
"fieldContext": "span"
|
||||
}],
|
||||
"limit": 50,
|
||||
"offset": 10,
|
||||
"cursor": "cursor123",
|
||||
"limitBy": {
|
||||
"value": "5",
|
||||
"keys": ["service.name"]
|
||||
},
|
||||
"having": {
|
||||
"expression": "count() > 10"
|
||||
},
|
||||
"secondaryAggregations": [{
|
||||
"limit": 20,
|
||||
"order": [{
|
||||
"key": {
|
||||
"name": "value",
|
||||
"fieldContext": "span"
|
||||
},
|
||||
"direction": "desc"
|
||||
}],
|
||||
"groupBy": [{
|
||||
"name": "region",
|
||||
"fieldContext": "resource"
|
||||
}]
|
||||
}],
|
||||
"functions": [{
|
||||
"name": "timeShift",
|
||||
"args": [{
|
||||
"name": "shift",
|
||||
"value": "1h"
|
||||
}]
|
||||
}],
|
||||
"legend": "{{service.name}}"
|
||||
}`
|
||||
|
||||
var query QueryBuilderQuery[TraceAggregation]
|
||||
err := json.Unmarshal([]byte(jsonData), &query)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "A", query.Name)
|
||||
assert.Equal(t, telemetrytypes.SignalTraces, query.Signal)
|
||||
assert.Equal(t, int64(300000), query.StepInterval.Milliseconds())
|
||||
// Source is set in the JSON, so it should be "traces", not SourceUnspecified
|
||||
assert.Equal(t, "traces", query.Source.String.StringValue())
|
||||
assert.True(t, query.Disabled)
|
||||
assert.Equal(t, query.Aggregations[0].Expression, "count()")
|
||||
assert.Equal(t, query.Aggregations[0].Alias, "span.count")
|
||||
assert.NotNil(t, query.Filter)
|
||||
assert.NotNil(t, query.GroupBy)
|
||||
assert.NotNil(t, query.Order)
|
||||
assert.NotNil(t, query.SelectFields)
|
||||
assert.Equal(t, 50, query.Limit)
|
||||
assert.Equal(t, 10, query.Offset)
|
||||
assert.Equal(t, "cursor123", query.Cursor)
|
||||
assert.NotNil(t, query.LimitBy)
|
||||
assert.NotNil(t, query.Having)
|
||||
assert.NotNil(t, query.SecondaryAggregations)
|
||||
assert.NotNil(t, query.Functions)
|
||||
assert.Equal(t, "{{service.name}}", query.Legend)
|
||||
})
|
||||
|
||||
t.Run("normalization happens during unmarshaling", func(t *testing.T) {
|
||||
jsonData := `{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"selectFields": [{
|
||||
"name": "resource.service.name"
|
||||
}],
|
||||
"groupBy": [{
|
||||
"name": "resource.host.name"
|
||||
}],
|
||||
"order": [{
|
||||
"key": {
|
||||
"name": "span.duration"
|
||||
},
|
||||
"direction": "desc"
|
||||
}]
|
||||
}`
|
||||
|
||||
var query QueryBuilderQuery[TraceAggregation]
|
||||
err := json.Unmarshal([]byte(jsonData), &query)
|
||||
require.NoError(t, err)
|
||||
|
||||
// FieldContext should be normalized, Name should remain as-is
|
||||
assert.Equal(t, "service.name", query.SelectFields[0].Name)
|
||||
assert.Equal(t, telemetrytypes.FieldContextResource, query.SelectFields[0].FieldContext)
|
||||
assert.Equal(t, "host.name", query.GroupBy[0].Name)
|
||||
assert.Equal(t, telemetrytypes.FieldContextResource, query.GroupBy[0].FieldContext)
|
||||
assert.Equal(t, "duration", query.Order[0].Key.Name)
|
||||
assert.Equal(t, telemetrytypes.FieldContextSpan, query.Order[0].Key.FieldContext)
|
||||
})
|
||||
}
|
||||
@@ -47,19 +47,19 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
switch header.Signal {
|
||||
case telemetrytypes.SignalTraces:
|
||||
var spec QueryBuilderQuery[TraceAggregation]
|
||||
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "query spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid trace builder query spec: %v", err)
|
||||
}
|
||||
q.Spec = spec
|
||||
case telemetrytypes.SignalLogs:
|
||||
var spec QueryBuilderQuery[LogAggregation]
|
||||
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "query spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid log builder query spec: %v", err)
|
||||
}
|
||||
q.Spec = spec
|
||||
case telemetrytypes.SignalMetrics:
|
||||
var spec QueryBuilderQuery[MetricAggregation]
|
||||
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "query spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid metric builder query spec: %v", err)
|
||||
}
|
||||
q.Spec = spec
|
||||
@@ -75,7 +75,6 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
|
||||
case QueryTypeFormula:
|
||||
var spec QueryBuilderFormula
|
||||
// TODO: use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderFormula
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "formula spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid formula spec: %v", err)
|
||||
}
|
||||
@@ -83,7 +82,6 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
|
||||
case QueryTypeJoin:
|
||||
var spec QueryBuilderJoin
|
||||
// TODO: use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderJoin
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "join spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid join spec: %v", err)
|
||||
}
|
||||
@@ -91,14 +89,13 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
|
||||
case QueryTypeTraceOperator:
|
||||
var spec QueryBuilderTraceOperator
|
||||
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "trace operator spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid trace operator spec: %v", err)
|
||||
}
|
||||
q.Spec = spec
|
||||
|
||||
case QueryTypePromQL:
|
||||
var spec PromQuery
|
||||
// TODO: use json.Unmarshal here after implementing custom unmarshaler for PromQuery
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "PromQL spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid PromQL spec: %v", err)
|
||||
}
|
||||
@@ -106,7 +103,6 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
|
||||
case QueryTypeClickHouseSQL:
|
||||
var spec ClickHouseQuery
|
||||
// TODO: use json.Unmarshal here after implementing custom unmarshaler for ClickHouseQuery
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "ClickHouse SQL spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid ClickHouse SQL spec: %v", err)
|
||||
}
|
||||
|
||||
@@ -366,45 +366,6 @@ func (q QueryBuilderTraceOperator) Copy() QueryBuilderTraceOperator {
|
||||
return c
|
||||
}
|
||||
|
||||
// UnmarshalJSON implements custom JSON unmarshaling to disallow unknown fields
|
||||
func (q *QueryBuilderTraceOperator) UnmarshalJSON(data []byte) error {
|
||||
// Define a type alias to avoid infinite recursion
|
||||
type Alias QueryBuilderTraceOperator
|
||||
|
||||
var temp Alias
|
||||
// Use UnmarshalJSONWithContext for better error messages
|
||||
if err := UnmarshalJSONWithContext(data, &temp, "query spec"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Copy the decoded values back to the original struct
|
||||
*q = QueryBuilderTraceOperator(temp)
|
||||
|
||||
// Nomarlize the query after unmarshaling
|
||||
q.Normalize()
|
||||
return nil
|
||||
}
|
||||
|
||||
// Normalize normalizes all the field keys in the query
|
||||
func (q *QueryBuilderTraceOperator) Normalize() {
|
||||
|
||||
// normalize select fields
|
||||
for idx := range q.SelectFields {
|
||||
q.SelectFields[idx].Normalize()
|
||||
}
|
||||
|
||||
// normalize group by fields
|
||||
for idx := range q.GroupBy {
|
||||
q.GroupBy[idx].Normalize()
|
||||
}
|
||||
|
||||
// normalize order by fields
|
||||
for idx := range q.Order {
|
||||
q.Order[idx].Key.Normalize()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// ValidateUniqueTraceOperator ensures only one trace operator exists in queries
|
||||
func ValidateUniqueTraceOperator(queries []QueryEnvelope) error {
|
||||
traceOperatorCount := 0
|
||||
|
||||
@@ -51,118 +51,63 @@ func (f TelemetryFieldKey) String() string {
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (f TelemetryFieldKey) Text() string {
|
||||
return TelemetryFieldKeyToText(&f)
|
||||
}
|
||||
|
||||
// Normalize parses and normalizes a TelemetryFieldKey by extracting
|
||||
// the field context and data type from the field name if they are not already specified.
|
||||
// This function modifies the key in place.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// key := &TelemetryFieldKey{Name: "resource.service.name:string"}
|
||||
// key.Normalize()
|
||||
// // Result: Name: "service.name", FieldContext: FieldContextResource, FieldDataType: FieldDataTypeString
|
||||
func (f *TelemetryFieldKey) Normalize() {
|
||||
|
||||
// Step 1: Parse data type from the right (after the last ":") if not already specified
|
||||
if f.FieldDataType == FieldDataTypeUnspecified {
|
||||
if colonIdx := strings.LastIndex(f.Name, ":"); colonIdx != -1 {
|
||||
potentialDataType := f.Name[colonIdx+1:]
|
||||
if dt, ok := fieldDataTypes[potentialDataType]; ok && dt != FieldDataTypeUnspecified {
|
||||
f.FieldDataType = dt
|
||||
f.Name = f.Name[:colonIdx]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 2: Parse field context from the left if not already specified
|
||||
if f.FieldContext == FieldContextUnspecified {
|
||||
if dotIdx := strings.Index(f.Name, "."); dotIdx != -1 {
|
||||
potentialContext := f.Name[:dotIdx]
|
||||
if fc, ok := fieldContexts[potentialContext]; ok && fc != FieldContextUnspecified {
|
||||
f.Name = f.Name[dotIdx+1:]
|
||||
f.FieldContext = fc
|
||||
|
||||
// Step 2a: Handle special case for log.body.* fields
|
||||
if f.FieldContext == FieldContextLog && strings.HasPrefix(f.Name, BodyJSONStringSearchPrefix) {
|
||||
f.FieldContext = FieldContextBody
|
||||
f.Name = strings.TrimPrefix(f.Name, BodyJSONStringSearchPrefix)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// GetFieldKeyFromKeyText returns a TelemetryFieldKey from a key text.
|
||||
// The key text is expected to be in the format of `fieldContext.fieldName:fieldDataType` in the search query.
|
||||
// Both fieldContext and :fieldDataType are optional.
|
||||
// fieldName can contain dots and can start with a dot (e.g., ".http_code").
|
||||
// Special cases:
|
||||
// - When key exactly matches a field context name (e.g., "body", "attribute"), use unspecified context
|
||||
// - When key starts with "body." prefix, use "body" as context with remainder as field name
|
||||
func GetFieldKeyFromKeyText(key string) TelemetryFieldKey {
|
||||
var explicitFieldDataType FieldDataType = FieldDataTypeUnspecified
|
||||
var fieldName string
|
||||
|
||||
// Step 1: Parse data type from the right (after the last ":")
|
||||
var keyWithoutDataType string
|
||||
if colonIdx := strings.LastIndex(key, ":"); colonIdx != -1 {
|
||||
potentialDataType := key[colonIdx+1:]
|
||||
if dt, ok := fieldDataTypes[potentialDataType]; ok && dt != FieldDataTypeUnspecified {
|
||||
explicitFieldDataType = dt
|
||||
keyWithoutDataType = key[:colonIdx]
|
||||
} else {
|
||||
// No valid data type found, treat the entire key as the field name
|
||||
keyWithoutDataType = key
|
||||
keyTextParts := strings.Split(key, ".")
|
||||
|
||||
var explicitFieldContextProvided, explicitFieldDataTypeProvided bool
|
||||
var explicitFieldContext FieldContext
|
||||
var explicitFieldDataType FieldDataType
|
||||
var ok bool
|
||||
|
||||
if len(keyTextParts) > 1 {
|
||||
explicitFieldContext, ok = fieldContexts[keyTextParts[0]]
|
||||
if ok && explicitFieldContext != FieldContextUnspecified {
|
||||
explicitFieldContextProvided = true
|
||||
}
|
||||
}
|
||||
|
||||
if explicitFieldContextProvided {
|
||||
keyTextParts = keyTextParts[1:]
|
||||
}
|
||||
|
||||
// check if there is a field data type provided
|
||||
if len(keyTextParts) >= 1 {
|
||||
lastPart := keyTextParts[len(keyTextParts)-1]
|
||||
lastPartParts := strings.Split(lastPart, ":")
|
||||
if len(lastPartParts) > 1 {
|
||||
explicitFieldDataType, ok = fieldDataTypes[lastPartParts[1]]
|
||||
if ok && explicitFieldDataType != FieldDataTypeUnspecified {
|
||||
explicitFieldDataTypeProvided = true
|
||||
}
|
||||
}
|
||||
|
||||
if explicitFieldDataTypeProvided {
|
||||
keyTextParts[len(keyTextParts)-1] = lastPartParts[0]
|
||||
}
|
||||
}
|
||||
|
||||
realKey := strings.Join(keyTextParts, ".")
|
||||
|
||||
fieldKeySelector := TelemetryFieldKey{
|
||||
Name: realKey,
|
||||
}
|
||||
|
||||
if explicitFieldContextProvided {
|
||||
fieldKeySelector.FieldContext = explicitFieldContext
|
||||
} else {
|
||||
keyWithoutDataType = key
|
||||
fieldKeySelector.FieldContext = FieldContextUnspecified
|
||||
}
|
||||
|
||||
// Step 2: Parse field context from the left
|
||||
if dotIdx := strings.Index(keyWithoutDataType, "."); dotIdx != -1 {
|
||||
potentialContext := keyWithoutDataType[:dotIdx]
|
||||
if fc, ok := fieldContexts[potentialContext]; ok && fc != FieldContextUnspecified {
|
||||
fieldName = keyWithoutDataType[dotIdx+1:]
|
||||
|
||||
// Step 2a: Handle special case for log.body.* fields
|
||||
if fc == FieldContextLog && strings.HasPrefix(fieldName, BodyJSONStringSearchPrefix) {
|
||||
fc = FieldContextBody
|
||||
fieldName = strings.TrimPrefix(fieldName, BodyJSONStringSearchPrefix)
|
||||
}
|
||||
|
||||
return TelemetryFieldKey{
|
||||
Name: fieldName,
|
||||
FieldContext: fc,
|
||||
FieldDataType: explicitFieldDataType,
|
||||
}
|
||||
}
|
||||
if explicitFieldDataTypeProvided {
|
||||
fieldKeySelector.FieldDataType = explicitFieldDataType
|
||||
} else {
|
||||
fieldKeySelector.FieldDataType = FieldDataTypeUnspecified
|
||||
}
|
||||
|
||||
// Step 3: No context found, entire key is the field name
|
||||
return TelemetryFieldKey{
|
||||
Name: keyWithoutDataType,
|
||||
FieldContext: FieldContextUnspecified,
|
||||
FieldDataType: explicitFieldDataType,
|
||||
}
|
||||
}
|
||||
|
||||
func TelemetryFieldKeyToText(key *TelemetryFieldKey) string {
|
||||
var sb strings.Builder
|
||||
if key.FieldContext != FieldContextUnspecified {
|
||||
sb.WriteString(key.FieldContext.StringValue())
|
||||
sb.WriteString(".")
|
||||
}
|
||||
sb.WriteString(key.Name)
|
||||
if key.FieldDataType != FieldDataTypeUnspecified {
|
||||
sb.WriteString(":")
|
||||
sb.WriteString(key.FieldDataType.StringValue())
|
||||
}
|
||||
return sb.String()
|
||||
return fieldKeySelector
|
||||
}
|
||||
|
||||
func FieldKeyToMaterializedColumnName(key *TelemetryFieldKey) string {
|
||||
|
||||
@@ -83,314 +83,12 @@ func TestGetFieldKeyFromKeyText(t *testing.T) {
|
||||
FieldDataType: FieldDataTypeUnspecified,
|
||||
},
|
||||
},
|
||||
// Test case for log.body.status - should use log context with body.status as field name
|
||||
{
|
||||
keyText: "log.body.status",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "status",
|
||||
FieldContext: FieldContextBody,
|
||||
FieldDataType: FieldDataTypeUnspecified,
|
||||
},
|
||||
},
|
||||
// Test case for log.body. with data type
|
||||
{
|
||||
keyText: "log.body.status_code:int",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "status_code",
|
||||
FieldContext: FieldContextBody,
|
||||
FieldDataType: FieldDataTypeNumber,
|
||||
},
|
||||
},
|
||||
// Test case for log.body. with nested field name
|
||||
{
|
||||
keyText: "log.body.http.status.code",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "http.status.code",
|
||||
FieldContext: FieldContextBody,
|
||||
FieldDataType: FieldDataTypeUnspecified,
|
||||
},
|
||||
},
|
||||
// Test case for body. prefix - should use body as context
|
||||
{
|
||||
keyText: "body.http.status.code:int",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "http.status.code",
|
||||
FieldContext: FieldContextBody,
|
||||
FieldDataType: FieldDataTypeNumber,
|
||||
},
|
||||
},
|
||||
// Test case for body. prefix without data type - should use body as context
|
||||
{
|
||||
keyText: "body.status",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "status",
|
||||
FieldContext: FieldContextBody,
|
||||
FieldDataType: FieldDataTypeUnspecified,
|
||||
},
|
||||
},
|
||||
// Test case for body. prefix with array data type - should use body as context
|
||||
{
|
||||
keyText: "body.tags:[]string",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "tags",
|
||||
FieldContext: FieldContextBody,
|
||||
FieldDataType: FieldDataTypeArrayString,
|
||||
},
|
||||
},
|
||||
// Test case for body. prefix with array data type (int64)
|
||||
{
|
||||
keyText: "body.ids:[]int64",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "ids",
|
||||
FieldContext: FieldContextBody,
|
||||
FieldDataType: FieldDataTypeArrayInt64,
|
||||
},
|
||||
},
|
||||
// Test case for body. prefix with nested field and array data type
|
||||
{
|
||||
keyText: "body.http.headers:[]string",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "http.headers",
|
||||
FieldContext: FieldContextBody,
|
||||
FieldDataType: FieldDataTypeArrayString,
|
||||
},
|
||||
},
|
||||
// Test case for just "body" - should use unspecified context with body as field name
|
||||
{
|
||||
keyText: "body",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "body",
|
||||
FieldContext: FieldContextUnspecified,
|
||||
FieldDataType: FieldDataTypeUnspecified,
|
||||
},
|
||||
},
|
||||
// Test case for just "body" with data type
|
||||
{
|
||||
keyText: "body:string",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "body",
|
||||
FieldContext: FieldContextUnspecified,
|
||||
FieldDataType: FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
// Test case for log.body (without trailing dot) - should keep log context
|
||||
{
|
||||
keyText: "log.body",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "body",
|
||||
FieldContext: FieldContextLog,
|
||||
FieldDataType: FieldDataTypeUnspecified,
|
||||
},
|
||||
},
|
||||
// Test case for log.body with data type - should keep log context
|
||||
{
|
||||
keyText: "log.body:string",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "body",
|
||||
FieldContext: FieldContextLog,
|
||||
FieldDataType: FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
// Test case for field name with dots and no context
|
||||
{
|
||||
keyText: "http.status.code",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "http.status.code",
|
||||
FieldContext: FieldContextUnspecified,
|
||||
FieldDataType: FieldDataTypeUnspecified,
|
||||
},
|
||||
},
|
||||
// Test case for field name with dots and data type but no context
|
||||
{
|
||||
keyText: "http.status.code:int",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "http.status.code",
|
||||
FieldContext: FieldContextUnspecified,
|
||||
FieldDataType: FieldDataTypeNumber,
|
||||
},
|
||||
},
|
||||
// Test case for just field name
|
||||
{
|
||||
keyText: "fieldName",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "fieldName",
|
||||
FieldContext: FieldContextUnspecified,
|
||||
FieldDataType: FieldDataTypeUnspecified,
|
||||
},
|
||||
},
|
||||
// Test case for just field name with data type
|
||||
{
|
||||
keyText: "fieldName:string",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "fieldName",
|
||||
FieldContext: FieldContextUnspecified,
|
||||
FieldDataType: FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
// Test case for field starting with dot
|
||||
{
|
||||
keyText: ".http_code",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: ".http_code",
|
||||
FieldContext: FieldContextUnspecified,
|
||||
FieldDataType: FieldDataTypeUnspecified,
|
||||
},
|
||||
},
|
||||
// Test case for field starting with dot and with data type
|
||||
{
|
||||
keyText: ".http_code:int",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: ".http_code",
|
||||
FieldContext: FieldContextUnspecified,
|
||||
FieldDataType: FieldDataTypeNumber,
|
||||
},
|
||||
},
|
||||
// Test case for field starting with dot and nested field name
|
||||
{
|
||||
keyText: ".http.status.code:int",
|
||||
expected: TelemetryFieldKey{
|
||||
Name: ".http.status.code",
|
||||
FieldContext: FieldContextUnspecified,
|
||||
FieldDataType: FieldDataTypeNumber,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range testCases {
|
||||
result := GetFieldKeyFromKeyText(testCase.keyText)
|
||||
if !reflect.DeepEqual(result, testCase.expected) {
|
||||
t.Errorf("For key '%s': expected %v, got %v", testCase.keyText, testCase.expected, result)
|
||||
t.Errorf("expected %v, got %v", testCase.expected, result)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalize(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
input TelemetryFieldKey
|
||||
expected TelemetryFieldKey
|
||||
}{
|
||||
{
|
||||
name: "Normalize key with context and data type in name",
|
||||
input: TelemetryFieldKey{
|
||||
Name: "resource.service.name:string",
|
||||
},
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: FieldContextResource,
|
||||
FieldDataType: FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Normalize key with existing context and data type",
|
||||
input: TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: FieldContextResource,
|
||||
FieldDataType: FieldDataTypeString,
|
||||
},
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
FieldContext: FieldContextResource,
|
||||
FieldDataType: FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Normalize body field",
|
||||
input: TelemetryFieldKey{
|
||||
Name: "body.status:int",
|
||||
},
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "status",
|
||||
FieldContext: FieldContextBody,
|
||||
FieldDataType: FieldDataTypeNumber,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Normalize log.body.* field",
|
||||
input: TelemetryFieldKey{
|
||||
Name: "log.body.status",
|
||||
},
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "status",
|
||||
FieldContext: FieldContextBody,
|
||||
FieldDataType: FieldDataTypeUnspecified,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Normalize field with no context",
|
||||
input: TelemetryFieldKey{
|
||||
Name: "http.status.code:int",
|
||||
},
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "http.status.code",
|
||||
FieldContext: FieldContextUnspecified,
|
||||
FieldDataType: FieldDataTypeNumber,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Normalize exact body keyword",
|
||||
input: TelemetryFieldKey{
|
||||
Name: "body",
|
||||
},
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "body",
|
||||
FieldContext: FieldContextUnspecified,
|
||||
FieldDataType: FieldDataTypeUnspecified,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Normalize span field",
|
||||
input: TelemetryFieldKey{
|
||||
Name: "span.kind:string",
|
||||
},
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "kind",
|
||||
FieldContext: FieldContextSpan,
|
||||
FieldDataType: FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Normalize attribute field",
|
||||
input: TelemetryFieldKey{
|
||||
Name: "attribute.http.method",
|
||||
},
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "http.method",
|
||||
FieldContext: FieldContextAttribute,
|
||||
FieldDataType: FieldDataTypeUnspecified,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Normalize field starting with dot",
|
||||
input: TelemetryFieldKey{
|
||||
Name: ".http_code:int",
|
||||
},
|
||||
expected: TelemetryFieldKey{
|
||||
Name: ".http_code",
|
||||
FieldContext: FieldContextUnspecified,
|
||||
FieldDataType: FieldDataTypeNumber,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Normalize array data type field",
|
||||
input: TelemetryFieldKey{
|
||||
Name: "body.tags:[]string",
|
||||
},
|
||||
expected: TelemetryFieldKey{
|
||||
Name: "tags",
|
||||
FieldContext: FieldContextBody,
|
||||
FieldDataType: FieldDataTypeArrayString,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
key := tc.input
|
||||
key.Normalize()
|
||||
if !reflect.DeepEqual(key, tc.expected) {
|
||||
t.Errorf("Expected %v, got %v", tc.expected, key)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ package variables
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
@@ -36,22 +35,19 @@ func (e *ErrorListener) SyntaxError(recognizer antlr.Recognizer, offendingSymbol
|
||||
type variableReplacementVisitor struct {
|
||||
variables map[string]qbtypes.VariableItem
|
||||
errors []string
|
||||
warnings []string
|
||||
}
|
||||
|
||||
// specialSkipMarker is used to indicate that a condition should be removed
|
||||
const specialSkipMarker = "__SKIP_CONDITION__"
|
||||
|
||||
// ReplaceVariablesInExpression takes a filter expression and returns it with variables replaced
|
||||
// Also returns any warnings generated during the replacement process.
|
||||
func ReplaceVariablesInExpression(expression string, variables map[string]qbtypes.VariableItem) (string, []string, error) {
|
||||
func ReplaceVariablesInExpression(expression string, variables map[string]qbtypes.VariableItem) (string, error) {
|
||||
input := antlr.NewInputStream(expression)
|
||||
lexer := grammar.NewFilterQueryLexer(input)
|
||||
|
||||
visitor := &variableReplacementVisitor{
|
||||
variables: variables,
|
||||
errors: []string{},
|
||||
warnings: []string{},
|
||||
}
|
||||
|
||||
lexerErrorListener := NewErrorListener()
|
||||
@@ -67,21 +63,21 @@ func ReplaceVariablesInExpression(expression string, variables map[string]qbtype
|
||||
tree := parser.Query()
|
||||
|
||||
if len(parserErrorListener.SyntaxErrors) > 0 {
|
||||
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "syntax errors in expression: %v", parserErrorListener.SyntaxErrors)
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "syntax errors in expression: %v", parserErrorListener.SyntaxErrors)
|
||||
}
|
||||
|
||||
result := visitor.Visit(tree).(string)
|
||||
|
||||
if len(visitor.errors) > 0 {
|
||||
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "errors processing expression: %v", visitor.errors)
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "errors processing expression: %v", visitor.errors)
|
||||
}
|
||||
|
||||
// If the entire expression should be skipped, return empty string
|
||||
if result == specialSkipMarker {
|
||||
return "", visitor.warnings, nil
|
||||
return "", nil
|
||||
}
|
||||
|
||||
return result, visitor.warnings, nil
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// Visit dispatches to the specific visit method based on node type
|
||||
@@ -482,32 +478,11 @@ func (v *variableReplacementVisitor) VisitValue(ctx *grammar.ValueContext) any {
|
||||
// Replace variable with its value
|
||||
return v.formatVariableValue(varItem.Value)
|
||||
}
|
||||
|
||||
// did not find as exact match; check if it's a composed string like "$env-xyz"
|
||||
interpolated, hasAllValue := v.interpolateVariablesInString(originalValue)
|
||||
if hasAllValue {
|
||||
return specialSkipMarker
|
||||
}
|
||||
if interpolated != originalValue {
|
||||
return v.formatVariableValue(interpolated)
|
||||
}
|
||||
|
||||
// No variables found, return original
|
||||
return originalValue
|
||||
}
|
||||
|
||||
// Check if the quoted text contains embedded variables (not starting with $)
|
||||
if ctx.QUOTED_TEXT() != nil && strings.Contains(originalValue, "$") {
|
||||
interpolated, hasAllValue := v.interpolateVariablesInString(originalValue)
|
||||
if hasAllValue {
|
||||
return specialSkipMarker
|
||||
}
|
||||
return v.formatVariableValue(interpolated)
|
||||
}
|
||||
|
||||
// Return original value if not a variable or variable not found
|
||||
// If it was quoted text and not a variable, return with quotes
|
||||
if ctx.QUOTED_TEXT() != nil {
|
||||
if ctx.QUOTED_TEXT() != nil && !strings.HasPrefix(originalValue, "$") {
|
||||
return ctx.QUOTED_TEXT().GetText()
|
||||
}
|
||||
return originalValue
|
||||
@@ -542,82 +517,6 @@ func (v *variableReplacementVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
return keyText
|
||||
}
|
||||
|
||||
// interpolateVariablesInString finds and replaces variable references within a string
|
||||
// by checking against actual variable names in the variables map.
|
||||
// Returns the interpolated string and a boolean indicating if any variable had __all__ value.
|
||||
func (v *variableReplacementVisitor) interpolateVariablesInString(s string) (string, bool) {
|
||||
result := s
|
||||
|
||||
varNames := make([]string, 0, len(v.variables)*2)
|
||||
for name := range v.variables {
|
||||
varNames = append(varNames, name)
|
||||
if !strings.HasPrefix(name, "$") {
|
||||
varNames = append(varNames, "$"+name)
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(varNames, func(i, j int) bool {
|
||||
return len(varNames[i]) > len(varNames[j])
|
||||
})
|
||||
|
||||
for _, varName := range varNames {
|
||||
// ensure we're looking for $varname pattern
|
||||
searchPattern := varName
|
||||
if !strings.HasPrefix(searchPattern, "$") {
|
||||
searchPattern = "$" + varName
|
||||
}
|
||||
|
||||
if strings.Contains(result, searchPattern) {
|
||||
// direct lookup
|
||||
varItem, ok := v.variables[varName]
|
||||
if !ok {
|
||||
// Try without $ prefix
|
||||
varItem, ok = v.variables[strings.TrimPrefix(varName, "$")]
|
||||
}
|
||||
|
||||
if ok {
|
||||
// special check for __all__ value
|
||||
if varItem.Type == qbtypes.DynamicVariableType {
|
||||
if allVal, ok := varItem.Value.(string); ok && allVal == "__all__" {
|
||||
return "", true
|
||||
}
|
||||
}
|
||||
|
||||
// format the replacement value (unquoted for string interpolation)
|
||||
replacement := v.formatVariableValueUnquoted(varItem.Value, strings.TrimPrefix(varName, "$"))
|
||||
result = strings.ReplaceAll(result, searchPattern, replacement)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result, false
|
||||
}
|
||||
|
||||
// formatVariableValueUnquoted returns a string representation of the value for string interpolation.
|
||||
// For multi-select (array) values, it takes the first value and adds a warning.
|
||||
func (v *variableReplacementVisitor) formatVariableValueUnquoted(value any, varName string) string {
|
||||
switch val := value.(type) {
|
||||
case []string:
|
||||
if len(val) > 1 {
|
||||
v.warnings = append(v.warnings, fmt.Sprintf("variable `%s` has multiple values, using first value `%s` for string interpolation", varName, val[0]))
|
||||
}
|
||||
if len(val) > 0 {
|
||||
return val[0]
|
||||
}
|
||||
return ""
|
||||
case []any:
|
||||
if len(val) > 1 {
|
||||
v.warnings = append(v.warnings, fmt.Sprintf("variable `%s` has multiple values, using first value for string interpolation", varName))
|
||||
}
|
||||
if len(val) > 0 {
|
||||
return fmt.Sprintf("%v", val[0])
|
||||
}
|
||||
return ""
|
||||
default:
|
||||
return fmt.Sprintf("%v", val)
|
||||
}
|
||||
}
|
||||
|
||||
// formatVariableValue formats a variable value for inclusion in the expression
|
||||
func (v *variableReplacementVisitor) formatVariableValue(value any) string {
|
||||
switch val := value.(type) {
|
||||
|
||||
@@ -421,107 +421,11 @@ func TestReplaceVariablesInExpression(t *testing.T) {
|
||||
},
|
||||
expected: "message NOT CONTAINS 'debug'",
|
||||
},
|
||||
{
|
||||
name: "variable composed with suffix in value",
|
||||
expression: "cluster_name = '$environment-xyz'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"environment": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
expected: "cluster_name = 'prod-xyz'",
|
||||
},
|
||||
{
|
||||
name: "variable composed with suffix without quotes",
|
||||
expression: "cluster_name = $environment-xyz",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"environment": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "staging",
|
||||
},
|
||||
},
|
||||
expected: "cluster_name = 'staging-xyz'",
|
||||
},
|
||||
{
|
||||
name: "variable in LIKE pattern with suffix",
|
||||
expression: "service.name LIKE '$env%'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
expected: "service.name LIKE 'prod%'",
|
||||
},
|
||||
{
|
||||
name: "variable composed with prefix and suffix",
|
||||
expression: "label = 'prefix-$var-suffix'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"var": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "middle",
|
||||
},
|
||||
},
|
||||
expected: "label = 'prefix-middle-suffix'",
|
||||
},
|
||||
{
|
||||
name: "multiple variables in one string",
|
||||
expression: "path = '$region-$env-cluster'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"region": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "us-west",
|
||||
},
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "prod",
|
||||
},
|
||||
},
|
||||
expected: "path = 'us-west-prod-cluster'",
|
||||
},
|
||||
{
|
||||
name: "embedded variable with __all__ value skips condition",
|
||||
expression: "cluster_name = '$environment-xyz'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"environment": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "__all__",
|
||||
},
|
||||
},
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "variable with underscore composed with suffix",
|
||||
expression: "name = '$my_var-test'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"my_var": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "hello",
|
||||
},
|
||||
},
|
||||
expected: "name = 'hello-test'",
|
||||
},
|
||||
{
|
||||
name: "similar variable names - longer matches first",
|
||||
expression: "name = '$env-$environment'",
|
||||
variables: map[string]qbtypes.VariableItem{
|
||||
"env": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "dev",
|
||||
},
|
||||
"environment": {
|
||||
Type: qbtypes.DynamicVariableType,
|
||||
Value: "production",
|
||||
},
|
||||
},
|
||||
expected: "name = 'dev-production'",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, _, err := ReplaceVariablesInExpression(tt.expression, tt.variables)
|
||||
result, err := ReplaceVariablesInExpression(tt.expression, tt.variables)
|
||||
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
|
||||
@@ -377,7 +377,7 @@ def idp_login(driver: webdriver.Chrome) -> Callable[[str, str], None]:
|
||||
# Fill the email in username field
|
||||
username_field = wait.until(EC.element_to_be_clickable((By.ID, "username")))
|
||||
username_field.send_keys(email)
|
||||
|
||||
|
||||
# Fill the password in password field
|
||||
password_field = wait.until(EC.element_to_be_clickable((By.ID, "password")))
|
||||
password_field.send_keys(password)
|
||||
|
||||
@@ -456,32 +456,3 @@ def assert_scalar_column_order(
|
||||
f"{context}: Column {column_index} order mismatch. "
|
||||
f"Expected {expected_values}, got {actual_values}"
|
||||
)
|
||||
|
||||
|
||||
def make_substitute_vars_request(
|
||||
signoz: "types.SigNoz",
|
||||
token: str,
|
||||
start_ms: int,
|
||||
end_ms: int,
|
||||
queries: List[Dict],
|
||||
variables: Dict[str, Any],
|
||||
timeout: int = QUERY_TIMEOUT,
|
||||
) -> requests.Response:
|
||||
return requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v5/substitute_vars"),
|
||||
timeout=timeout,
|
||||
headers={"authorization": f"Bearer {token}"},
|
||||
json={
|
||||
"schemaVersion": "v1",
|
||||
"start": start_ms,
|
||||
"end": end_ms,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": {"queries": queries},
|
||||
"variables": variables,
|
||||
"formatOptions": {"formatTableResultForUI": False, "fillGaps": False},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def sum_series_values(series_values: List[Dict]) -> float:
|
||||
return sum(point["value"] for point in series_values)
|
||||
|
||||
@@ -66,7 +66,7 @@ def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
|
||||
"SIGNOZ_PROMETHEUS_ACTIVE__QUERY__TRACKER_ENABLED": False,
|
||||
"SIGNOZ_GATEWAY_URL": gateway.container_configs["8080"].base(),
|
||||
"SIGNOZ_TOKENIZER_JWT_SECRET": "secret",
|
||||
"SIGNOZ_GLOBAL_INGESTION__URL": "https://ingest.test.signoz.cloud",
|
||||
"SIGNOZ_GLOBAL_INGESTION__URL": "https://ingest.test.signoz.cloud"
|
||||
}
|
||||
| sqlstore.env
|
||||
| clickhouse.env
|
||||
|
||||
@@ -2,7 +2,6 @@ from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, List
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
@@ -12,9 +11,7 @@ from fixtures.querier import (
|
||||
assert_minutely_bucket_values,
|
||||
find_named_result,
|
||||
index_series_by_label,
|
||||
make_query_request,
|
||||
)
|
||||
from src.querier.util import assert_identical_query_response
|
||||
|
||||
|
||||
def test_logs_list(
|
||||
@@ -399,122 +396,6 @@ def test_logs_list(
|
||||
assert "d-001" in values
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"order_by_context,expected_order",
|
||||
####
|
||||
# Tests:
|
||||
# 1. Query logs ordered by attribute.service.name descending
|
||||
# 2. Query logs ordered by resource.service.name descending
|
||||
# 3. Query logs ordered by service.name descending
|
||||
###
|
||||
[
|
||||
pytest.param("attribute", ["log-002", "log-001", "log-004", "log-003"]),
|
||||
pytest.param("resource", ["log-003", "log-004", "log-001", "log-002"]),
|
||||
pytest.param("", ["log-002", "log-001", "log-003", "log-004"]),
|
||||
],
|
||||
)
|
||||
def test_logs_list_with_order_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
order_by_context: str,
|
||||
expected_order: List[str],
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert 3 logs with service.name in attributes and resources
|
||||
"""
|
||||
|
||||
attribute_resource_pair = [
|
||||
[{"id": "log-001", "service.name": "c"}, {}],
|
||||
[{"id": "log-002", "service.name": "d"}, {}],
|
||||
[{"id": "log-003"}, {"service.name": "b"}],
|
||||
[{"id": "log-004"}, {"service.name": "a"}],
|
||||
]
|
||||
insert_logs(
|
||||
[
|
||||
Logs(
|
||||
timestamp=datetime.now(tz=timezone.utc) - timedelta(seconds=3),
|
||||
attributes=attribute_resource_pair[i][0],
|
||||
resources=attribute_resource_pair[i][1],
|
||||
body="Log with DEBUG severity",
|
||||
severity_text="DEBUG",
|
||||
)
|
||||
for i in range(4)
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
query = {
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"order": [
|
||||
{
|
||||
"key": {
|
||||
"name": "service.name",
|
||||
"fieldContext": order_by_context,
|
||||
},
|
||||
"direction": "desc",
|
||||
}
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
query_with_inline_context = {
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"order": [
|
||||
{
|
||||
"key": {
|
||||
"name": f"{order_by_context + '.' if order_by_context else ''}service.name",
|
||||
},
|
||||
"direction": "desc",
|
||||
}
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms=int(
|
||||
(datetime.now(tz=timezone.utc) - timedelta(minutes=1)).timestamp() * 1000
|
||||
),
|
||||
end_ms=int(datetime.now(tz=timezone.utc).timestamp() * 1000),
|
||||
request_type="raw",
|
||||
queries=[query],
|
||||
)
|
||||
|
||||
# Verify that both queries return the same results with specifying context with key name
|
||||
response_with_inline_context = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms=int(
|
||||
(datetime.now(tz=timezone.utc) - timedelta(minutes=1)).timestamp() * 1000
|
||||
),
|
||||
end_ms=int(datetime.now(tz=timezone.utc).timestamp() * 1000),
|
||||
request_type="raw",
|
||||
queries=[query_with_inline_context],
|
||||
)
|
||||
|
||||
assert_identical_query_response(response, response_with_inline_context)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
results = response.json()["data"]["data"]["results"]
|
||||
rows = results[0]["rows"]
|
||||
ids = [row["data"]["attributes_string"].get("id", "") for row in rows]
|
||||
|
||||
assert ids == expected_order
|
||||
|
||||
|
||||
def test_logs_time_series_count(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
@@ -944,44 +825,6 @@ def test_logs_time_series_count(
|
||||
},
|
||||
)
|
||||
|
||||
response_with_inline_context = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms=int(
|
||||
(
|
||||
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
- timedelta(minutes=5)
|
||||
).timestamp()
|
||||
* 1000
|
||||
),
|
||||
end_ms=int(
|
||||
datetime.now(tz=timezone.utc).replace(second=0, microsecond=0).timestamp()
|
||||
* 1000
|
||||
),
|
||||
request_type="time_series",
|
||||
queries=[
|
||||
{
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"stepInterval": 60,
|
||||
"disabled": False,
|
||||
"groupBy": [
|
||||
{
|
||||
"name": "resource.host.name:string",
|
||||
}
|
||||
],
|
||||
"order": [{"key": {"name": "host.name"}, "direction": "desc"}],
|
||||
"having": {"expression": ""},
|
||||
"aggregations": [{"expression": "count()"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
)
|
||||
|
||||
assert_identical_query_response(response, response_with_inline_context)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, Dict, List
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
@@ -11,10 +10,8 @@ from fixtures.querier import (
|
||||
assert_minutely_bucket_values,
|
||||
find_named_result,
|
||||
index_series_by_label,
|
||||
make_query_request,
|
||||
)
|
||||
from fixtures.traces import TraceIdGenerator, Traces, TracesKind, TracesStatusCode
|
||||
from src.querier.util import assert_identical_query_response
|
||||
|
||||
|
||||
def test_traces_list(
|
||||
@@ -218,59 +215,6 @@ def test_traces_list(
|
||||
},
|
||||
)
|
||||
|
||||
# Query results with context appended to key names
|
||||
response_with_inline_context = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms=int(
|
||||
(datetime.now(tz=timezone.utc) - timedelta(minutes=5)).timestamp() * 1000
|
||||
),
|
||||
end_ms=int(datetime.now(tz=timezone.utc).timestamp() * 1000),
|
||||
request_type="raw",
|
||||
queries=[
|
||||
{
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"disabled": False,
|
||||
"limit": 10,
|
||||
"offset": 0,
|
||||
"order": [
|
||||
{"key": {"name": "timestamp"}, "direction": "desc"},
|
||||
],
|
||||
"selectFields": [
|
||||
{
|
||||
"name": "resource.service.name",
|
||||
"fieldDataType": "string",
|
||||
"signal": "traces",
|
||||
},
|
||||
{
|
||||
"name": "span.name:string",
|
||||
"signal": "traces",
|
||||
},
|
||||
{
|
||||
"name": "span.duration_nano",
|
||||
"signal": "traces",
|
||||
},
|
||||
{
|
||||
"name": "span.http_method",
|
||||
"signal": "traces",
|
||||
},
|
||||
{
|
||||
"name": "span.response_status_code",
|
||||
"signal": "traces",
|
||||
},
|
||||
],
|
||||
"having": {"expression": ""},
|
||||
"aggregations": [{"expression": "count()"}],
|
||||
},
|
||||
}
|
||||
],
|
||||
)
|
||||
assert_identical_query_response(response, response_with_inline_context)
|
||||
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
@@ -476,236 +420,6 @@ def test_traces_list(
|
||||
assert set(values) == set(["topic-service", "http-service"])
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"order_by,aggregation_alias,expected_status",
|
||||
[
|
||||
# Case 1a: count by count()
|
||||
pytest.param({"name": "count()"}, "count_", HTTPStatus.OK),
|
||||
# Case 1b: count by count() with alias span.count_
|
||||
pytest.param({"name": "count()"}, "span.count_", HTTPStatus.OK),
|
||||
# Case 2a: count by count() with context specified in the key
|
||||
pytest.param(
|
||||
{"name": "count()", "fieldContext": "span"}, "count_", HTTPStatus.OK
|
||||
),
|
||||
# Case 2b: count by count() with context specified in the key with alias span.count_
|
||||
pytest.param(
|
||||
{"name": "count()", "fieldContext": "span"}, "span.count_", HTTPStatus.OK
|
||||
),
|
||||
# Case 3a: count by span.count() and context specified in the key [BAD REQUEST]
|
||||
pytest.param(
|
||||
{"name": "span.count()", "fieldContext": "span"},
|
||||
"count_",
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
),
|
||||
# Case 3b: count by span.count() and context specified in the key with alias span.count_ [BAD REQUEST]
|
||||
pytest.param(
|
||||
{"name": "span.count()", "fieldContext": "span"},
|
||||
"span.count_",
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
),
|
||||
# Case 4a: count by span.count() and context specified in the key
|
||||
pytest.param(
|
||||
{"name": "span.count()", "fieldContext": ""}, "count_", HTTPStatus.OK
|
||||
),
|
||||
# Case 4b: count by span.count() and context specified in the key with alias span.count_
|
||||
pytest.param(
|
||||
{"name": "span.count()", "fieldContext": ""}, "span.count_", HTTPStatus.OK
|
||||
),
|
||||
# Case 5a: count by count_
|
||||
pytest.param({"name": "count_"}, "count_", HTTPStatus.OK),
|
||||
# Case 5b: count by count_ with alias span.count_
|
||||
pytest.param({"name": "count_"}, "count_", HTTPStatus.OK),
|
||||
# Case 6a: count by span.count_
|
||||
pytest.param({"name": "span.count_"}, "count_", HTTPStatus.OK),
|
||||
# Case 6b: count by span.count_ with alias span.count_
|
||||
pytest.param(
|
||||
{"name": "span.count_"}, "span.count_", HTTPStatus.BAD_REQUEST
|
||||
), # THIS SHOULD BE OK BUT FAILS DUE TO LIMITATION IN CURRENT IMPLEMENTATION
|
||||
# Case 7a: count by span.count_ and context specified in the key [BAD REQUEST]
|
||||
pytest.param(
|
||||
{"name": "span.count_", "fieldContext": "span"},
|
||||
"count_",
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
),
|
||||
# Case 7b: count by span.count_ and context specified in the key with alias span.count_
|
||||
pytest.param(
|
||||
{"name": "span.count_", "fieldContext": "span"},
|
||||
"span.count_",
|
||||
HTTPStatus.OK,
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_traces_aggergate_order_by_count(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
order_by: Dict[str, str],
|
||||
aggregation_alias: str,
|
||||
expected_status: HTTPStatus,
|
||||
) -> None:
|
||||
"""
|
||||
Setup:
|
||||
Insert 4 traces with different attributes.
|
||||
http-service: POST /integration -> SELECT, HTTP PATCH
|
||||
topic-service: topic publish
|
||||
|
||||
Tests:
|
||||
1. Query traces count for spans grouped by service.name and host.name
|
||||
"""
|
||||
http_service_trace_id = TraceIdGenerator.trace_id()
|
||||
http_service_span_id = TraceIdGenerator.span_id()
|
||||
http_service_db_span_id = TraceIdGenerator.span_id()
|
||||
http_service_patch_span_id = TraceIdGenerator.span_id()
|
||||
topic_service_trace_id = TraceIdGenerator.trace_id()
|
||||
topic_service_span_id = TraceIdGenerator.span_id()
|
||||
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
|
||||
insert_traces(
|
||||
[
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=4),
|
||||
duration=timedelta(seconds=3),
|
||||
trace_id=http_service_trace_id,
|
||||
span_id=http_service_span_id,
|
||||
parent_span_id="",
|
||||
name="POST /integration",
|
||||
kind=TracesKind.SPAN_KIND_SERVER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"deployment.environment": "production",
|
||||
"service.name": "http-service",
|
||||
"os.type": "linux",
|
||||
"host.name": "linux-000",
|
||||
"cloud.provider": "integration",
|
||||
"cloud.account.id": "000",
|
||||
},
|
||||
attributes={
|
||||
"net.transport": "IP.TCP",
|
||||
"http.scheme": "http",
|
||||
"http.user_agent": "Integration Test",
|
||||
"http.request.method": "POST",
|
||||
"http.response.status_code": "200",
|
||||
},
|
||||
),
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=3.5),
|
||||
duration=timedelta(seconds=0.5),
|
||||
trace_id=http_service_trace_id,
|
||||
span_id=http_service_db_span_id,
|
||||
parent_span_id=http_service_span_id,
|
||||
name="SELECT",
|
||||
kind=TracesKind.SPAN_KIND_CLIENT,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"deployment.environment": "production",
|
||||
"service.name": "http-service",
|
||||
"os.type": "linux",
|
||||
"host.name": "linux-000",
|
||||
"cloud.provider": "integration",
|
||||
"cloud.account.id": "000",
|
||||
},
|
||||
attributes={
|
||||
"db.name": "integration",
|
||||
"db.operation": "SELECT",
|
||||
"db.statement": "SELECT * FROM integration",
|
||||
},
|
||||
),
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=3),
|
||||
duration=timedelta(seconds=1),
|
||||
trace_id=http_service_trace_id,
|
||||
span_id=http_service_patch_span_id,
|
||||
parent_span_id=http_service_span_id,
|
||||
name="HTTP PATCH",
|
||||
kind=TracesKind.SPAN_KIND_CLIENT,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"deployment.environment": "production",
|
||||
"service.name": "http-service",
|
||||
"os.type": "linux",
|
||||
"host.name": "linux-000",
|
||||
"cloud.provider": "integration",
|
||||
"cloud.account.id": "000",
|
||||
},
|
||||
attributes={
|
||||
"http.request.method": "PATCH",
|
||||
"http.status_code": "404",
|
||||
},
|
||||
),
|
||||
Traces(
|
||||
timestamp=now - timedelta(seconds=1),
|
||||
duration=timedelta(seconds=4),
|
||||
trace_id=topic_service_trace_id,
|
||||
span_id=topic_service_span_id,
|
||||
parent_span_id="",
|
||||
name="topic publish",
|
||||
kind=TracesKind.SPAN_KIND_PRODUCER,
|
||||
status_code=TracesStatusCode.STATUS_CODE_OK,
|
||||
status_message="",
|
||||
resources={
|
||||
"deployment.environment": "production",
|
||||
"service.name": "topic-service",
|
||||
"os.type": "linux",
|
||||
"host.name": "linux-001",
|
||||
"cloud.provider": "integration",
|
||||
"cloud.account.id": "001",
|
||||
},
|
||||
attributes={
|
||||
"message.type": "SENT",
|
||||
"messaging.operation": "publish",
|
||||
"messaging.message.id": "001",
|
||||
},
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
query = {
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"disabled": False,
|
||||
"order": [{"key": {"name": "count()"}, "direction": "desc"}],
|
||||
"aggregations": [{"expression": "count()", "alias": "count_"}],
|
||||
},
|
||||
}
|
||||
|
||||
# Query traces count for spans
|
||||
|
||||
query["spec"]["order"][0]["key"] = order_by
|
||||
query["spec"]["aggregations"][0]["alias"] = aggregation_alias
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms=int(
|
||||
(datetime.now(tz=timezone.utc) - timedelta(minutes=5)).timestamp() * 1000
|
||||
),
|
||||
end_ms=int(datetime.now(tz=timezone.utc).timestamp() * 1000),
|
||||
request_type="time_series",
|
||||
queries=[query],
|
||||
)
|
||||
|
||||
assert response.status_code == expected_status
|
||||
if expected_status != HTTPStatus.OK:
|
||||
return
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
results = response.json()["data"]["data"]["results"]
|
||||
assert len(results) == 1
|
||||
aggregations = results[0]["aggregations"]
|
||||
assert len(aggregations) == 1
|
||||
series = aggregations[0]["series"]
|
||||
assert len(series) == 1
|
||||
assert series[0]["values"][0]["value"] == 4
|
||||
|
||||
|
||||
def test_traces_fill_gaps(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
|
||||
@@ -2,6 +2,8 @@ from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, Dict, List, Optional, Tuple
|
||||
|
||||
import requests
|
||||
|
||||
from fixtures import querier, types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logs import Logs
|
||||
@@ -22,8 +24,6 @@ metric_values_for_test = {
|
||||
"service-d": 10.0,
|
||||
}
|
||||
|
||||
SCALAR_FORMAT_OPTIONS = {"formatTableResultForUI": True, "fillGaps": False}
|
||||
|
||||
|
||||
def generate_logs_with_counts(
|
||||
now: datetime,
|
||||
@@ -85,6 +85,30 @@ def generate_metrics_with_values(
|
||||
return metrics
|
||||
|
||||
|
||||
def make_scalar_query_request(
|
||||
signoz: types.SigNoz,
|
||||
token: str,
|
||||
now: datetime,
|
||||
queries: List[Dict],
|
||||
lookback_minutes: int = 5,
|
||||
) -> requests.Response:
|
||||
return requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
|
||||
timeout=5,
|
||||
headers={"authorization": f"Bearer {token}"},
|
||||
json={
|
||||
"schemaVersion": "v1",
|
||||
"start": int(
|
||||
(now - timedelta(minutes=lookback_minutes)).timestamp() * 1000
|
||||
),
|
||||
"end": int(now.timestamp() * 1000),
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": {"queries": queries},
|
||||
"formatOptions": {"formatTableResultForUI": True, "fillGaps": False},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def build_logs_query(
|
||||
name: str = "A",
|
||||
aggregations: Optional[List[str]] = None,
|
||||
@@ -194,16 +218,11 @@ def test_logs_scalar_group_by_single_agg_no_order(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[build_logs_query(group_by=["service.name"])],
|
||||
request_type="scalar",
|
||||
format_options={"formatTableResultForUI": True, "fillGaps": False},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -227,16 +246,11 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_asc(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[build_logs_query(group_by=["service.name"], order_by=[("count()", "asc")])],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -260,16 +274,11 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_desc(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[build_logs_query(group_by=["service.name"], order_by=[("count()", "desc")])],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -293,20 +302,15 @@ def test_logs_scalar_group_by_single_agg_order_by_grouping_key_asc(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_logs_query(
|
||||
group_by=["service.name"], order_by=[("service.name", "asc")]
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -330,20 +334,15 @@ def test_logs_scalar_group_by_single_agg_order_by_grouping_key_desc(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_logs_query(
|
||||
group_by=["service.name"], order_by=[("service.name", "desc")]
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -367,13 +366,10 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_logs_query(
|
||||
group_by=["service.name"],
|
||||
@@ -381,8 +377,6 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
|
||||
order_by=[("count()", "asc")],
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -404,13 +398,10 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_second_agg_desc(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_logs_query(
|
||||
group_by=["service.name"],
|
||||
@@ -418,8 +409,6 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_second_agg_desc(
|
||||
order_by=[("count_distinct(body)", "desc")],
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -442,20 +431,15 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_logs_query(
|
||||
group_by=["service.name"], order_by=[("count()", "asc")], limit=2
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -479,20 +463,15 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_logs_query(
|
||||
group_by=["service.name"], order_by=[("count()", "desc")], limit=3
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -516,20 +495,15 @@ def test_logs_scalar_group_by_order_by_grouping_key_asc_limit_2(
|
||||
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_logs_query(
|
||||
group_by=["service.name"], order_by=[("service.name", "asc")], limit=2
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -553,16 +527,11 @@ def test_traces_scalar_group_by_single_agg_no_order(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[build_traces_query(group_by=["service.name"])],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -586,16 +555,11 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_asc(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[build_traces_query(group_by=["service.name"], order_by=[("count()", "asc")])],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -619,16 +583,11 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_desc(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[build_traces_query(group_by=["service.name"], order_by=[("count()", "desc")])],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -652,20 +611,15 @@ def test_traces_scalar_group_by_single_agg_order_by_grouping_key_asc(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_traces_query(
|
||||
group_by=["service.name"], order_by=[("service.name", "asc")]
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -689,20 +643,15 @@ def test_traces_scalar_group_by_single_agg_order_by_grouping_key_desc(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_traces_query(
|
||||
group_by=["service.name"], order_by=[("service.name", "desc")]
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -726,13 +675,10 @@ def test_traces_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_traces_query(
|
||||
group_by=["service.name"],
|
||||
@@ -740,8 +686,6 @@ def test_traces_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
|
||||
order_by=[("count()", "asc")],
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -763,20 +707,15 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_traces_query(
|
||||
group_by=["service.name"], order_by=[("count()", "asc")], limit=2
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -800,20 +739,15 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_traces_query(
|
||||
group_by=["service.name"], order_by=[("count()", "desc")], limit=3
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -837,20 +771,15 @@ def test_traces_scalar_group_by_order_by_grouping_key_asc_limit_2(
|
||||
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_traces_query(
|
||||
group_by=["service.name"], order_by=[("service.name", "asc")], limit=2
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -874,16 +803,11 @@ def test_metrics_scalar_group_by_single_agg_no_order(
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[build_metrics_query(group_by=["service.name"])],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -912,21 +836,16 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_asc(
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_metrics_query(
|
||||
group_by=["service.name"],
|
||||
order_by=[("sum(test.metric)", "asc")],
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -955,21 +874,16 @@ def test_metrics_scalar_group_by_single_agg_order_by_grouping_key_asc(
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_metrics_query(
|
||||
group_by=["service.name"],
|
||||
order_by=[("service.name", "asc")],
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -998,13 +912,10 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_metrics_query(
|
||||
group_by=["service.name"],
|
||||
@@ -1012,8 +923,6 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
|
||||
limit=2,
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -1037,13 +946,10 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_metrics_query(
|
||||
group_by=["service.name"],
|
||||
@@ -1051,8 +957,6 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
|
||||
limit=3,
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
@@ -1076,13 +980,10 @@ def test_metrics_scalar_group_by_order_by_grouping_key_asc_limit_2(
|
||||
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
response = querier.make_query_request(
|
||||
response = make_scalar_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
now,
|
||||
[
|
||||
build_metrics_query(
|
||||
group_by=["service.name"],
|
||||
@@ -1090,8 +991,6 @@ def test_metrics_scalar_group_by_order_by_grouping_key_asc_limit_2(
|
||||
limit=2,
|
||||
)
|
||||
],
|
||||
request_type="scalar",
|
||||
format_options=SCALAR_FORMAT_OPTIONS,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
@@ -1,356 +0,0 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.querier import build_scalar_query, make_substitute_vars_request
|
||||
|
||||
LOGS_COUNT_AGG = [{"expression": "count()"}]
|
||||
|
||||
|
||||
def test_substitute_vars_standalone_variable(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A", "logs", LOGS_COUNT_AGG, filter_expression="service.name = $service"
|
||||
)
|
||||
],
|
||||
variables={"service": {"type": "query", "value": "auth-service"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == "service.name = 'auth-service'"
|
||||
|
||||
|
||||
def test_substitute_vars_variable_in_string(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="cluster_name = '$environment-xyz'",
|
||||
)
|
||||
],
|
||||
variables={"environment": {"type": "custom", "value": "prod"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == "cluster_name = 'prod-xyz'"
|
||||
|
||||
|
||||
def test_substitute_vars_multiple_variables(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name = $service AND env = $environment",
|
||||
)
|
||||
],
|
||||
variables={
|
||||
"service": {"type": "text", "value": "auth-service"},
|
||||
"environment": {"type": "query", "value": "production"},
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == "service.name = 'auth-service' AND env = 'production'"
|
||||
|
||||
|
||||
def test_substitute_vars_array_variable(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name IN $services",
|
||||
)
|
||||
],
|
||||
variables={
|
||||
"services": {"type": "query", "value": ["auth-service", "api-service"]}
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == "service.name IN ['auth-service', 'api-service']"
|
||||
|
||||
|
||||
def test_substitute_vars_like_pattern(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name LIKE '$env%'",
|
||||
)
|
||||
],
|
||||
variables={"env": {"type": "text", "value": "prod"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == "service.name LIKE 'prod%'"
|
||||
|
||||
|
||||
def test_substitute_vars_variable_without_quotes(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="cluster_name = $environment-xyz",
|
||||
)
|
||||
],
|
||||
variables={"environment": {"type": "dynamic", "value": "staging"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == "cluster_name = 'staging-xyz'"
|
||||
|
||||
|
||||
def test_substitute_vars_all_value_standalone(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A", "logs", LOGS_COUNT_AGG, filter_expression="service.name = $service"
|
||||
)
|
||||
],
|
||||
variables={"service": {"type": "dynamic", "value": "__all__"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
# expression should be empty when __all__ is used
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == ""
|
||||
|
||||
|
||||
def test_substitute_vars_all_value_in_composed_string(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="cluster_name = '$environment-xyz'",
|
||||
)
|
||||
],
|
||||
variables={"environment": {"type": "dynamic", "value": "__all__"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
# expression should be empty when __all__ is used
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == ""
|
||||
|
||||
|
||||
def test_substitute_vars_all_value_partial(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_substitute_vars_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name = $service AND env = $env",
|
||||
)
|
||||
],
|
||||
variables={
|
||||
"service": {"type": "dynamic", "value": "__all__"},
|
||||
"env": {"type": "dynamic", "value": "production"},
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
data = response.json()["data"]
|
||||
queries = data["compositeQuery"]["queries"]
|
||||
assert len(queries) == 1
|
||||
|
||||
# only env condition should remain
|
||||
filter_expr = queries[0]["spec"]["filter"]["expression"]
|
||||
assert filter_expr == "env = 'production'"
|
||||
@@ -1,803 +0,0 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, List
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logs import Logs
|
||||
from fixtures.querier import (
|
||||
build_scalar_query,
|
||||
get_all_series,
|
||||
get_series_values,
|
||||
make_query_request,
|
||||
sum_series_values,
|
||||
)
|
||||
|
||||
LOGS_COUNT_AGG = [{"expression": "count()"}]
|
||||
|
||||
|
||||
def test_query_range_with_standalone_variable(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "auth-service"},
|
||||
attributes={"env": "production"},
|
||||
body="Auth service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "api-service"},
|
||||
attributes={"env": "production"},
|
||||
body="API service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "web-service"},
|
||||
attributes={"env": "staging"},
|
||||
body="Web service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A", "logs", LOGS_COUNT_AGG, filter_expression="service.name = $service"
|
||||
)
|
||||
],
|
||||
variables={"service": {"type": "query", "value": "auth-service"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 1 # auth-service log
|
||||
|
||||
|
||||
def test_query_range_with_variable_in_array(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "auth-service"},
|
||||
attributes={"env": "production"},
|
||||
body="Auth service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "api-service"},
|
||||
attributes={"env": "production"},
|
||||
body="API service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "web-service"},
|
||||
attributes={"env": "staging"},
|
||||
body="Web service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name IN $services",
|
||||
)
|
||||
],
|
||||
variables={
|
||||
"services": {"type": "custom", "value": ["auth-service", "api-service"]}
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 2 # auth-service and api-service logs
|
||||
|
||||
|
||||
def test_query_range_with_variable_composed_in_string(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service1"},
|
||||
attributes={"cluster_name": "prod-xyz"},
|
||||
body="Prod cluster log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service2"},
|
||||
attributes={"cluster_name": "staging-xyz"},
|
||||
body="Staging cluster log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service3"},
|
||||
attributes={"cluster_name": "dev-xyz"},
|
||||
body="Dev cluster log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="cluster_name = '$environment-xyz'",
|
||||
)
|
||||
],
|
||||
variables={"environment": {"type": "dynamic", "value": "prod"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 1 # prod-xyz log
|
||||
|
||||
|
||||
def test_query_range_with_variable_in_like_pattern(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "prod-auth"},
|
||||
attributes={"env": "production"},
|
||||
body="Prod auth log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "prod-api"},
|
||||
attributes={"env": "production"},
|
||||
body="Prod API log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "staging-api"},
|
||||
attributes={"env": "staging"},
|
||||
body="Staging API log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name LIKE '$env%'",
|
||||
)
|
||||
],
|
||||
variables={"env": {"type": "text", "value": "prod"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 2 # prod-auth and prod-api logs
|
||||
|
||||
|
||||
def test_query_range_with_multiple_variables_in_string(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service1"},
|
||||
attributes={"path": "us-west-prod-cluster"},
|
||||
body="US West Prod log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service2"},
|
||||
attributes={"path": "us-east-prod-cluster"},
|
||||
body="US East Prod log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service3"},
|
||||
attributes={"path": "eu-west-staging-cluster"},
|
||||
body="EU West Staging log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="path = '$region-$env-cluster'",
|
||||
)
|
||||
],
|
||||
variables={
|
||||
"region": {"type": "query", "value": "us-west"},
|
||||
"env": {"type": "custom", "value": "prod"},
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 1 # us-west-prod-cluster log
|
||||
|
||||
|
||||
def test_query_range_with_variable_prefix_and_suffix(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service1"},
|
||||
attributes={"label": "prefix-middle-suffix"},
|
||||
body="Middle label log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service2"},
|
||||
attributes={"label": "prefix-other-suffix"},
|
||||
body="Other label log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="label = 'prefix-$var-suffix'",
|
||||
)
|
||||
],
|
||||
variables={"var": {"type": "text", "value": "middle"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 1 # prefix-middle-suffix log
|
||||
|
||||
|
||||
def test_query_range_with_variable_without_quotes(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service1"},
|
||||
attributes={"cluster_name": "staging-xyz"},
|
||||
body="Staging cluster log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service2"},
|
||||
attributes={"cluster_name": "prod-xyz"},
|
||||
body="Prod cluster log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="cluster_name = $environment-xyz",
|
||||
)
|
||||
],
|
||||
variables={"environment": {"type": "custom", "value": "staging"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 1 # staging-xyz log
|
||||
|
||||
|
||||
def test_query_range_time_series_with_group_by(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = []
|
||||
|
||||
for i in range(3):
|
||||
logs.append(
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=i + 1),
|
||||
resources={"service.name": "auth-service"},
|
||||
attributes={"cluster_name": "prod-xyz"},
|
||||
body=f"Auth service log {i}",
|
||||
severity_text="INFO",
|
||||
)
|
||||
)
|
||||
logs.append(
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=i + 1),
|
||||
resources={"service.name": "api-service"},
|
||||
attributes={"cluster_name": "prod-xyz"},
|
||||
body=f"API service log {i}",
|
||||
severity_text="INFO",
|
||||
)
|
||||
)
|
||||
logs.append(
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=i + 1),
|
||||
resources={"service.name": "web-service"},
|
||||
attributes={"cluster_name": "staging-xyz"},
|
||||
body=f"Web service log {i}",
|
||||
severity_text="INFO",
|
||||
)
|
||||
)
|
||||
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=10)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="cluster_name = '$env-xyz'",
|
||||
group_by=[
|
||||
{
|
||||
"name": "service.name",
|
||||
"fieldDataType": "string",
|
||||
"fieldContext": "resource",
|
||||
}
|
||||
],
|
||||
)
|
||||
],
|
||||
variables={"env": {"type": "query", "value": "prod"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
all_series = get_all_series(response.json(), "A")
|
||||
assert len(all_series) == 2 # auth-service and api-service
|
||||
|
||||
# 6 (3 auth-service + 3 api-service)
|
||||
total_count = sum(sum_series_values(s["values"]) for s in all_series)
|
||||
assert total_count == 6
|
||||
|
||||
|
||||
def test_query_range_with_different_variable_types(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "auth-service"},
|
||||
attributes={"env": "production"},
|
||||
body="Auth service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
# all different variable types
|
||||
for var_type in ["query", "custom", "text", "dynamic"]:
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name = $service",
|
||||
)
|
||||
],
|
||||
variables={"service": {"type": var_type, "value": "auth-service"}},
|
||||
)
|
||||
|
||||
assert (
|
||||
response.status_code == HTTPStatus.OK
|
||||
), f"Failed for variable type: {var_type}"
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert (
|
||||
total_count == 1
|
||||
), f"Expected 1 log for variable type {var_type}, got {total_count}"
|
||||
|
||||
|
||||
def test_query_range_with_all_value_standalone(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "auth-service"},
|
||||
attributes={"env": "production"},
|
||||
body="Auth service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "api-service"},
|
||||
attributes={"env": "production"},
|
||||
body="API service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "web-service"},
|
||||
attributes={"env": "staging"},
|
||||
body="Web service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
# `__all__`, the filter condition should be removed
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A", "logs", LOGS_COUNT_AGG, filter_expression="service.name = $service"
|
||||
)
|
||||
],
|
||||
variables={"service": {"type": "dynamic", "value": "__all__"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 3
|
||||
|
||||
|
||||
def test_query_range_with_all_value_in_composed_string(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service1"},
|
||||
attributes={"cluster_name": "prod-xyz"},
|
||||
body="Prod cluster log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service2"},
|
||||
attributes={"cluster_name": "staging-xyz"},
|
||||
body="Staging cluster log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "service3"},
|
||||
attributes={"cluster_name": "dev-xyz"},
|
||||
body="Dev cluster log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
# `__all__` in composed string, the filter should be removed
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="cluster_name = '$environment-xyz'",
|
||||
)
|
||||
],
|
||||
variables={"environment": {"type": "dynamic", "value": "__all__"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 3 # all logs should be returned
|
||||
|
||||
|
||||
def test_query_range_with_all_value_partial_filter(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "auth-service"},
|
||||
attributes={"env": "production"},
|
||||
body="Auth prod log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "api-service"},
|
||||
attributes={"env": "production"},
|
||||
body="API prod log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "web-service"},
|
||||
attributes={"env": "staging"},
|
||||
body="Web staging log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
# `__all__` for service, only env filter should apply
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name = $service AND env = $env",
|
||||
)
|
||||
],
|
||||
variables={
|
||||
"service": {"type": "dynamic", "value": "__all__"},
|
||||
"env": {"type": "dynamic", "value": "production"},
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 2 # prod logs (auth + api)
|
||||
|
||||
|
||||
def test_query_range_with_all_value_in_array(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
logs: List[Logs] = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "auth-service"},
|
||||
attributes={"env": "production"},
|
||||
body="Auth service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
Logs(
|
||||
timestamp=now - timedelta(minutes=1),
|
||||
resources={"service.name": "api-service"},
|
||||
attributes={"env": "production"},
|
||||
body="API service log",
|
||||
severity_text="INFO",
|
||||
),
|
||||
]
|
||||
insert_logs(logs)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
response = make_query_request(
|
||||
signoz,
|
||||
token,
|
||||
start_ms,
|
||||
end_ms,
|
||||
[
|
||||
build_scalar_query(
|
||||
"A",
|
||||
"logs",
|
||||
LOGS_COUNT_AGG,
|
||||
filter_expression="service.name IN $services",
|
||||
)
|
||||
],
|
||||
variables={"services": {"type": "dynamic", "value": "__all__"}},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
|
||||
values = get_series_values(response.json(), "A")
|
||||
total_count = sum_series_values(values)
|
||||
assert total_count == 2 # all logs
|
||||
@@ -1,20 +0,0 @@
|
||||
from http import HTTPStatus
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
def assert_identical_query_response(
|
||||
response1: requests.Response, response2: requests.Response
|
||||
) -> None:
|
||||
"""
|
||||
Assert that two query responses are identical in status and data.
|
||||
"""
|
||||
assert response1.status_code == response2.status_code, "Status codes do not match"
|
||||
if response1.status_code == HTTPStatus.OK:
|
||||
assert (
|
||||
response1.json()["status"] == response2.json()["status"]
|
||||
), "Response statuses do not match"
|
||||
assert (
|
||||
response1.json()["data"]["data"]["results"]
|
||||
== response2.json()["data"]["data"]["results"]
|
||||
), "Response data do not match"
|
||||
Reference in New Issue
Block a user