Compare commits

...

9 Commits

Author SHA1 Message Date
Srikanth Chekuri
585caa84fe Merge branch 'main' into issue-10008 2026-01-23 20:11:48 +05:30
srikanthccv
814fd40a1d chore: update tests 2026-01-23 05:43:26 +05:30
srikanthccv
0978bdfa7f chore: fix warn 2026-01-23 04:33:07 +05:30
srikanthccv
4f99261743 chore: resolve conflicts 2026-01-23 04:25:39 +05:30
Srikanth Chekuri
836988273f Merge branch 'main' into issue-10008 2026-01-20 16:48:54 +05:30
srikanthccv
a622d65226 chore: lint 2026-01-20 06:30:00 +05:30
srikanthccv
2410e3d411 chore: add integration tests 2026-01-20 06:14:13 +05:30
Srikanth Chekuri
654e2e4b7e Merge branch 'main' into issue-10008 2026-01-20 05:02:15 +05:30
srikanthccv
03ad7a85fa feat(querybuilder): support using variables inside value
prior to this, variables work only as standalone RHS values (e.g. field = $var / field IN $var). this change adds support to use variables inside a value for pattern matching or substitution ex "$env-suffix" or LIKE "%$pattern-var-name%"
2026-01-19 01:41:42 +05:30
12 changed files with 2387 additions and 95 deletions

View File

@@ -235,28 +235,31 @@ func (a *API) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
switch spec := item.Spec.(type) {
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
if spec.Filter != nil && spec.Filter.Expression != "" {
replaced, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
replaced, warnings, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
if err != nil {
errs = append(errs, err)
}
a.set.Logger.WarnContext(req.Context(), "variable replace warnings", "warnings", warnings)
spec.Filter.Expression = replaced
}
queryRangeRequest.CompositeQuery.Queries[idx].Spec = spec
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
if spec.Filter != nil && spec.Filter.Expression != "" {
replaced, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
replaced, warnings, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
if err != nil {
errs = append(errs, err)
}
a.set.Logger.WarnContext(req.Context(), "variable replace warnings", "warnings", warnings)
spec.Filter.Expression = replaced
}
queryRangeRequest.CompositeQuery.Queries[idx].Spec = spec
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
if spec.Filter != nil && spec.Filter.Expression != "" {
replaced, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
replaced, warnings, err := variables.ReplaceVariablesInExpression(spec.Filter.Expression, queryRangeRequest.Variables)
if err != nil {
errs = append(errs, err)
}
a.set.Logger.WarnContext(req.Context(), "variable replace warnings", "warnings", warnings)
spec.Filter.Expression = replaced
}
queryRangeRequest.CompositeQuery.Queries[idx].Spec = spec

View File

@@ -4,6 +4,7 @@ import (
"context"
"fmt"
"log/slog"
"sort"
"strconv"
"strings"
@@ -164,6 +165,7 @@ func PrepareWhereClause(query string, opts FilterExprVisitorOpts, startNs uint64
return nil, combinedErrors.WithAdditional(visitor.errors...).WithUrl(url)
}
// if there is nothing to filter, return true
if cond == "" {
cond = "true"
}
@@ -221,7 +223,6 @@ func (v *filterExpressionVisitor) Visit(tree antlr.ParseTree) any {
}
func (v *filterExpressionVisitor) VisitQuery(ctx *grammar.QueryContext) any {
return v.Visit(ctx.Expression())
}
@@ -503,8 +504,16 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
// Get all values for operations that need them
values := ctx.AllValue()
if len(values) > 0 {
// there should only be one value for the following operators, even if there is more than one
// we just take the first value
value := v.Visit(values[0])
// Check if the value is a skip marker (embedded variable with __all__ value)
if strVal, ok := value.(string); ok && strVal == specialSkipConditionMarker {
v.logger.Info("skipping condition due to __all__ variable", "keys", keys, "value", value) //nolint:sloglint
return ""
}
if var_, ok := value.(string); ok {
// check if this is a variables
var ok bool
@@ -516,6 +525,13 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
}
if ok {
if varItem.Type == qbtypes.DynamicVariableType {
if all_, ok := varItem.Value.(string); ok && all_ == "__all__" {
// this is likely overlooked by user, we treat it as if it was IN instead of =
v.logger.Warn("received unexpected __all__ value for single select dynamic variable", "variable", var_, "keys", keys, "value", value) //nolint:sloglint
return ""
}
}
switch varValues := varItem.Value.(type) {
case []any:
if len(varValues) == 0 {
@@ -781,7 +797,12 @@ func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any {
if ctx.QUOTED_TEXT() != nil {
txt := ctx.QUOTED_TEXT().GetText()
// trim quotes and return the value
return trimQuotes(txt)
value := trimQuotes(txt)
// Check if the string contains embedded variables
if strings.Contains(value, "$") {
return v.interpolateVariablesInString(value)
}
return value
} else if ctx.NUMBER() != nil {
number, err := strconv.ParseFloat(ctx.NUMBER().GetText(), 64)
if err != nil {
@@ -798,7 +819,12 @@ func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any {
// When the user writes an expression like `service.name=redis`
// The `redis` part is a VALUE context but parsed as a KEY token
// so we return the text as is
return ctx.KEY().GetText()
keyText := ctx.KEY().GetText()
// Check if this is a composed variable like $environment-xyz
if strings.HasPrefix(keyText, "$") {
return v.interpolateVariablesInString(keyText)
}
return keyText
}
return "" // Should not happen with valid input
@@ -830,7 +856,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
// 1. either user meant key ( this is already handled above in fieldKeysForName )
// 2. or user meant `attribute.key` we look up in the map for all possible field keys with name 'attribute.key'
// Note:
// Note:
// If user only wants to search `attribute.key`, then they have to use `attribute.attribute.key`
// If user only wants to search `key`, then they have to use `key`
// If user wants to search both, they can use `attribute.key` and we will resolve the ambiguity
@@ -928,3 +954,103 @@ func trimQuotes(txt string) string {
txt = strings.ReplaceAll(txt, `\'`, `'`)
return txt
}
// specialSkipConditionMarker is used to indicate that the entire condition should be removed
const specialSkipConditionMarker = "__signoz_skip_condition__"
// interpolateVariablesInString finds and replaces variable references within a string
// by checking against actual variable names in the variables map.
// Pure variable references (e.g., "$service") are returned as-is to let the
// existing variable handling code process them.
// Returns specialSkipConditionMarker if any variable has __all__ value.
func (v *filterExpressionVisitor) interpolateVariablesInString(s string) string {
// if this is a complete variable reference (just $varname with nothing else)
// if so, return as-is
varName := s
if strings.HasPrefix(varName, "$") {
_, exactMatch := v.variables[varName]
if !exactMatch {
_, exactMatch = v.variables[varName[1:]]
}
if exactMatch {
return s
}
}
result := s
// find and replace variables by checking each variable name in the map
// process longer variable names first to handle cases with prefix substring
varNames := make([]string, 0, len(v.variables)*2)
for name := range v.variables {
varNames = append(varNames, name)
// add with $ prefix if not already present
if !strings.HasPrefix(name, "$") {
varNames = append(varNames, "$"+name)
}
}
// sort by length (longest first) to match longer variable names before shorter ones
sort.Slice(varNames, func(i, j int) bool {
return len(varNames[i]) > len(varNames[j])
})
for _, vName := range varNames {
searchPattern := vName
if !strings.HasPrefix(searchPattern, "$") {
searchPattern = "$" + vName
}
if strings.Contains(result, searchPattern) {
// direct lookup
varItem, ok := v.variables[vName]
if !ok {
// Try without $ prefix
varItem, ok = v.variables[strings.TrimPrefix(vName, "$")]
}
if ok {
// special check for __all__ value - skip the entire condition
if varItem.Type == qbtypes.DynamicVariableType {
if allVal, ok := varItem.Value.(string); ok && allVal == "__all__" {
return specialSkipConditionMarker
}
}
replacement := v.formatVariableValueForInterpolation(varItem.Value, strings.TrimPrefix(vName, "$"))
result = strings.ReplaceAll(result, searchPattern, replacement)
}
}
}
return result
}
func (v *filterExpressionVisitor) formatVariableValueForInterpolation(value any, varName string) string {
switch val := value.(type) {
case string:
return val
case []string:
if len(val) > 1 {
v.warnings = append(v.warnings, fmt.Sprintf("variable `%s` has multiple values, using first value `%s` for string interpolation", varName, val[0]))
}
if len(val) > 0 {
return val[0]
}
return ""
case []any:
if len(val) > 1 {
v.warnings = append(v.warnings, fmt.Sprintf("variable `%s` has multiple values, using first value for string interpolation", varName))
}
if len(val) > 0 {
return v.formatVariableValueForInterpolation(val[0], varName)
}
return ""
case int, int32, int64, float32, float64:
return fmt.Sprintf("%v", val)
case bool:
return strconv.FormatBool(val)
default:
return fmt.Sprintf("%v", val)
}
}

View File

@@ -10,8 +10,113 @@ import (
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/antlr4-go/antlr/v4"
sqlbuilder "github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/assert"
)
// TestInterpolateVariablesInString tests the embedded variable interpolation feature (GitHub issue #10008)
func TestInterpolateVariablesInString(t *testing.T) {
tests := []struct {
name string
input string
variables map[string]qbtypes.VariableItem
expected string
}{
{
name: "pure variable reference - not interpolated",
input: "$service",
variables: map[string]qbtypes.VariableItem{
"service": {Value: "auth-service"},
},
expected: "$service", // Pure variables are handled by existing code
},
{
name: "variable composed with suffix",
input: "$environment-xyz",
variables: map[string]qbtypes.VariableItem{
"environment": {Value: "prod"},
},
expected: "prod-xyz",
},
{
name: "variable in quoted string with suffix",
input: "$env-cluster",
variables: map[string]qbtypes.VariableItem{
"env": {Value: "staging"},
},
expected: "staging-cluster",
},
{
name: "variable with prefix and suffix",
input: "prefix-$var-suffix",
variables: map[string]qbtypes.VariableItem{
"var": {Value: "middle"},
},
expected: "prefix-middle-suffix",
},
{
name: "multiple variables in one string",
input: "$region-$env-cluster",
variables: map[string]qbtypes.VariableItem{
"region": {Value: "us-west"},
"env": {Value: "prod"},
},
expected: "us-west-prod-cluster",
},
{
name: "similar variable names - longer matches first",
input: "$env-$environment",
variables: map[string]qbtypes.VariableItem{
"env": {Value: "dev"},
"environment": {Value: "production"},
},
expected: "dev-production",
},
{
name: "unknown variable - preserved as-is",
input: "$unknown-suffix",
variables: map[string]qbtypes.VariableItem{},
expected: "$unknown-suffix",
},
{
name: "variable with underscore",
input: "$my_var-test",
variables: map[string]qbtypes.VariableItem{
"my_var": {Value: "hello"},
},
expected: "hello-test",
},
{
name: "__all__ value returns skip marker",
input: "$env-suffix",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "__all__",
},
},
expected: specialSkipConditionMarker,
},
{
name: "multi-select takes first value",
input: "$env-suffix",
variables: map[string]qbtypes.VariableItem{
"env": {Value: []any{"prod", "staging", "dev"}},
},
expected: "prod-suffix",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
visitor := &filterExpressionVisitor{
variables: tt.variables,
}
result := visitor.interpolateVariablesInString(tt.input)
assert.Equal(t, tt.expected, result)
})
}
}
// TestPrepareWhereClause_EmptyVariableList ensures PrepareWhereClause errors when a variable has an empty list value
func TestPrepareWhereClause_EmptyVariableList(t *testing.T) {
tests := []struct {
@@ -42,7 +147,7 @@ func TestPrepareWhereClause_EmptyVariableList(t *testing.T) {
}
keys := map[string][]*telemetrytypes.TelemetryFieldKey{
"service": []*telemetrytypes.TelemetryFieldKey{
"service": {
{
Name: "service",
Signal: telemetrytypes.SignalLogs,
@@ -154,7 +259,7 @@ func TestVisitKey(t *testing.T) {
name: "Key not found",
keyText: "unknown_key",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"service": []*telemetrytypes.TelemetryFieldKey{
"service": {
{
Name: "service",
Signal: telemetrytypes.SignalLogs,
@@ -335,7 +440,7 @@ func TestVisitKey(t *testing.T) {
name: "Unknown key with ignoreNotFoundKeys=false",
keyText: "unknown_key",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"service": []*telemetrytypes.TelemetryFieldKey{
"service": {
{
Name: "service",
Signal: telemetrytypes.SignalLogs,
@@ -355,7 +460,7 @@ func TestVisitKey(t *testing.T) {
name: "Unknown key with ignoreNotFoundKeys=true",
keyText: "unknown_key",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"service": []*telemetrytypes.TelemetryFieldKey{
"service": {
{
Name: "service",
Signal: telemetrytypes.SignalLogs,
@@ -467,7 +572,7 @@ func TestVisitKey(t *testing.T) {
expectedWarnings: nil,
expectedMainWrnURL: "",
},
{
{
name: "only attribute.custom_field is selected",
keyText: "attribute.attribute.custom_field",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{

View File

@@ -0,0 +1,191 @@
package telemetrylogs
import (
"testing"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/require"
)
func TestFilterExprEmbeddedVariables(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm, nil)
keys := buildCompleteFieldKeyMap()
testCases := []struct {
name string
query string
variables map[string]qbtypes.VariableItem
shouldPass bool
expectedQuery string
expectedArgs []any
}{
{
name: "variable composed with suffix in quoted string",
query: "version = '$env-xyz'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['version'] = ? AND mapContains(attributes_string, 'version') = ?)",
expectedArgs: []any{"prod-xyz", true},
},
{
name: "variable in LIKE pattern with suffix",
query: "service.name LIKE '$env%'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
shouldPass: true,
expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) LIKE ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)",
expectedArgs: []any{"prod%"},
},
{
name: "variable with prefix and suffix",
query: "path = 'prefix-$var-suffix'",
variables: map[string]qbtypes.VariableItem{
"var": {
Type: qbtypes.DynamicVariableType,
Value: "middle",
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['path'] = ? AND mapContains(attributes_string, 'path') = ?)",
expectedArgs: []any{"prefix-middle-suffix", true},
},
{
name: "multiple variables in one string",
query: "path = '$region-$env-cluster'",
variables: map[string]qbtypes.VariableItem{
"region": {
Type: qbtypes.DynamicVariableType,
Value: "us-west",
},
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['path'] = ? AND mapContains(attributes_string, 'path') = ?)",
expectedArgs: []any{"us-west-prod-cluster", true},
},
{
name: "similar variable names - longer matches first",
query: "path = '$env-$environment'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "dev",
},
"environment": {
Type: qbtypes.DynamicVariableType,
Value: "production",
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['path'] = ? AND mapContains(attributes_string, 'path') = ?)",
expectedArgs: []any{"dev-production", true},
},
{
name: "pure variable reference - still works",
query: "service.name = $service",
variables: map[string]qbtypes.VariableItem{
"service": {
Type: qbtypes.DynamicVariableType,
Value: "auth-service",
},
},
shouldPass: true,
expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)",
expectedArgs: []any{"auth-service"},
},
{
name: "variable with underscore composed with suffix",
query: "version = '$my_var-test'",
variables: map[string]qbtypes.VariableItem{
"my_var": {
Type: qbtypes.DynamicVariableType,
Value: "hello",
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['version'] = ? AND mapContains(attributes_string, 'version') = ?)",
expectedArgs: []any{"hello-test", true},
},
{
name: "variable in ILIKE pattern",
query: "message ILIKE '%$pattern%'",
variables: map[string]qbtypes.VariableItem{
"pattern": {
Type: qbtypes.DynamicVariableType,
Value: "error",
},
},
shouldPass: true,
expectedQuery: "WHERE (LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?)",
expectedArgs: []any{"%error%", true},
},
{
name: "__all__ value skips condition",
query: "version = '$env-xyz'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "__all__",
},
},
shouldPass: true,
expectedQuery: "WHERE true",
expectedArgs: nil,
},
{
name: "multi-select takes first value",
query: "version = '$env-xyz'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: []any{"prod", "staging", "dev"},
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['version'] = ? AND mapContains(attributes_string, 'version') = ?)",
expectedArgs: []any{"prod-xyz", true},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
opts := querybuilder.FilterExprVisitorOpts{
Logger: instrumentationtest.New().Logger(),
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: keys,
FullTextColumn: DefaultFullTextColumn,
JsonKeyToKey: GetBodyJSONKey,
Variables: tc.variables,
}
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
if tc.shouldPass {
require.NoError(t, err)
require.NotNil(t, clause)
sql, args := clause.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
require.Equal(t, tc.expectedQuery, sql)
require.Equal(t, tc.expectedArgs, args)
} else {
require.Error(t, err)
}
})
}
}

View File

@@ -0,0 +1,235 @@
package telemetrymetrics
import (
"testing"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/require"
)
func buildMetricsFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
return map[string][]*telemetrytypes.TelemetryFieldKey{
"service.name": {
{
Name: "service.name",
Signal: telemetrytypes.SignalMetrics,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"host.name": {
{
Name: "host.name",
Signal: telemetrytypes.SignalMetrics,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"environment": {
{
Name: "environment",
Signal: telemetrytypes.SignalMetrics,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"region": {
{
Name: "region",
Signal: telemetrytypes.SignalMetrics,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"cluster": {
{
Name: "cluster",
Signal: telemetrytypes.SignalMetrics,
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
}
}
func TestFilterExprEmbeddedVariables(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
keys := buildMetricsFieldKeyMap()
testCases := []struct {
name string
query string
variables map[string]qbtypes.VariableItem
shouldPass bool
expectedQuery string
expectedArgs []any
}{
{
name: "variable composed with suffix in quoted string",
query: "host.name = '$env-server'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
shouldPass: true,
expectedQuery: "WHERE JSONExtractString(labels, 'host.name') = ?",
expectedArgs: []any{"prod-server"},
},
{
name: "variable in LIKE pattern with suffix",
query: "service.name LIKE '$env%'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
shouldPass: true,
expectedQuery: "WHERE JSONExtractString(labels, 'service.name') LIKE ?",
expectedArgs: []any{"prod%"},
},
{
name: "variable with prefix and suffix",
query: "cluster = 'prefix-$var-suffix'",
variables: map[string]qbtypes.VariableItem{
"var": {
Type: qbtypes.DynamicVariableType,
Value: "middle",
},
},
shouldPass: true,
expectedQuery: "WHERE JSONExtractString(labels, 'cluster') = ?",
expectedArgs: []any{"prefix-middle-suffix"},
},
{
name: "multiple variables in one string",
query: "cluster = '$region-$env-cluster'",
variables: map[string]qbtypes.VariableItem{
"region": {
Type: qbtypes.DynamicVariableType,
Value: "us-west",
},
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
shouldPass: true,
expectedQuery: "WHERE JSONExtractString(labels, 'cluster') = ?",
expectedArgs: []any{"us-west-prod-cluster"},
},
{
name: "similar variable names - longer matches first",
query: "cluster = '$env-$environment'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "dev",
},
"environment": {
Type: qbtypes.DynamicVariableType,
Value: "production",
},
},
shouldPass: true,
expectedQuery: "WHERE JSONExtractString(labels, 'cluster') = ?",
expectedArgs: []any{"dev-production"},
},
{
name: "pure variable reference - still works",
query: "service.name = $service",
variables: map[string]qbtypes.VariableItem{
"service": {
Type: qbtypes.DynamicVariableType,
Value: "auth-service",
},
},
shouldPass: true,
expectedQuery: "WHERE JSONExtractString(labels, 'service.name') = ?",
expectedArgs: []any{"auth-service"},
},
{
name: "variable with underscore composed with suffix",
query: "host.name = '$my_var-test'",
variables: map[string]qbtypes.VariableItem{
"my_var": {
Type: qbtypes.DynamicVariableType,
Value: "hello",
},
},
shouldPass: true,
expectedQuery: "WHERE JSONExtractString(labels, 'host.name') = ?",
expectedArgs: []any{"hello-test"},
},
{
name: "variable in ILIKE pattern",
query: "environment ILIKE '%$pattern%'",
variables: map[string]qbtypes.VariableItem{
"pattern": {
Type: qbtypes.DynamicVariableType,
Value: "staging",
},
},
shouldPass: true,
expectedQuery: "WHERE LOWER(JSONExtractString(labels, 'environment')) LIKE LOWER(?)",
expectedArgs: []any{"%staging%"},
},
{
name: "__all__ value skips condition",
query: "host.name = '$env-server'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "__all__",
},
},
shouldPass: true,
expectedQuery: "WHERE true",
expectedArgs: nil,
},
{
name: "multi-select takes first value",
query: "host.name = '$env-server'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: []any{"prod", "staging", "dev"},
},
},
shouldPass: true,
expectedQuery: "WHERE JSONExtractString(labels, 'host.name') = ?",
expectedArgs: []any{"prod-server"},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
opts := querybuilder.FilterExprVisitorOpts{
Logger: instrumentationtest.New().Logger(),
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: keys,
Variables: tc.variables,
}
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
if tc.shouldPass {
require.NoError(t, err)
require.NotNil(t, clause)
sql, args := clause.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
require.Equal(t, tc.expectedQuery, sql)
require.Equal(t, tc.expectedArgs, args)
} else {
require.Error(t, err)
}
})
}
}

View File

@@ -0,0 +1,146 @@
package telemetrytraces
import (
"testing"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/require"
)
func TestFilterExprEmbeddedVariables(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
keys := buildCompleteFieldKeyMap()
testCases := []struct {
name string
query string
variables map[string]qbtypes.VariableItem
shouldPass bool
expectedQuery string
expectedArgs []any
}{
{
name: "variable composed with suffix in quoted string",
query: "http.method = '$method-request'",
variables: map[string]qbtypes.VariableItem{
"method": {
Type: qbtypes.DynamicVariableType,
Value: "GET",
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)",
expectedArgs: []any{"GET-request", true},
},
{
name: "variable in LIKE pattern with suffix",
query: "service.name LIKE '$env%'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
shouldPass: true,
expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) LIKE ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)",
expectedArgs: []any{"prod%"},
},
{
name: "variable with prefix and suffix",
query: "user.id = 'user-$var-id'",
variables: map[string]qbtypes.VariableItem{
"var": {
Type: qbtypes.DynamicVariableType,
Value: "123",
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['user.id'] = ? AND mapContains(attributes_string, 'user.id') = ?)",
expectedArgs: []any{"user-123-id", true},
},
{
name: "multiple variables in one string",
query: "user.id = '$region-$env-user'",
variables: map[string]qbtypes.VariableItem{
"region": {
Type: qbtypes.DynamicVariableType,
Value: "us-west",
},
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['user.id'] = ? AND mapContains(attributes_string, 'user.id') = ?)",
expectedArgs: []any{"us-west-prod-user", true},
},
{
name: "pure variable reference - still works",
query: "service.name = $service",
variables: map[string]qbtypes.VariableItem{
"service": {
Type: qbtypes.DynamicVariableType,
Value: "auth-service",
},
},
shouldPass: true,
expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)",
expectedArgs: []any{"auth-service"},
},
{
name: "__all__ value skips condition",
query: "http.method = '$method-request'",
variables: map[string]qbtypes.VariableItem{
"method": {
Type: qbtypes.DynamicVariableType,
Value: "__all__",
},
},
shouldPass: true,
expectedQuery: "WHERE true",
expectedArgs: nil,
},
{
name: "multi-select takes first value",
query: "http.method = '$method-request'",
variables: map[string]qbtypes.VariableItem{
"method": {
Type: qbtypes.DynamicVariableType,
Value: []any{"GET", "POST", "PUT"},
},
},
shouldPass: true,
expectedQuery: "WHERE (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)",
expectedArgs: []any{"GET-request", true},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
opts := querybuilder.FilterExprVisitorOpts{
Logger: instrumentationtest.New().Logger(),
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: keys,
Variables: tc.variables,
}
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
if tc.shouldPass {
require.NoError(t, err)
require.NotNil(t, clause)
sql, args := clause.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
require.Equal(t, tc.expectedQuery, sql)
require.Equal(t, tc.expectedArgs, args)
} else {
require.Error(t, err)
}
})
}
}

View File

@@ -2,6 +2,7 @@ package variables
import (
"fmt"
"sort"
"strconv"
"strings"
@@ -35,19 +36,22 @@ func (e *ErrorListener) SyntaxError(recognizer antlr.Recognizer, offendingSymbol
type variableReplacementVisitor struct {
variables map[string]qbtypes.VariableItem
errors []string
warnings []string
}
// specialSkipMarker is used to indicate that a condition should be removed
const specialSkipMarker = "__SKIP_CONDITION__"
// ReplaceVariablesInExpression takes a filter expression and returns it with variables replaced
func ReplaceVariablesInExpression(expression string, variables map[string]qbtypes.VariableItem) (string, error) {
// Also returns any warnings generated during the replacement process.
func ReplaceVariablesInExpression(expression string, variables map[string]qbtypes.VariableItem) (string, []string, error) {
input := antlr.NewInputStream(expression)
lexer := grammar.NewFilterQueryLexer(input)
visitor := &variableReplacementVisitor{
variables: variables,
errors: []string{},
warnings: []string{},
}
lexerErrorListener := NewErrorListener()
@@ -63,21 +67,21 @@ func ReplaceVariablesInExpression(expression string, variables map[string]qbtype
tree := parser.Query()
if len(parserErrorListener.SyntaxErrors) > 0 {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "syntax errors in expression: %v", parserErrorListener.SyntaxErrors)
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "syntax errors in expression: %v", parserErrorListener.SyntaxErrors)
}
result := visitor.Visit(tree).(string)
if len(visitor.errors) > 0 {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "errors processing expression: %v", visitor.errors)
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "errors processing expression: %v", visitor.errors)
}
// If the entire expression should be skipped, return empty string
if result == specialSkipMarker {
return "", nil
return "", visitor.warnings, nil
}
return result, nil
return result, visitor.warnings, nil
}
// Visit dispatches to the specific visit method based on node type
@@ -478,11 +482,32 @@ func (v *variableReplacementVisitor) VisitValue(ctx *grammar.ValueContext) any {
// Replace variable with its value
return v.formatVariableValue(varItem.Value)
}
// did not find as exact match; check if it's a composed string like "$env-xyz"
interpolated, hasAllValue := v.interpolateVariablesInString(originalValue)
if hasAllValue {
return specialSkipMarker
}
if interpolated != originalValue {
return v.formatVariableValue(interpolated)
}
// No variables found, return original
return originalValue
}
// Check if the quoted text contains embedded variables (not starting with $)
if ctx.QUOTED_TEXT() != nil && strings.Contains(originalValue, "$") {
interpolated, hasAllValue := v.interpolateVariablesInString(originalValue)
if hasAllValue {
return specialSkipMarker
}
return v.formatVariableValue(interpolated)
}
// Return original value if not a variable or variable not found
// If it was quoted text and not a variable, return with quotes
if ctx.QUOTED_TEXT() != nil && !strings.HasPrefix(originalValue, "$") {
if ctx.QUOTED_TEXT() != nil {
return ctx.QUOTED_TEXT().GetText()
}
return originalValue
@@ -517,6 +542,82 @@ func (v *variableReplacementVisitor) VisitKey(ctx *grammar.KeyContext) any {
return keyText
}
// interpolateVariablesInString finds and replaces variable references within a string
// by checking against actual variable names in the variables map.
// Returns the interpolated string and a boolean indicating if any variable had __all__ value.
func (v *variableReplacementVisitor) interpolateVariablesInString(s string) (string, bool) {
result := s
varNames := make([]string, 0, len(v.variables)*2)
for name := range v.variables {
varNames = append(varNames, name)
if !strings.HasPrefix(name, "$") {
varNames = append(varNames, "$"+name)
}
}
sort.Slice(varNames, func(i, j int) bool {
return len(varNames[i]) > len(varNames[j])
})
for _, varName := range varNames {
// ensure we're looking for $varname pattern
searchPattern := varName
if !strings.HasPrefix(searchPattern, "$") {
searchPattern = "$" + varName
}
if strings.Contains(result, searchPattern) {
// direct lookup
varItem, ok := v.variables[varName]
if !ok {
// Try without $ prefix
varItem, ok = v.variables[strings.TrimPrefix(varName, "$")]
}
if ok {
// special check for __all__ value
if varItem.Type == qbtypes.DynamicVariableType {
if allVal, ok := varItem.Value.(string); ok && allVal == "__all__" {
return "", true
}
}
// format the replacement value (unquoted for string interpolation)
replacement := v.formatVariableValueUnquoted(varItem.Value, strings.TrimPrefix(varName, "$"))
result = strings.ReplaceAll(result, searchPattern, replacement)
}
}
}
return result, false
}
// formatVariableValueUnquoted returns a string representation of the value for string interpolation.
// For multi-select (array) values, it takes the first value and adds a warning.
func (v *variableReplacementVisitor) formatVariableValueUnquoted(value any, varName string) string {
switch val := value.(type) {
case []string:
if len(val) > 1 {
v.warnings = append(v.warnings, fmt.Sprintf("variable `%s` has multiple values, using first value `%s` for string interpolation", varName, val[0]))
}
if len(val) > 0 {
return val[0]
}
return ""
case []any:
if len(val) > 1 {
v.warnings = append(v.warnings, fmt.Sprintf("variable `%s` has multiple values, using first value for string interpolation", varName))
}
if len(val) > 0 {
return fmt.Sprintf("%v", val[0])
}
return ""
default:
return fmt.Sprintf("%v", val)
}
}
// formatVariableValue formats a variable value for inclusion in the expression
func (v *variableReplacementVisitor) formatVariableValue(value any) string {
switch val := value.(type) {

View File

@@ -421,11 +421,107 @@ func TestReplaceVariablesInExpression(t *testing.T) {
},
expected: "message NOT CONTAINS 'debug'",
},
{
name: "variable composed with suffix in value",
expression: "cluster_name = '$environment-xyz'",
variables: map[string]qbtypes.VariableItem{
"environment": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
expected: "cluster_name = 'prod-xyz'",
},
{
name: "variable composed with suffix without quotes",
expression: "cluster_name = $environment-xyz",
variables: map[string]qbtypes.VariableItem{
"environment": {
Type: qbtypes.DynamicVariableType,
Value: "staging",
},
},
expected: "cluster_name = 'staging-xyz'",
},
{
name: "variable in LIKE pattern with suffix",
expression: "service.name LIKE '$env%'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
expected: "service.name LIKE 'prod%'",
},
{
name: "variable composed with prefix and suffix",
expression: "label = 'prefix-$var-suffix'",
variables: map[string]qbtypes.VariableItem{
"var": {
Type: qbtypes.DynamicVariableType,
Value: "middle",
},
},
expected: "label = 'prefix-middle-suffix'",
},
{
name: "multiple variables in one string",
expression: "path = '$region-$env-cluster'",
variables: map[string]qbtypes.VariableItem{
"region": {
Type: qbtypes.DynamicVariableType,
Value: "us-west",
},
"env": {
Type: qbtypes.DynamicVariableType,
Value: "prod",
},
},
expected: "path = 'us-west-prod-cluster'",
},
{
name: "embedded variable with __all__ value skips condition",
expression: "cluster_name = '$environment-xyz'",
variables: map[string]qbtypes.VariableItem{
"environment": {
Type: qbtypes.DynamicVariableType,
Value: "__all__",
},
},
expected: "",
},
{
name: "variable with underscore composed with suffix",
expression: "name = '$my_var-test'",
variables: map[string]qbtypes.VariableItem{
"my_var": {
Type: qbtypes.DynamicVariableType,
Value: "hello",
},
},
expected: "name = 'hello-test'",
},
{
name: "similar variable names - longer matches first",
expression: "name = '$env-$environment'",
variables: map[string]qbtypes.VariableItem{
"env": {
Type: qbtypes.DynamicVariableType,
Value: "dev",
},
"environment": {
Type: qbtypes.DynamicVariableType,
Value: "production",
},
},
expected: "name = 'dev-production'",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := ReplaceVariablesInExpression(tt.expression, tt.variables)
result, _, err := ReplaceVariablesInExpression(tt.expression, tt.variables)
if tt.wantErr {
assert.Error(t, err)

View File

@@ -456,3 +456,32 @@ def assert_scalar_column_order(
f"{context}: Column {column_index} order mismatch. "
f"Expected {expected_values}, got {actual_values}"
)
def make_substitute_vars_request(
signoz: "types.SigNoz",
token: str,
start_ms: int,
end_ms: int,
queries: List[Dict],
variables: Dict[str, Any],
timeout: int = QUERY_TIMEOUT,
) -> requests.Response:
return requests.post(
signoz.self.host_configs["8080"].get("/api/v5/substitute_vars"),
timeout=timeout,
headers={"authorization": f"Bearer {token}"},
json={
"schemaVersion": "v1",
"start": start_ms,
"end": end_ms,
"requestType": "time_series",
"compositeQuery": {"queries": queries},
"variables": variables,
"formatOptions": {"formatTableResultForUI": False, "fillGaps": False},
},
)
def sum_series_values(series_values: List[Dict]) -> float:
return sum(point["value"] for point in series_values)

View File

@@ -2,8 +2,6 @@ from datetime import datetime, timedelta, timezone
from http import HTTPStatus
from typing import Callable, Dict, List, Optional, Tuple
import requests
from fixtures import querier, types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.logs import Logs
@@ -24,6 +22,8 @@ metric_values_for_test = {
"service-d": 10.0,
}
SCALAR_FORMAT_OPTIONS = {"formatTableResultForUI": True, "fillGaps": False}
def generate_logs_with_counts(
now: datetime,
@@ -85,30 +85,6 @@ def generate_metrics_with_values(
return metrics
def make_scalar_query_request(
signoz: types.SigNoz,
token: str,
now: datetime,
queries: List[Dict],
lookback_minutes: int = 5,
) -> requests.Response:
return requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=5,
headers={"authorization": f"Bearer {token}"},
json={
"schemaVersion": "v1",
"start": int(
(now - timedelta(minutes=lookback_minutes)).timestamp() * 1000
),
"end": int(now.timestamp() * 1000),
"requestType": "scalar",
"compositeQuery": {"queries": queries},
"formatOptions": {"formatTableResultForUI": True, "fillGaps": False},
},
)
def build_logs_query(
name: str = "A",
aggregations: Optional[List[str]] = None,
@@ -218,11 +194,16 @@ def test_logs_scalar_group_by_single_agg_no_order(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[build_logs_query(group_by=["service.name"])],
request_type="scalar",
format_options={"formatTableResultForUI": True, "fillGaps": False},
)
assert response.status_code == HTTPStatus.OK
@@ -246,11 +227,16 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_asc(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[build_logs_query(group_by=["service.name"], order_by=[("count()", "asc")])],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -274,11 +260,16 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_desc(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[build_logs_query(group_by=["service.name"], order_by=[("count()", "desc")])],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -302,15 +293,20 @@ def test_logs_scalar_group_by_single_agg_order_by_grouping_key_asc(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_logs_query(
group_by=["service.name"], order_by=[("service.name", "asc")]
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -334,15 +330,20 @@ def test_logs_scalar_group_by_single_agg_order_by_grouping_key_desc(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_logs_query(
group_by=["service.name"], order_by=[("service.name", "desc")]
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -366,10 +367,13 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_logs_query(
group_by=["service.name"],
@@ -377,6 +381,8 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
order_by=[("count()", "asc")],
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -398,10 +404,13 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_second_agg_desc(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_logs_query(
group_by=["service.name"],
@@ -409,6 +418,8 @@ def test_logs_scalar_group_by_multiple_aggs_order_by_second_agg_desc(
order_by=[("count_distinct(body)", "desc")],
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -431,15 +442,20 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_logs_query(
group_by=["service.name"], order_by=[("count()", "asc")], limit=2
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -463,15 +479,20 @@ def test_logs_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_logs_query(
group_by=["service.name"], order_by=[("count()", "desc")], limit=3
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -495,15 +516,20 @@ def test_logs_scalar_group_by_order_by_grouping_key_asc_limit_2(
insert_logs(generate_logs_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_logs_query(
group_by=["service.name"], order_by=[("service.name", "asc")], limit=2
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -527,11 +553,16 @@ def test_traces_scalar_group_by_single_agg_no_order(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[build_traces_query(group_by=["service.name"])],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -555,11 +586,16 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_asc(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[build_traces_query(group_by=["service.name"], order_by=[("count()", "asc")])],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -583,11 +619,16 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_desc(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[build_traces_query(group_by=["service.name"], order_by=[("count()", "desc")])],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -611,15 +652,20 @@ def test_traces_scalar_group_by_single_agg_order_by_grouping_key_asc(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_traces_query(
group_by=["service.name"], order_by=[("service.name", "asc")]
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -643,15 +689,20 @@ def test_traces_scalar_group_by_single_agg_order_by_grouping_key_desc(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_traces_query(
group_by=["service.name"], order_by=[("service.name", "desc")]
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -675,10 +726,13 @@ def test_traces_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_traces_query(
group_by=["service.name"],
@@ -686,6 +740,8 @@ def test_traces_scalar_group_by_multiple_aggs_order_by_first_agg_asc(
order_by=[("count()", "asc")],
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -707,15 +763,20 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_traces_query(
group_by=["service.name"], order_by=[("count()", "asc")], limit=2
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -739,15 +800,20 @@ def test_traces_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_traces_query(
group_by=["service.name"], order_by=[("count()", "desc")], limit=3
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -771,15 +837,20 @@ def test_traces_scalar_group_by_order_by_grouping_key_asc_limit_2(
insert_traces(generate_traces_with_counts(now, log_or_trace_service_counts))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_traces_query(
group_by=["service.name"], order_by=[("service.name", "asc")], limit=2
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -803,11 +874,16 @@ def test_metrics_scalar_group_by_single_agg_no_order(
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[build_metrics_query(group_by=["service.name"])],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -836,16 +912,21 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_asc(
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_metrics_query(
group_by=["service.name"],
order_by=[("sum(test.metric)", "asc")],
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -874,16 +955,21 @@ def test_metrics_scalar_group_by_single_agg_order_by_grouping_key_asc(
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_metrics_query(
group_by=["service.name"],
order_by=[("service.name", "asc")],
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -912,10 +998,13 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_metrics_query(
group_by=["service.name"],
@@ -923,6 +1012,8 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_asc_limit_2(
limit=2,
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -946,10 +1037,13 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_metrics_query(
group_by=["service.name"],
@@ -957,6 +1051,8 @@ def test_metrics_scalar_group_by_single_agg_order_by_agg_desc_limit_3(
limit=3,
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK
@@ -980,10 +1076,13 @@ def test_metrics_scalar_group_by_order_by_grouping_key_asc_limit_2(
insert_metrics(generate_metrics_with_values(now, metric_values_for_test))
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = make_scalar_query_request(
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = querier.make_query_request(
signoz,
token,
now,
start_ms,
end_ms,
[
build_metrics_query(
group_by=["service.name"],
@@ -991,6 +1090,8 @@ def test_metrics_scalar_group_by_order_by_grouping_key_asc_limit_2(
limit=2,
)
],
request_type="scalar",
format_options=SCALAR_FORMAT_OPTIONS,
)
assert response.status_code == HTTPStatus.OK

View File

@@ -0,0 +1,356 @@
from datetime import datetime, timedelta, timezone
from http import HTTPStatus
from typing import Callable
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.querier import build_scalar_query, make_substitute_vars_request
LOGS_COUNT_AGG = [{"expression": "count()"}]
def test_substitute_vars_standalone_variable(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A", "logs", LOGS_COUNT_AGG, filter_expression="service.name = $service"
)
],
variables={"service": {"type": "query", "value": "auth-service"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == "service.name = 'auth-service'"
def test_substitute_vars_variable_in_string(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="cluster_name = '$environment-xyz'",
)
],
variables={"environment": {"type": "custom", "value": "prod"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == "cluster_name = 'prod-xyz'"
def test_substitute_vars_multiple_variables(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name = $service AND env = $environment",
)
],
variables={
"service": {"type": "text", "value": "auth-service"},
"environment": {"type": "query", "value": "production"},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == "service.name = 'auth-service' AND env = 'production'"
def test_substitute_vars_array_variable(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name IN $services",
)
],
variables={
"services": {"type": "query", "value": ["auth-service", "api-service"]}
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == "service.name IN ['auth-service', 'api-service']"
def test_substitute_vars_like_pattern(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name LIKE '$env%'",
)
],
variables={"env": {"type": "text", "value": "prod"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == "service.name LIKE 'prod%'"
def test_substitute_vars_variable_without_quotes(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="cluster_name = $environment-xyz",
)
],
variables={"environment": {"type": "dynamic", "value": "staging"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == "cluster_name = 'staging-xyz'"
def test_substitute_vars_all_value_standalone(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A", "logs", LOGS_COUNT_AGG, filter_expression="service.name = $service"
)
],
variables={"service": {"type": "dynamic", "value": "__all__"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
# expression should be empty when __all__ is used
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == ""
def test_substitute_vars_all_value_in_composed_string(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="cluster_name = '$environment-xyz'",
)
],
variables={"environment": {"type": "dynamic", "value": "__all__"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
# expression should be empty when __all__ is used
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == ""
def test_substitute_vars_all_value_partial(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
) -> None:
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_substitute_vars_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name = $service AND env = $env",
)
],
variables={
"service": {"type": "dynamic", "value": "__all__"},
"env": {"type": "dynamic", "value": "production"},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
data = response.json()["data"]
queries = data["compositeQuery"]["queries"]
assert len(queries) == 1
# only env condition should remain
filter_expr = queries[0]["spec"]["filter"]["expression"]
assert filter_expr == "env = 'production'"

View File

@@ -0,0 +1,803 @@
from datetime import datetime, timedelta, timezone
from http import HTTPStatus
from typing import Callable, List
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.logs import Logs
from fixtures.querier import (
build_scalar_query,
get_all_series,
get_series_values,
make_query_request,
sum_series_values,
)
LOGS_COUNT_AGG = [{"expression": "count()"}]
def test_query_range_with_standalone_variable(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "auth-service"},
attributes={"env": "production"},
body="Auth service log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "api-service"},
attributes={"env": "production"},
body="API service log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "web-service"},
attributes={"env": "staging"},
body="Web service log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A", "logs", LOGS_COUNT_AGG, filter_expression="service.name = $service"
)
],
variables={"service": {"type": "query", "value": "auth-service"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 1 # auth-service log
def test_query_range_with_variable_in_array(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "auth-service"},
attributes={"env": "production"},
body="Auth service log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "api-service"},
attributes={"env": "production"},
body="API service log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "web-service"},
attributes={"env": "staging"},
body="Web service log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name IN $services",
)
],
variables={
"services": {"type": "custom", "value": ["auth-service", "api-service"]}
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 2 # auth-service and api-service logs
def test_query_range_with_variable_composed_in_string(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service1"},
attributes={"cluster_name": "prod-xyz"},
body="Prod cluster log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service2"},
attributes={"cluster_name": "staging-xyz"},
body="Staging cluster log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service3"},
attributes={"cluster_name": "dev-xyz"},
body="Dev cluster log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="cluster_name = '$environment-xyz'",
)
],
variables={"environment": {"type": "dynamic", "value": "prod"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 1 # prod-xyz log
def test_query_range_with_variable_in_like_pattern(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "prod-auth"},
attributes={"env": "production"},
body="Prod auth log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "prod-api"},
attributes={"env": "production"},
body="Prod API log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "staging-api"},
attributes={"env": "staging"},
body="Staging API log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name LIKE '$env%'",
)
],
variables={"env": {"type": "text", "value": "prod"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 2 # prod-auth and prod-api logs
def test_query_range_with_multiple_variables_in_string(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service1"},
attributes={"path": "us-west-prod-cluster"},
body="US West Prod log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service2"},
attributes={"path": "us-east-prod-cluster"},
body="US East Prod log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service3"},
attributes={"path": "eu-west-staging-cluster"},
body="EU West Staging log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="path = '$region-$env-cluster'",
)
],
variables={
"region": {"type": "query", "value": "us-west"},
"env": {"type": "custom", "value": "prod"},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 1 # us-west-prod-cluster log
def test_query_range_with_variable_prefix_and_suffix(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service1"},
attributes={"label": "prefix-middle-suffix"},
body="Middle label log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service2"},
attributes={"label": "prefix-other-suffix"},
body="Other label log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="label = 'prefix-$var-suffix'",
)
],
variables={"var": {"type": "text", "value": "middle"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 1 # prefix-middle-suffix log
def test_query_range_with_variable_without_quotes(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service1"},
attributes={"cluster_name": "staging-xyz"},
body="Staging cluster log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service2"},
attributes={"cluster_name": "prod-xyz"},
body="Prod cluster log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="cluster_name = $environment-xyz",
)
],
variables={"environment": {"type": "custom", "value": "staging"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 1 # staging-xyz log
def test_query_range_time_series_with_group_by(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = []
for i in range(3):
logs.append(
Logs(
timestamp=now - timedelta(minutes=i + 1),
resources={"service.name": "auth-service"},
attributes={"cluster_name": "prod-xyz"},
body=f"Auth service log {i}",
severity_text="INFO",
)
)
logs.append(
Logs(
timestamp=now - timedelta(minutes=i + 1),
resources={"service.name": "api-service"},
attributes={"cluster_name": "prod-xyz"},
body=f"API service log {i}",
severity_text="INFO",
)
)
logs.append(
Logs(
timestamp=now - timedelta(minutes=i + 1),
resources={"service.name": "web-service"},
attributes={"cluster_name": "staging-xyz"},
body=f"Web service log {i}",
severity_text="INFO",
)
)
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=10)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="cluster_name = '$env-xyz'",
group_by=[
{
"name": "service.name",
"fieldDataType": "string",
"fieldContext": "resource",
}
],
)
],
variables={"env": {"type": "query", "value": "prod"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
all_series = get_all_series(response.json(), "A")
assert len(all_series) == 2 # auth-service and api-service
# 6 (3 auth-service + 3 api-service)
total_count = sum(sum_series_values(s["values"]) for s in all_series)
assert total_count == 6
def test_query_range_with_different_variable_types(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "auth-service"},
attributes={"env": "production"},
body="Auth service log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
# all different variable types
for var_type in ["query", "custom", "text", "dynamic"]:
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name = $service",
)
],
variables={"service": {"type": var_type, "value": "auth-service"}},
)
assert (
response.status_code == HTTPStatus.OK
), f"Failed for variable type: {var_type}"
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert (
total_count == 1
), f"Expected 1 log for variable type {var_type}, got {total_count}"
def test_query_range_with_all_value_standalone(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "auth-service"},
attributes={"env": "production"},
body="Auth service log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "api-service"},
attributes={"env": "production"},
body="API service log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "web-service"},
attributes={"env": "staging"},
body="Web service log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
# `__all__`, the filter condition should be removed
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A", "logs", LOGS_COUNT_AGG, filter_expression="service.name = $service"
)
],
variables={"service": {"type": "dynamic", "value": "__all__"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 3
def test_query_range_with_all_value_in_composed_string(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service1"},
attributes={"cluster_name": "prod-xyz"},
body="Prod cluster log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service2"},
attributes={"cluster_name": "staging-xyz"},
body="Staging cluster log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "service3"},
attributes={"cluster_name": "dev-xyz"},
body="Dev cluster log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
# `__all__` in composed string, the filter should be removed
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="cluster_name = '$environment-xyz'",
)
],
variables={"environment": {"type": "dynamic", "value": "__all__"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 3 # all logs should be returned
def test_query_range_with_all_value_partial_filter(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "auth-service"},
attributes={"env": "production"},
body="Auth prod log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "api-service"},
attributes={"env": "production"},
body="API prod log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "web-service"},
attributes={"env": "staging"},
body="Web staging log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
# `__all__` for service, only env filter should apply
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name = $service AND env = $env",
)
],
variables={
"service": {"type": "dynamic", "value": "__all__"},
"env": {"type": "dynamic", "value": "production"},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 2 # prod logs (auth + api)
def test_query_range_with_all_value_in_array(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
logs: List[Logs] = [
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "auth-service"},
attributes={"env": "production"},
body="Auth service log",
severity_text="INFO",
),
Logs(
timestamp=now - timedelta(minutes=1),
resources={"service.name": "api-service"},
attributes={"env": "production"},
body="API service log",
severity_text="INFO",
),
]
insert_logs(logs)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
start_ms = int((now - timedelta(minutes=5)).timestamp() * 1000)
end_ms = int(now.timestamp() * 1000)
response = make_query_request(
signoz,
token,
start_ms,
end_ms,
[
build_scalar_query(
"A",
"logs",
LOGS_COUNT_AGG,
filter_expression="service.name IN $services",
)
],
variables={"services": {"type": "dynamic", "value": "__all__"}},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
values = get_series_values(response.json(), "A")
total_count = sum_series_values(values)
assert total_count == 2 # all logs