mirror of
https://github.com/SigNoz/signoz.git
synced 2026-05-15 14:40:30 +01:00
Compare commits
4 Commits
issue_4522
...
fts-logs
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
952f5d6e91 | ||
|
|
2154ea30a6 | ||
|
|
6e3857b840 | ||
|
|
0766ab31c0 |
@@ -18,16 +18,19 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/gateway/noopgateway"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
|
||||
"github.com/SigNoz/signoz/pkg/meterreporter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration"
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration/implcloudintegration"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/retention"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rulestatehistory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
@@ -109,6 +112,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(_ licensing.Licensing) factory.NamedMap[factory.ProviderFactory[auditor.Auditor, auditor.Config]] {
|
||||
return signoz.NewAuditorProviderFactories()
|
||||
},
|
||||
func(_ context.Context, _ factory.ProviderSettings, _ flagger.Flagger, _ licensing.Licensing, _ telemetrystore.TelemetryStore, _ retention.Getter, _ organization.Getter, _ zeus.Zeus) (factory.NamedMap[factory.ProviderFactory[meterreporter.Reporter, meterreporter.Config]], string) {
|
||||
return signoz.NewMeterReporterProviderFactories(), "noop"
|
||||
},
|
||||
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
|
||||
return querier.NewHandler(ps, q, a)
|
||||
},
|
||||
|
||||
55
cmd/enterprise/meter.go
Normal file
55
cmd/enterprise/meter.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/metercollector"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrytraces"
|
||||
"github.com/SigNoz/signoz/pkg/types/retentiontypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/zeustypes"
|
||||
)
|
||||
|
||||
var meterConfigs = []metercollector.Config{
|
||||
{
|
||||
Provider: metercollector.ProviderStatic,
|
||||
Static: metercollector.StaticConfig{
|
||||
Name: zeustypes.MeterPlatformActive,
|
||||
Unit: zeustypes.MeterUnitCount,
|
||||
Aggregation: zeustypes.MeterAggregationMax,
|
||||
Value: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
Provider: metercollector.ProviderTelemetry,
|
||||
Telemetry: metercollector.TelemetryConfig{
|
||||
Name: zeustypes.MeterLogSize,
|
||||
Unit: zeustypes.MeterUnitBytes,
|
||||
Aggregation: zeustypes.MeterAggregationSum,
|
||||
DBName: telemetrylogs.DBName,
|
||||
TableName: telemetrylogs.LogsV2LocalTableName,
|
||||
DefaultRetentionDays: retentiontypes.DefaultLogsRetentionDays,
|
||||
},
|
||||
},
|
||||
{
|
||||
Provider: metercollector.ProviderTelemetry,
|
||||
Telemetry: metercollector.TelemetryConfig{
|
||||
Name: zeustypes.MeterSpanSize,
|
||||
Unit: zeustypes.MeterUnitBytes,
|
||||
Aggregation: zeustypes.MeterAggregationSum,
|
||||
DBName: telemetrytraces.DBName,
|
||||
TableName: telemetrytraces.SpanIndexV3LocalTableName,
|
||||
DefaultRetentionDays: retentiontypes.DefaultTracesRetentionDays,
|
||||
},
|
||||
},
|
||||
{
|
||||
Provider: metercollector.ProviderTelemetry,
|
||||
Telemetry: metercollector.TelemetryConfig{
|
||||
Name: zeustypes.MeterDatapointCount,
|
||||
Unit: zeustypes.MeterUnitCount,
|
||||
Aggregation: zeustypes.MeterAggregationSum,
|
||||
DBName: telemetrymetrics.DBName,
|
||||
TableName: telemetrymetrics.SamplesV4LocalTableName,
|
||||
DefaultRetentionDays: retentiontypes.DefaultMetricsRetentionDays,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -18,6 +18,9 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
"github.com/SigNoz/signoz/ee/metercollector/staticmetercollector"
|
||||
"github.com/SigNoz/signoz/ee/metercollector/telemetrymetercollector"
|
||||
"github.com/SigNoz/signoz/ee/meterreporter/httpmeterreporter"
|
||||
"github.com/SigNoz/signoz/ee/modules/cloudintegration/implcloudintegration"
|
||||
"github.com/SigNoz/signoz/ee/modules/cloudintegration/implcloudintegration/implcloudprovider"
|
||||
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
|
||||
@@ -36,14 +39,17 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
pkgflagger "github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/meterreporter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration"
|
||||
pkgcloudintegration "github.com/SigNoz/signoz/pkg/modules/cloudintegration/implcloudintegration"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/retention"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rulestatehistory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
@@ -161,6 +167,20 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
}
|
||||
return factories
|
||||
},
|
||||
func(ctx context.Context, providerSettings factory.ProviderSettings, flagger pkgflagger.Flagger, licensing licensing.Licensing, telemetryStore telemetrystore.TelemetryStore, retentionGetter retention.Getter, orgGetter organization.Getter, zeus zeus.Zeus) (factory.NamedMap[factory.ProviderFactory[meterreporter.Reporter, meterreporter.Config]], string) {
|
||||
factories := signoz.NewMeterReporterProviderFactories()
|
||||
|
||||
collectorFactories := factory.MustNewNamedMap(
|
||||
staticmetercollector.NewFactory(),
|
||||
telemetrymetercollector.NewFactory(telemetryStore, retentionGetter),
|
||||
)
|
||||
|
||||
if err := factories.Add(httpmeterreporter.NewFactory(collectorFactories, meterConfigs, flagger, licensing, orgGetter, zeus)); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return factories, "http"
|
||||
},
|
||||
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
|
||||
communityHandler := querier.NewHandler(ps, q, a)
|
||||
return eequerier.NewHandler(ps, q, communityHandler)
|
||||
|
||||
@@ -429,3 +429,10 @@ authz:
|
||||
openfga:
|
||||
# maximum tuples allowed per openfga write operation.
|
||||
max_tuples_per_write: 300
|
||||
|
||||
##################### Meter Reporter #####################
|
||||
meterreporter:
|
||||
# The interval between collection ticks. Minimum 5m.
|
||||
interval: 6h
|
||||
# Whether to backfill sealed days from the license creation day.
|
||||
backfill: true
|
||||
|
||||
61
ee/metercollector/staticmetercollector/provider.go
Normal file
61
ee/metercollector/staticmetercollector/provider.go
Normal file
@@ -0,0 +1,61 @@
|
||||
// Package staticmetercollector emits a fixed-value meter reading per org per window.
|
||||
package staticmetercollector
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/metercollector"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/zeustypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
var _ metercollector.MeterCollector = (*Provider)(nil)
|
||||
|
||||
type Provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
config metercollector.StaticConfig
|
||||
}
|
||||
|
||||
func NewFactory() factory.ProviderFactory[metercollector.MeterCollector, metercollector.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName(metercollector.ProviderStatic), func(ctx context.Context, providerSettings factory.ProviderSettings, config metercollector.Config) (metercollector.MeterCollector, error) {
|
||||
return newProvider(providerSettings, config.Static), nil
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func newProvider(providerSettings factory.ProviderSettings, config metercollector.StaticConfig) *Provider {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/metercollector/staticmetercollector")
|
||||
|
||||
return &Provider{
|
||||
settings: settings,
|
||||
config: config,
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *Provider) Name() zeustypes.MeterName { return provider.config.Name }
|
||||
func (provider *Provider) Unit() zeustypes.MeterUnit { return provider.config.Unit }
|
||||
func (provider *Provider) Aggregation() zeustypes.MeterAggregation {
|
||||
return provider.config.Aggregation
|
||||
}
|
||||
|
||||
func (provider *Provider) Origin(_ context.Context, _ valuer.UUID, license *licensetypes.License, _ time.Time) (time.Time, error) {
|
||||
if license == nil || license.CreatedAt.IsZero() {
|
||||
return time.Time{}, nil
|
||||
}
|
||||
|
||||
createdAt := license.CreatedAt.UTC()
|
||||
return time.Date(createdAt.Year(), createdAt.Month(), createdAt.Day(), 0, 0, 0, 0, time.UTC), nil
|
||||
}
|
||||
|
||||
func (provider *Provider) Collect(_ context.Context, orgID valuer.UUID, license *licensetypes.License, window zeustypes.MeterWindow) ([]zeustypes.Meter, error) {
|
||||
if license == nil || license.Key == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return []zeustypes.Meter{
|
||||
zeustypes.NewMeter(provider.config.Name, provider.config.Value, provider.config.Unit, provider.config.Aggregation, window, zeustypes.NewDimensions(zeustypes.OrganizationID.String(orgID.StringValue()))),
|
||||
}, nil
|
||||
}
|
||||
247
ee/metercollector/telemetrymetercollector/provider.go
Normal file
247
ee/metercollector/telemetrymetercollector/provider.go
Normal file
@@ -0,0 +1,247 @@
|
||||
// Package telemetrymetercollector collects telemetry meters (logs, traces, metrics)
|
||||
// by retention. One Provider materializes per TelemetryConfig.
|
||||
package telemetrymetercollector
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/metercollector"
|
||||
"github.com/SigNoz/signoz/pkg/modules/retention"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymeter"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/retentiontypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/zeustypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
var (
|
||||
labelKeyPattern = regexp.MustCompile(`^[A-Za-z0-9_.\-]+$`)
|
||||
labelValuePattern = regexp.MustCompile(`^[A-Za-z0-9_.\-:]+$`)
|
||||
)
|
||||
|
||||
var _ metercollector.MeterCollector = (*Provider)(nil)
|
||||
|
||||
type Provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
config metercollector.TelemetryConfig
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
retentionGetter retention.Getter
|
||||
}
|
||||
|
||||
func NewFactory(telemetryStore telemetrystore.TelemetryStore, retentionGetter retention.Getter) factory.ProviderFactory[metercollector.MeterCollector, metercollector.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName(metercollector.ProviderTelemetry), func(ctx context.Context, providerSettings factory.ProviderSettings, config metercollector.Config) (metercollector.MeterCollector, error) {
|
||||
return newProvider(providerSettings, config.Telemetry, telemetryStore, retentionGetter), nil
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func newProvider(
|
||||
providerSettings factory.ProviderSettings,
|
||||
config metercollector.TelemetryConfig,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
retentionGetter retention.Getter,
|
||||
) *Provider {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/metercollector/telemetrymetercollector")
|
||||
|
||||
return &Provider{
|
||||
settings: settings,
|
||||
config: config,
|
||||
telemetryStore: telemetryStore,
|
||||
retentionGetter: retentionGetter,
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *Provider) Name() zeustypes.MeterName { return provider.config.Name }
|
||||
func (provider *Provider) Unit() zeustypes.MeterUnit { return provider.config.Unit }
|
||||
func (provider *Provider) Aggregation() zeustypes.MeterAggregation {
|
||||
return provider.config.Aggregation
|
||||
}
|
||||
|
||||
func (provider *Provider) Origin(ctx context.Context, _ valuer.UUID, _ *licensetypes.License, todayStart time.Time) (time.Time, error) {
|
||||
query, args := buildOriginQuery(provider.config.Name.String())
|
||||
|
||||
var minMs int64
|
||||
if err := provider.telemetryStore.ClickhouseDB().QueryRow(ctx, query, args...).Scan(&minMs); err != nil {
|
||||
return time.Time{}, err
|
||||
}
|
||||
if minMs == 0 {
|
||||
return todayStart, nil
|
||||
}
|
||||
|
||||
minDay := time.UnixMilli(minMs).UTC()
|
||||
return time.Date(minDay.Year(), minDay.Month(), minDay.Day(), 0, 0, 0, 0, time.UTC), nil
|
||||
}
|
||||
|
||||
func (provider *Provider) Collect(
|
||||
ctx context.Context,
|
||||
orgID valuer.UUID,
|
||||
_ *licensetypes.License,
|
||||
window zeustypes.MeterWindow,
|
||||
) ([]zeustypes.Meter, error) {
|
||||
meterName := provider.config.Name.String()
|
||||
|
||||
segments, err := provider.retentionGetter.GetRetentionPolicySegments(
|
||||
ctx,
|
||||
orgID,
|
||||
provider.config.DBName,
|
||||
provider.config.TableName,
|
||||
provider.config.DefaultRetentionDays,
|
||||
window.StartUnixMilli,
|
||||
window.EndUnixMilli,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
valuesByRetentionDays := make(map[int]int64)
|
||||
|
||||
for _, segment := range segments {
|
||||
query, args, err := buildQuery(meterName, segment)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rows, err := provider.telemetryStore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := func() error {
|
||||
defer rows.Close()
|
||||
for rows.Next() {
|
||||
var retentionDays int32
|
||||
var value int64
|
||||
|
||||
if err := rows.Scan(&retentionDays, &value); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
valuesByRetentionDays[int(retentionDays)] += value
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
meters := make([]zeustypes.Meter, 0, len(valuesByRetentionDays))
|
||||
for retentionDays, value := range valuesByRetentionDays {
|
||||
meters = append(meters, zeustypes.NewMeter(provider.config.Name, value, provider.config.Unit, provider.config.Aggregation, window, buildDimensions(orgID, retentionDays)))
|
||||
}
|
||||
|
||||
// Empty windows still emit a sentinel so checkpoints can advance.
|
||||
if len(meters) == 0 && len(segments) > 0 {
|
||||
meters = append(meters, zeustypes.NewMeter(provider.config.Name, 0, provider.config.Unit, provider.config.Aggregation, window, buildDimensions(orgID, segments[len(segments)-1].DefaultDays)))
|
||||
}
|
||||
|
||||
return meters, nil
|
||||
}
|
||||
|
||||
func buildOriginQuery(meterName string) (string, []any) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select("toInt64(ifNull(min(unix_milli), 0))")
|
||||
sb.From(telemetrymeter.DBName + "." + telemetrymeter.SamplesTableName)
|
||||
sb.Where(sb.Equal("metric_name", meterName))
|
||||
return sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
}
|
||||
|
||||
func buildQuery(meterName string, segment *retentiontypes.RetentionPolicySegment) (string, []any, error) {
|
||||
retentionExpr, err := buildRetentionMultiIfSQL(segment.Rules, segment.DefaultDays)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
selects := []string{
|
||||
retentionExpr + " AS retention_days",
|
||||
"toInt64(ifNull(sum(value), 0)) AS value",
|
||||
}
|
||||
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select(selects...)
|
||||
sb.From(telemetrymeter.DBName + "." + telemetrymeter.SamplesTableName)
|
||||
sb.Where(
|
||||
sb.Equal("metric_name", meterName),
|
||||
sb.GTE("unix_milli", segment.StartMs),
|
||||
sb.LT("unix_milli", segment.EndMs),
|
||||
)
|
||||
sb.GroupBy("retention_days")
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return query, args, nil
|
||||
}
|
||||
|
||||
func buildRetentionMultiIfSQL(rules []retentiontypes.CustomRetentionRule, defaultDays int) (string, error) {
|
||||
if defaultDays <= 0 {
|
||||
return "", errors.Newf(errors.TypeInvalidInput, metercollector.ErrCodeMeterCollectorInvalidCustomRetentionRule, "non-positive default retention %d", defaultDays)
|
||||
}
|
||||
|
||||
if len(rules) == 0 {
|
||||
return "toInt32(" + strconv.Itoa(defaultDays) + ")", nil
|
||||
}
|
||||
|
||||
arms := make([]string, 0, 2*len(rules)+1)
|
||||
for ruleIndex, rule := range rules {
|
||||
if rule.TTLDays <= 0 {
|
||||
return "", errors.Newf(errors.TypeInvalidInput, metercollector.ErrCodeMeterCollectorInvalidCustomRetentionRule, "rule %d has non-positive ttl_days %d", ruleIndex, rule.TTLDays)
|
||||
}
|
||||
conditionExpr, err := buildRuleConditionSQL(ruleIndex, rule)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
arms = append(arms, conditionExpr)
|
||||
arms = append(arms, strconv.Itoa(rule.TTLDays))
|
||||
}
|
||||
arms = append(arms, strconv.Itoa(defaultDays))
|
||||
|
||||
return "toInt32(multiIf(" + strings.Join(arms, ", ") + "))", nil
|
||||
}
|
||||
|
||||
func buildRuleConditionSQL(ruleIndex int, rule retentiontypes.CustomRetentionRule) (string, error) {
|
||||
if len(rule.Filters) == 0 {
|
||||
return "", errors.Newf(errors.TypeInvalidInput, metercollector.ErrCodeMeterCollectorInvalidCustomRetentionRule, "rule %d has no filters", ruleIndex)
|
||||
}
|
||||
|
||||
filterExprs := make([]string, 0, len(rule.Filters))
|
||||
for filterIndex, filter := range rule.Filters {
|
||||
if !labelKeyPattern.MatchString(filter.Key) {
|
||||
return "", errors.Newf(errors.TypeInvalidInput, metercollector.ErrCodeMeterCollectorInvalidCustomRetentionRule, "rule %d filter %d has invalid key %q", ruleIndex, filterIndex, filter.Key)
|
||||
}
|
||||
if len(filter.Values) == 0 {
|
||||
return "", errors.Newf(errors.TypeInvalidInput, metercollector.ErrCodeMeterCollectorInvalidCustomRetentionRule, "rule %d filter %d has no values", ruleIndex, filterIndex)
|
||||
}
|
||||
|
||||
quoted := make([]string, len(filter.Values))
|
||||
for valueIndex, value := range filter.Values {
|
||||
if !labelValuePattern.MatchString(value) {
|
||||
return "", errors.Newf(errors.TypeInvalidInput, metercollector.ErrCodeMeterCollectorInvalidCustomRetentionRule, "rule %d filter %d value %d is invalid %q", ruleIndex, filterIndex, valueIndex, value)
|
||||
}
|
||||
quoted[valueIndex] = "'" + value + "'"
|
||||
}
|
||||
|
||||
filterExprs = append(filterExprs, fmt.Sprintf("JSONExtractString(labels, '%s') IN (%s)", filter.Key, strings.Join(quoted, ", ")))
|
||||
}
|
||||
|
||||
return strings.Join(filterExprs, " AND "), nil
|
||||
}
|
||||
|
||||
func buildDimensions(orgID valuer.UUID, retentionDays int) map[string]string {
|
||||
retentionDurationSeconds := int64(retentionDays) * 24 * 60 * 60 // seconds
|
||||
|
||||
return zeustypes.NewDimensions(
|
||||
zeustypes.OrganizationID.String(orgID.StringValue()),
|
||||
zeustypes.RetentionDuration.String(strconv.FormatInt(retentionDurationSeconds, 10)),
|
||||
)
|
||||
}
|
||||
318
ee/meterreporter/httpmeterreporter/provider.go
Normal file
318
ee/meterreporter/httpmeterreporter/provider.go
Normal file
@@ -0,0 +1,318 @@
|
||||
package httpmeterreporter
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/metercollector"
|
||||
"github.com/SigNoz/signoz/pkg/meterreporter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/zeustypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/metric"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
var _ factory.ServiceWithHealthy = (*Provider)(nil)
|
||||
|
||||
type Provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
config meterreporter.Config
|
||||
collectorsByName map[zeustypes.MeterName]metercollector.MeterCollector
|
||||
flagger flagger.Flagger
|
||||
licensing licensing.Licensing
|
||||
orgGetter organization.Getter
|
||||
zeus zeus.Zeus
|
||||
healthyC chan struct{}
|
||||
stopC chan struct{}
|
||||
metrics *reporterMetrics
|
||||
}
|
||||
|
||||
func NewFactory(collectorFactories factory.NamedMap[factory.ProviderFactory[metercollector.MeterCollector, metercollector.Config]], collectorConfigs []metercollector.Config, flagger flagger.Flagger, licensing licensing.Licensing, orgGetter organization.Getter, zeus zeus.Zeus) factory.ProviderFactory[meterreporter.Reporter, meterreporter.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, providerSettings factory.ProviderSettings, config meterreporter.Config) (meterreporter.Reporter, error) {
|
||||
return newProvider(ctx, providerSettings, config, collectorFactories, collectorConfigs, flagger, licensing, orgGetter, zeus)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func newProvider(
|
||||
ctx context.Context,
|
||||
providerSettings factory.ProviderSettings,
|
||||
config meterreporter.Config,
|
||||
collectorFactories factory.NamedMap[factory.ProviderFactory[metercollector.MeterCollector, metercollector.Config]],
|
||||
collectorConfigs []metercollector.Config,
|
||||
flagger flagger.Flagger,
|
||||
licensing licensing.Licensing,
|
||||
orgGetter organization.Getter,
|
||||
zeus zeus.Zeus,
|
||||
) (*Provider, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/meterreporter/httpmeterreporter")
|
||||
|
||||
collectorsByName := map[zeustypes.MeterName]metercollector.MeterCollector{}
|
||||
for _, collectorConfig := range collectorConfigs {
|
||||
collector, err := factory.NewProviderFromNamedMap(ctx, providerSettings, collectorConfig, collectorFactories, collectorConfig.Provider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if _, exists := collectorsByName[collector.Name()]; exists {
|
||||
return nil, errors.Newf(errors.TypeAlreadyExists, errors.CodeAlreadyExists, "duplicate meter collector %q", collector.Name())
|
||||
}
|
||||
collectorsByName[collector.Name()] = collector
|
||||
}
|
||||
|
||||
metrics, err := newReporterMetrics(settings.Meter())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Provider{
|
||||
settings: settings,
|
||||
config: config,
|
||||
collectorsByName: collectorsByName,
|
||||
flagger: flagger,
|
||||
licensing: licensing,
|
||||
orgGetter: orgGetter,
|
||||
zeus: zeus,
|
||||
healthyC: make(chan struct{}),
|
||||
stopC: make(chan struct{}),
|
||||
metrics: metrics,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) Start(ctx context.Context) error {
|
||||
close(provider.healthyC)
|
||||
|
||||
provider.collect(ctx)
|
||||
|
||||
ticker := time.NewTicker(provider.config.Interval)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-provider.stopC:
|
||||
return nil
|
||||
case <-ticker.C:
|
||||
provider.collect(ctx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *Provider) collect(ctx context.Context) {
|
||||
ctx, span := provider.settings.Tracer().Start(ctx, "meterreporter.Collect", trace.WithAttributes(attribute.String("meterreporter.provider", "http")))
|
||||
defer span.End()
|
||||
|
||||
orgs, err := provider.orgGetter.ListByOwnedKeyRange(ctx)
|
||||
if err != nil {
|
||||
span.RecordError(err)
|
||||
provider.settings.Logger().ErrorContext(ctx, "failed to get orgs data", errors.Attr(err))
|
||||
return
|
||||
}
|
||||
|
||||
for _, org := range orgs {
|
||||
evalCtx := featuretypes.NewFlaggerEvaluationContext(org.ID)
|
||||
if !provider.flagger.BooleanOrEmpty(ctx, flagger.FeatureUseMeterReporter, evalCtx) {
|
||||
provider.settings.Logger().DebugContext(ctx, "meter reporter disabled for org, skipping reporting", slog.String("org_id", org.ID.StringValue()))
|
||||
continue
|
||||
}
|
||||
|
||||
license, err := provider.licensing.GetActive(ctx, org.ID)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
provider.settings.Logger().DebugContext(ctx, "no active license found for org, skipping reporting", slog.String("org_id", org.ID.StringValue()))
|
||||
continue
|
||||
}
|
||||
|
||||
span.RecordError(err)
|
||||
provider.settings.Logger().ErrorContext(ctx, "failed to fetch active license for org", errors.Attr(err), slog.String("org_id", org.ID.StringValue()))
|
||||
return
|
||||
}
|
||||
|
||||
if err := provider.collectOrg(ctx, org, license); err != nil {
|
||||
span.RecordError(err)
|
||||
provider.settings.Logger().ErrorContext(ctx, "failed to collect meters", errors.Attr(err), slog.String("org_id", org.ID.StringValue()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *Provider) Stop(ctx context.Context) error {
|
||||
close(provider.stopC)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *Provider) Healthy() <-chan struct{} {
|
||||
return provider.healthyC
|
||||
}
|
||||
|
||||
func (provider *Provider) collectOrg(ctx context.Context, org *types.Organization, license *licensetypes.License) error {
|
||||
now := time.Now().UTC()
|
||||
// Use one timestamp so a tick cannot straddle midnight.
|
||||
todayStart := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, time.UTC)
|
||||
|
||||
if provider.config.Backfill {
|
||||
checkpointsByMeter, err := provider.checkpoints(ctx, license.Key)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
nextByCollector := provider.nextDays(license, todayStart, checkpointsByMeter)
|
||||
|
||||
start, end, ok := backfillRange(nextByCollector, todayStart)
|
||||
if ok {
|
||||
for day := start; !day.After(end); day = day.AddDate(0, 0, 1) {
|
||||
eligible := eligibleCollectors(provider.collectorsByName, nextByCollector, day)
|
||||
if len(eligible) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
window, err := zeustypes.NewMeterWindow(day.UnixMilli(), day.AddDate(0, 0, 1).UnixMilli(), true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.report(ctx, org.ID, license, window, eligible); err != nil {
|
||||
provider.settings.Logger().WarnContext(ctx, "failed to backfill for day", errors.Attr(err), slog.String("date", day.Format("2006-01-02")))
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Today's partial window: every collector is always eligible (next <= today).
|
||||
if now.UnixMilli() > todayStart.UnixMilli() {
|
||||
todayWindow, err := zeustypes.NewMeterWindow(todayStart.UnixMilli(), now.UnixMilli(), false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return provider.report(ctx, org.ID, license, todayWindow, provider.collectorsByName)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *Provider) checkpoints(ctx context.Context, licenseKey string) (map[string]time.Time, error) {
|
||||
list, err := provider.zeus.ListMeterCheckpoints(ctx, licenseKey)
|
||||
if err != nil {
|
||||
provider.metrics.checkpoints.Add(ctx, 1, metric.WithAttributes(errors.TypeAttr(err)))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
provider.metrics.checkpoints.Add(ctx, 1)
|
||||
|
||||
checkpointsByMeter := make(map[string]time.Time, len(list))
|
||||
for _, checkpoint := range list {
|
||||
checkpointsByMeter[checkpoint.Name] = checkpoint.StartDate.UTC()
|
||||
}
|
||||
|
||||
return checkpointsByMeter, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) nextDays(license *licensetypes.License, todayStart time.Time, checkpointsByMeter map[string]time.Time) map[zeustypes.MeterName]time.Time {
|
||||
nextByCollector := make(map[zeustypes.MeterName]time.Time, len(provider.collectorsByName))
|
||||
licenseCreatedAt := license.CreatedAt.UTC()
|
||||
licenseCreatedAtDay := time.Date(licenseCreatedAt.Year(), licenseCreatedAt.Month(), licenseCreatedAt.Day(), 0, 0, 0, 0, time.UTC)
|
||||
|
||||
for _, collector := range provider.collectorsByName {
|
||||
checkpoint, hasCheckpoint := checkpointsByMeter[collector.Name().String()]
|
||||
nextByCollector[collector.Name()] = nextReportableDay(licenseCreatedAtDay, todayStart, checkpoint, hasCheckpoint)
|
||||
}
|
||||
|
||||
return nextByCollector
|
||||
}
|
||||
|
||||
func nextReportableDay(licenseCreatedAtDay time.Time, todayStart time.Time, checkpoint time.Time, hasCheckpoint bool) time.Time {
|
||||
next := licenseCreatedAtDay
|
||||
if next.IsZero() {
|
||||
next = todayStart
|
||||
}
|
||||
|
||||
if hasCheckpoint {
|
||||
checkpointNext := checkpoint.AddDate(0, 0, 1)
|
||||
if checkpointNext.After(next) {
|
||||
next = checkpointNext
|
||||
}
|
||||
}
|
||||
|
||||
return next
|
||||
}
|
||||
|
||||
func (provider *Provider) report(ctx context.Context, orgID valuer.UUID, license *licensetypes.License, window zeustypes.MeterWindow, collectors map[zeustypes.MeterName]metercollector.MeterCollector) error {
|
||||
date := time.UnixMilli(window.StartUnixMilli).UTC().Format("2006-01-02")
|
||||
|
||||
meters := make([]zeustypes.Meter, 0, len(collectors))
|
||||
for _, collector := range collectors {
|
||||
meterAttr := attribute.String("signoz.meter.name", collector.Name().String())
|
||||
collectedReadings, err := collector.Collect(ctx, orgID, license, window)
|
||||
if err != nil {
|
||||
provider.metrics.collections.Add(ctx, 1, metric.WithAttributes(meterAttr, errors.TypeAttr(err)))
|
||||
continue
|
||||
}
|
||||
|
||||
provider.metrics.collections.Add(ctx, 1, metric.WithAttributes(meterAttr))
|
||||
meters = append(meters, collectedReadings...)
|
||||
}
|
||||
|
||||
if len(meters) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
idempotencyKey := fmt.Sprintf("meterreporter:%s", date)
|
||||
|
||||
body, err := json.Marshal(meters)
|
||||
if err != nil {
|
||||
provider.metrics.reports.Add(ctx, 1, metric.WithAttributes(errors.TypeAttr(err)))
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.zeus.PutMetersV3(ctx, license.Key, idempotencyKey, body); err != nil {
|
||||
provider.metrics.reports.Add(ctx, 1, metric.WithAttributes(errors.TypeAttr(err)))
|
||||
return err
|
||||
}
|
||||
|
||||
provider.metrics.reports.Add(ctx, 1)
|
||||
provider.metrics.meters.Add(ctx, int64(len(meters)))
|
||||
return nil
|
||||
}
|
||||
|
||||
// backfillRange returns the inclusive sealed-day range ending at yesterday.
|
||||
func backfillRange(nextByCollector map[zeustypes.MeterName]time.Time, todayStart time.Time) (start, end time.Time, ok bool) {
|
||||
yesterday := todayStart.AddDate(0, 0, -1)
|
||||
|
||||
for _, next := range nextByCollector {
|
||||
if !next.Before(todayStart) {
|
||||
continue
|
||||
}
|
||||
if start.IsZero() || next.Before(start) {
|
||||
start = next
|
||||
}
|
||||
}
|
||||
|
||||
if start.IsZero() || start.After(yesterday) {
|
||||
return time.Time{}, time.Time{}, false
|
||||
}
|
||||
|
||||
return start, yesterday, true
|
||||
}
|
||||
|
||||
func eligibleCollectors(collectors map[zeustypes.MeterName]metercollector.MeterCollector, nextByCollector map[zeustypes.MeterName]time.Time, day time.Time) map[zeustypes.MeterName]metercollector.MeterCollector {
|
||||
eligible := make(map[zeustypes.MeterName]metercollector.MeterCollector, len(collectors))
|
||||
for name, collector := range collectors {
|
||||
if !nextByCollector[name].After(day) {
|
||||
eligible[name] = collector
|
||||
}
|
||||
}
|
||||
|
||||
return eligible
|
||||
}
|
||||
48
ee/meterreporter/httpmeterreporter/telemetry.go
Normal file
48
ee/meterreporter/httpmeterreporter/telemetry.go
Normal file
@@ -0,0 +1,48 @@
|
||||
package httpmeterreporter
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"go.opentelemetry.io/otel/metric"
|
||||
)
|
||||
|
||||
type reporterMetrics struct {
|
||||
checkpoints metric.Int64Counter
|
||||
reports metric.Int64Counter
|
||||
collections metric.Int64Counter
|
||||
meters metric.Int64Counter
|
||||
}
|
||||
|
||||
func newReporterMetrics(meter metric.Meter) (*reporterMetrics, error) {
|
||||
var errs error
|
||||
|
||||
checkpoints, err := meter.Int64Counter("signoz.meterreporter.checkpoints", metric.WithDescription("Zeus meter checkpoint fetches."), metric.WithUnit("{checkpoint}"))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
reports, err := meter.Int64Counter("signoz.meterreporter.reports", metric.WithDescription("Meter reports shipped to Zeus."), metric.WithUnit("{report}"))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
collections, err := meter.Int64Counter("signoz.meterreporter.collections", metric.WithDescription("Per-meter collect calls."), metric.WithUnit("{collection}"))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
meters, err := meter.Int64Counter("signoz.meterreporter.meters", metric.WithDescription("Meter readings shipped to Zeus."), metric.WithUnit("{meter}"))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
if errs != nil {
|
||||
return nil, errs
|
||||
}
|
||||
|
||||
return &reporterMetrics{
|
||||
checkpoints: checkpoints,
|
||||
reports: reports,
|
||||
collections: collections,
|
||||
meters: meters,
|
||||
}, nil
|
||||
}
|
||||
@@ -150,6 +150,72 @@ func (provider *Provider) PutMetersV2(ctx context.Context, key string, data []by
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *Provider) PutMetersV3(ctx context.Context, key string, idempotencyKey string, data []byte) error {
|
||||
headers := http.Header{}
|
||||
if idempotencyKey != "" {
|
||||
headers.Set("X-Idempotency-Key", idempotencyKey)
|
||||
}
|
||||
|
||||
_, err := provider.doWithHeaders(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/meters"),
|
||||
http.MethodPost,
|
||||
key,
|
||||
data,
|
||||
headers,
|
||||
)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *Provider) ListMeterCheckpoints(ctx context.Context, key string) ([]zeustypes.MeterCheckpoint, error) {
|
||||
response, err := provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/meters/checkpoints"),
|
||||
http.MethodGet,
|
||||
key,
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
checkpointValues := gjson.GetBytes(response, "data")
|
||||
if !checkpointValues.Exists() || checkpointValues.Type == gjson.Null {
|
||||
return nil, errors.Newf(errors.TypeInternal, zeus.ErrCodeResponseMalformed, "meter checkpoints are required")
|
||||
}
|
||||
|
||||
if !checkpointValues.IsArray() {
|
||||
return nil, errors.Newf(errors.TypeInternal, zeus.ErrCodeResponseMalformed, "meter checkpoints must be an array")
|
||||
}
|
||||
|
||||
checkpointResults := checkpointValues.Array()
|
||||
checkpoints := make([]zeustypes.MeterCheckpoint, 0, len(checkpointResults))
|
||||
for _, checkpointValue := range checkpointResults {
|
||||
name := checkpointValue.Get("name").String()
|
||||
if name == "" {
|
||||
return nil, errors.Newf(errors.TypeInternal, zeus.ErrCodeResponseMalformed, "meter checkpoint name is required")
|
||||
}
|
||||
|
||||
startDateString := checkpointValue.Get("start_date").String()
|
||||
if startDateString == "" {
|
||||
return nil, errors.Newf(errors.TypeInternal, zeus.ErrCodeResponseMalformed, "meter checkpoint start_date is required for %q", name)
|
||||
}
|
||||
|
||||
startDate, err := time.Parse("2006-01-02", startDateString)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, zeus.ErrCodeResponseMalformed, "parse meter checkpoint start_date %q for %q", startDateString, name)
|
||||
}
|
||||
|
||||
checkpoints = append(checkpoints, zeustypes.MeterCheckpoint{
|
||||
Name: name,
|
||||
StartDate: startDate,
|
||||
})
|
||||
}
|
||||
|
||||
return checkpoints, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) PutProfile(ctx context.Context, key string, profile *zeustypes.PostableProfile) error {
|
||||
body, err := json.Marshal(profile)
|
||||
if err != nil {
|
||||
@@ -185,12 +251,21 @@ func (provider *Provider) PutHost(ctx context.Context, key string, host *zeustyp
|
||||
}
|
||||
|
||||
func (provider *Provider) do(ctx context.Context, url *url.URL, method string, key string, requestBody []byte) ([]byte, error) {
|
||||
return provider.doWithHeaders(ctx, url, method, key, requestBody, nil)
|
||||
}
|
||||
|
||||
func (provider *Provider) doWithHeaders(ctx context.Context, url *url.URL, method string, key string, requestBody []byte, extraHeaders http.Header) ([]byte, error) {
|
||||
request, err := http.NewRequestWithContext(ctx, method, url.String(), bytes.NewBuffer(requestBody))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
request.Header.Set("X-Signoz-Cloud-Api-Key", key)
|
||||
request.Header.Set("Content-Type", "application/json")
|
||||
for k, vs := range extraHeaders {
|
||||
for _, v := range vs {
|
||||
request.Header.Add(k, v)
|
||||
}
|
||||
}
|
||||
|
||||
response, err := provider.httpClient.Do(request)
|
||||
if err != nil {
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -24,12 +24,13 @@ HASTOKEN=23
|
||||
HAS=24
|
||||
HASANY=25
|
||||
HASALL=26
|
||||
BOOL=27
|
||||
NUMBER=28
|
||||
QUOTED_TEXT=29
|
||||
KEY=30
|
||||
WS=31
|
||||
FREETEXT=32
|
||||
SEARCH=27
|
||||
BOOL=28
|
||||
NUMBER=29
|
||||
QUOTED_TEXT=30
|
||||
KEY=31
|
||||
WS=32
|
||||
FREETEXT=33
|
||||
'('=1
|
||||
')'=2
|
||||
'['=3
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -24,12 +24,13 @@ HASTOKEN=23
|
||||
HAS=24
|
||||
HASANY=25
|
||||
HASALL=26
|
||||
BOOL=27
|
||||
NUMBER=28
|
||||
QUOTED_TEXT=29
|
||||
KEY=30
|
||||
WS=31
|
||||
FREETEXT=32
|
||||
SEARCH=27
|
||||
BOOL=28
|
||||
NUMBER=29
|
||||
QUOTED_TEXT=30
|
||||
KEY=31
|
||||
WS=32
|
||||
FREETEXT=33
|
||||
'('=1
|
||||
')'=2
|
||||
'['=3
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Generated from FilterQuery.g4 by ANTLR 4.13.1
|
||||
// Generated from grammar/FilterQuery.g4 by ANTLR 4.13.2
|
||||
// noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols
|
||||
import {
|
||||
ATN,
|
||||
@@ -38,12 +38,13 @@ export default class FilterQueryLexer extends Lexer {
|
||||
public static readonly HAS = 24;
|
||||
public static readonly HASANY = 25;
|
||||
public static readonly HASALL = 26;
|
||||
public static readonly BOOL = 27;
|
||||
public static readonly NUMBER = 28;
|
||||
public static readonly QUOTED_TEXT = 29;
|
||||
public static readonly KEY = 30;
|
||||
public static readonly WS = 31;
|
||||
public static readonly FREETEXT = 32;
|
||||
public static readonly SEARCH = 27;
|
||||
public static readonly BOOL = 28;
|
||||
public static readonly NUMBER = 29;
|
||||
public static readonly QUOTED_TEXT = 30;
|
||||
public static readonly KEY = 31;
|
||||
public static readonly WS = 32;
|
||||
public static readonly FREETEXT = 33;
|
||||
public static readonly EOF = Token.EOF;
|
||||
|
||||
public static readonly channelNames: string[] = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN" ];
|
||||
@@ -68,8 +69,9 @@ export default class FilterQueryLexer extends Lexer {
|
||||
"AND", "OR",
|
||||
"HASTOKEN",
|
||||
"HAS", "HASANY",
|
||||
"HASALL", "BOOL",
|
||||
"NUMBER", "QUOTED_TEXT",
|
||||
"HASALL", "SEARCH",
|
||||
"BOOL", "NUMBER",
|
||||
"QUOTED_TEXT",
|
||||
"KEY", "WS",
|
||||
"FREETEXT" ];
|
||||
public static readonly modeNames: string[] = [ "DEFAULT_MODE", ];
|
||||
@@ -78,8 +80,8 @@ export default class FilterQueryLexer extends Lexer {
|
||||
"LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
|
||||
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "ILIKE", "BETWEEN", "EXISTS", "REGEXP",
|
||||
"CONTAINS", "IN", "NOT", "AND", "OR", "HASTOKEN", "HAS", "HASANY", "HASALL",
|
||||
"BOOL", "SIGN", "NUMBER", "QUOTED_TEXT", "SEGMENT", "EMPTY_BRACKS", "OLD_JSON_BRACKS",
|
||||
"KEY", "WS", "DIGIT", "FREETEXT",
|
||||
"SEARCH", "BOOL", "SIGN", "NUMBER", "QUOTED_TEXT", "SEGMENT", "EMPTY_BRACKS",
|
||||
"OLD_JSON_BRACKS", "KEY", "WS", "DIGIT", "FREETEXT",
|
||||
];
|
||||
|
||||
|
||||
@@ -100,119 +102,122 @@ export default class FilterQueryLexer extends Lexer {
|
||||
|
||||
public get modeNames(): string[] { return FilterQueryLexer.modeNames; }
|
||||
|
||||
public static readonly _serializedATN: number[] = [4,0,32,320,6,-1,2,0,
|
||||
public static readonly _serializedATN: number[] = [4,0,33,329,6,-1,2,0,
|
||||
7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7,6,2,7,7,7,2,8,7,8,2,9,
|
||||
7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13,2,14,7,14,2,15,7,15,2,16,7,
|
||||
16,2,17,7,17,2,18,7,18,2,19,7,19,2,20,7,20,2,21,7,21,2,22,7,22,2,23,7,23,
|
||||
2,24,7,24,2,25,7,25,2,26,7,26,2,27,7,27,2,28,7,28,2,29,7,29,2,30,7,30,2,
|
||||
31,7,31,2,32,7,32,2,33,7,33,2,34,7,34,2,35,7,35,2,36,7,36,1,0,1,0,1,1,1,
|
||||
1,1,2,1,2,1,3,1,3,1,4,1,4,1,5,1,5,1,5,3,5,89,8,5,1,6,1,6,1,6,1,7,1,7,1,
|
||||
7,1,8,1,8,1,9,1,9,1,9,1,10,1,10,1,11,1,11,1,11,1,12,1,12,1,12,1,12,1,12,
|
||||
1,13,1,13,1,13,1,13,1,13,1,13,1,14,1,14,1,14,1,14,1,14,1,14,1,14,1,14,1,
|
||||
15,1,15,1,15,1,15,1,15,1,15,3,15,132,8,15,1,16,1,16,1,16,1,16,1,16,1,16,
|
||||
1,16,1,17,1,17,1,17,1,17,1,17,1,17,1,17,1,17,3,17,149,8,17,1,18,1,18,1,
|
||||
18,1,19,1,19,1,19,1,19,1,20,1,20,1,20,1,20,1,21,1,21,1,21,1,22,1,22,1,22,
|
||||
1,22,1,22,1,22,1,22,1,22,1,22,1,23,1,23,1,23,1,23,1,24,1,24,1,24,1,24,1,
|
||||
24,1,24,1,24,1,25,1,25,1,25,1,25,1,25,1,25,1,25,1,26,1,26,1,26,1,26,1,26,
|
||||
1,26,1,26,1,26,1,26,3,26,201,8,26,1,27,1,27,1,28,3,28,206,8,28,1,28,4,28,
|
||||
209,8,28,11,28,12,28,210,1,28,1,28,5,28,215,8,28,10,28,12,28,218,9,28,3,
|
||||
28,220,8,28,1,28,1,28,3,28,224,8,28,1,28,4,28,227,8,28,11,28,12,28,228,
|
||||
3,28,231,8,28,1,28,3,28,234,8,28,1,28,1,28,4,28,238,8,28,11,28,12,28,239,
|
||||
1,28,1,28,3,28,244,8,28,1,28,4,28,247,8,28,11,28,12,28,248,3,28,251,8,28,
|
||||
3,28,253,8,28,1,29,1,29,1,29,1,29,5,29,259,8,29,10,29,12,29,262,9,29,1,
|
||||
29,1,29,1,29,1,29,1,29,5,29,269,8,29,10,29,12,29,272,9,29,1,29,3,29,275,
|
||||
8,29,1,30,1,30,5,30,279,8,30,10,30,12,30,282,9,30,1,31,1,31,1,31,1,32,1,
|
||||
32,1,32,1,32,1,33,1,33,1,33,1,33,1,33,1,33,1,33,4,33,298,8,33,11,33,12,
|
||||
33,299,5,33,302,8,33,10,33,12,33,305,9,33,1,34,4,34,308,8,34,11,34,12,34,
|
||||
309,1,34,1,34,1,35,1,35,1,36,4,36,317,8,36,11,36,12,36,318,0,0,37,1,1,3,
|
||||
2,5,3,7,4,9,5,11,6,13,7,15,8,17,9,19,10,21,11,23,12,25,13,27,14,29,15,31,
|
||||
16,33,17,35,18,37,19,39,20,41,21,43,22,45,23,47,24,49,25,51,26,53,27,55,
|
||||
0,57,28,59,29,61,0,63,0,65,0,67,30,69,31,71,0,73,32,1,0,29,2,0,76,76,108,
|
||||
108,2,0,73,73,105,105,2,0,75,75,107,107,2,0,69,69,101,101,2,0,66,66,98,
|
||||
98,2,0,84,84,116,116,2,0,87,87,119,119,2,0,78,78,110,110,2,0,88,88,120,
|
||||
120,2,0,83,83,115,115,2,0,82,82,114,114,2,0,71,71,103,103,2,0,80,80,112,
|
||||
112,2,0,67,67,99,99,2,0,79,79,111,111,2,0,65,65,97,97,2,0,68,68,100,100,
|
||||
2,0,72,72,104,104,2,0,89,89,121,121,2,0,85,85,117,117,2,0,70,70,102,102,
|
||||
2,0,43,43,45,45,2,0,34,34,92,92,2,0,39,39,92,92,4,0,35,36,64,90,95,95,97,
|
||||
123,7,0,35,36,45,45,47,58,64,90,95,95,97,123,125,125,3,0,9,10,13,13,32,
|
||||
32,1,0,48,57,8,0,9,10,13,13,32,34,39,41,44,44,60,62,91,91,93,93,344,0,1,
|
||||
1,0,0,0,0,3,1,0,0,0,0,5,1,0,0,0,0,7,1,0,0,0,0,9,1,0,0,0,0,11,1,0,0,0,0,
|
||||
13,1,0,0,0,0,15,1,0,0,0,0,17,1,0,0,0,0,19,1,0,0,0,0,21,1,0,0,0,0,23,1,0,
|
||||
0,0,0,25,1,0,0,0,0,27,1,0,0,0,0,29,1,0,0,0,0,31,1,0,0,0,0,33,1,0,0,0,0,
|
||||
35,1,0,0,0,0,37,1,0,0,0,0,39,1,0,0,0,0,41,1,0,0,0,0,43,1,0,0,0,0,45,1,0,
|
||||
0,0,0,47,1,0,0,0,0,49,1,0,0,0,0,51,1,0,0,0,0,53,1,0,0,0,0,57,1,0,0,0,0,
|
||||
59,1,0,0,0,0,67,1,0,0,0,0,69,1,0,0,0,0,73,1,0,0,0,1,75,1,0,0,0,3,77,1,0,
|
||||
0,0,5,79,1,0,0,0,7,81,1,0,0,0,9,83,1,0,0,0,11,88,1,0,0,0,13,90,1,0,0,0,
|
||||
15,93,1,0,0,0,17,96,1,0,0,0,19,98,1,0,0,0,21,101,1,0,0,0,23,103,1,0,0,0,
|
||||
25,106,1,0,0,0,27,111,1,0,0,0,29,117,1,0,0,0,31,125,1,0,0,0,33,133,1,0,
|
||||
0,0,35,140,1,0,0,0,37,150,1,0,0,0,39,153,1,0,0,0,41,157,1,0,0,0,43,161,
|
||||
1,0,0,0,45,164,1,0,0,0,47,173,1,0,0,0,49,177,1,0,0,0,51,184,1,0,0,0,53,
|
||||
200,1,0,0,0,55,202,1,0,0,0,57,252,1,0,0,0,59,274,1,0,0,0,61,276,1,0,0,0,
|
||||
63,283,1,0,0,0,65,286,1,0,0,0,67,290,1,0,0,0,69,307,1,0,0,0,71,313,1,0,
|
||||
0,0,73,316,1,0,0,0,75,76,5,40,0,0,76,2,1,0,0,0,77,78,5,41,0,0,78,4,1,0,
|
||||
0,0,79,80,5,91,0,0,80,6,1,0,0,0,81,82,5,93,0,0,82,8,1,0,0,0,83,84,5,44,
|
||||
0,0,84,10,1,0,0,0,85,89,5,61,0,0,86,87,5,61,0,0,87,89,5,61,0,0,88,85,1,
|
||||
0,0,0,88,86,1,0,0,0,89,12,1,0,0,0,90,91,5,33,0,0,91,92,5,61,0,0,92,14,1,
|
||||
0,0,0,93,94,5,60,0,0,94,95,5,62,0,0,95,16,1,0,0,0,96,97,5,60,0,0,97,18,
|
||||
1,0,0,0,98,99,5,60,0,0,99,100,5,61,0,0,100,20,1,0,0,0,101,102,5,62,0,0,
|
||||
102,22,1,0,0,0,103,104,5,62,0,0,104,105,5,61,0,0,105,24,1,0,0,0,106,107,
|
||||
7,0,0,0,107,108,7,1,0,0,108,109,7,2,0,0,109,110,7,3,0,0,110,26,1,0,0,0,
|
||||
111,112,7,1,0,0,112,113,7,0,0,0,113,114,7,1,0,0,114,115,7,2,0,0,115,116,
|
||||
7,3,0,0,116,28,1,0,0,0,117,118,7,4,0,0,118,119,7,3,0,0,119,120,7,5,0,0,
|
||||
120,121,7,6,0,0,121,122,7,3,0,0,122,123,7,3,0,0,123,124,7,7,0,0,124,30,
|
||||
1,0,0,0,125,126,7,3,0,0,126,127,7,8,0,0,127,128,7,1,0,0,128,129,7,9,0,0,
|
||||
129,131,7,5,0,0,130,132,7,9,0,0,131,130,1,0,0,0,131,132,1,0,0,0,132,32,
|
||||
1,0,0,0,133,134,7,10,0,0,134,135,7,3,0,0,135,136,7,11,0,0,136,137,7,3,0,
|
||||
0,137,138,7,8,0,0,138,139,7,12,0,0,139,34,1,0,0,0,140,141,7,13,0,0,141,
|
||||
142,7,14,0,0,142,143,7,7,0,0,143,144,7,5,0,0,144,145,7,15,0,0,145,146,7,
|
||||
1,0,0,146,148,7,7,0,0,147,149,7,9,0,0,148,147,1,0,0,0,148,149,1,0,0,0,149,
|
||||
36,1,0,0,0,150,151,7,1,0,0,151,152,7,7,0,0,152,38,1,0,0,0,153,154,7,7,0,
|
||||
0,154,155,7,14,0,0,155,156,7,5,0,0,156,40,1,0,0,0,157,158,7,15,0,0,158,
|
||||
159,7,7,0,0,159,160,7,16,0,0,160,42,1,0,0,0,161,162,7,14,0,0,162,163,7,
|
||||
10,0,0,163,44,1,0,0,0,164,165,7,17,0,0,165,166,7,15,0,0,166,167,7,9,0,0,
|
||||
167,168,7,5,0,0,168,169,7,14,0,0,169,170,7,2,0,0,170,171,7,3,0,0,171,172,
|
||||
7,7,0,0,172,46,1,0,0,0,173,174,7,17,0,0,174,175,7,15,0,0,175,176,7,9,0,
|
||||
0,176,48,1,0,0,0,177,178,7,17,0,0,178,179,7,15,0,0,179,180,7,9,0,0,180,
|
||||
181,7,15,0,0,181,182,7,7,0,0,182,183,7,18,0,0,183,50,1,0,0,0,184,185,7,
|
||||
17,0,0,185,186,7,15,0,0,186,187,7,9,0,0,187,188,7,15,0,0,188,189,7,0,0,
|
||||
0,189,190,7,0,0,0,190,52,1,0,0,0,191,192,7,5,0,0,192,193,7,10,0,0,193,194,
|
||||
7,19,0,0,194,201,7,3,0,0,195,196,7,20,0,0,196,197,7,15,0,0,197,198,7,0,
|
||||
0,0,198,199,7,9,0,0,199,201,7,3,0,0,200,191,1,0,0,0,200,195,1,0,0,0,201,
|
||||
54,1,0,0,0,202,203,7,21,0,0,203,56,1,0,0,0,204,206,3,55,27,0,205,204,1,
|
||||
0,0,0,205,206,1,0,0,0,206,208,1,0,0,0,207,209,3,71,35,0,208,207,1,0,0,0,
|
||||
209,210,1,0,0,0,210,208,1,0,0,0,210,211,1,0,0,0,211,219,1,0,0,0,212,216,
|
||||
5,46,0,0,213,215,3,71,35,0,214,213,1,0,0,0,215,218,1,0,0,0,216,214,1,0,
|
||||
0,0,216,217,1,0,0,0,217,220,1,0,0,0,218,216,1,0,0,0,219,212,1,0,0,0,219,
|
||||
220,1,0,0,0,220,230,1,0,0,0,221,223,7,3,0,0,222,224,3,55,27,0,223,222,1,
|
||||
0,0,0,223,224,1,0,0,0,224,226,1,0,0,0,225,227,3,71,35,0,226,225,1,0,0,0,
|
||||
227,228,1,0,0,0,228,226,1,0,0,0,228,229,1,0,0,0,229,231,1,0,0,0,230,221,
|
||||
1,0,0,0,230,231,1,0,0,0,231,253,1,0,0,0,232,234,3,55,27,0,233,232,1,0,0,
|
||||
0,233,234,1,0,0,0,234,235,1,0,0,0,235,237,5,46,0,0,236,238,3,71,35,0,237,
|
||||
236,1,0,0,0,238,239,1,0,0,0,239,237,1,0,0,0,239,240,1,0,0,0,240,250,1,0,
|
||||
0,0,241,243,7,3,0,0,242,244,3,55,27,0,243,242,1,0,0,0,243,244,1,0,0,0,244,
|
||||
246,1,0,0,0,245,247,3,71,35,0,246,245,1,0,0,0,247,248,1,0,0,0,248,246,1,
|
||||
0,0,0,248,249,1,0,0,0,249,251,1,0,0,0,250,241,1,0,0,0,250,251,1,0,0,0,251,
|
||||
253,1,0,0,0,252,205,1,0,0,0,252,233,1,0,0,0,253,58,1,0,0,0,254,260,5,34,
|
||||
0,0,255,259,8,22,0,0,256,257,5,92,0,0,257,259,9,0,0,0,258,255,1,0,0,0,258,
|
||||
256,1,0,0,0,259,262,1,0,0,0,260,258,1,0,0,0,260,261,1,0,0,0,261,263,1,0,
|
||||
0,0,262,260,1,0,0,0,263,275,5,34,0,0,264,270,5,39,0,0,265,269,8,23,0,0,
|
||||
266,267,5,92,0,0,267,269,9,0,0,0,268,265,1,0,0,0,268,266,1,0,0,0,269,272,
|
||||
1,0,0,0,270,268,1,0,0,0,270,271,1,0,0,0,271,273,1,0,0,0,272,270,1,0,0,0,
|
||||
273,275,5,39,0,0,274,254,1,0,0,0,274,264,1,0,0,0,275,60,1,0,0,0,276,280,
|
||||
7,24,0,0,277,279,7,25,0,0,278,277,1,0,0,0,279,282,1,0,0,0,280,278,1,0,0,
|
||||
0,280,281,1,0,0,0,281,62,1,0,0,0,282,280,1,0,0,0,283,284,5,91,0,0,284,285,
|
||||
5,93,0,0,285,64,1,0,0,0,286,287,5,91,0,0,287,288,5,42,0,0,288,289,5,93,
|
||||
0,0,289,66,1,0,0,0,290,303,3,61,30,0,291,292,5,46,0,0,292,302,3,61,30,0,
|
||||
293,302,3,63,31,0,294,302,3,65,32,0,295,297,5,46,0,0,296,298,3,71,35,0,
|
||||
297,296,1,0,0,0,298,299,1,0,0,0,299,297,1,0,0,0,299,300,1,0,0,0,300,302,
|
||||
1,0,0,0,301,291,1,0,0,0,301,293,1,0,0,0,301,294,1,0,0,0,301,295,1,0,0,0,
|
||||
302,305,1,0,0,0,303,301,1,0,0,0,303,304,1,0,0,0,304,68,1,0,0,0,305,303,
|
||||
1,0,0,0,306,308,7,26,0,0,307,306,1,0,0,0,308,309,1,0,0,0,309,307,1,0,0,
|
||||
0,309,310,1,0,0,0,310,311,1,0,0,0,311,312,6,34,0,0,312,70,1,0,0,0,313,314,
|
||||
7,27,0,0,314,72,1,0,0,0,315,317,8,28,0,0,316,315,1,0,0,0,317,318,1,0,0,
|
||||
0,318,316,1,0,0,0,318,319,1,0,0,0,319,74,1,0,0,0,29,0,88,131,148,200,205,
|
||||
210,216,219,223,228,230,233,239,243,248,250,252,258,260,268,270,274,280,
|
||||
299,301,303,309,318,1,6,0,0];
|
||||
31,7,31,2,32,7,32,2,33,7,33,2,34,7,34,2,35,7,35,2,36,7,36,2,37,7,37,1,0,
|
||||
1,0,1,1,1,1,1,2,1,2,1,3,1,3,1,4,1,4,1,5,1,5,1,5,3,5,91,8,5,1,6,1,6,1,6,
|
||||
1,7,1,7,1,7,1,8,1,8,1,9,1,9,1,9,1,10,1,10,1,11,1,11,1,11,1,12,1,12,1,12,
|
||||
1,12,1,12,1,13,1,13,1,13,1,13,1,13,1,13,1,14,1,14,1,14,1,14,1,14,1,14,1,
|
||||
14,1,14,1,15,1,15,1,15,1,15,1,15,1,15,3,15,134,8,15,1,16,1,16,1,16,1,16,
|
||||
1,16,1,16,1,16,1,17,1,17,1,17,1,17,1,17,1,17,1,17,1,17,3,17,151,8,17,1,
|
||||
18,1,18,1,18,1,19,1,19,1,19,1,19,1,20,1,20,1,20,1,20,1,21,1,21,1,21,1,22,
|
||||
1,22,1,22,1,22,1,22,1,22,1,22,1,22,1,22,1,23,1,23,1,23,1,23,1,24,1,24,1,
|
||||
24,1,24,1,24,1,24,1,24,1,25,1,25,1,25,1,25,1,25,1,25,1,25,1,26,1,26,1,26,
|
||||
1,26,1,26,1,26,1,26,1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,27,3,27,210,
|
||||
8,27,1,28,1,28,1,29,3,29,215,8,29,1,29,4,29,218,8,29,11,29,12,29,219,1,
|
||||
29,1,29,5,29,224,8,29,10,29,12,29,227,9,29,3,29,229,8,29,1,29,1,29,3,29,
|
||||
233,8,29,1,29,4,29,236,8,29,11,29,12,29,237,3,29,240,8,29,1,29,3,29,243,
|
||||
8,29,1,29,1,29,4,29,247,8,29,11,29,12,29,248,1,29,1,29,3,29,253,8,29,1,
|
||||
29,4,29,256,8,29,11,29,12,29,257,3,29,260,8,29,3,29,262,8,29,1,30,1,30,
|
||||
1,30,1,30,5,30,268,8,30,10,30,12,30,271,9,30,1,30,1,30,1,30,1,30,1,30,5,
|
||||
30,278,8,30,10,30,12,30,281,9,30,1,30,3,30,284,8,30,1,31,1,31,5,31,288,
|
||||
8,31,10,31,12,31,291,9,31,1,32,1,32,1,32,1,33,1,33,1,33,1,33,1,34,1,34,
|
||||
1,34,1,34,1,34,1,34,1,34,4,34,307,8,34,11,34,12,34,308,5,34,311,8,34,10,
|
||||
34,12,34,314,9,34,1,35,4,35,317,8,35,11,35,12,35,318,1,35,1,35,1,36,1,36,
|
||||
1,37,4,37,326,8,37,11,37,12,37,327,0,0,38,1,1,3,2,5,3,7,4,9,5,11,6,13,7,
|
||||
15,8,17,9,19,10,21,11,23,12,25,13,27,14,29,15,31,16,33,17,35,18,37,19,39,
|
||||
20,41,21,43,22,45,23,47,24,49,25,51,26,53,27,55,28,57,0,59,29,61,30,63,
|
||||
0,65,0,67,0,69,31,71,32,73,0,75,33,1,0,29,2,0,76,76,108,108,2,0,73,73,105,
|
||||
105,2,0,75,75,107,107,2,0,69,69,101,101,2,0,66,66,98,98,2,0,84,84,116,116,
|
||||
2,0,87,87,119,119,2,0,78,78,110,110,2,0,88,88,120,120,2,0,83,83,115,115,
|
||||
2,0,82,82,114,114,2,0,71,71,103,103,2,0,80,80,112,112,2,0,67,67,99,99,2,
|
||||
0,79,79,111,111,2,0,65,65,97,97,2,0,68,68,100,100,2,0,72,72,104,104,2,0,
|
||||
89,89,121,121,2,0,85,85,117,117,2,0,70,70,102,102,2,0,43,43,45,45,2,0,34,
|
||||
34,92,92,2,0,39,39,92,92,4,0,35,36,64,90,95,95,97,123,7,0,35,36,45,45,47,
|
||||
58,64,90,95,95,97,123,125,125,3,0,9,10,13,13,32,32,1,0,48,57,8,0,9,10,13,
|
||||
13,32,34,39,41,44,44,60,62,91,91,93,93,353,0,1,1,0,0,0,0,3,1,0,0,0,0,5,
|
||||
1,0,0,0,0,7,1,0,0,0,0,9,1,0,0,0,0,11,1,0,0,0,0,13,1,0,0,0,0,15,1,0,0,0,
|
||||
0,17,1,0,0,0,0,19,1,0,0,0,0,21,1,0,0,0,0,23,1,0,0,0,0,25,1,0,0,0,0,27,1,
|
||||
0,0,0,0,29,1,0,0,0,0,31,1,0,0,0,0,33,1,0,0,0,0,35,1,0,0,0,0,37,1,0,0,0,
|
||||
0,39,1,0,0,0,0,41,1,0,0,0,0,43,1,0,0,0,0,45,1,0,0,0,0,47,1,0,0,0,0,49,1,
|
||||
0,0,0,0,51,1,0,0,0,0,53,1,0,0,0,0,55,1,0,0,0,0,59,1,0,0,0,0,61,1,0,0,0,
|
||||
0,69,1,0,0,0,0,71,1,0,0,0,0,75,1,0,0,0,1,77,1,0,0,0,3,79,1,0,0,0,5,81,1,
|
||||
0,0,0,7,83,1,0,0,0,9,85,1,0,0,0,11,90,1,0,0,0,13,92,1,0,0,0,15,95,1,0,0,
|
||||
0,17,98,1,0,0,0,19,100,1,0,0,0,21,103,1,0,0,0,23,105,1,0,0,0,25,108,1,0,
|
||||
0,0,27,113,1,0,0,0,29,119,1,0,0,0,31,127,1,0,0,0,33,135,1,0,0,0,35,142,
|
||||
1,0,0,0,37,152,1,0,0,0,39,155,1,0,0,0,41,159,1,0,0,0,43,163,1,0,0,0,45,
|
||||
166,1,0,0,0,47,175,1,0,0,0,49,179,1,0,0,0,51,186,1,0,0,0,53,193,1,0,0,0,
|
||||
55,209,1,0,0,0,57,211,1,0,0,0,59,261,1,0,0,0,61,283,1,0,0,0,63,285,1,0,
|
||||
0,0,65,292,1,0,0,0,67,295,1,0,0,0,69,299,1,0,0,0,71,316,1,0,0,0,73,322,
|
||||
1,0,0,0,75,325,1,0,0,0,77,78,5,40,0,0,78,2,1,0,0,0,79,80,5,41,0,0,80,4,
|
||||
1,0,0,0,81,82,5,91,0,0,82,6,1,0,0,0,83,84,5,93,0,0,84,8,1,0,0,0,85,86,5,
|
||||
44,0,0,86,10,1,0,0,0,87,91,5,61,0,0,88,89,5,61,0,0,89,91,5,61,0,0,90,87,
|
||||
1,0,0,0,90,88,1,0,0,0,91,12,1,0,0,0,92,93,5,33,0,0,93,94,5,61,0,0,94,14,
|
||||
1,0,0,0,95,96,5,60,0,0,96,97,5,62,0,0,97,16,1,0,0,0,98,99,5,60,0,0,99,18,
|
||||
1,0,0,0,100,101,5,60,0,0,101,102,5,61,0,0,102,20,1,0,0,0,103,104,5,62,0,
|
||||
0,104,22,1,0,0,0,105,106,5,62,0,0,106,107,5,61,0,0,107,24,1,0,0,0,108,109,
|
||||
7,0,0,0,109,110,7,1,0,0,110,111,7,2,0,0,111,112,7,3,0,0,112,26,1,0,0,0,
|
||||
113,114,7,1,0,0,114,115,7,0,0,0,115,116,7,1,0,0,116,117,7,2,0,0,117,118,
|
||||
7,3,0,0,118,28,1,0,0,0,119,120,7,4,0,0,120,121,7,3,0,0,121,122,7,5,0,0,
|
||||
122,123,7,6,0,0,123,124,7,3,0,0,124,125,7,3,0,0,125,126,7,7,0,0,126,30,
|
||||
1,0,0,0,127,128,7,3,0,0,128,129,7,8,0,0,129,130,7,1,0,0,130,131,7,9,0,0,
|
||||
131,133,7,5,0,0,132,134,7,9,0,0,133,132,1,0,0,0,133,134,1,0,0,0,134,32,
|
||||
1,0,0,0,135,136,7,10,0,0,136,137,7,3,0,0,137,138,7,11,0,0,138,139,7,3,0,
|
||||
0,139,140,7,8,0,0,140,141,7,12,0,0,141,34,1,0,0,0,142,143,7,13,0,0,143,
|
||||
144,7,14,0,0,144,145,7,7,0,0,145,146,7,5,0,0,146,147,7,15,0,0,147,148,7,
|
||||
1,0,0,148,150,7,7,0,0,149,151,7,9,0,0,150,149,1,0,0,0,150,151,1,0,0,0,151,
|
||||
36,1,0,0,0,152,153,7,1,0,0,153,154,7,7,0,0,154,38,1,0,0,0,155,156,7,7,0,
|
||||
0,156,157,7,14,0,0,157,158,7,5,0,0,158,40,1,0,0,0,159,160,7,15,0,0,160,
|
||||
161,7,7,0,0,161,162,7,16,0,0,162,42,1,0,0,0,163,164,7,14,0,0,164,165,7,
|
||||
10,0,0,165,44,1,0,0,0,166,167,7,17,0,0,167,168,7,15,0,0,168,169,7,9,0,0,
|
||||
169,170,7,5,0,0,170,171,7,14,0,0,171,172,7,2,0,0,172,173,7,3,0,0,173,174,
|
||||
7,7,0,0,174,46,1,0,0,0,175,176,7,17,0,0,176,177,7,15,0,0,177,178,7,9,0,
|
||||
0,178,48,1,0,0,0,179,180,7,17,0,0,180,181,7,15,0,0,181,182,7,9,0,0,182,
|
||||
183,7,15,0,0,183,184,7,7,0,0,184,185,7,18,0,0,185,50,1,0,0,0,186,187,7,
|
||||
17,0,0,187,188,7,15,0,0,188,189,7,9,0,0,189,190,7,15,0,0,190,191,7,0,0,
|
||||
0,191,192,7,0,0,0,192,52,1,0,0,0,193,194,7,9,0,0,194,195,7,3,0,0,195,196,
|
||||
7,15,0,0,196,197,7,10,0,0,197,198,7,13,0,0,198,199,7,17,0,0,199,54,1,0,
|
||||
0,0,200,201,7,5,0,0,201,202,7,10,0,0,202,203,7,19,0,0,203,210,7,3,0,0,204,
|
||||
205,7,20,0,0,205,206,7,15,0,0,206,207,7,0,0,0,207,208,7,9,0,0,208,210,7,
|
||||
3,0,0,209,200,1,0,0,0,209,204,1,0,0,0,210,56,1,0,0,0,211,212,7,21,0,0,212,
|
||||
58,1,0,0,0,213,215,3,57,28,0,214,213,1,0,0,0,214,215,1,0,0,0,215,217,1,
|
||||
0,0,0,216,218,3,73,36,0,217,216,1,0,0,0,218,219,1,0,0,0,219,217,1,0,0,0,
|
||||
219,220,1,0,0,0,220,228,1,0,0,0,221,225,5,46,0,0,222,224,3,73,36,0,223,
|
||||
222,1,0,0,0,224,227,1,0,0,0,225,223,1,0,0,0,225,226,1,0,0,0,226,229,1,0,
|
||||
0,0,227,225,1,0,0,0,228,221,1,0,0,0,228,229,1,0,0,0,229,239,1,0,0,0,230,
|
||||
232,7,3,0,0,231,233,3,57,28,0,232,231,1,0,0,0,232,233,1,0,0,0,233,235,1,
|
||||
0,0,0,234,236,3,73,36,0,235,234,1,0,0,0,236,237,1,0,0,0,237,235,1,0,0,0,
|
||||
237,238,1,0,0,0,238,240,1,0,0,0,239,230,1,0,0,0,239,240,1,0,0,0,240,262,
|
||||
1,0,0,0,241,243,3,57,28,0,242,241,1,0,0,0,242,243,1,0,0,0,243,244,1,0,0,
|
||||
0,244,246,5,46,0,0,245,247,3,73,36,0,246,245,1,0,0,0,247,248,1,0,0,0,248,
|
||||
246,1,0,0,0,248,249,1,0,0,0,249,259,1,0,0,0,250,252,7,3,0,0,251,253,3,57,
|
||||
28,0,252,251,1,0,0,0,252,253,1,0,0,0,253,255,1,0,0,0,254,256,3,73,36,0,
|
||||
255,254,1,0,0,0,256,257,1,0,0,0,257,255,1,0,0,0,257,258,1,0,0,0,258,260,
|
||||
1,0,0,0,259,250,1,0,0,0,259,260,1,0,0,0,260,262,1,0,0,0,261,214,1,0,0,0,
|
||||
261,242,1,0,0,0,262,60,1,0,0,0,263,269,5,34,0,0,264,268,8,22,0,0,265,266,
|
||||
5,92,0,0,266,268,9,0,0,0,267,264,1,0,0,0,267,265,1,0,0,0,268,271,1,0,0,
|
||||
0,269,267,1,0,0,0,269,270,1,0,0,0,270,272,1,0,0,0,271,269,1,0,0,0,272,284,
|
||||
5,34,0,0,273,279,5,39,0,0,274,278,8,23,0,0,275,276,5,92,0,0,276,278,9,0,
|
||||
0,0,277,274,1,0,0,0,277,275,1,0,0,0,278,281,1,0,0,0,279,277,1,0,0,0,279,
|
||||
280,1,0,0,0,280,282,1,0,0,0,281,279,1,0,0,0,282,284,5,39,0,0,283,263,1,
|
||||
0,0,0,283,273,1,0,0,0,284,62,1,0,0,0,285,289,7,24,0,0,286,288,7,25,0,0,
|
||||
287,286,1,0,0,0,288,291,1,0,0,0,289,287,1,0,0,0,289,290,1,0,0,0,290,64,
|
||||
1,0,0,0,291,289,1,0,0,0,292,293,5,91,0,0,293,294,5,93,0,0,294,66,1,0,0,
|
||||
0,295,296,5,91,0,0,296,297,5,42,0,0,297,298,5,93,0,0,298,68,1,0,0,0,299,
|
||||
312,3,63,31,0,300,301,5,46,0,0,301,311,3,63,31,0,302,311,3,65,32,0,303,
|
||||
311,3,67,33,0,304,306,5,46,0,0,305,307,3,73,36,0,306,305,1,0,0,0,307,308,
|
||||
1,0,0,0,308,306,1,0,0,0,308,309,1,0,0,0,309,311,1,0,0,0,310,300,1,0,0,0,
|
||||
310,302,1,0,0,0,310,303,1,0,0,0,310,304,1,0,0,0,311,314,1,0,0,0,312,310,
|
||||
1,0,0,0,312,313,1,0,0,0,313,70,1,0,0,0,314,312,1,0,0,0,315,317,7,26,0,0,
|
||||
316,315,1,0,0,0,317,318,1,0,0,0,318,316,1,0,0,0,318,319,1,0,0,0,319,320,
|
||||
1,0,0,0,320,321,6,35,0,0,321,72,1,0,0,0,322,323,7,27,0,0,323,74,1,0,0,0,
|
||||
324,326,8,28,0,0,325,324,1,0,0,0,326,327,1,0,0,0,327,325,1,0,0,0,327,328,
|
||||
1,0,0,0,328,76,1,0,0,0,29,0,90,133,150,209,214,219,225,228,232,237,239,
|
||||
242,248,252,257,259,261,267,269,277,279,283,289,308,310,312,318,327,1,6,
|
||||
0,0];
|
||||
|
||||
private static __ATN: ATN;
|
||||
public static get _ATN(): ATN {
|
||||
|
||||
@@ -1,25 +1,25 @@
|
||||
// Generated from FilterQuery.g4 by ANTLR 4.13.1
|
||||
// Generated from grammar/FilterQuery.g4 by ANTLR 4.13.2
|
||||
|
||||
import {ParseTreeListener} from "antlr4";
|
||||
|
||||
|
||||
import { QueryContext } from "./FilterQueryParser";
|
||||
import { ExpressionContext } from "./FilterQueryParser";
|
||||
import { OrExpressionContext } from "./FilterQueryParser";
|
||||
import { AndExpressionContext } from "./FilterQueryParser";
|
||||
import { UnaryExpressionContext } from "./FilterQueryParser";
|
||||
import { PrimaryContext } from "./FilterQueryParser";
|
||||
import { ComparisonContext } from "./FilterQueryParser";
|
||||
import { InClauseContext } from "./FilterQueryParser";
|
||||
import { NotInClauseContext } from "./FilterQueryParser";
|
||||
import { ValueListContext } from "./FilterQueryParser";
|
||||
import { FullTextContext } from "./FilterQueryParser";
|
||||
import { FunctionCallContext } from "./FilterQueryParser";
|
||||
import { FunctionParamListContext } from "./FilterQueryParser";
|
||||
import { FunctionParamContext } from "./FilterQueryParser";
|
||||
import { ArrayContext } from "./FilterQueryParser";
|
||||
import { ValueContext } from "./FilterQueryParser";
|
||||
import { KeyContext } from "./FilterQueryParser";
|
||||
import { QueryContext } from "./FilterQueryParser.js";
|
||||
import { ExpressionContext } from "./FilterQueryParser.js";
|
||||
import { OrExpressionContext } from "./FilterQueryParser.js";
|
||||
import { AndExpressionContext } from "./FilterQueryParser.js";
|
||||
import { UnaryExpressionContext } from "./FilterQueryParser.js";
|
||||
import { PrimaryContext } from "./FilterQueryParser.js";
|
||||
import { ComparisonContext } from "./FilterQueryParser.js";
|
||||
import { InClauseContext } from "./FilterQueryParser.js";
|
||||
import { NotInClauseContext } from "./FilterQueryParser.js";
|
||||
import { ValueListContext } from "./FilterQueryParser.js";
|
||||
import { FullTextContext } from "./FilterQueryParser.js";
|
||||
import { FunctionCallContext } from "./FilterQueryParser.js";
|
||||
import { FunctionParamListContext } from "./FilterQueryParser.js";
|
||||
import { FunctionParamContext } from "./FilterQueryParser.js";
|
||||
import { ArrayContext } from "./FilterQueryParser.js";
|
||||
import { ValueContext } from "./FilterQueryParser.js";
|
||||
import { KeyContext } from "./FilterQueryParser.js";
|
||||
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Generated from FilterQuery.g4 by ANTLR 4.13.1
|
||||
// Generated from grammar/FilterQuery.g4 by ANTLR 4.13.2
|
||||
// noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols
|
||||
|
||||
import {
|
||||
@@ -45,13 +45,14 @@ export default class FilterQueryParser extends Parser {
|
||||
public static readonly HAS = 24;
|
||||
public static readonly HASANY = 25;
|
||||
public static readonly HASALL = 26;
|
||||
public static readonly BOOL = 27;
|
||||
public static readonly NUMBER = 28;
|
||||
public static readonly QUOTED_TEXT = 29;
|
||||
public static readonly KEY = 30;
|
||||
public static readonly WS = 31;
|
||||
public static readonly FREETEXT = 32;
|
||||
public static readonly EOF = Token.EOF;
|
||||
public static readonly SEARCH = 27;
|
||||
public static readonly BOOL = 28;
|
||||
public static readonly NUMBER = 29;
|
||||
public static readonly QUOTED_TEXT = 30;
|
||||
public static readonly KEY = 31;
|
||||
public static readonly WS = 32;
|
||||
public static readonly FREETEXT = 33;
|
||||
public static override readonly EOF = Token.EOF;
|
||||
public static readonly RULE_query = 0;
|
||||
public static readonly RULE_expression = 1;
|
||||
public static readonly RULE_orExpression = 2;
|
||||
@@ -90,8 +91,9 @@ export default class FilterQueryParser extends Parser {
|
||||
"AND", "OR",
|
||||
"HASTOKEN",
|
||||
"HAS", "HASANY",
|
||||
"HASALL", "BOOL",
|
||||
"NUMBER", "QUOTED_TEXT",
|
||||
"HASALL", "SEARCH",
|
||||
"BOOL", "NUMBER",
|
||||
"QUOTED_TEXT",
|
||||
"KEY", "WS",
|
||||
"FREETEXT" ];
|
||||
// tslint:disable:no-trailing-whitespace
|
||||
@@ -222,7 +224,7 @@ export default class FilterQueryParser extends Parser {
|
||||
this.state = 53;
|
||||
this._errHandler.sync(this);
|
||||
_la = this._input.LA(1);
|
||||
while (((((_la - 1)) & ~0x1F) === 0 && ((1 << (_la - 1)) & 3218604033) !== 0)) {
|
||||
while ((((_la) & ~0x1F) === 0 && ((1 << _la) & 4289724418) !== 0) || _la===33) {
|
||||
{
|
||||
this.state = 51;
|
||||
this._errHandler.sync(this);
|
||||
@@ -245,7 +247,8 @@ export default class FilterQueryParser extends Parser {
|
||||
case 28:
|
||||
case 29:
|
||||
case 30:
|
||||
case 32:
|
||||
case 31:
|
||||
case 33:
|
||||
{
|
||||
this.state = 50;
|
||||
this.unaryExpression();
|
||||
@@ -811,7 +814,7 @@ export default class FilterQueryParser extends Parser {
|
||||
{
|
||||
this.state = 190;
|
||||
_la = this._input.LA(1);
|
||||
if(!(_la===29 || _la===32)) {
|
||||
if(!(_la===30 || _la===33)) {
|
||||
this._errHandler.recoverInline(this);
|
||||
}
|
||||
else {
|
||||
@@ -844,7 +847,7 @@ export default class FilterQueryParser extends Parser {
|
||||
{
|
||||
this.state = 192;
|
||||
_la = this._input.LA(1);
|
||||
if(!((((_la) & ~0x1F) === 0 && ((1 << _la) & 125829120) !== 0))) {
|
||||
if(!((((_la) & ~0x1F) === 0 && ((1 << _la) & 260046848) !== 0))) {
|
||||
this._errHandler.recoverInline(this);
|
||||
}
|
||||
else {
|
||||
@@ -999,7 +1002,7 @@ export default class FilterQueryParser extends Parser {
|
||||
{
|
||||
this.state = 214;
|
||||
_la = this._input.LA(1);
|
||||
if(!((((_la) & ~0x1F) === 0 && ((1 << _la) & 2013265920) !== 0))) {
|
||||
if(!((((_la) & ~0x1F) === 0 && ((1 << _la) & 4026531840) !== 0))) {
|
||||
this._errHandler.recoverInline(this);
|
||||
}
|
||||
else {
|
||||
@@ -1048,7 +1051,7 @@ export default class FilterQueryParser extends Parser {
|
||||
return localctx;
|
||||
}
|
||||
|
||||
public static readonly _serializedATN: number[] = [4,1,32,219,2,0,7,0,2,
|
||||
public static readonly _serializedATN: number[] = [4,1,33,219,2,0,7,0,2,
|
||||
1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7,6,2,7,7,7,2,8,7,8,2,9,7,9,2,
|
||||
10,7,10,2,11,7,11,2,12,7,12,2,13,7,13,2,14,7,14,2,15,7,15,2,16,7,16,1,0,
|
||||
1,0,1,0,1,1,1,1,1,2,1,2,1,2,5,2,43,8,2,10,2,12,2,46,9,2,1,3,1,3,1,3,1,3,
|
||||
@@ -1063,7 +1066,7 @@ export default class FilterQueryParser extends Parser {
|
||||
10,9,12,9,189,9,9,1,10,1,10,1,11,1,11,1,11,1,11,1,11,1,12,1,12,1,12,5,12,
|
||||
201,8,12,10,12,12,12,204,9,12,1,13,1,13,1,13,3,13,209,8,13,1,14,1,14,1,
|
||||
14,1,14,1,15,1,15,1,16,1,16,1,16,0,0,17,0,2,4,6,8,10,12,14,16,18,20,22,
|
||||
24,26,28,30,32,0,5,1,0,7,8,1,0,13,14,2,0,29,29,32,32,1,0,23,26,1,0,27,30,
|
||||
24,26,28,30,32,0,5,1,0,7,8,1,0,13,14,2,0,30,30,33,33,1,0,23,27,1,0,28,31,
|
||||
235,0,34,1,0,0,0,2,37,1,0,0,0,4,39,1,0,0,0,6,47,1,0,0,0,8,57,1,0,0,0,10,
|
||||
70,1,0,0,0,12,149,1,0,0,0,14,163,1,0,0,0,16,180,1,0,0,0,18,182,1,0,0,0,
|
||||
20,190,1,0,0,0,22,192,1,0,0,0,24,197,1,0,0,0,26,208,1,0,0,0,28,210,1,0,
|
||||
@@ -1115,7 +1118,7 @@ export default class FilterQueryParser extends Parser {
|
||||
0,0,205,209,3,32,16,0,206,209,3,30,15,0,207,209,3,28,14,0,208,205,1,0,0,
|
||||
0,208,206,1,0,0,0,208,207,1,0,0,0,209,27,1,0,0,0,210,211,5,3,0,0,211,212,
|
||||
3,18,9,0,212,213,5,4,0,0,213,29,1,0,0,0,214,215,7,4,0,0,215,31,1,0,0,0,
|
||||
216,217,5,30,0,0,217,33,1,0,0,0,11,44,51,53,57,70,149,163,180,187,202,208];
|
||||
216,217,5,31,0,0,217,33,1,0,0,0,11,44,51,53,57,70,149,163,180,187,202,208];
|
||||
|
||||
private static __ATN: ATN;
|
||||
public static get _ATN(): ATN {
|
||||
@@ -1662,6 +1665,9 @@ export class FunctionCallContext extends ParserRuleContext {
|
||||
public HASALL(): TerminalNode {
|
||||
return this.getToken(FilterQueryParser.HASALL, 0);
|
||||
}
|
||||
public SEARCH(): TerminalNode {
|
||||
return this.getToken(FilterQueryParser.SEARCH, 0);
|
||||
}
|
||||
public get ruleIndex(): number {
|
||||
return FilterQueryParser.RULE_functionCall;
|
||||
}
|
||||
|
||||
@@ -1,25 +1,25 @@
|
||||
// Generated from FilterQuery.g4 by ANTLR 4.13.1
|
||||
// Generated from grammar/FilterQuery.g4 by ANTLR 4.13.2
|
||||
|
||||
import {ParseTreeVisitor} from 'antlr4';
|
||||
|
||||
|
||||
import { QueryContext } from "./FilterQueryParser";
|
||||
import { ExpressionContext } from "./FilterQueryParser";
|
||||
import { OrExpressionContext } from "./FilterQueryParser";
|
||||
import { AndExpressionContext } from "./FilterQueryParser";
|
||||
import { UnaryExpressionContext } from "./FilterQueryParser";
|
||||
import { PrimaryContext } from "./FilterQueryParser";
|
||||
import { ComparisonContext } from "./FilterQueryParser";
|
||||
import { InClauseContext } from "./FilterQueryParser";
|
||||
import { NotInClauseContext } from "./FilterQueryParser";
|
||||
import { ValueListContext } from "./FilterQueryParser";
|
||||
import { FullTextContext } from "./FilterQueryParser";
|
||||
import { FunctionCallContext } from "./FilterQueryParser";
|
||||
import { FunctionParamListContext } from "./FilterQueryParser";
|
||||
import { FunctionParamContext } from "./FilterQueryParser";
|
||||
import { ArrayContext } from "./FilterQueryParser";
|
||||
import { ValueContext } from "./FilterQueryParser";
|
||||
import { KeyContext } from "./FilterQueryParser";
|
||||
import { QueryContext } from "./FilterQueryParser.js";
|
||||
import { ExpressionContext } from "./FilterQueryParser.js";
|
||||
import { OrExpressionContext } from "./FilterQueryParser.js";
|
||||
import { AndExpressionContext } from "./FilterQueryParser.js";
|
||||
import { UnaryExpressionContext } from "./FilterQueryParser.js";
|
||||
import { PrimaryContext } from "./FilterQueryParser.js";
|
||||
import { ComparisonContext } from "./FilterQueryParser.js";
|
||||
import { InClauseContext } from "./FilterQueryParser.js";
|
||||
import { NotInClauseContext } from "./FilterQueryParser.js";
|
||||
import { ValueListContext } from "./FilterQueryParser.js";
|
||||
import { FullTextContext } from "./FilterQueryParser.js";
|
||||
import { FunctionCallContext } from "./FilterQueryParser.js";
|
||||
import { FunctionParamListContext } from "./FilterQueryParser.js";
|
||||
import { FunctionParamContext } from "./FilterQueryParser.js";
|
||||
import { ArrayContext } from "./FilterQueryParser.js";
|
||||
import { ValueContext } from "./FilterQueryParser.js";
|
||||
import { KeyContext } from "./FilterQueryParser.js";
|
||||
|
||||
|
||||
/**
|
||||
|
||||
@@ -107,7 +107,7 @@ fullText
|
||||
* ...
|
||||
*/
|
||||
functionCall
|
||||
: (HASTOKEN | HAS | HASANY | HASALL) LPAREN functionParamList RPAREN
|
||||
: (HASTOKEN | HAS | HASANY | HASALL | SEARCH) LPAREN functionParamList RPAREN
|
||||
;
|
||||
|
||||
// Function parameters can be keys, single scalar values, or arrays
|
||||
@@ -184,6 +184,7 @@ HASTOKEN : [Hh][Aa][Ss][Tt][Oo][Kk][Ee][Nn];
|
||||
HAS : [Hh][Aa][Ss] ;
|
||||
HASANY : [Hh][Aa][Ss][Aa][Nn][Yy] ;
|
||||
HASALL : [Hh][Aa][Ss][Aa][Ll][Ll] ;
|
||||
SEARCH : [Ss][Ee][Aa][Rr][Cc][Hh] ;
|
||||
|
||||
// Potential boolean constants
|
||||
BOOL
|
||||
|
||||
@@ -4,6 +4,8 @@ import (
|
||||
"errors" //nolint:depguard
|
||||
"fmt"
|
||||
"log/slog"
|
||||
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
)
|
||||
|
||||
// base is the fundamental struct that implements the error interface.
|
||||
@@ -253,3 +255,10 @@ func NewTimeoutf(code Code, format string, args ...any) *base {
|
||||
func Attr(err error) slog.Attr {
|
||||
return slog.Any("exception", err)
|
||||
}
|
||||
|
||||
// TypeAttr returns an OTel attribute.KeyValue with the "error.type" semconv key
|
||||
// set to the error's type string.
|
||||
func TypeAttr(err error) attribute.KeyValue {
|
||||
t, _, _, _, _, _ := Unwrapb(err)
|
||||
return attribute.String("error.type", t.String())
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ var (
|
||||
FeatureHideRootUser = featuretypes.MustNewName("hide_root_user")
|
||||
FeatureGetMetersFromZeus = featuretypes.MustNewName("get_meters_from_zeus")
|
||||
FeaturePutMetersInZeus = featuretypes.MustNewName("put_meters_in_zeus")
|
||||
FeatureUseMeterReporter = featuretypes.MustNewName("use_meter_reporter")
|
||||
FeatureUseJSONBody = featuretypes.MustNewName("use_json_body")
|
||||
)
|
||||
|
||||
@@ -53,6 +54,14 @@ func MustNewRegistry() featuretypes.Registry {
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
&featuretypes.Feature{
|
||||
Name: FeatureUseMeterReporter,
|
||||
Kind: featuretypes.KindBoolean,
|
||||
Stage: featuretypes.StageExperimental,
|
||||
Description: "Controls whether the enterprise meter reporter runs instead of the noop reporter",
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
&featuretypes.Feature{
|
||||
Name: FeatureUseJSONBody,
|
||||
Kind: featuretypes.KindBoolean,
|
||||
|
||||
68
pkg/metercollector/config.go
Normal file
68
pkg/metercollector/config.go
Normal file
@@ -0,0 +1,68 @@
|
||||
package metercollector
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/zeustypes"
|
||||
)
|
||||
|
||||
const (
|
||||
ProviderStatic = "static"
|
||||
ProviderTelemetry = "telemetry"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Provider string `mapstructure:"provider"`
|
||||
Telemetry TelemetryConfig `mapstructure:"telemetry"`
|
||||
Static StaticConfig `mapstructure:"static"`
|
||||
}
|
||||
|
||||
func (c Config) Validate() error {
|
||||
switch c.Provider {
|
||||
case ProviderStatic:
|
||||
return c.Static.Validate()
|
||||
case ProviderTelemetry:
|
||||
return c.Telemetry.Validate()
|
||||
default:
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidConfig, "meter collector: unknown provider %q", c.Provider)
|
||||
}
|
||||
}
|
||||
|
||||
type TelemetryConfig struct {
|
||||
Name zeustypes.MeterName
|
||||
Unit zeustypes.MeterUnit
|
||||
Aggregation zeustypes.MeterAggregation
|
||||
DBName string
|
||||
TableName string
|
||||
DefaultRetentionDays int
|
||||
}
|
||||
|
||||
type StaticConfig struct {
|
||||
Name zeustypes.MeterName
|
||||
Unit zeustypes.MeterUnit
|
||||
Aggregation zeustypes.MeterAggregation
|
||||
Value int64
|
||||
}
|
||||
|
||||
func (c StaticConfig) Validate() error {
|
||||
if c.Name.IsZero() {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidConfig, "static meter collector: name must be set")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c TelemetryConfig) Validate() error {
|
||||
if c.Name.IsZero() {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidConfig, "telemetry meter collector: name must be set")
|
||||
}
|
||||
|
||||
if c.DBName == "" || c.TableName == "" {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidConfig, "telemetry meter collector: db_name and table_name are required")
|
||||
}
|
||||
|
||||
if c.DefaultRetentionDays <= 0 {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidConfig, "telemetry meter collector: default_retention_days must be positive")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
25
pkg/metercollector/metercollector.go
Normal file
25
pkg/metercollector/metercollector.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package metercollector
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/zeustypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrCodeMeterCollectorCollectFailed = errors.MustNewCode("meter_collector_collect_failed")
|
||||
ErrCodeMeterCollectorInvalidCustomRetentionRule = errors.MustNewCode("meter_collector_invalid_custom_retention_rule")
|
||||
ErrCodeInvalidConfig = errors.MustNewCode("meter_collector_invalid_config")
|
||||
)
|
||||
|
||||
type MeterCollector interface {
|
||||
Name() zeustypes.MeterName
|
||||
Unit() zeustypes.MeterUnit
|
||||
Aggregation() zeustypes.MeterAggregation
|
||||
Origin(ctx context.Context, orgID valuer.UUID, license *licensetypes.License, todayStart time.Time) (time.Time, error)
|
||||
Collect(ctx context.Context, orgID valuer.UUID, license *licensetypes.License, window zeustypes.MeterWindow) ([]zeustypes.Meter, error)
|
||||
}
|
||||
37
pkg/meterreporter/config.go
Normal file
37
pkg/meterreporter/config.go
Normal file
@@ -0,0 +1,37 @@
|
||||
package meterreporter
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
)
|
||||
|
||||
var _ factory.Config = (*Config)(nil)
|
||||
|
||||
type Config struct {
|
||||
// Interval is how often the reporter collects and ships meters.
|
||||
Interval time.Duration `mapstructure:"interval"`
|
||||
|
||||
// Backfill enables sealed-day catch-up from the license creation day.
|
||||
Backfill bool `mapstructure:"backfill"`
|
||||
}
|
||||
|
||||
func newConfig() factory.Config {
|
||||
return Config{
|
||||
Interval: 6 * time.Hour,
|
||||
Backfill: true,
|
||||
}
|
||||
}
|
||||
|
||||
func NewConfigFactory() factory.ConfigFactory {
|
||||
return factory.NewConfigFactory(factory.MustNewName("meterreporter"), newConfig)
|
||||
}
|
||||
|
||||
func (c Config) Validate() error {
|
||||
if c.Interval < 5*time.Minute || c.Interval > 24*time.Hour {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidInput, "meterreporter::interval must be between 5m and 24h")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
14
pkg/meterreporter/meterreporter.go
Normal file
14
pkg/meterreporter/meterreporter.go
Normal file
@@ -0,0 +1,14 @@
|
||||
package meterreporter
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrCodeInvalidInput = errors.MustNewCode("meterreporter_invalid_input")
|
||||
)
|
||||
|
||||
type Reporter interface {
|
||||
factory.ServiceWithHealthy
|
||||
}
|
||||
39
pkg/meterreporter/noopmeterreporter/provider.go
Normal file
39
pkg/meterreporter/noopmeterreporter/provider.go
Normal file
@@ -0,0 +1,39 @@
|
||||
package noopmeterreporter
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/meterreporter"
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
healthyC chan struct{}
|
||||
stopC chan struct{}
|
||||
}
|
||||
|
||||
func NewFactory() factory.ProviderFactory[meterreporter.Reporter, meterreporter.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("noop"), New)
|
||||
}
|
||||
|
||||
func New(_ context.Context, _ factory.ProviderSettings, _ meterreporter.Config) (meterreporter.Reporter, error) {
|
||||
return &provider{
|
||||
healthyC: make(chan struct{}),
|
||||
stopC: make(chan struct{}),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (p *provider) Start(_ context.Context) error {
|
||||
close(p.healthyC)
|
||||
<-p.stopC
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *provider) Stop(_ context.Context) error {
|
||||
close(p.stopC)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *provider) Healthy() <-chan struct{} {
|
||||
return p.healthyC
|
||||
}
|
||||
52
pkg/modules/retention/implretention/getter.go
Normal file
52
pkg/modules/retention/implretention/getter.go
Normal file
@@ -0,0 +1,52 @@
|
||||
package implretention
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/modules/retention"
|
||||
"github.com/SigNoz/signoz/pkg/types/retentiontypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type getter struct {
|
||||
store retentiontypes.Store
|
||||
}
|
||||
|
||||
// NewGetter creates a retention getter backed by the retention store.
|
||||
func NewGetter(store retentiontypes.Store) retention.Getter {
|
||||
return &getter{
|
||||
store: store,
|
||||
}
|
||||
}
|
||||
|
||||
// GetRetentionPolicySegments loads successful TTL changes and converts them into retention policy segments.
|
||||
func (getter *getter) GetRetentionPolicySegments(
|
||||
ctx context.Context,
|
||||
orgID valuer.UUID,
|
||||
dbName string,
|
||||
tableName string,
|
||||
fallbackDefaultDays int,
|
||||
startMs int64,
|
||||
endMs int64,
|
||||
) ([]*retentiontypes.RetentionPolicySegment, error) {
|
||||
if startMs >= endMs {
|
||||
return nil, nil
|
||||
}
|
||||
if dbName == "" {
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "dbName is empty")
|
||||
}
|
||||
if tableName == "" {
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "tableName is empty")
|
||||
}
|
||||
if fallbackDefaultDays <= 0 {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "non-positive fallbackDefaultDays %d", fallbackDefaultDays)
|
||||
}
|
||||
|
||||
rows, err := getter.store.ListTTLSettingsByTableNameAndBeforeCreatedAt(ctx, orgID, dbName+"."+tableName, endMs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return retentiontypes.BuildRetentionPolicySegmentsFromRows(rows, fallbackDefaultDays, startMs, endMs)
|
||||
}
|
||||
41
pkg/modules/retention/implretention/store.go
Normal file
41
pkg/modules/retention/implretention/store.go
Normal file
@@ -0,0 +1,41 @@
|
||||
package implretention
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/retentiontypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type store struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
// NewStore creates a SQL-backed retention store.
|
||||
func NewStore(sqlstore sqlstore.SQLStore) retentiontypes.Store {
|
||||
return &store{sqlstore: sqlstore}
|
||||
}
|
||||
|
||||
// ListTTLSettingsByTableNameAndBeforeCreatedAt returns successful TTL settings before the given timestamp.
|
||||
func (store *store) ListTTLSettingsByTableNameAndBeforeCreatedAt(ctx context.Context, orgID valuer.UUID, tableName string, beforeMs int64) ([]*retentiontypes.TTLSetting, error) {
|
||||
rows := []*retentiontypes.TTLSetting{}
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(&rows).
|
||||
Where("table_name = ?", tableName).
|
||||
Where("org_id = ?", orgID.StringValue()).
|
||||
Where("status = ?", retentiontypes.TTLSettingStatusSuccess).
|
||||
Where("created_at < ?", time.UnixMilli(beforeMs).UTC()).
|
||||
OrderExpr("created_at ASC").
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "load ttl_setting rows for org %q table %q", orgID.StringValue(), tableName)
|
||||
}
|
||||
|
||||
return rows, nil
|
||||
}
|
||||
14
pkg/modules/retention/retention.go
Normal file
14
pkg/modules/retention/retention.go
Normal file
@@ -0,0 +1,14 @@
|
||||
package retention
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/retentiontypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// Getter resolves retention data and expressions for read paths.
|
||||
type Getter interface {
|
||||
// GetRetentionPolicySegments returns retention policy segments active over a half-open meter window.
|
||||
GetRetentionPolicySegments(ctx context.Context, orgID valuer.UUID, dbName string, tableName string, fallbackDefaultDays int, startMs int64, endMs int64) ([]*retentiontypes.RetentionPolicySegment, error)
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@@ -24,12 +24,13 @@ HASTOKEN=23
|
||||
HAS=24
|
||||
HASANY=25
|
||||
HASALL=26
|
||||
BOOL=27
|
||||
NUMBER=28
|
||||
QUOTED_TEXT=29
|
||||
KEY=30
|
||||
WS=31
|
||||
FREETEXT=32
|
||||
SEARCH=27
|
||||
BOOL=28
|
||||
NUMBER=29
|
||||
QUOTED_TEXT=30
|
||||
KEY=31
|
||||
WS=32
|
||||
FREETEXT=33
|
||||
'('=1
|
||||
')'=2
|
||||
'['=3
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -24,12 +24,13 @@ HASTOKEN=23
|
||||
HAS=24
|
||||
HASANY=25
|
||||
HASALL=26
|
||||
BOOL=27
|
||||
NUMBER=28
|
||||
QUOTED_TEXT=29
|
||||
KEY=30
|
||||
WS=31
|
||||
FREETEXT=32
|
||||
SEARCH=27
|
||||
BOOL=28
|
||||
NUMBER=29
|
||||
QUOTED_TEXT=30
|
||||
KEY=31
|
||||
WS=32
|
||||
FREETEXT=33
|
||||
'('=1
|
||||
')'=2
|
||||
'['=3
|
||||
|
||||
@@ -4,10 +4,9 @@ package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/antlr4-go/antlr/v4"
|
||||
"sync"
|
||||
"unicode"
|
||||
|
||||
"github.com/antlr4-go/antlr/v4"
|
||||
)
|
||||
|
||||
// Suppress unused import error
|
||||
@@ -51,170 +50,174 @@ func filterquerylexerLexerInit() {
|
||||
"", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
|
||||
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "ILIKE", "BETWEEN", "EXISTS",
|
||||
"REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR", "HASTOKEN", "HAS", "HASANY",
|
||||
"HASALL", "BOOL", "NUMBER", "QUOTED_TEXT", "KEY", "WS", "FREETEXT",
|
||||
"HASALL", "SEARCH", "BOOL", "NUMBER", "QUOTED_TEXT", "KEY", "WS", "FREETEXT",
|
||||
}
|
||||
staticData.RuleNames = []string{
|
||||
"LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
|
||||
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "ILIKE", "BETWEEN", "EXISTS",
|
||||
"REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR", "HASTOKEN", "HAS", "HASANY",
|
||||
"HASALL", "BOOL", "SIGN", "NUMBER", "QUOTED_TEXT", "SEGMENT", "EMPTY_BRACKS",
|
||||
"OLD_JSON_BRACKS", "KEY", "WS", "DIGIT", "FREETEXT",
|
||||
"HASALL", "SEARCH", "BOOL", "SIGN", "NUMBER", "QUOTED_TEXT", "SEGMENT",
|
||||
"EMPTY_BRACKS", "OLD_JSON_BRACKS", "KEY", "WS", "DIGIT", "FREETEXT",
|
||||
}
|
||||
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
|
||||
staticData.serializedATN = []int32{
|
||||
4, 0, 32, 320, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
|
||||
4, 0, 33, 329, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
|
||||
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
|
||||
10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
|
||||
7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
|
||||
20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
|
||||
2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
|
||||
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36,
|
||||
7, 36, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5,
|
||||
1, 5, 1, 5, 3, 5, 89, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1,
|
||||
8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1,
|
||||
12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14,
|
||||
1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1,
|
||||
15, 1, 15, 3, 15, 132, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16,
|
||||
1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 149,
|
||||
8, 17, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1,
|
||||
20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22,
|
||||
1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1,
|
||||
24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25,
|
||||
1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 3, 26, 201,
|
||||
8, 26, 1, 27, 1, 27, 1, 28, 3, 28, 206, 8, 28, 1, 28, 4, 28, 209, 8, 28,
|
||||
11, 28, 12, 28, 210, 1, 28, 1, 28, 5, 28, 215, 8, 28, 10, 28, 12, 28, 218,
|
||||
9, 28, 3, 28, 220, 8, 28, 1, 28, 1, 28, 3, 28, 224, 8, 28, 1, 28, 4, 28,
|
||||
227, 8, 28, 11, 28, 12, 28, 228, 3, 28, 231, 8, 28, 1, 28, 3, 28, 234,
|
||||
8, 28, 1, 28, 1, 28, 4, 28, 238, 8, 28, 11, 28, 12, 28, 239, 1, 28, 1,
|
||||
28, 3, 28, 244, 8, 28, 1, 28, 4, 28, 247, 8, 28, 11, 28, 12, 28, 248, 3,
|
||||
28, 251, 8, 28, 3, 28, 253, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 259,
|
||||
8, 29, 10, 29, 12, 29, 262, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5,
|
||||
29, 269, 8, 29, 10, 29, 12, 29, 272, 9, 29, 1, 29, 3, 29, 275, 8, 29, 1,
|
||||
30, 1, 30, 5, 30, 279, 8, 30, 10, 30, 12, 30, 282, 9, 30, 1, 31, 1, 31,
|
||||
1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1,
|
||||
33, 1, 33, 4, 33, 298, 8, 33, 11, 33, 12, 33, 299, 5, 33, 302, 8, 33, 10,
|
||||
33, 12, 33, 305, 9, 33, 1, 34, 4, 34, 308, 8, 34, 11, 34, 12, 34, 309,
|
||||
1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 4, 36, 317, 8, 36, 11, 36, 12, 36, 318,
|
||||
0, 0, 37, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19,
|
||||
7, 36, 2, 37, 7, 37, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1,
|
||||
4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 91, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7,
|
||||
1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11,
|
||||
1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1,
|
||||
13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15,
|
||||
1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 134, 8, 15, 1, 16, 1, 16, 1, 16, 1,
|
||||
16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17,
|
||||
1, 17, 3, 17, 151, 8, 17, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1,
|
||||
19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22,
|
||||
1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1,
|
||||
24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25,
|
||||
1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1,
|
||||
27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 3, 27, 210,
|
||||
8, 27, 1, 28, 1, 28, 1, 29, 3, 29, 215, 8, 29, 1, 29, 4, 29, 218, 8, 29,
|
||||
11, 29, 12, 29, 219, 1, 29, 1, 29, 5, 29, 224, 8, 29, 10, 29, 12, 29, 227,
|
||||
9, 29, 3, 29, 229, 8, 29, 1, 29, 1, 29, 3, 29, 233, 8, 29, 1, 29, 4, 29,
|
||||
236, 8, 29, 11, 29, 12, 29, 237, 3, 29, 240, 8, 29, 1, 29, 3, 29, 243,
|
||||
8, 29, 1, 29, 1, 29, 4, 29, 247, 8, 29, 11, 29, 12, 29, 248, 1, 29, 1,
|
||||
29, 3, 29, 253, 8, 29, 1, 29, 4, 29, 256, 8, 29, 11, 29, 12, 29, 257, 3,
|
||||
29, 260, 8, 29, 3, 29, 262, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 268,
|
||||
8, 30, 10, 30, 12, 30, 271, 9, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5,
|
||||
30, 278, 8, 30, 10, 30, 12, 30, 281, 9, 30, 1, 30, 3, 30, 284, 8, 30, 1,
|
||||
31, 1, 31, 5, 31, 288, 8, 31, 10, 31, 12, 31, 291, 9, 31, 1, 32, 1, 32,
|
||||
1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1,
|
||||
34, 1, 34, 4, 34, 307, 8, 34, 11, 34, 12, 34, 308, 5, 34, 311, 8, 34, 10,
|
||||
34, 12, 34, 314, 9, 34, 1, 35, 4, 35, 317, 8, 35, 11, 35, 12, 35, 318,
|
||||
1, 35, 1, 35, 1, 36, 1, 36, 1, 37, 4, 37, 326, 8, 37, 11, 37, 12, 37, 327,
|
||||
0, 0, 38, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19,
|
||||
10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37,
|
||||
19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55,
|
||||
0, 57, 28, 59, 29, 61, 0, 63, 0, 65, 0, 67, 30, 69, 31, 71, 0, 73, 32,
|
||||
1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0, 75, 75,
|
||||
107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 66, 66, 98, 98, 2, 0, 84, 84, 116,
|
||||
116, 2, 0, 87, 87, 119, 119, 2, 0, 78, 78, 110, 110, 2, 0, 88, 88, 120,
|
||||
120, 2, 0, 83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103,
|
||||
103, 2, 0, 80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 79, 79, 111, 111,
|
||||
2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72, 104, 104, 2,
|
||||
0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70, 102, 102, 2,
|
||||
0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92, 4, 0, 35,
|
||||
36, 64, 90, 95, 95, 97, 123, 7, 0, 35, 36, 45, 45, 47, 58, 64, 90, 95,
|
||||
95, 97, 123, 125, 125, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0,
|
||||
9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 344, 0,
|
||||
1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0,
|
||||
9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0,
|
||||
0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0,
|
||||
0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0,
|
||||
0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1,
|
||||
0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47,
|
||||
1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0,
|
||||
57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0,
|
||||
0, 73, 1, 0, 0, 0, 1, 75, 1, 0, 0, 0, 3, 77, 1, 0, 0, 0, 5, 79, 1, 0, 0,
|
||||
0, 7, 81, 1, 0, 0, 0, 9, 83, 1, 0, 0, 0, 11, 88, 1, 0, 0, 0, 13, 90, 1,
|
||||
0, 0, 0, 15, 93, 1, 0, 0, 0, 17, 96, 1, 0, 0, 0, 19, 98, 1, 0, 0, 0, 21,
|
||||
101, 1, 0, 0, 0, 23, 103, 1, 0, 0, 0, 25, 106, 1, 0, 0, 0, 27, 111, 1,
|
||||
0, 0, 0, 29, 117, 1, 0, 0, 0, 31, 125, 1, 0, 0, 0, 33, 133, 1, 0, 0, 0,
|
||||
35, 140, 1, 0, 0, 0, 37, 150, 1, 0, 0, 0, 39, 153, 1, 0, 0, 0, 41, 157,
|
||||
1, 0, 0, 0, 43, 161, 1, 0, 0, 0, 45, 164, 1, 0, 0, 0, 47, 173, 1, 0, 0,
|
||||
0, 49, 177, 1, 0, 0, 0, 51, 184, 1, 0, 0, 0, 53, 200, 1, 0, 0, 0, 55, 202,
|
||||
1, 0, 0, 0, 57, 252, 1, 0, 0, 0, 59, 274, 1, 0, 0, 0, 61, 276, 1, 0, 0,
|
||||
0, 63, 283, 1, 0, 0, 0, 65, 286, 1, 0, 0, 0, 67, 290, 1, 0, 0, 0, 69, 307,
|
||||
1, 0, 0, 0, 71, 313, 1, 0, 0, 0, 73, 316, 1, 0, 0, 0, 75, 76, 5, 40, 0,
|
||||
0, 76, 2, 1, 0, 0, 0, 77, 78, 5, 41, 0, 0, 78, 4, 1, 0, 0, 0, 79, 80, 5,
|
||||
91, 0, 0, 80, 6, 1, 0, 0, 0, 81, 82, 5, 93, 0, 0, 82, 8, 1, 0, 0, 0, 83,
|
||||
84, 5, 44, 0, 0, 84, 10, 1, 0, 0, 0, 85, 89, 5, 61, 0, 0, 86, 87, 5, 61,
|
||||
0, 0, 87, 89, 5, 61, 0, 0, 88, 85, 1, 0, 0, 0, 88, 86, 1, 0, 0, 0, 89,
|
||||
12, 1, 0, 0, 0, 90, 91, 5, 33, 0, 0, 91, 92, 5, 61, 0, 0, 92, 14, 1, 0,
|
||||
0, 0, 93, 94, 5, 60, 0, 0, 94, 95, 5, 62, 0, 0, 95, 16, 1, 0, 0, 0, 96,
|
||||
97, 5, 60, 0, 0, 97, 18, 1, 0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 100, 5, 61,
|
||||
0, 0, 100, 20, 1, 0, 0, 0, 101, 102, 5, 62, 0, 0, 102, 22, 1, 0, 0, 0,
|
||||
103, 104, 5, 62, 0, 0, 104, 105, 5, 61, 0, 0, 105, 24, 1, 0, 0, 0, 106,
|
||||
107, 7, 0, 0, 0, 107, 108, 7, 1, 0, 0, 108, 109, 7, 2, 0, 0, 109, 110,
|
||||
7, 3, 0, 0, 110, 26, 1, 0, 0, 0, 111, 112, 7, 1, 0, 0, 112, 113, 7, 0,
|
||||
0, 0, 113, 114, 7, 1, 0, 0, 114, 115, 7, 2, 0, 0, 115, 116, 7, 3, 0, 0,
|
||||
116, 28, 1, 0, 0, 0, 117, 118, 7, 4, 0, 0, 118, 119, 7, 3, 0, 0, 119, 120,
|
||||
7, 5, 0, 0, 120, 121, 7, 6, 0, 0, 121, 122, 7, 3, 0, 0, 122, 123, 7, 3,
|
||||
0, 0, 123, 124, 7, 7, 0, 0, 124, 30, 1, 0, 0, 0, 125, 126, 7, 3, 0, 0,
|
||||
126, 127, 7, 8, 0, 0, 127, 128, 7, 1, 0, 0, 128, 129, 7, 9, 0, 0, 129,
|
||||
131, 7, 5, 0, 0, 130, 132, 7, 9, 0, 0, 131, 130, 1, 0, 0, 0, 131, 132,
|
||||
1, 0, 0, 0, 132, 32, 1, 0, 0, 0, 133, 134, 7, 10, 0, 0, 134, 135, 7, 3,
|
||||
0, 0, 135, 136, 7, 11, 0, 0, 136, 137, 7, 3, 0, 0, 137, 138, 7, 8, 0, 0,
|
||||
138, 139, 7, 12, 0, 0, 139, 34, 1, 0, 0, 0, 140, 141, 7, 13, 0, 0, 141,
|
||||
142, 7, 14, 0, 0, 142, 143, 7, 7, 0, 0, 143, 144, 7, 5, 0, 0, 144, 145,
|
||||
7, 15, 0, 0, 145, 146, 7, 1, 0, 0, 146, 148, 7, 7, 0, 0, 147, 149, 7, 9,
|
||||
0, 0, 148, 147, 1, 0, 0, 0, 148, 149, 1, 0, 0, 0, 149, 36, 1, 0, 0, 0,
|
||||
150, 151, 7, 1, 0, 0, 151, 152, 7, 7, 0, 0, 152, 38, 1, 0, 0, 0, 153, 154,
|
||||
7, 7, 0, 0, 154, 155, 7, 14, 0, 0, 155, 156, 7, 5, 0, 0, 156, 40, 1, 0,
|
||||
0, 0, 157, 158, 7, 15, 0, 0, 158, 159, 7, 7, 0, 0, 159, 160, 7, 16, 0,
|
||||
0, 160, 42, 1, 0, 0, 0, 161, 162, 7, 14, 0, 0, 162, 163, 7, 10, 0, 0, 163,
|
||||
44, 1, 0, 0, 0, 164, 165, 7, 17, 0, 0, 165, 166, 7, 15, 0, 0, 166, 167,
|
||||
7, 9, 0, 0, 167, 168, 7, 5, 0, 0, 168, 169, 7, 14, 0, 0, 169, 170, 7, 2,
|
||||
0, 0, 170, 171, 7, 3, 0, 0, 171, 172, 7, 7, 0, 0, 172, 46, 1, 0, 0, 0,
|
||||
173, 174, 7, 17, 0, 0, 174, 175, 7, 15, 0, 0, 175, 176, 7, 9, 0, 0, 176,
|
||||
48, 1, 0, 0, 0, 177, 178, 7, 17, 0, 0, 178, 179, 7, 15, 0, 0, 179, 180,
|
||||
7, 9, 0, 0, 180, 181, 7, 15, 0, 0, 181, 182, 7, 7, 0, 0, 182, 183, 7, 18,
|
||||
0, 0, 183, 50, 1, 0, 0, 0, 184, 185, 7, 17, 0, 0, 185, 186, 7, 15, 0, 0,
|
||||
186, 187, 7, 9, 0, 0, 187, 188, 7, 15, 0, 0, 188, 189, 7, 0, 0, 0, 189,
|
||||
190, 7, 0, 0, 0, 190, 52, 1, 0, 0, 0, 191, 192, 7, 5, 0, 0, 192, 193, 7,
|
||||
10, 0, 0, 193, 194, 7, 19, 0, 0, 194, 201, 7, 3, 0, 0, 195, 196, 7, 20,
|
||||
0, 0, 196, 197, 7, 15, 0, 0, 197, 198, 7, 0, 0, 0, 198, 199, 7, 9, 0, 0,
|
||||
199, 201, 7, 3, 0, 0, 200, 191, 1, 0, 0, 0, 200, 195, 1, 0, 0, 0, 201,
|
||||
54, 1, 0, 0, 0, 202, 203, 7, 21, 0, 0, 203, 56, 1, 0, 0, 0, 204, 206, 3,
|
||||
55, 27, 0, 205, 204, 1, 0, 0, 0, 205, 206, 1, 0, 0, 0, 206, 208, 1, 0,
|
||||
0, 0, 207, 209, 3, 71, 35, 0, 208, 207, 1, 0, 0, 0, 209, 210, 1, 0, 0,
|
||||
0, 210, 208, 1, 0, 0, 0, 210, 211, 1, 0, 0, 0, 211, 219, 1, 0, 0, 0, 212,
|
||||
216, 5, 46, 0, 0, 213, 215, 3, 71, 35, 0, 214, 213, 1, 0, 0, 0, 215, 218,
|
||||
1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 220, 1, 0,
|
||||
0, 0, 218, 216, 1, 0, 0, 0, 219, 212, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0,
|
||||
220, 230, 1, 0, 0, 0, 221, 223, 7, 3, 0, 0, 222, 224, 3, 55, 27, 0, 223,
|
||||
222, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 226, 1, 0, 0, 0, 225, 227,
|
||||
3, 71, 35, 0, 226, 225, 1, 0, 0, 0, 227, 228, 1, 0, 0, 0, 228, 226, 1,
|
||||
0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 231, 1, 0, 0, 0, 230, 221, 1, 0, 0,
|
||||
0, 230, 231, 1, 0, 0, 0, 231, 253, 1, 0, 0, 0, 232, 234, 3, 55, 27, 0,
|
||||
233, 232, 1, 0, 0, 0, 233, 234, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235,
|
||||
237, 5, 46, 0, 0, 236, 238, 3, 71, 35, 0, 237, 236, 1, 0, 0, 0, 238, 239,
|
||||
1, 0, 0, 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 250, 1, 0,
|
||||
0, 0, 241, 243, 7, 3, 0, 0, 242, 244, 3, 55, 27, 0, 243, 242, 1, 0, 0,
|
||||
0, 243, 244, 1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 247, 3, 71, 35, 0,
|
||||
246, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 248,
|
||||
249, 1, 0, 0, 0, 249, 251, 1, 0, 0, 0, 250, 241, 1, 0, 0, 0, 250, 251,
|
||||
1, 0, 0, 0, 251, 253, 1, 0, 0, 0, 252, 205, 1, 0, 0, 0, 252, 233, 1, 0,
|
||||
0, 0, 253, 58, 1, 0, 0, 0, 254, 260, 5, 34, 0, 0, 255, 259, 8, 22, 0, 0,
|
||||
256, 257, 5, 92, 0, 0, 257, 259, 9, 0, 0, 0, 258, 255, 1, 0, 0, 0, 258,
|
||||
256, 1, 0, 0, 0, 259, 262, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 260, 261,
|
||||
1, 0, 0, 0, 261, 263, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, 263, 275, 5, 34,
|
||||
0, 0, 264, 270, 5, 39, 0, 0, 265, 269, 8, 23, 0, 0, 266, 267, 5, 92, 0,
|
||||
0, 267, 269, 9, 0, 0, 0, 268, 265, 1, 0, 0, 0, 268, 266, 1, 0, 0, 0, 269,
|
||||
272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 273,
|
||||
1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 275, 5, 39, 0, 0, 274, 254, 1, 0,
|
||||
0, 0, 274, 264, 1, 0, 0, 0, 275, 60, 1, 0, 0, 0, 276, 280, 7, 24, 0, 0,
|
||||
277, 279, 7, 25, 0, 0, 278, 277, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280,
|
||||
278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 62, 1, 0, 0, 0, 282, 280, 1,
|
||||
0, 0, 0, 283, 284, 5, 91, 0, 0, 284, 285, 5, 93, 0, 0, 285, 64, 1, 0, 0,
|
||||
0, 286, 287, 5, 91, 0, 0, 287, 288, 5, 42, 0, 0, 288, 289, 5, 93, 0, 0,
|
||||
289, 66, 1, 0, 0, 0, 290, 303, 3, 61, 30, 0, 291, 292, 5, 46, 0, 0, 292,
|
||||
302, 3, 61, 30, 0, 293, 302, 3, 63, 31, 0, 294, 302, 3, 65, 32, 0, 295,
|
||||
297, 5, 46, 0, 0, 296, 298, 3, 71, 35, 0, 297, 296, 1, 0, 0, 0, 298, 299,
|
||||
1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 299, 300, 1, 0, 0, 0, 300, 302, 1, 0,
|
||||
0, 0, 301, 291, 1, 0, 0, 0, 301, 293, 1, 0, 0, 0, 301, 294, 1, 0, 0, 0,
|
||||
301, 295, 1, 0, 0, 0, 302, 305, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 303,
|
||||
304, 1, 0, 0, 0, 304, 68, 1, 0, 0, 0, 305, 303, 1, 0, 0, 0, 306, 308, 7,
|
||||
26, 0, 0, 307, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 307, 1, 0, 0,
|
||||
0, 309, 310, 1, 0, 0, 0, 310, 311, 1, 0, 0, 0, 311, 312, 6, 34, 0, 0, 312,
|
||||
70, 1, 0, 0, 0, 313, 314, 7, 27, 0, 0, 314, 72, 1, 0, 0, 0, 315, 317, 8,
|
||||
28, 0, 0, 316, 315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 316, 1, 0, 0,
|
||||
0, 318, 319, 1, 0, 0, 0, 319, 74, 1, 0, 0, 0, 29, 0, 88, 131, 148, 200,
|
||||
205, 210, 216, 219, 223, 228, 230, 233, 239, 243, 248, 250, 252, 258, 260,
|
||||
268, 270, 274, 280, 299, 301, 303, 309, 318, 1, 6, 0, 0,
|
||||
28, 57, 0, 59, 29, 61, 30, 63, 0, 65, 0, 67, 0, 69, 31, 71, 32, 73, 0,
|
||||
75, 33, 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0,
|
||||
75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 66, 66, 98, 98, 2, 0, 84,
|
||||
84, 116, 116, 2, 0, 87, 87, 119, 119, 2, 0, 78, 78, 110, 110, 2, 0, 88,
|
||||
88, 120, 120, 2, 0, 83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71,
|
||||
71, 103, 103, 2, 0, 80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 79, 79,
|
||||
111, 111, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72, 104,
|
||||
104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70, 102,
|
||||
102, 2, 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92,
|
||||
4, 0, 35, 36, 64, 90, 95, 95, 97, 123, 7, 0, 35, 36, 45, 45, 47, 58, 64,
|
||||
90, 95, 95, 97, 123, 125, 125, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57,
|
||||
8, 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 353,
|
||||
0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0,
|
||||
0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0,
|
||||
0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0,
|
||||
0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1,
|
||||
0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39,
|
||||
1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0,
|
||||
47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0,
|
||||
0, 55, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 69, 1, 0, 0,
|
||||
0, 0, 71, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 1, 77, 1, 0, 0, 0, 3, 79, 1, 0,
|
||||
0, 0, 5, 81, 1, 0, 0, 0, 7, 83, 1, 0, 0, 0, 9, 85, 1, 0, 0, 0, 11, 90,
|
||||
1, 0, 0, 0, 13, 92, 1, 0, 0, 0, 15, 95, 1, 0, 0, 0, 17, 98, 1, 0, 0, 0,
|
||||
19, 100, 1, 0, 0, 0, 21, 103, 1, 0, 0, 0, 23, 105, 1, 0, 0, 0, 25, 108,
|
||||
1, 0, 0, 0, 27, 113, 1, 0, 0, 0, 29, 119, 1, 0, 0, 0, 31, 127, 1, 0, 0,
|
||||
0, 33, 135, 1, 0, 0, 0, 35, 142, 1, 0, 0, 0, 37, 152, 1, 0, 0, 0, 39, 155,
|
||||
1, 0, 0, 0, 41, 159, 1, 0, 0, 0, 43, 163, 1, 0, 0, 0, 45, 166, 1, 0, 0,
|
||||
0, 47, 175, 1, 0, 0, 0, 49, 179, 1, 0, 0, 0, 51, 186, 1, 0, 0, 0, 53, 193,
|
||||
1, 0, 0, 0, 55, 209, 1, 0, 0, 0, 57, 211, 1, 0, 0, 0, 59, 261, 1, 0, 0,
|
||||
0, 61, 283, 1, 0, 0, 0, 63, 285, 1, 0, 0, 0, 65, 292, 1, 0, 0, 0, 67, 295,
|
||||
1, 0, 0, 0, 69, 299, 1, 0, 0, 0, 71, 316, 1, 0, 0, 0, 73, 322, 1, 0, 0,
|
||||
0, 75, 325, 1, 0, 0, 0, 77, 78, 5, 40, 0, 0, 78, 2, 1, 0, 0, 0, 79, 80,
|
||||
5, 41, 0, 0, 80, 4, 1, 0, 0, 0, 81, 82, 5, 91, 0, 0, 82, 6, 1, 0, 0, 0,
|
||||
83, 84, 5, 93, 0, 0, 84, 8, 1, 0, 0, 0, 85, 86, 5, 44, 0, 0, 86, 10, 1,
|
||||
0, 0, 0, 87, 91, 5, 61, 0, 0, 88, 89, 5, 61, 0, 0, 89, 91, 5, 61, 0, 0,
|
||||
90, 87, 1, 0, 0, 0, 90, 88, 1, 0, 0, 0, 91, 12, 1, 0, 0, 0, 92, 93, 5,
|
||||
33, 0, 0, 93, 94, 5, 61, 0, 0, 94, 14, 1, 0, 0, 0, 95, 96, 5, 60, 0, 0,
|
||||
96, 97, 5, 62, 0, 0, 97, 16, 1, 0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 18, 1,
|
||||
0, 0, 0, 100, 101, 5, 60, 0, 0, 101, 102, 5, 61, 0, 0, 102, 20, 1, 0, 0,
|
||||
0, 103, 104, 5, 62, 0, 0, 104, 22, 1, 0, 0, 0, 105, 106, 5, 62, 0, 0, 106,
|
||||
107, 5, 61, 0, 0, 107, 24, 1, 0, 0, 0, 108, 109, 7, 0, 0, 0, 109, 110,
|
||||
7, 1, 0, 0, 110, 111, 7, 2, 0, 0, 111, 112, 7, 3, 0, 0, 112, 26, 1, 0,
|
||||
0, 0, 113, 114, 7, 1, 0, 0, 114, 115, 7, 0, 0, 0, 115, 116, 7, 1, 0, 0,
|
||||
116, 117, 7, 2, 0, 0, 117, 118, 7, 3, 0, 0, 118, 28, 1, 0, 0, 0, 119, 120,
|
||||
7, 4, 0, 0, 120, 121, 7, 3, 0, 0, 121, 122, 7, 5, 0, 0, 122, 123, 7, 6,
|
||||
0, 0, 123, 124, 7, 3, 0, 0, 124, 125, 7, 3, 0, 0, 125, 126, 7, 7, 0, 0,
|
||||
126, 30, 1, 0, 0, 0, 127, 128, 7, 3, 0, 0, 128, 129, 7, 8, 0, 0, 129, 130,
|
||||
7, 1, 0, 0, 130, 131, 7, 9, 0, 0, 131, 133, 7, 5, 0, 0, 132, 134, 7, 9,
|
||||
0, 0, 133, 132, 1, 0, 0, 0, 133, 134, 1, 0, 0, 0, 134, 32, 1, 0, 0, 0,
|
||||
135, 136, 7, 10, 0, 0, 136, 137, 7, 3, 0, 0, 137, 138, 7, 11, 0, 0, 138,
|
||||
139, 7, 3, 0, 0, 139, 140, 7, 8, 0, 0, 140, 141, 7, 12, 0, 0, 141, 34,
|
||||
1, 0, 0, 0, 142, 143, 7, 13, 0, 0, 143, 144, 7, 14, 0, 0, 144, 145, 7,
|
||||
7, 0, 0, 145, 146, 7, 5, 0, 0, 146, 147, 7, 15, 0, 0, 147, 148, 7, 1, 0,
|
||||
0, 148, 150, 7, 7, 0, 0, 149, 151, 7, 9, 0, 0, 150, 149, 1, 0, 0, 0, 150,
|
||||
151, 1, 0, 0, 0, 151, 36, 1, 0, 0, 0, 152, 153, 7, 1, 0, 0, 153, 154, 7,
|
||||
7, 0, 0, 154, 38, 1, 0, 0, 0, 155, 156, 7, 7, 0, 0, 156, 157, 7, 14, 0,
|
||||
0, 157, 158, 7, 5, 0, 0, 158, 40, 1, 0, 0, 0, 159, 160, 7, 15, 0, 0, 160,
|
||||
161, 7, 7, 0, 0, 161, 162, 7, 16, 0, 0, 162, 42, 1, 0, 0, 0, 163, 164,
|
||||
7, 14, 0, 0, 164, 165, 7, 10, 0, 0, 165, 44, 1, 0, 0, 0, 166, 167, 7, 17,
|
||||
0, 0, 167, 168, 7, 15, 0, 0, 168, 169, 7, 9, 0, 0, 169, 170, 7, 5, 0, 0,
|
||||
170, 171, 7, 14, 0, 0, 171, 172, 7, 2, 0, 0, 172, 173, 7, 3, 0, 0, 173,
|
||||
174, 7, 7, 0, 0, 174, 46, 1, 0, 0, 0, 175, 176, 7, 17, 0, 0, 176, 177,
|
||||
7, 15, 0, 0, 177, 178, 7, 9, 0, 0, 178, 48, 1, 0, 0, 0, 179, 180, 7, 17,
|
||||
0, 0, 180, 181, 7, 15, 0, 0, 181, 182, 7, 9, 0, 0, 182, 183, 7, 15, 0,
|
||||
0, 183, 184, 7, 7, 0, 0, 184, 185, 7, 18, 0, 0, 185, 50, 1, 0, 0, 0, 186,
|
||||
187, 7, 17, 0, 0, 187, 188, 7, 15, 0, 0, 188, 189, 7, 9, 0, 0, 189, 190,
|
||||
7, 15, 0, 0, 190, 191, 7, 0, 0, 0, 191, 192, 7, 0, 0, 0, 192, 52, 1, 0,
|
||||
0, 0, 193, 194, 7, 9, 0, 0, 194, 195, 7, 3, 0, 0, 195, 196, 7, 15, 0, 0,
|
||||
196, 197, 7, 10, 0, 0, 197, 198, 7, 13, 0, 0, 198, 199, 7, 17, 0, 0, 199,
|
||||
54, 1, 0, 0, 0, 200, 201, 7, 5, 0, 0, 201, 202, 7, 10, 0, 0, 202, 203,
|
||||
7, 19, 0, 0, 203, 210, 7, 3, 0, 0, 204, 205, 7, 20, 0, 0, 205, 206, 7,
|
||||
15, 0, 0, 206, 207, 7, 0, 0, 0, 207, 208, 7, 9, 0, 0, 208, 210, 7, 3, 0,
|
||||
0, 209, 200, 1, 0, 0, 0, 209, 204, 1, 0, 0, 0, 210, 56, 1, 0, 0, 0, 211,
|
||||
212, 7, 21, 0, 0, 212, 58, 1, 0, 0, 0, 213, 215, 3, 57, 28, 0, 214, 213,
|
||||
1, 0, 0, 0, 214, 215, 1, 0, 0, 0, 215, 217, 1, 0, 0, 0, 216, 218, 3, 73,
|
||||
36, 0, 217, 216, 1, 0, 0, 0, 218, 219, 1, 0, 0, 0, 219, 217, 1, 0, 0, 0,
|
||||
219, 220, 1, 0, 0, 0, 220, 228, 1, 0, 0, 0, 221, 225, 5, 46, 0, 0, 222,
|
||||
224, 3, 73, 36, 0, 223, 222, 1, 0, 0, 0, 224, 227, 1, 0, 0, 0, 225, 223,
|
||||
1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 229, 1, 0, 0, 0, 227, 225, 1, 0,
|
||||
0, 0, 228, 221, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 239, 1, 0, 0, 0,
|
||||
230, 232, 7, 3, 0, 0, 231, 233, 3, 57, 28, 0, 232, 231, 1, 0, 0, 0, 232,
|
||||
233, 1, 0, 0, 0, 233, 235, 1, 0, 0, 0, 234, 236, 3, 73, 36, 0, 235, 234,
|
||||
1, 0, 0, 0, 236, 237, 1, 0, 0, 0, 237, 235, 1, 0, 0, 0, 237, 238, 1, 0,
|
||||
0, 0, 238, 240, 1, 0, 0, 0, 239, 230, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0,
|
||||
240, 262, 1, 0, 0, 0, 241, 243, 3, 57, 28, 0, 242, 241, 1, 0, 0, 0, 242,
|
||||
243, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 246, 5, 46, 0, 0, 245, 247,
|
||||
3, 73, 36, 0, 246, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 246, 1,
|
||||
0, 0, 0, 248, 249, 1, 0, 0, 0, 249, 259, 1, 0, 0, 0, 250, 252, 7, 3, 0,
|
||||
0, 251, 253, 3, 57, 28, 0, 252, 251, 1, 0, 0, 0, 252, 253, 1, 0, 0, 0,
|
||||
253, 255, 1, 0, 0, 0, 254, 256, 3, 73, 36, 0, 255, 254, 1, 0, 0, 0, 256,
|
||||
257, 1, 0, 0, 0, 257, 255, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 260,
|
||||
1, 0, 0, 0, 259, 250, 1, 0, 0, 0, 259, 260, 1, 0, 0, 0, 260, 262, 1, 0,
|
||||
0, 0, 261, 214, 1, 0, 0, 0, 261, 242, 1, 0, 0, 0, 262, 60, 1, 0, 0, 0,
|
||||
263, 269, 5, 34, 0, 0, 264, 268, 8, 22, 0, 0, 265, 266, 5, 92, 0, 0, 266,
|
||||
268, 9, 0, 0, 0, 267, 264, 1, 0, 0, 0, 267, 265, 1, 0, 0, 0, 268, 271,
|
||||
1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 272, 1, 0,
|
||||
0, 0, 271, 269, 1, 0, 0, 0, 272, 284, 5, 34, 0, 0, 273, 279, 5, 39, 0,
|
||||
0, 274, 278, 8, 23, 0, 0, 275, 276, 5, 92, 0, 0, 276, 278, 9, 0, 0, 0,
|
||||
277, 274, 1, 0, 0, 0, 277, 275, 1, 0, 0, 0, 278, 281, 1, 0, 0, 0, 279,
|
||||
277, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 282, 1, 0, 0, 0, 281, 279,
|
||||
1, 0, 0, 0, 282, 284, 5, 39, 0, 0, 283, 263, 1, 0, 0, 0, 283, 273, 1, 0,
|
||||
0, 0, 284, 62, 1, 0, 0, 0, 285, 289, 7, 24, 0, 0, 286, 288, 7, 25, 0, 0,
|
||||
287, 286, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 289,
|
||||
290, 1, 0, 0, 0, 290, 64, 1, 0, 0, 0, 291, 289, 1, 0, 0, 0, 292, 293, 5,
|
||||
91, 0, 0, 293, 294, 5, 93, 0, 0, 294, 66, 1, 0, 0, 0, 295, 296, 5, 91,
|
||||
0, 0, 296, 297, 5, 42, 0, 0, 297, 298, 5, 93, 0, 0, 298, 68, 1, 0, 0, 0,
|
||||
299, 312, 3, 63, 31, 0, 300, 301, 5, 46, 0, 0, 301, 311, 3, 63, 31, 0,
|
||||
302, 311, 3, 65, 32, 0, 303, 311, 3, 67, 33, 0, 304, 306, 5, 46, 0, 0,
|
||||
305, 307, 3, 73, 36, 0, 306, 305, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308,
|
||||
306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 311, 1, 0, 0, 0, 310, 300,
|
||||
1, 0, 0, 0, 310, 302, 1, 0, 0, 0, 310, 303, 1, 0, 0, 0, 310, 304, 1, 0,
|
||||
0, 0, 311, 314, 1, 0, 0, 0, 312, 310, 1, 0, 0, 0, 312, 313, 1, 0, 0, 0,
|
||||
313, 70, 1, 0, 0, 0, 314, 312, 1, 0, 0, 0, 315, 317, 7, 26, 0, 0, 316,
|
||||
315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 316, 1, 0, 0, 0, 318, 319,
|
||||
1, 0, 0, 0, 319, 320, 1, 0, 0, 0, 320, 321, 6, 35, 0, 0, 321, 72, 1, 0,
|
||||
0, 0, 322, 323, 7, 27, 0, 0, 323, 74, 1, 0, 0, 0, 324, 326, 8, 28, 0, 0,
|
||||
325, 324, 1, 0, 0, 0, 326, 327, 1, 0, 0, 0, 327, 325, 1, 0, 0, 0, 327,
|
||||
328, 1, 0, 0, 0, 328, 76, 1, 0, 0, 0, 29, 0, 90, 133, 150, 209, 214, 219,
|
||||
225, 228, 232, 237, 239, 242, 248, 252, 257, 259, 261, 267, 269, 277, 279,
|
||||
283, 289, 308, 310, 312, 318, 327, 1, 6, 0, 0,
|
||||
}
|
||||
deserializer := antlr.NewATNDeserializer(nil)
|
||||
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
|
||||
@@ -281,10 +284,11 @@ const (
|
||||
FilterQueryLexerHAS = 24
|
||||
FilterQueryLexerHASANY = 25
|
||||
FilterQueryLexerHASALL = 26
|
||||
FilterQueryLexerBOOL = 27
|
||||
FilterQueryLexerNUMBER = 28
|
||||
FilterQueryLexerQUOTED_TEXT = 29
|
||||
FilterQueryLexerKEY = 30
|
||||
FilterQueryLexerWS = 31
|
||||
FilterQueryLexerFREETEXT = 32
|
||||
FilterQueryLexerSEARCH = 27
|
||||
FilterQueryLexerBOOL = 28
|
||||
FilterQueryLexerNUMBER = 29
|
||||
FilterQueryLexerQUOTED_TEXT = 30
|
||||
FilterQueryLexerKEY = 31
|
||||
FilterQueryLexerWS = 32
|
||||
FilterQueryLexerFREETEXT = 33
|
||||
)
|
||||
|
||||
@@ -40,7 +40,7 @@ func filterqueryParserInit() {
|
||||
"", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
|
||||
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "ILIKE", "BETWEEN", "EXISTS",
|
||||
"REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR", "HASTOKEN", "HAS", "HASANY",
|
||||
"HASALL", "BOOL", "NUMBER", "QUOTED_TEXT", "KEY", "WS", "FREETEXT",
|
||||
"HASALL", "SEARCH", "BOOL", "NUMBER", "QUOTED_TEXT", "KEY", "WS", "FREETEXT",
|
||||
}
|
||||
staticData.RuleNames = []string{
|
||||
"query", "expression", "orExpression", "andExpression", "unaryExpression",
|
||||
@@ -50,7 +50,7 @@ func filterqueryParserInit() {
|
||||
}
|
||||
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
|
||||
staticData.serializedATN = []int32{
|
||||
4, 1, 32, 219, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7,
|
||||
4, 1, 33, 219, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7,
|
||||
4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7,
|
||||
10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15,
|
||||
2, 16, 7, 16, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 5, 2, 43,
|
||||
@@ -71,8 +71,8 @@ func filterqueryParserInit() {
|
||||
8, 12, 10, 12, 12, 12, 204, 9, 12, 1, 13, 1, 13, 1, 13, 3, 13, 209, 8,
|
||||
13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 0, 0,
|
||||
17, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 0, 5,
|
||||
1, 0, 7, 8, 1, 0, 13, 14, 2, 0, 29, 29, 32, 32, 1, 0, 23, 26, 1, 0, 27,
|
||||
30, 235, 0, 34, 1, 0, 0, 0, 2, 37, 1, 0, 0, 0, 4, 39, 1, 0, 0, 0, 6, 47,
|
||||
1, 0, 7, 8, 1, 0, 13, 14, 2, 0, 30, 30, 33, 33, 1, 0, 23, 27, 1, 0, 28,
|
||||
31, 235, 0, 34, 1, 0, 0, 0, 2, 37, 1, 0, 0, 0, 4, 39, 1, 0, 0, 0, 6, 47,
|
||||
1, 0, 0, 0, 8, 57, 1, 0, 0, 0, 10, 70, 1, 0, 0, 0, 12, 149, 1, 0, 0, 0,
|
||||
14, 163, 1, 0, 0, 0, 16, 180, 1, 0, 0, 0, 18, 182, 1, 0, 0, 0, 20, 190,
|
||||
1, 0, 0, 0, 22, 192, 1, 0, 0, 0, 24, 197, 1, 0, 0, 0, 26, 208, 1, 0, 0,
|
||||
@@ -142,7 +142,7 @@ func filterqueryParserInit() {
|
||||
14, 0, 208, 205, 1, 0, 0, 0, 208, 206, 1, 0, 0, 0, 208, 207, 1, 0, 0, 0,
|
||||
209, 27, 1, 0, 0, 0, 210, 211, 5, 3, 0, 0, 211, 212, 3, 18, 9, 0, 212,
|
||||
213, 5, 4, 0, 0, 213, 29, 1, 0, 0, 0, 214, 215, 7, 4, 0, 0, 215, 31, 1,
|
||||
0, 0, 0, 216, 217, 5, 30, 0, 0, 217, 33, 1, 0, 0, 0, 11, 44, 51, 53, 57,
|
||||
0, 0, 0, 216, 217, 5, 31, 0, 0, 217, 33, 1, 0, 0, 0, 11, 44, 51, 53, 57,
|
||||
70, 149, 163, 180, 187, 202, 208,
|
||||
}
|
||||
deserializer := antlr.NewATNDeserializer(nil)
|
||||
@@ -208,12 +208,13 @@ const (
|
||||
FilterQueryParserHAS = 24
|
||||
FilterQueryParserHASANY = 25
|
||||
FilterQueryParserHASALL = 26
|
||||
FilterQueryParserBOOL = 27
|
||||
FilterQueryParserNUMBER = 28
|
||||
FilterQueryParserQUOTED_TEXT = 29
|
||||
FilterQueryParserKEY = 30
|
||||
FilterQueryParserWS = 31
|
||||
FilterQueryParserFREETEXT = 32
|
||||
FilterQueryParserSEARCH = 27
|
||||
FilterQueryParserBOOL = 28
|
||||
FilterQueryParserNUMBER = 29
|
||||
FilterQueryParserQUOTED_TEXT = 30
|
||||
FilterQueryParserKEY = 31
|
||||
FilterQueryParserWS = 32
|
||||
FilterQueryParserFREETEXT = 33
|
||||
)
|
||||
|
||||
// FilterQueryParser rules.
|
||||
@@ -803,7 +804,7 @@ func (p *FilterQueryParser) AndExpression() (localctx IAndExpressionContext) {
|
||||
}
|
||||
_la = p.GetTokenStream().LA(1)
|
||||
|
||||
for (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&6437208066) != 0 {
|
||||
for (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&12879659010) != 0 {
|
||||
p.SetState(51)
|
||||
p.GetErrorHandler().Sync(p)
|
||||
if p.HasError() {
|
||||
@@ -825,7 +826,7 @@ func (p *FilterQueryParser) AndExpression() (localctx IAndExpressionContext) {
|
||||
p.UnaryExpression()
|
||||
}
|
||||
|
||||
case FilterQueryParserLPAREN, FilterQueryParserNOT, FilterQueryParserHASTOKEN, FilterQueryParserHAS, FilterQueryParserHASANY, FilterQueryParserHASALL, FilterQueryParserBOOL, FilterQueryParserNUMBER, FilterQueryParserQUOTED_TEXT, FilterQueryParserKEY, FilterQueryParserFREETEXT:
|
||||
case FilterQueryParserLPAREN, FilterQueryParserNOT, FilterQueryParserHASTOKEN, FilterQueryParserHAS, FilterQueryParserHASANY, FilterQueryParserHASALL, FilterQueryParserSEARCH, FilterQueryParserBOOL, FilterQueryParserNUMBER, FilterQueryParserQUOTED_TEXT, FilterQueryParserKEY, FilterQueryParserFREETEXT:
|
||||
{
|
||||
p.SetState(50)
|
||||
p.UnaryExpression()
|
||||
@@ -2748,6 +2749,7 @@ type IFunctionCallContext interface {
|
||||
HAS() antlr.TerminalNode
|
||||
HASANY() antlr.TerminalNode
|
||||
HASALL() antlr.TerminalNode
|
||||
SEARCH() antlr.TerminalNode
|
||||
|
||||
// IsFunctionCallContext differentiates from other interfaces.
|
||||
IsFunctionCallContext()
|
||||
@@ -2825,6 +2827,10 @@ func (s *FunctionCallContext) HASALL() antlr.TerminalNode {
|
||||
return s.GetToken(FilterQueryParserHASALL, 0)
|
||||
}
|
||||
|
||||
func (s *FunctionCallContext) SEARCH() antlr.TerminalNode {
|
||||
return s.GetToken(FilterQueryParserSEARCH, 0)
|
||||
}
|
||||
|
||||
func (s *FunctionCallContext) GetRuleContext() antlr.RuleContext {
|
||||
return s
|
||||
}
|
||||
@@ -2865,7 +2871,7 @@ func (p *FilterQueryParser) FunctionCall() (localctx IFunctionCallContext) {
|
||||
p.SetState(192)
|
||||
_la = p.GetTokenStream().LA(1)
|
||||
|
||||
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&125829120) != 0) {
|
||||
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&260046848) != 0) {
|
||||
p.GetErrorHandler().RecoverInline(p)
|
||||
} else {
|
||||
p.GetErrorHandler().ReportMatch(p)
|
||||
@@ -3505,7 +3511,7 @@ func (p *FilterQueryParser) Value() (localctx IValueContext) {
|
||||
p.SetState(214)
|
||||
_la = p.GetTokenStream().LA(1)
|
||||
|
||||
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&2013265920) != 0) {
|
||||
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&4026531840) != 0) {
|
||||
p.GetErrorHandler().RecoverInline(p)
|
||||
} else {
|
||||
p.GetErrorHandler().ReportMatch(p)
|
||||
|
||||
@@ -1343,7 +1343,7 @@ func getLocalTableName(tableName string) string {
|
||||
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError) {
|
||||
func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params *retentiontypes.TTLParams) (*retentiontypes.SetTTLResponseItem, *model.ApiError) {
|
||||
ctx = ctxtypes.NewContextWithCommentVals(ctx, map[string]string{
|
||||
instrumentationtypes.TelemetrySignal: telemetrytypes.SignalLogs.StringValue(),
|
||||
instrumentationtypes.CodeNamespace: "clickhouse-reader",
|
||||
@@ -1378,7 +1378,7 @@ func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params
|
||||
if apiErr != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing ttl_status check sql query")}
|
||||
}
|
||||
if statusItem.Status == constants.StatusPending {
|
||||
if statusItem.Status == retentiontypes.TTLSettingStatusPending {
|
||||
return nil, &model.ApiError{Typ: model.ErrorConflict, Err: fmt.Errorf("TTL is already running")}
|
||||
}
|
||||
}
|
||||
@@ -1437,7 +1437,7 @@ func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params
|
||||
TransactionID: uuid,
|
||||
TableName: tableName,
|
||||
TTL: int(params.DelDuration),
|
||||
Status: constants.StatusPending,
|
||||
Status: retentiontypes.TTLSettingStatusPending,
|
||||
ColdStorageTTL: coldStorageDuration,
|
||||
OrgID: orgID,
|
||||
}
|
||||
@@ -1463,7 +1463,7 @@ func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params
|
||||
NewUpdate().
|
||||
Model(new(retentiontypes.TTLSetting)).
|
||||
Set("updated_at = ?", time.Now()).
|
||||
Set("status = ?", constants.StatusFailed).
|
||||
Set("status = ?", retentiontypes.TTLSettingStatusFailed).
|
||||
Where("id = ?", statusItem.ID.StringValue()).
|
||||
Exec(ctx)
|
||||
if dbErr != nil {
|
||||
@@ -1483,7 +1483,7 @@ func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params
|
||||
NewUpdate().
|
||||
Model(new(retentiontypes.TTLSetting)).
|
||||
Set("updated_at = ?", time.Now()).
|
||||
Set("status = ?", constants.StatusFailed).
|
||||
Set("status = ?", retentiontypes.TTLSettingStatusFailed).
|
||||
Where("id = ?", statusItem.ID.StringValue()).
|
||||
Exec(ctx)
|
||||
if dbErr != nil {
|
||||
@@ -1498,7 +1498,7 @@ func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params
|
||||
NewUpdate().
|
||||
Model(new(retentiontypes.TTLSetting)).
|
||||
Set("updated_at = ?", time.Now()).
|
||||
Set("status = ?", constants.StatusSuccess).
|
||||
Set("status = ?", retentiontypes.TTLSettingStatusSuccess).
|
||||
Where("id = ?", statusItem.ID.StringValue()).
|
||||
Exec(ctx)
|
||||
if dbErr != nil {
|
||||
@@ -1508,10 +1508,10 @@ func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params
|
||||
}
|
||||
|
||||
}(ttlPayload)
|
||||
return &model.SetTTLResponseItem{Message: "move ttl has been successfully set up"}, nil
|
||||
return &retentiontypes.SetTTLResponseItem{Message: "move ttl has been successfully set up"}, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, params *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError) {
|
||||
func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, params *retentiontypes.TTLParams) (*retentiontypes.SetTTLResponseItem, *model.ApiError) {
|
||||
ctx = ctxtypes.NewContextWithCommentVals(ctx, map[string]string{
|
||||
instrumentationtypes.TelemetrySignal: telemetrytypes.SignalTraces.StringValue(),
|
||||
instrumentationtypes.CodeNamespace: "clickhouse-reader",
|
||||
@@ -1541,7 +1541,7 @@ func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, param
|
||||
if apiErr != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing ttl_status check sql query")}
|
||||
}
|
||||
if statusItem.Status == constants.StatusPending {
|
||||
if statusItem.Status == retentiontypes.TTLSettingStatusPending {
|
||||
return nil, &model.ApiError{Typ: model.ErrorConflict, Err: fmt.Errorf("TTL is already running")}
|
||||
}
|
||||
}
|
||||
@@ -1575,7 +1575,7 @@ func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, param
|
||||
TransactionID: uuid,
|
||||
TableName: tableName,
|
||||
TTL: int(params.DelDuration),
|
||||
Status: constants.StatusPending,
|
||||
Status: retentiontypes.TTLSettingStatusPending,
|
||||
ColdStorageTTL: coldStorageDuration,
|
||||
OrgID: orgID,
|
||||
}
|
||||
@@ -1613,7 +1613,7 @@ func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, param
|
||||
NewUpdate().
|
||||
Model(new(retentiontypes.TTLSetting)).
|
||||
Set("updated_at = ?", time.Now()).
|
||||
Set("status = ?", constants.StatusFailed).
|
||||
Set("status = ?", retentiontypes.TTLSettingStatusFailed).
|
||||
Where("id = ?", statusItem.ID.StringValue()).
|
||||
Exec(ctx)
|
||||
if dbErr != nil {
|
||||
@@ -1634,7 +1634,7 @@ func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, param
|
||||
NewUpdate().
|
||||
Model(new(retentiontypes.TTLSetting)).
|
||||
Set("updated_at = ?", time.Now()).
|
||||
Set("status = ?", constants.StatusFailed).
|
||||
Set("status = ?", retentiontypes.TTLSettingStatusFailed).
|
||||
Where("id = ?", statusItem.ID.StringValue()).
|
||||
Exec(ctx)
|
||||
if dbErr != nil {
|
||||
@@ -1649,7 +1649,7 @@ func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, param
|
||||
NewUpdate().
|
||||
Model(new(retentiontypes.TTLSetting)).
|
||||
Set("updated_at = ?", time.Now()).
|
||||
Set("status = ?", constants.StatusSuccess).
|
||||
Set("status = ?", retentiontypes.TTLSettingStatusSuccess).
|
||||
Where("id = ?", statusItem.ID.StringValue()).
|
||||
Exec(ctx)
|
||||
if dbErr != nil {
|
||||
@@ -1658,7 +1658,7 @@ func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, param
|
||||
}
|
||||
}(distributedTableName)
|
||||
}
|
||||
return &model.SetTTLResponseItem{Message: "move ttl has been successfully set up"}, nil
|
||||
return &retentiontypes.SetTTLResponseItem{Message: "move ttl has been successfully set up"}, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) hasCustomRetentionColumn(ctx context.Context) (bool, error) {
|
||||
@@ -1687,7 +1687,7 @@ func (r *ClickHouseReader) hasCustomRetentionColumn(ctx context.Context) (bool,
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) SetTTLV2(ctx context.Context, orgID string, params *model.CustomRetentionTTLParams) (*model.CustomRetentionTTLResponse, error) {
|
||||
func (r *ClickHouseReader) SetTTLV2(ctx context.Context, orgID string, params *retentiontypes.CustomRetentionTTLParams) (*retentiontypes.CustomRetentionTTLResponse, error) {
|
||||
|
||||
ctx = ctxtypes.NewContextWithCommentVals(ctx, map[string]string{
|
||||
instrumentationtypes.TelemetrySignal: telemetrytypes.SignalLogs.StringValue(),
|
||||
@@ -1702,7 +1702,7 @@ func (r *ClickHouseReader) SetTTLV2(ctx context.Context, orgID string, params *m
|
||||
if !hasCustomRetention {
|
||||
r.logger.Info("Custom retention not supported, falling back to standard TTL method", "orgID", orgID)
|
||||
|
||||
ttlParams := &model.TTLParams{
|
||||
ttlParams := &retentiontypes.TTLParams{
|
||||
Type: params.Type,
|
||||
DelDuration: int64(params.DefaultTTLDays * 24 * 3600),
|
||||
}
|
||||
@@ -1723,7 +1723,7 @@ func (r *ClickHouseReader) SetTTLV2(ctx context.Context, orgID string, params *m
|
||||
return nil, errorsV2.Wrapf(apiErr.Err, errorsV2.TypeInternal, errorsV2.CodeInternal, "failed to set standard TTL")
|
||||
}
|
||||
|
||||
return &model.CustomRetentionTTLResponse{
|
||||
return &retentiontypes.CustomRetentionTTLResponse{
|
||||
Message: fmt.Sprintf("Custom retention not supported, applied standard TTL of %d days. %s", params.DefaultTTLDays, ttlResult.Message),
|
||||
}, nil
|
||||
}
|
||||
@@ -1734,7 +1734,7 @@ func (r *ClickHouseReader) SetTTLV2(ctx context.Context, orgID string, params *m
|
||||
uuidWithHyphen := valuer.GenerateUUID()
|
||||
uuid := strings.Replace(uuidWithHyphen.String(), "-", "", -1)
|
||||
|
||||
if params.Type != constants.LogsTTL {
|
||||
if params.Type != retentiontypes.LogsTTL {
|
||||
return nil, errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "custom retention TTL only supported for logs")
|
||||
}
|
||||
|
||||
@@ -1765,7 +1765,7 @@ func (r *ClickHouseReader) SetTTLV2(ctx context.Context, orgID string, params *m
|
||||
if apiErr != nil {
|
||||
return nil, errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "error in processing custom_retention_ttl_status check sql query")
|
||||
}
|
||||
if statusItem.Status == constants.StatusPending {
|
||||
if statusItem.Status == retentiontypes.TTLSettingStatusPending {
|
||||
return nil, errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "custom retention TTL is already running")
|
||||
}
|
||||
}
|
||||
@@ -1851,7 +1851,7 @@ func (r *ClickHouseReader) SetTTLV2(ctx context.Context, orgID string, params *m
|
||||
TableName: tableName,
|
||||
TTL: params.DefaultTTLDays,
|
||||
Condition: string(ttlConditionsJSON),
|
||||
Status: constants.StatusPending,
|
||||
Status: retentiontypes.TTLSettingStatusPending,
|
||||
ColdStorageTTL: coldStorageDuration,
|
||||
OrgID: orgID,
|
||||
}
|
||||
@@ -1867,7 +1867,7 @@ func (r *ClickHouseReader) SetTTLV2(ctx context.Context, orgID string, params *m
|
||||
err := r.setColdStorage(ctx, tableName, params.ColdStorageVolume)
|
||||
if err != nil {
|
||||
r.logger.Error("error in setting cold storage", errorsV2.Attr(err))
|
||||
r.updateCustomRetentionTTLStatus(ctx, orgID, tableName, constants.StatusFailed)
|
||||
r.updateCustomRetentionTTLStatus(ctx, orgID, tableName, retentiontypes.TTLSettingStatusFailed)
|
||||
return nil, errorsV2.Wrapf(err.Err, errorsV2.TypeInternal, errorsV2.CodeInternal, "error setting cold storage for table %s", tableName)
|
||||
}
|
||||
}
|
||||
@@ -1876,21 +1876,21 @@ func (r *ClickHouseReader) SetTTLV2(ctx context.Context, orgID string, params *m
|
||||
r.logger.Debug("Executing custom retention TTL request: ", "request", query, "step", i+1)
|
||||
if err := r.db.Exec(ctx, query); err != nil {
|
||||
r.logger.Error("error while setting custom retention ttl", errorsV2.Attr(err))
|
||||
r.updateCustomRetentionTTLStatus(ctx, orgID, tableName, constants.StatusFailed)
|
||||
r.updateCustomRetentionTTLStatus(ctx, orgID, tableName, retentiontypes.TTLSettingStatusFailed)
|
||||
return nil, errorsV2.Wrapf(err, errorsV2.TypeInternal, errorsV2.CodeInternal, "error setting custom retention TTL for table %s, query: %s", tableName, query)
|
||||
}
|
||||
}
|
||||
|
||||
r.updateCustomRetentionTTLStatus(ctx, orgID, tableName, constants.StatusSuccess)
|
||||
r.updateCustomRetentionTTLStatus(ctx, orgID, tableName, retentiontypes.TTLSettingStatusSuccess)
|
||||
}
|
||||
|
||||
return &model.CustomRetentionTTLResponse{
|
||||
return &retentiontypes.CustomRetentionTTLResponse{
|
||||
Message: "custom retention TTL has been successfully set up",
|
||||
}, nil
|
||||
}
|
||||
|
||||
// New method to build multiIf expressions with support for multiple AND conditions
|
||||
func (r *ClickHouseReader) buildMultiIfExpression(ttlConditions []model.CustomRetentionRule, defaultTTLDays int, isResourceTable bool) string {
|
||||
func (r *ClickHouseReader) buildMultiIfExpression(ttlConditions []retentiontypes.CustomRetentionRule, defaultTTLDays int, isResourceTable bool) string {
|
||||
var conditions []string
|
||||
|
||||
for i, rule := range ttlConditions {
|
||||
@@ -1962,7 +1962,7 @@ func (r *ClickHouseReader) buildMultiIfExpression(ttlConditions []model.CustomRe
|
||||
return result
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetCustomRetentionTTL(ctx context.Context, orgID string) (*model.GetCustomRetentionTTLResponse, error) {
|
||||
func (r *ClickHouseReader) GetCustomRetentionTTL(ctx context.Context, orgID string) (*retentiontypes.GetCustomRetentionTTLResponse, error) {
|
||||
// Check if V2 (custom retention) is supported
|
||||
hasCustomRetention, err := r.hasCustomRetentionColumn(ctx)
|
||||
if err != nil {
|
||||
@@ -1971,7 +1971,7 @@ func (r *ClickHouseReader) GetCustomRetentionTTL(ctx context.Context, orgID stri
|
||||
hasCustomRetention = false
|
||||
}
|
||||
|
||||
response := &model.GetCustomRetentionTTLResponse{}
|
||||
response := &retentiontypes.GetCustomRetentionTTLResponse{}
|
||||
|
||||
if hasCustomRetention {
|
||||
// V2 - Custom retention is supported
|
||||
@@ -1994,19 +1994,19 @@ func (r *ClickHouseReader) GetCustomRetentionTTL(ctx context.Context, orgID stri
|
||||
|
||||
if err == sql.ErrNoRows {
|
||||
// No V2 configuration found, return defaults
|
||||
response.DefaultTTLDays = 15
|
||||
response.TTLConditions = []model.CustomRetentionRule{}
|
||||
response.Status = constants.StatusSuccess
|
||||
response.DefaultTTLDays = retentiontypes.DefaultLogsRetentionDays
|
||||
response.TTLConditions = []retentiontypes.CustomRetentionRule{}
|
||||
response.Status = retentiontypes.TTLSettingStatusSuccess
|
||||
response.ColdStorageTTLDays = -1
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Parse TTL conditions from Condition
|
||||
var ttlConditions []model.CustomRetentionRule
|
||||
var ttlConditions []retentiontypes.CustomRetentionRule
|
||||
if customTTL.Condition != "" {
|
||||
if err := json.Unmarshal([]byte(customTTL.Condition), &ttlConditions); err != nil {
|
||||
r.logger.Error("Error parsing TTL conditions", errorsV2.Attr(err))
|
||||
ttlConditions = []model.CustomRetentionRule{}
|
||||
ttlConditions = []retentiontypes.CustomRetentionRule{}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2020,8 +2020,8 @@ func (r *ClickHouseReader) GetCustomRetentionTTL(ctx context.Context, orgID stri
|
||||
response.Version = "v1"
|
||||
|
||||
// Get V1 TTL configuration
|
||||
ttlParams := &model.GetTTLParams{
|
||||
Type: constants.LogsTTL,
|
||||
ttlParams := &retentiontypes.GetTTLParams{
|
||||
Type: retentiontypes.LogsTTL,
|
||||
}
|
||||
|
||||
ttlResult, apiErr := r.GetTTL(ctx, orgID, ttlParams)
|
||||
@@ -2041,7 +2041,7 @@ func (r *ClickHouseReader) GetCustomRetentionTTL(ctx context.Context, orgID stri
|
||||
}
|
||||
|
||||
// For V1, we don't have TTL conditions
|
||||
response.TTLConditions = []model.CustomRetentionRule{}
|
||||
response.TTLConditions = []retentiontypes.CustomRetentionRule{}
|
||||
}
|
||||
|
||||
return response, nil
|
||||
@@ -2081,7 +2081,7 @@ func (r *ClickHouseReader) updateCustomRetentionTTLStatus(ctx context.Context, o
|
||||
}
|
||||
|
||||
// Enhanced validation function with duplicate detection and efficient key validation
|
||||
func (r *ClickHouseReader) validateTTLConditions(ctx context.Context, ttlConditions []model.CustomRetentionRule) error {
|
||||
func (r *ClickHouseReader) validateTTLConditions(ctx context.Context, ttlConditions []retentiontypes.CustomRetentionRule) error {
|
||||
ctx = ctxtypes.NewContextWithCommentVals(ctx, map[string]string{
|
||||
instrumentationtypes.CodeNamespace: "clickhouse-reader",
|
||||
instrumentationtypes.CodeFunctionName: "validateTTLConditions",
|
||||
@@ -2185,16 +2185,16 @@ func (r *ClickHouseReader) validateTTLConditions(ctx context.Context, ttlConditi
|
||||
// SetTTL sets the TTL for traces or metrics or logs tables.
|
||||
// This is an async API which creates goroutines to set TTL.
|
||||
// Status of TTL update is tracked with ttl_status table in sqlite db.
|
||||
func (r *ClickHouseReader) SetTTL(ctx context.Context, orgID string, params *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError) {
|
||||
func (r *ClickHouseReader) SetTTL(ctx context.Context, orgID string, params *retentiontypes.TTLParams) (*retentiontypes.SetTTLResponseItem, *model.ApiError) {
|
||||
// Keep only latest 100 transactions/requests
|
||||
r.deleteTtlTransactions(ctx, orgID, 100)
|
||||
|
||||
switch params.Type {
|
||||
case constants.TraceTTL:
|
||||
case retentiontypes.TraceTTL:
|
||||
return r.setTTLTraces(ctx, orgID, params)
|
||||
case constants.MetricsTTL:
|
||||
case retentiontypes.MetricsTTL:
|
||||
return r.setTTLMetrics(ctx, orgID, params)
|
||||
case constants.LogsTTL:
|
||||
case retentiontypes.LogsTTL:
|
||||
return r.setTTLLogs(ctx, orgID, params)
|
||||
default:
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error while setting ttl. ttl type should be <metrics|traces>, got %v", params.Type)}
|
||||
@@ -2202,7 +2202,7 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context, orgID string, params *mod
|
||||
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) setTTLMetrics(ctx context.Context, orgID string, params *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError) {
|
||||
func (r *ClickHouseReader) setTTLMetrics(ctx context.Context, orgID string, params *retentiontypes.TTLParams) (*retentiontypes.SetTTLResponseItem, *model.ApiError) {
|
||||
ctx = ctxtypes.NewContextWithCommentVals(ctx, map[string]string{
|
||||
instrumentationtypes.TelemetrySignal: telemetrytypes.SignalMetrics.StringValue(),
|
||||
instrumentationtypes.CodeNamespace: "clickhouse-reader",
|
||||
@@ -2231,7 +2231,7 @@ func (r *ClickHouseReader) setTTLMetrics(ctx context.Context, orgID string, para
|
||||
if apiErr != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing ttl_status check sql query")}
|
||||
}
|
||||
if statusItem.Status == constants.StatusPending {
|
||||
if statusItem.Status == retentiontypes.TTLSettingStatusPending {
|
||||
return nil, &model.ApiError{Typ: model.ErrorConflict, Err: fmt.Errorf("TTL is already running")}
|
||||
}
|
||||
}
|
||||
@@ -2247,7 +2247,7 @@ func (r *ClickHouseReader) setTTLMetrics(ctx context.Context, orgID string, para
|
||||
TransactionID: uuid,
|
||||
TableName: tableName,
|
||||
TTL: int(params.DelDuration),
|
||||
Status: constants.StatusPending,
|
||||
Status: retentiontypes.TTLSettingStatusPending,
|
||||
ColdStorageTTL: coldStorageDuration,
|
||||
OrgID: orgID,
|
||||
}
|
||||
@@ -2285,7 +2285,7 @@ func (r *ClickHouseReader) setTTLMetrics(ctx context.Context, orgID string, para
|
||||
NewUpdate().
|
||||
Model(new(retentiontypes.TTLSetting)).
|
||||
Set("updated_at = ?", time.Now()).
|
||||
Set("status = ?", constants.StatusFailed).
|
||||
Set("status = ?", retentiontypes.TTLSettingStatusFailed).
|
||||
Where("id = ?", statusItem.ID.StringValue()).
|
||||
Exec(ctx)
|
||||
if dbErr != nil {
|
||||
@@ -2306,7 +2306,7 @@ func (r *ClickHouseReader) setTTLMetrics(ctx context.Context, orgID string, para
|
||||
NewUpdate().
|
||||
Model(new(retentiontypes.TTLSetting)).
|
||||
Set("updated_at = ?", time.Now()).
|
||||
Set("status = ?", constants.StatusFailed).
|
||||
Set("status = ?", retentiontypes.TTLSettingStatusFailed).
|
||||
Where("id = ?", statusItem.ID.StringValue()).
|
||||
Exec(ctx)
|
||||
if dbErr != nil {
|
||||
@@ -2321,7 +2321,7 @@ func (r *ClickHouseReader) setTTLMetrics(ctx context.Context, orgID string, para
|
||||
NewUpdate().
|
||||
Model(new(retentiontypes.TTLSetting)).
|
||||
Set("updated_at = ?", time.Now()).
|
||||
Set("status = ?", constants.StatusSuccess).
|
||||
Set("status = ?", retentiontypes.TTLSettingStatusSuccess).
|
||||
Where("id = ?", statusItem.ID.StringValue()).
|
||||
Exec(ctx)
|
||||
if dbErr != nil {
|
||||
@@ -2332,7 +2332,7 @@ func (r *ClickHouseReader) setTTLMetrics(ctx context.Context, orgID string, para
|
||||
for _, tableName := range tableNames {
|
||||
go metricTTL(tableName)
|
||||
}
|
||||
return &model.SetTTLResponseItem{Message: "move ttl has been successfully set up"}, nil
|
||||
return &retentiontypes.SetTTLResponseItem{Message: "move ttl has been successfully set up"}, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) deleteTtlTransactions(ctx context.Context, orgID string, numberOfTransactionsStore int) {
|
||||
@@ -2389,7 +2389,7 @@ func (r *ClickHouseReader) checkTTLStatusItem(ctx context.Context, orgID string,
|
||||
// getTTLQueryStatus fetches ttl_status table status from DB
|
||||
func (r *ClickHouseReader) getTTLQueryStatus(ctx context.Context, orgID string, tableNameArray []string) (string, *model.ApiError) {
|
||||
failFlag := false
|
||||
status := constants.StatusSuccess
|
||||
status := retentiontypes.TTLSettingStatusSuccess
|
||||
for _, tableName := range tableNameArray {
|
||||
statusItem, apiErr := r.checkTTLStatusItem(ctx, orgID, tableName)
|
||||
emptyStatusStruct := new(retentiontypes.TTLSetting)
|
||||
@@ -2399,16 +2399,16 @@ func (r *ClickHouseReader) getTTLQueryStatus(ctx context.Context, orgID string,
|
||||
if apiErr != nil {
|
||||
return "", &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing ttl_status check sql query")}
|
||||
}
|
||||
if statusItem.Status == constants.StatusPending && statusItem.UpdatedAt.Unix()-time.Now().Unix() < 3600 {
|
||||
status = constants.StatusPending
|
||||
if statusItem.Status == retentiontypes.TTLSettingStatusPending && statusItem.UpdatedAt.Unix()-time.Now().Unix() < 3600 {
|
||||
status = retentiontypes.TTLSettingStatusPending
|
||||
return status, nil
|
||||
}
|
||||
if statusItem.Status == constants.StatusFailed {
|
||||
if statusItem.Status == retentiontypes.TTLSettingStatusFailed {
|
||||
failFlag = true
|
||||
}
|
||||
}
|
||||
if failFlag {
|
||||
status = constants.StatusFailed
|
||||
status = retentiontypes.TTLSettingStatusFailed
|
||||
}
|
||||
|
||||
return status, nil
|
||||
@@ -2461,7 +2461,7 @@ func getLocalTableNameArray(tableNames []string) []string {
|
||||
}
|
||||
|
||||
// GetTTL returns current ttl, expected ttl and past setTTL status for metrics/traces.
|
||||
func (r *ClickHouseReader) GetTTL(ctx context.Context, orgID string, ttlParams *model.GetTTLParams) (*model.GetTTLResponseItem, *model.ApiError) {
|
||||
func (r *ClickHouseReader) GetTTL(ctx context.Context, orgID string, ttlParams *retentiontypes.GetTTLParams) (*retentiontypes.GetTTLResponseItem, *model.ApiError) {
|
||||
|
||||
ctx = ctxtypes.NewContextWithCommentVals(ctx, map[string]string{
|
||||
instrumentationtypes.CodeNamespace: "clickhouse-reader",
|
||||
@@ -2496,8 +2496,8 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, orgID string, ttlParams *
|
||||
return delTTL, moveTTL
|
||||
}
|
||||
|
||||
getMetricsTTL := func() (*model.DBResponseTTL, *model.ApiError) {
|
||||
var dbResp []model.DBResponseTTL
|
||||
getMetricsTTL := func() (*retentiontypes.DBResponseTTL, *model.ApiError) {
|
||||
var dbResp []retentiontypes.DBResponseTTL
|
||||
|
||||
query := fmt.Sprintf("SELECT engine_full FROM system.tables WHERE name='%v'", signozSampleLocalTableName)
|
||||
|
||||
@@ -2514,8 +2514,8 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, orgID string, ttlParams *
|
||||
}
|
||||
}
|
||||
|
||||
getTracesTTL := func() (*model.DBResponseTTL, *model.ApiError) {
|
||||
var dbResp []model.DBResponseTTL
|
||||
getTracesTTL := func() (*retentiontypes.DBResponseTTL, *model.ApiError) {
|
||||
var dbResp []retentiontypes.DBResponseTTL
|
||||
|
||||
query := fmt.Sprintf("SELECT engine_full FROM system.tables WHERE name='%v' AND database='%v'", r.traceLocalTableName, signozTraceDBName)
|
||||
|
||||
@@ -2532,8 +2532,8 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, orgID string, ttlParams *
|
||||
}
|
||||
}
|
||||
|
||||
getLogsTTL := func() (*model.DBResponseTTL, *model.ApiError) {
|
||||
var dbResp []model.DBResponseTTL
|
||||
getLogsTTL := func() (*retentiontypes.DBResponseTTL, *model.ApiError) {
|
||||
var dbResp []retentiontypes.DBResponseTTL
|
||||
|
||||
query := fmt.Sprintf("SELECT engine_full FROM system.tables WHERE name='%v' AND database='%v'", r.logsLocalTableName, r.logsDB)
|
||||
|
||||
@@ -2551,7 +2551,7 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, orgID string, ttlParams *
|
||||
}
|
||||
|
||||
switch ttlParams.Type {
|
||||
case constants.TraceTTL:
|
||||
case retentiontypes.TraceTTL:
|
||||
tableNameArray := []string{
|
||||
r.TraceDB + "." + r.traceTableName,
|
||||
r.TraceDB + "." + r.traceResourceTableV3,
|
||||
@@ -2579,9 +2579,9 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, orgID string, ttlParams *
|
||||
}
|
||||
|
||||
delTTL, moveTTL := parseTTL(dbResp.EngineFull)
|
||||
return &model.GetTTLResponseItem{TracesTime: delTTL, TracesMoveTime: moveTTL, ExpectedTracesTime: ttlQuery.TTL, ExpectedTracesMoveTime: ttlQuery.ColdStorageTTL, Status: status}, nil
|
||||
return &retentiontypes.GetTTLResponseItem{TracesTime: delTTL, TracesMoveTime: moveTTL, ExpectedTracesTime: ttlQuery.TTL, ExpectedTracesMoveTime: ttlQuery.ColdStorageTTL, Status: status}, nil
|
||||
|
||||
case constants.MetricsTTL:
|
||||
case retentiontypes.MetricsTTL:
|
||||
tableNameArray := []string{signozMetricDBName + "." + signozSampleTableName}
|
||||
tableNameArray = getLocalTableNameArray(tableNameArray)
|
||||
status, apiErr := r.getTTLQueryStatus(ctx, orgID, tableNameArray)
|
||||
@@ -2602,9 +2602,9 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, orgID string, ttlParams *
|
||||
}
|
||||
|
||||
delTTL, moveTTL := parseTTL(dbResp.EngineFull)
|
||||
return &model.GetTTLResponseItem{MetricsTime: delTTL, MetricsMoveTime: moveTTL, ExpectedMetricsTime: ttlQuery.TTL, ExpectedMetricsMoveTime: ttlQuery.ColdStorageTTL, Status: status}, nil
|
||||
return &retentiontypes.GetTTLResponseItem{MetricsTime: delTTL, MetricsMoveTime: moveTTL, ExpectedMetricsTime: ttlQuery.TTL, ExpectedMetricsMoveTime: ttlQuery.ColdStorageTTL, Status: status}, nil
|
||||
|
||||
case constants.LogsTTL:
|
||||
case retentiontypes.LogsTTL:
|
||||
tableNameArray := []string{r.logsDB + "." + r.logsTableName}
|
||||
tableNameArray = getLocalTableNameArray(tableNameArray)
|
||||
status, apiErr := r.getTTLQueryStatus(ctx, orgID, tableNameArray)
|
||||
@@ -2625,7 +2625,7 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, orgID string, ttlParams *
|
||||
}
|
||||
|
||||
delTTL, moveTTL := parseTTL(dbResp.EngineFull)
|
||||
return &model.GetTTLResponseItem{LogsTime: delTTL, LogsMoveTime: moveTTL, ExpectedLogsTime: ttlQuery.TTL, ExpectedLogsMoveTime: ttlQuery.ColdStorageTTL, Status: status}, nil
|
||||
return &retentiontypes.GetTTLResponseItem{LogsTime: delTTL, LogsMoveTime: moveTTL, ExpectedLogsTime: ttlQuery.TTL, ExpectedLogsMoveTime: ttlQuery.ColdStorageTTL, Status: status}, nil
|
||||
|
||||
default:
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error while getting ttl. ttl type should be metrics|traces, got %v",
|
||||
|
||||
@@ -34,6 +34,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types/retentiontypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
@@ -1677,7 +1678,7 @@ func (aH *APIHandler) setCustomRetentionTTL(w http.ResponseWriter, r *http.Reque
|
||||
return
|
||||
}
|
||||
|
||||
var params model.CustomRetentionTTLParams
|
||||
var params retentiontypes.CustomRetentionTTLParams
|
||||
if err := json.NewDecoder(r.Body).Decode(¶ms); err != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "Invalid data"))
|
||||
return
|
||||
|
||||
@@ -40,6 +40,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
querytemplate "github.com/SigNoz/signoz/pkg/query-service/utils/queryTemplate"
|
||||
"github.com/SigNoz/signoz/pkg/types/retentiontypes"
|
||||
chVariables "github.com/SigNoz/signoz/pkg/variables/clickhouse"
|
||||
)
|
||||
|
||||
@@ -419,7 +420,7 @@ func parseTime(param string, r *http.Request) (*time.Time, error) {
|
||||
|
||||
}
|
||||
|
||||
func parseTTLParams(r *http.Request) (*model.TTLParams, error) {
|
||||
func parseTTLParams(r *http.Request) (*retentiontypes.TTLParams, error) {
|
||||
|
||||
// make sure either of the query params are present
|
||||
typeTTL := r.URL.Query().Get("type")
|
||||
@@ -432,7 +433,7 @@ func parseTTLParams(r *http.Request) (*model.TTLParams, error) {
|
||||
}
|
||||
|
||||
// Validate the type parameter
|
||||
if typeTTL != baseconstants.TraceTTL && typeTTL != baseconstants.MetricsTTL && typeTTL != baseconstants.LogsTTL {
|
||||
if typeTTL != retentiontypes.TraceTTL && typeTTL != retentiontypes.MetricsTTL && typeTTL != retentiontypes.LogsTTL {
|
||||
return nil, fmt.Errorf("type param should be metrics|traces|logs, got %v", typeTTL)
|
||||
}
|
||||
|
||||
@@ -455,7 +456,7 @@ func parseTTLParams(r *http.Request) (*model.TTLParams, error) {
|
||||
}
|
||||
}
|
||||
|
||||
return &model.TTLParams{
|
||||
return &retentiontypes.TTLParams{
|
||||
Type: typeTTL,
|
||||
DelDuration: int64(durationParsed.Seconds()),
|
||||
ColdStorageVolume: coldStorage,
|
||||
@@ -463,7 +464,7 @@ func parseTTLParams(r *http.Request) (*model.TTLParams, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
func parseGetTTL(r *http.Request) (*model.GetTTLParams, error) {
|
||||
func parseGetTTL(r *http.Request) (*retentiontypes.GetTTLParams, error) {
|
||||
|
||||
typeTTL := r.URL.Query().Get("type")
|
||||
|
||||
@@ -471,12 +472,12 @@ func parseGetTTL(r *http.Request) (*model.GetTTLParams, error) {
|
||||
return nil, fmt.Errorf("type param cannot be empty from the query")
|
||||
} else {
|
||||
// Validate the type parameter
|
||||
if typeTTL != baseconstants.TraceTTL && typeTTL != baseconstants.MetricsTTL && typeTTL != baseconstants.LogsTTL {
|
||||
if typeTTL != retentiontypes.TraceTTL && typeTTL != retentiontypes.MetricsTTL && typeTTL != retentiontypes.LogsTTL {
|
||||
return nil, fmt.Errorf("type param should be metrics|traces|logs, got %v", typeTTL)
|
||||
}
|
||||
}
|
||||
|
||||
return &model.GetTTLParams{Type: typeTTL}, nil
|
||||
return &retentiontypes.GetTTLParams{Type: typeTTL}, nil
|
||||
}
|
||||
|
||||
func parseAggregateAttributeRequest(r *http.Request) (*v3.AggregateAttributeRequest, error) {
|
||||
|
||||
@@ -19,10 +19,6 @@ const (
|
||||
|
||||
const MaxAllowedPointsInTimeSeries = 300
|
||||
|
||||
const TraceTTL = "traces"
|
||||
const MetricsTTL = "metrics"
|
||||
const LogsTTL = "logs"
|
||||
|
||||
const SpanSearchScopeRoot = "isroot"
|
||||
const SpanSearchScopeEntryPoint = "isentrypoint"
|
||||
const OrderBySpanCount = "span_count"
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/querycache"
|
||||
"github.com/SigNoz/signoz/pkg/types/retentiontypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/util/stats"
|
||||
@@ -23,8 +24,8 @@ type Reader interface {
|
||||
GetServicesList(ctx context.Context) (*[]string, error)
|
||||
GetDependencyGraph(ctx context.Context, query *model.GetServicesParams) (*[]model.ServiceMapDependencyResponseItem, error)
|
||||
|
||||
GetTTL(ctx context.Context, orgID string, ttlParams *model.GetTTLParams) (*model.GetTTLResponseItem, *model.ApiError)
|
||||
GetCustomRetentionTTL(ctx context.Context, orgID string) (*model.GetCustomRetentionTTLResponse, error)
|
||||
GetTTL(ctx context.Context, orgID string, ttlParams *retentiontypes.GetTTLParams) (*retentiontypes.GetTTLResponseItem, *model.ApiError)
|
||||
GetCustomRetentionTTL(ctx context.Context, orgID string) (*retentiontypes.GetCustomRetentionTTLResponse, error)
|
||||
|
||||
// GetDisks returns a list of disks configured in the underlying DB. It is supported by
|
||||
// clickhouse only.
|
||||
@@ -46,8 +47,8 @@ type Reader interface {
|
||||
GetFlamegraphSpansForTrace(ctx context.Context, orgID valuer.UUID, traceID string, req *model.GetFlamegraphSpansForTraceParams) (*model.GetFlamegraphSpansForTraceResponse, error)
|
||||
|
||||
// Setter Interfaces
|
||||
SetTTL(ctx context.Context, orgID string, ttlParams *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError)
|
||||
SetTTLV2(ctx context.Context, orgID string, params *model.CustomRetentionTTLParams) (*model.CustomRetentionTTLResponse, error)
|
||||
SetTTL(ctx context.Context, orgID string, ttlParams *retentiontypes.TTLParams) (*retentiontypes.SetTTLResponseItem, *model.ApiError)
|
||||
SetTTLV2(ctx context.Context, orgID string, params *retentiontypes.CustomRetentionTTLParams) (*retentiontypes.CustomRetentionTTLResponse, error)
|
||||
|
||||
FetchTemporality(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]map[v3.Temporality]bool, error)
|
||||
GetMetricAggregateAttributes(ctx context.Context, orgID valuer.UUID, req *v3.AggregateAttributeRequest, skipSignozMetrics bool) (*v3.AggregateAttributeResponse, error)
|
||||
|
||||
@@ -404,56 +404,6 @@ type TagKey struct {
|
||||
Type TagDataType `json:"type"`
|
||||
}
|
||||
|
||||
type TTLParams struct {
|
||||
Type string // It can be one of {traces, metrics}.
|
||||
ColdStorageVolume string // Name of the cold storage volume.
|
||||
ToColdStorageDuration int64 // Seconds after which data will be moved to cold storage.
|
||||
DelDuration int64 // Seconds after which data will be deleted.
|
||||
}
|
||||
|
||||
type CustomRetentionTTLParams struct {
|
||||
Type string `json:"type"`
|
||||
DefaultTTLDays int `json:"defaultTTLDays"`
|
||||
TTLConditions []CustomRetentionRule `json:"ttlConditions"`
|
||||
ColdStorageVolume string `json:"coldStorageVolume,omitempty"`
|
||||
ToColdStorageDurationDays int64 `json:"coldStorageDurationDays,omitempty"`
|
||||
}
|
||||
|
||||
type CustomRetentionRule struct {
|
||||
Filters []FilterCondition `json:"conditions"`
|
||||
TTLDays int `json:"ttlDays"`
|
||||
}
|
||||
|
||||
type FilterCondition struct {
|
||||
Key string `json:"key"`
|
||||
Values []string `json:"values"`
|
||||
}
|
||||
|
||||
type GetCustomRetentionTTLResponse struct {
|
||||
Version string `json:"version"`
|
||||
Status string `json:"status"`
|
||||
|
||||
// V1 fields
|
||||
// LogsTime int `json:"logs_ttl_duration_hrs,omitempty"`
|
||||
// LogsMoveTime int `json:"logs_move_ttl_duration_hrs,omitempty"`
|
||||
ExpectedLogsTime int `json:"expected_logs_ttl_duration_hrs,omitempty"`
|
||||
ExpectedLogsMoveTime int `json:"expected_logs_move_ttl_duration_hrs,omitempty"`
|
||||
|
||||
// V2 fields
|
||||
DefaultTTLDays int `json:"default_ttl_days,omitempty"`
|
||||
TTLConditions []CustomRetentionRule `json:"ttl_conditions,omitempty"`
|
||||
ColdStorageVolume string `json:"cold_storage_volume,omitempty"`
|
||||
ColdStorageTTLDays int `json:"cold_storage_ttl_days,omitempty"`
|
||||
}
|
||||
|
||||
type CustomRetentionTTLResponse struct {
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type GetTTLParams struct {
|
||||
Type string
|
||||
}
|
||||
|
||||
type ListErrorsParams struct {
|
||||
StartStr string `json:"start"`
|
||||
EndStr string `json:"end"`
|
||||
|
||||
@@ -150,16 +150,6 @@ type RuleResponseItem struct {
|
||||
Data string `json:"data" db:"data"`
|
||||
}
|
||||
|
||||
type TTLStatusItem struct {
|
||||
Id int `json:"id" db:"id"`
|
||||
UpdatedAt time.Time `json:"updated_at" db:"updated_at"`
|
||||
CreatedAt time.Time `json:"created_at" db:"created_at"`
|
||||
TableName string `json:"table_name" db:"table_name"`
|
||||
TTL int `json:"ttl" db:"ttl"`
|
||||
Status string `json:"status" db:"status"`
|
||||
ColdStorageTtl int `json:"cold_storage_ttl" db:"cold_storage_ttl"`
|
||||
}
|
||||
|
||||
type ChannelItem struct {
|
||||
Id int `json:"id" db:"id"`
|
||||
CreatedAt time.Time `json:"created_at" db:"created_at"`
|
||||
@@ -462,35 +452,11 @@ type SpanAggregatesDBResponseItem struct {
|
||||
GroupBy string `ch:"groupBy"`
|
||||
}
|
||||
|
||||
type SetTTLResponseItem struct {
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type DiskItem struct {
|
||||
Name string `json:"name,omitempty" ch:"name"`
|
||||
Type string `json:"type,omitempty" ch:"type"`
|
||||
}
|
||||
|
||||
type DBResponseTTL struct {
|
||||
EngineFull string `ch:"engine_full"`
|
||||
}
|
||||
|
||||
type GetTTLResponseItem struct {
|
||||
MetricsTime int `json:"metrics_ttl_duration_hrs,omitempty"`
|
||||
MetricsMoveTime int `json:"metrics_move_ttl_duration_hrs,omitempty"`
|
||||
TracesTime int `json:"traces_ttl_duration_hrs,omitempty"`
|
||||
TracesMoveTime int `json:"traces_move_ttl_duration_hrs,omitempty"`
|
||||
LogsTime int `json:"logs_ttl_duration_hrs,omitempty"`
|
||||
LogsMoveTime int `json:"logs_move_ttl_duration_hrs,omitempty"`
|
||||
ExpectedMetricsTime int `json:"expected_metrics_ttl_duration_hrs,omitempty"`
|
||||
ExpectedMetricsMoveTime int `json:"expected_metrics_move_ttl_duration_hrs,omitempty"`
|
||||
ExpectedTracesTime int `json:"expected_traces_ttl_duration_hrs,omitempty"`
|
||||
ExpectedTracesMoveTime int `json:"expected_traces_move_ttl_duration_hrs,omitempty"`
|
||||
ExpectedLogsTime int `json:"expected_logs_ttl_duration_hrs,omitempty"`
|
||||
ExpectedLogsMoveTime int `json:"expected_logs_move_ttl_duration_hrs,omitempty"`
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
type DBResponseServiceName struct {
|
||||
ServiceName string `ch:"serviceName"`
|
||||
Count uint64 `ch:"count"`
|
||||
|
||||
@@ -8,6 +8,17 @@ const (
|
||||
// BodyFullTextSearchDefaultWarning is emitted when a full-text search or "body" searches are hit
|
||||
// with New JSON Body enhancements.
|
||||
BodyFullTextSearchDefaultWarning = "Full text searches default to `body.message:string`. Use `body.<key>` to search a different field inside body"
|
||||
|
||||
// FTSInternalKey is the sentinel Name on TelemetryFieldKey instances that represent
|
||||
// wildcard map searches (all attribute/resource keys+values). The unconventional value
|
||||
// prevents collision with any real field name a user could type.
|
||||
FTSInternalKey = "_X_INTERNAL_FTS_KEY"
|
||||
|
||||
// SearchFunctionName is the grammar function name for full-text search.
|
||||
SearchFunctionName = "search"
|
||||
|
||||
// FTSMaxWindowNs is the maximum allowed time range for a search() query (6 hours).
|
||||
FTSMaxWindowNs = uint64(6 * 60 * 60 * 1_000_000_000)
|
||||
)
|
||||
|
||||
var (
|
||||
|
||||
@@ -46,6 +46,7 @@ type filterExpressionVisitor struct {
|
||||
keysWithWarnings map[string]bool
|
||||
startNs uint64
|
||||
endNs uint64
|
||||
ftsFieldKeys []*telemetrytypes.TelemetryFieldKey
|
||||
}
|
||||
|
||||
type FilterExprVisitorOpts struct {
|
||||
@@ -65,6 +66,9 @@ type FilterExprVisitorOpts struct {
|
||||
Variables map[string]qbtypes.VariableItem
|
||||
StartNs uint64
|
||||
EndNs uint64
|
||||
// FTSFieldKeys enables search() for this query context. nil disables search()
|
||||
// (traces, metrics, and non-log callers leave this nil).
|
||||
FTSFieldKeys []*telemetrytypes.TelemetryFieldKey
|
||||
}
|
||||
|
||||
// newFilterExpressionVisitor creates a new filterExpressionVisitor.
|
||||
@@ -87,6 +91,7 @@ func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVis
|
||||
keysWithWarnings: make(map[string]bool),
|
||||
startNs: opts.StartNs,
|
||||
endNs: opts.EndNs,
|
||||
ftsFieldKeys: opts.FTSFieldKeys,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -334,14 +339,9 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any
|
||||
return SkipConditionLiteral
|
||||
}
|
||||
|
||||
if v.fullTextColumn == nil {
|
||||
v.errors = append(v.errors, "full text search is not supported")
|
||||
return ErrorConditionLiteral
|
||||
}
|
||||
child := ctx.GetChild(0)
|
||||
var searchText string
|
||||
if keyCtx, ok := child.(*grammar.KeyContext); ok {
|
||||
// create a full text search condition on the body field
|
||||
searchText = keyCtx.GetText()
|
||||
} else if valCtx, ok := child.(*grammar.ValueContext); ok {
|
||||
if valCtx.QUOTED_TEXT() != nil {
|
||||
@@ -357,6 +357,15 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any
|
||||
return ErrorConditionLiteral
|
||||
}
|
||||
}
|
||||
|
||||
if len(v.ftsFieldKeys) > 0 {
|
||||
return v.runSearchFunction(searchText)
|
||||
}
|
||||
|
||||
if v.fullTextColumn == nil {
|
||||
v.errors = append(v.errors, "full text search is not supported")
|
||||
return ErrorConditionLiteral
|
||||
}
|
||||
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.startNs, v.endNs, v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(searchText), v.builder)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
|
||||
@@ -365,7 +374,6 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any
|
||||
if v.bodyJSONEnabled && v.fullTextColumn.Name == "body" {
|
||||
v.warnings = append(v.warnings, BodyFullTextSearchDefaultWarning)
|
||||
}
|
||||
|
||||
return cond
|
||||
}
|
||||
|
||||
@@ -711,6 +719,10 @@ func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) an
|
||||
text = ctx.FREETEXT().GetText()
|
||||
}
|
||||
|
||||
if len(v.ftsFieldKeys) > 0 {
|
||||
return v.runSearchFunction(text)
|
||||
}
|
||||
|
||||
if v.fullTextColumn == nil {
|
||||
v.errors = append(v.errors, "full text search is not supported")
|
||||
return ErrorConditionLiteral
|
||||
@@ -728,12 +740,19 @@ func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) an
|
||||
return cond
|
||||
}
|
||||
|
||||
// VisitFunctionCall handles function calls like has(), hasAny(), etc.
|
||||
// VisitFunctionCall handles function calls like has(), hasAny(), search(), etc.
|
||||
func (v *filterExpressionVisitor) VisitFunctionCall(ctx *grammar.FunctionCallContext) any {
|
||||
if v.skipFunctionCalls {
|
||||
return SkipConditionLiteral
|
||||
}
|
||||
|
||||
// search() must be handled before visiting params: unquoted tokens like
|
||||
// search(error) are parsed as a key context, and visiting them through VisitKey
|
||||
// would append "key not found" errors before we can treat the text as a search string.
|
||||
if ctx.SEARCH() != nil {
|
||||
return v.visitSearchFunction(ctx)
|
||||
}
|
||||
|
||||
// Get function name based on which token is present
|
||||
var functionName string
|
||||
if ctx.HAS() != nil {
|
||||
@@ -842,6 +861,63 @@ func (v *filterExpressionVisitor) VisitFunctionCall(ctx *grammar.FunctionCallCon
|
||||
return v.builder.Or(conds...)
|
||||
}
|
||||
|
||||
// runSearchFunction fans a regex match for text across all ftsFieldKeys using OR.
|
||||
// Used by both explicit search() calls and implicit bare-expression FTS for logs.
|
||||
// Enforces the FTSMaxWindowNs guard so all callers share the same time-window limit.
|
||||
func (v *filterExpressionVisitor) runSearchFunction(text string) any {
|
||||
if v.endNs > 0 && v.startNs > 0 && (v.endNs-v.startNs) > FTSMaxWindowNs {
|
||||
v.errors = append(v.errors, "full text search is restricted to a maximum of 6-hour time window")
|
||||
return ErrorConditionLiteral
|
||||
}
|
||||
|
||||
formattedText := FormatFullTextSearch(text)
|
||||
var ftsConds []string
|
||||
for _, key := range v.ftsFieldKeys {
|
||||
cond, err := v.conditionBuilder.ConditionFor(v.context, v.startNs, v.endNs, key, qbtypes.FilterOperatorRegexp, formattedText, v.builder)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
ftsConds = append(ftsConds, cond)
|
||||
}
|
||||
if len(ftsConds) == 0 {
|
||||
return ErrorConditionLiteral
|
||||
}
|
||||
return v.builder.Or(ftsConds...)
|
||||
}
|
||||
|
||||
// visitSearchFunction handles the search() function call.
|
||||
// search('value') or search(value) fans out a regex match across all FTS column keys.
|
||||
func (v *filterExpressionVisitor) visitSearchFunction(ctx *grammar.FunctionCallContext) any {
|
||||
// ftsFieldKeys == nil means search() is not enabled for this signal/query type.
|
||||
// Only log statement builders set FTSFieldKeys; traces/metrics leave it nil.
|
||||
if len(v.ftsFieldKeys) == 0 {
|
||||
v.errors = append(v.errors, "search() is only supported for log queries")
|
||||
return ErrorConditionLiteral
|
||||
}
|
||||
|
||||
// Extract the search text directly from the parse tree — bypass VisitKey so that
|
||||
// unquoted tokens like search(error) don't trigger "key not found" errors.
|
||||
paramCtxs := ctx.FunctionParamList().AllFunctionParam()
|
||||
if len(paramCtxs) < 1 {
|
||||
v.errors = append(v.errors, "search() requires a value parameter, e.g. search('error') or search(error)")
|
||||
return ErrorConditionLiteral
|
||||
}
|
||||
paramCtx := paramCtxs[0]
|
||||
var searchText string
|
||||
if paramCtx.Value() != nil {
|
||||
raw := v.Visit(paramCtx.Value())
|
||||
searchText = fmt.Sprintf("%v", raw)
|
||||
} else if paramCtx.Key() != nil {
|
||||
// Unquoted word — use the raw token text, bypassing the key lookup.
|
||||
searchText = paramCtx.Key().GetText()
|
||||
} else {
|
||||
v.errors = append(v.errors, "search() parameter must be a string value")
|
||||
return ErrorConditionLiteral
|
||||
}
|
||||
|
||||
return v.runSearchFunction(searchText)
|
||||
}
|
||||
|
||||
// VisitFunctionParamList handles the parameter list for function calls.
|
||||
func (v *filterExpressionVisitor) VisitFunctionParamList(ctx *grammar.FunctionParamListContext) any {
|
||||
params := ctx.AllFunctionParam()
|
||||
|
||||
@@ -23,6 +23,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/identn"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
"github.com/SigNoz/signoz/pkg/meterreporter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration"
|
||||
"github.com/SigNoz/signoz/pkg/modules/inframonitoring"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
@@ -135,6 +136,9 @@ type Config struct {
|
||||
// Auditor config
|
||||
Auditor auditor.Config `mapstructure:"auditor"`
|
||||
|
||||
// MeterReporter config
|
||||
MeterReporter meterreporter.Config `mapstructure:"meterreporter"`
|
||||
|
||||
// CloudIntegration config
|
||||
CloudIntegration cloudintegration.Config `mapstructure:"cloudintegration"`
|
||||
|
||||
@@ -175,6 +179,7 @@ func NewConfig(ctx context.Context, logger *slog.Logger, resolverConfig config.R
|
||||
identn.NewConfigFactory(),
|
||||
serviceaccount.NewConfigFactory(),
|
||||
auditor.NewConfigFactory(),
|
||||
meterreporter.NewConfigFactory(),
|
||||
cloudintegration.NewConfigFactory(),
|
||||
tracedetail.NewConfigFactory(),
|
||||
authz.NewConfigFactory(),
|
||||
|
||||
@@ -16,6 +16,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/retention/implretention"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
@@ -52,7 +53,8 @@ func TestNewHandlers(t *testing.T) {
|
||||
userRoleStore := impluser.NewUserRoleStore(sqlstore, providerSettings)
|
||||
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings), userRoleStore, flagger)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, userGetter, userRoleStore, nil, nil, flagger)
|
||||
retentionGetter := implretention.NewGetter(implretention.NewStore(sqlstore))
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, userGetter, userRoleStore, nil, nil, retentionGetter, flagger)
|
||||
|
||||
querierHandler := querier.NewHandler(providerSettings, nil, nil)
|
||||
registryHandler := factory.NewHandler(nil)
|
||||
|
||||
@@ -29,6 +29,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport"
|
||||
"github.com/SigNoz/signoz/pkg/modules/retention"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rulestatehistory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rulestatehistory/implrulestatehistory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/savedview"
|
||||
@@ -63,6 +64,7 @@ type Modules struct {
|
||||
Preference preference.Module
|
||||
UserSetter user.Setter
|
||||
UserGetter user.Getter
|
||||
RetentionGetter retention.Getter
|
||||
SavedView savedview.Module
|
||||
Apdex apdex.Module
|
||||
Dashboard dashboard.Module
|
||||
@@ -103,6 +105,7 @@ func NewModules(
|
||||
userRoleStore authtypes.UserRoleStore,
|
||||
serviceAccount serviceaccount.Module,
|
||||
cloudIntegrationModule cloudintegration.Module,
|
||||
retentionGetter retention.Getter,
|
||||
fl flagger.Flagger,
|
||||
) Modules {
|
||||
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
|
||||
@@ -119,6 +122,7 @@ func NewModules(
|
||||
Dashboard: dashboard,
|
||||
UserSetter: userSetter,
|
||||
UserGetter: userGetter,
|
||||
RetentionGetter: retentionGetter,
|
||||
QuickFilter: quickfilter,
|
||||
TraceFunnel: impltracefunnel.NewModule(impltracefunnel.NewStore(sqlstore)),
|
||||
RawDataExport: implrawdataexport.NewModule(querier),
|
||||
|
||||
@@ -16,6 +16,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration/implcloudintegration"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/retention/implretention"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount/implserviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
@@ -56,7 +57,8 @@ func TestNewModules(t *testing.T) {
|
||||
|
||||
serviceAccount := implserviceaccount.NewModule(implserviceaccount.NewStore(sqlstore), nil, nil, nil, providerSettings, serviceaccount.Config{})
|
||||
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, userGetter, userRoleStore, serviceAccount, implcloudintegration.NewModule(), flagger)
|
||||
retentionGetter := implretention.NewGetter(implretention.NewStore(sqlstore))
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, userGetter, userRoleStore, serviceAccount, implcloudintegration.NewModule(), retentionGetter, flagger)
|
||||
|
||||
reflectVal := reflect.ValueOf(modules)
|
||||
for i := 0; i < reflectVal.NumField(); i++ {
|
||||
|
||||
@@ -28,6 +28,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/identn/apikeyidentn"
|
||||
"github.com/SigNoz/signoz/pkg/identn/impersonationidentn"
|
||||
"github.com/SigNoz/signoz/pkg/identn/tokenizeridentn"
|
||||
"github.com/SigNoz/signoz/pkg/meterreporter"
|
||||
"github.com/SigNoz/signoz/pkg/meterreporter/noopmeterreporter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain/implauthdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
@@ -319,6 +321,12 @@ func NewAuditorProviderFactories() factory.NamedMap[factory.ProviderFactory[audi
|
||||
)
|
||||
}
|
||||
|
||||
func NewMeterReporterProviderFactories() factory.NamedMap[factory.ProviderFactory[meterreporter.Reporter, meterreporter.Config]] {
|
||||
return factory.MustNewNamedMap(
|
||||
noopmeterreporter.NewFactory(),
|
||||
)
|
||||
}
|
||||
|
||||
func NewFlaggerProviderFactories(registry featuretypes.Registry) factory.NamedMap[factory.ProviderFactory[flagger.FlaggerProvider, flagger.Config]] {
|
||||
return factory.MustNewNamedMap(
|
||||
configflagger.NewFactory(registry),
|
||||
|
||||
@@ -22,10 +22,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/identn"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/meterreporter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/retention"
|
||||
"github.com/SigNoz/signoz/pkg/modules/retention/implretention"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rulestatehistory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount/implserviceaccount"
|
||||
@@ -84,6 +87,7 @@ type SigNoz struct {
|
||||
Flagger flagger.Flagger
|
||||
Gateway gateway.Gateway
|
||||
Auditor auditor.Auditor
|
||||
MeterReporter meterreporter.Reporter
|
||||
}
|
||||
|
||||
func New(
|
||||
@@ -104,6 +108,7 @@ func New(
|
||||
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
|
||||
gatewayProviderFactory func(licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config],
|
||||
auditorProviderFactories func(licensing.Licensing) factory.NamedMap[factory.ProviderFactory[auditor.Auditor, auditor.Config]],
|
||||
meterReporterProviderFactories func(context.Context, factory.ProviderSettings, flagger.Flagger, licensing.Licensing, telemetrystore.TelemetryStore, retention.Getter, organization.Getter, zeus.Zeus) (factory.NamedMap[factory.ProviderFactory[meterreporter.Reporter, meterreporter.Config]], string),
|
||||
querierHandlerCallback func(factory.ProviderSettings, querier.Querier, analytics.Analytics) querier.Handler,
|
||||
cloudIntegrationCallback func(sqlstore.SQLStore, global.Global, zeus.Zeus, gateway.Gateway, licensing.Licensing, serviceaccount.Module, cloudintegration.Config) (cloudintegration.Module, error),
|
||||
rulerProviderFactories func(cache.Cache, alertmanager.Alertmanager, sqlstore.SQLStore, telemetrystore.TelemetryStore, telemetrytypes.MetadataStore, prometheus.Prometheus, organization.Getter, rulestatehistory.Module, querier.Querier, queryparser.QueryParser) factory.NamedMap[factory.ProviderFactory[ruler.Ruler, ruler.Config]],
|
||||
@@ -228,6 +233,8 @@ func New(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
retentionGetter := implretention.NewGetter(implretention.NewStore(sqlstore))
|
||||
|
||||
// Initialize prometheus from the available prometheus provider factories
|
||||
prometheus, err := factory.NewProviderFromNamedMap(
|
||||
ctx,
|
||||
@@ -386,6 +393,13 @@ func New(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Initialize meter reporter from the variant-specific provider factories
|
||||
meterReporterFactories, meterReporterProvider := meterReporterProviderFactories(ctx, providerSettings, flagger, licensing, telemetrystore, retentionGetter, orgGetter, zeus)
|
||||
meterReporter, err := factory.NewProviderFromNamedMap(ctx, providerSettings, config.MeterReporter, meterReporterFactories, meterReporterProvider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Initialize authns
|
||||
store := sqlauthnstore.NewStore(sqlstore)
|
||||
authNs, err := authNsCallback(ctx, providerSettings, store, licensing)
|
||||
@@ -441,7 +455,7 @@ func New(
|
||||
}
|
||||
|
||||
// Initialize all modules
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard, userGetter, userRoleStore, serviceAccount, cloudIntegrationModule, flagger)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard, userGetter, userRoleStore, serviceAccount, cloudIntegrationModule, retentionGetter, flagger)
|
||||
|
||||
// Initialize ruler from the variant-specific provider factories
|
||||
rulerInstance, err := factory.NewProviderFromNamedMap(ctx, providerSettings, config.Ruler, rulerProviderFactories(cache, alertmanager, sqlstore, telemetrystore, telemetryMetadataStore, prometheus, orgGetter, modules.RuleStateHistory, querier, queryParser), "signoz")
|
||||
@@ -501,6 +515,7 @@ func New(
|
||||
factory.NewNamedService(factory.MustNewName("authz"), authz),
|
||||
factory.NewNamedService(factory.MustNewName("user"), userService, factory.MustNewName("authz")),
|
||||
factory.NewNamedService(factory.MustNewName("auditor"), auditor),
|
||||
factory.NewNamedService(factory.MustNewName("meterreporter"), meterReporter, factory.MustNewName("licensing")),
|
||||
factory.NewNamedService(factory.MustNewName("ruler"), rulerInstance),
|
||||
)
|
||||
if err != nil {
|
||||
@@ -550,5 +565,6 @@ func New(
|
||||
Flagger: flagger,
|
||||
Gateway: gateway,
|
||||
Auditor: auditor,
|
||||
MeterReporter: meterReporter,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -25,6 +25,17 @@ func NewConditionBuilder(fm qbtypes.FieldMapper, fl flagger.Flagger) *conditionB
|
||||
return &conditionBuilder{fm: fm, fl: fl}
|
||||
}
|
||||
|
||||
// ftsMapExprs returns mapKeys and mapValues expressions for FTS matching on a map column.
|
||||
// Non-string value types are wrapped with arrayMap(x -> toString(x), ...).
|
||||
func ftsMapExprs(col *schema.Column) (keysExpr, valsExpr string) {
|
||||
keysExpr = fmt.Sprintf("mapKeys(%s)", col.Name)
|
||||
valsExpr = fmt.Sprintf("mapValues(%s)", col.Name)
|
||||
if mc, ok := col.Type.(schema.MapColumnType); ok && mc.ValueType.GetType() != schema.ColumnTypeEnumString {
|
||||
valsExpr = fmt.Sprintf("arrayMap(x -> toString(x), mapValues(%s))", col.Name)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
ctx context.Context,
|
||||
startNs, endNs uint64,
|
||||
@@ -118,10 +129,24 @@ func (c *conditionBuilder) conditionFor(
|
||||
return sb.NotILike(fieldExpression, fmt.Sprintf("%%%s%%", value)), nil
|
||||
|
||||
case qbtypes.FilterOperatorRegexp:
|
||||
if key.Name == querybuilder.FTSInternalKey {
|
||||
rawVal := fmt.Sprintf("%v", value)
|
||||
keysExpr, valsExpr := ftsMapExprs(columns[0])
|
||||
keysCond := fmt.Sprintf(`arrayExists(x -> match(x, %s), %s)`, sb.Var(rawVal), keysExpr)
|
||||
valsCond := fmt.Sprintf(`arrayExists(x -> match(x, %s), %s)`, sb.Var(rawVal), valsExpr)
|
||||
return sb.Or(keysCond, valsCond), nil
|
||||
}
|
||||
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
|
||||
// Only needed because we are using sprintf instead of sb.Match (not implemented in sqlbuilder)
|
||||
return fmt.Sprintf(`match(%s, %s)`, sqlbuilder.Escape(fieldExpression), sb.Var(value)), nil
|
||||
case qbtypes.FilterOperatorNotRegexp:
|
||||
if key.Name == querybuilder.FTSInternalKey {
|
||||
rawVal := fmt.Sprintf("%v", value)
|
||||
keysExpr, valsExpr := ftsMapExprs(columns[0])
|
||||
keysCond := fmt.Sprintf(`arrayExists(x -> match(x, %s), %s)`, sb.Var(rawVal), keysExpr)
|
||||
valsCond := fmt.Sprintf(`arrayExists(x -> match(x, %s), %s)`, sb.Var(rawVal), valsExpr)
|
||||
return "NOT " + sb.Or(keysCond, valsCond), nil
|
||||
}
|
||||
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
|
||||
// Only needed because we are using sprintf instead of sb.Match (not implemented in sqlbuilder)
|
||||
return fmt.Sprintf(`NOT match(%s, %s)`, sqlbuilder.Escape(fieldExpression), sb.Var(value)), nil
|
||||
@@ -282,6 +307,12 @@ func (c *conditionBuilder) ConditionFor(
|
||||
return "", err
|
||||
}
|
||||
|
||||
// FTS wildcard conditions are self-contained (arrayExists over full map);
|
||||
// no additional EXISTS wrapper is needed.
|
||||
if key.Name == querybuilder.FTSInternalKey {
|
||||
return condition, nil
|
||||
}
|
||||
|
||||
// Skip adding exists filter for intrinsic fields i.e. Table level log context fields
|
||||
buildExistCondition := operator.AddDefaultExistsFilter()
|
||||
switch key.FieldContext {
|
||||
|
||||
@@ -514,6 +514,67 @@ func TestConditionFor(t *testing.T) {
|
||||
expectedSQL: "",
|
||||
expectedError: qbtypes.ErrColumnNotFound,
|
||||
},
|
||||
// FTS wildcard (FTSInternalKey) cases
|
||||
{
|
||||
name: "FTS wildcard - attribute string REGEXP",
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "_X_INTERNAL_FTS_KEY",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
operator: qbtypes.FilterOperatorRegexp,
|
||||
value: "error",
|
||||
expectedSQL: "arrayExists(x -> match(x, ?), mapKeys(attributes_string)) OR arrayExists(x -> match(x, ?), mapValues(attributes_string))",
|
||||
expectedArgs: []any{"error", "error"},
|
||||
},
|
||||
{
|
||||
name: "FTS wildcard - attribute number REGEXP",
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "_X_INTERNAL_FTS_KEY",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
operator: qbtypes.FilterOperatorRegexp,
|
||||
value: "42",
|
||||
expectedSQL: "arrayExists(x -> match(x, ?), mapKeys(attributes_number)) OR arrayExists(x -> match(x, ?), arrayMap(x -> toString(x), mapValues(attributes_number)))",
|
||||
expectedArgs: []any{"42", "42"},
|
||||
},
|
||||
{
|
||||
name: "FTS wildcard - attribute bool REGEXP",
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "_X_INTERNAL_FTS_KEY",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||
},
|
||||
operator: qbtypes.FilterOperatorRegexp,
|
||||
value: "true",
|
||||
expectedSQL: "arrayExists(x -> match(x, ?), mapKeys(attributes_bool)) OR arrayExists(x -> match(x, ?), arrayMap(x -> toString(x), mapValues(attributes_bool)))",
|
||||
expectedArgs: []any{"true", "true"},
|
||||
},
|
||||
{
|
||||
name: "FTS wildcard - resource string REGEXP",
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "_X_INTERNAL_FTS_KEY",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
operator: qbtypes.FilterOperatorRegexp,
|
||||
value: "my-service",
|
||||
expectedSQL: "arrayExists(x -> match(x, ?), mapKeys(resources_string)) OR arrayExists(x -> match(x, ?), mapValues(resources_string))",
|
||||
expectedArgs: []any{"my-service", "my-service"},
|
||||
},
|
||||
{
|
||||
name: "FTS wildcard - NOT REGEXP",
|
||||
key: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "_X_INTERNAL_FTS_KEY",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
operator: qbtypes.FilterOperatorNotRegexp,
|
||||
value: "healthcheck",
|
||||
expectedSQL: "NOT (arrayExists(x -> match(x, ?), mapKeys(attributes_string)) OR arrayExists(x -> match(x, ?), mapValues(attributes_string)))",
|
||||
expectedArgs: []any{"healthcheck", "healthcheck"},
|
||||
},
|
||||
}
|
||||
fl := flaggertest.New(t)
|
||||
fm := NewFieldMapper(fl)
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"fmt"
|
||||
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
@@ -125,6 +126,21 @@ var (
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
},
|
||||
}
|
||||
|
||||
// DefaultFTSFieldKeys is the ordered set of TelemetryFieldKey instances that
|
||||
// search() fans out across. Intrinsic log columns use the normal conditionFor
|
||||
// path; entries with Name==FTSInternalKey are short-circuited in conditionFor
|
||||
// to emit arrayExists conditions over mapKeys/mapValues without arrayConcat.
|
||||
DefaultFTSFieldKeys = []*telemetrytypes.TelemetryFieldKey{
|
||||
{Name: LogsV2BodyColumn, Signal: telemetrytypes.SignalLogs, FieldContext: telemetrytypes.FieldContextLog, FieldDataType: telemetrytypes.FieldDataTypeString},
|
||||
{Name: LogsV2SeverityTextColumn, Signal: telemetrytypes.SignalLogs, FieldContext: telemetrytypes.FieldContextLog, FieldDataType: telemetrytypes.FieldDataTypeString},
|
||||
{Name: LogsV2TraceIDColumn, Signal: telemetrytypes.SignalLogs, FieldContext: telemetrytypes.FieldContextLog, FieldDataType: telemetrytypes.FieldDataTypeString},
|
||||
{Name: LogsV2SpanIDColumn, Signal: telemetrytypes.SignalLogs, FieldContext: telemetrytypes.FieldContextLog, FieldDataType: telemetrytypes.FieldDataTypeString},
|
||||
{Name: querybuilder.FTSInternalKey, Signal: telemetrytypes.SignalLogs, FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString},
|
||||
{Name: querybuilder.FTSInternalKey, Signal: telemetrytypes.SignalLogs, FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeNumber},
|
||||
{Name: querybuilder.FTSInternalKey, Signal: telemetrytypes.SignalLogs, FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeBool},
|
||||
{Name: querybuilder.FTSInternalKey, Signal: telemetrytypes.SignalLogs, FieldContext: telemetrytypes.FieldContextResource, FieldDataType: telemetrytypes.FieldDataTypeString},
|
||||
}
|
||||
)
|
||||
|
||||
func bodyAliasExpression(bodyJSONEnabled bool) string {
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"github.com/SigNoz/signoz-otel-collector/utils"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
@@ -79,6 +80,9 @@ func NewFieldMapper(fl flagger.Flagger) qbtypes.FieldMapper {
|
||||
func (m *fieldMapper) getColumn(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) ([]*schema.Column, error) {
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource:
|
||||
if key.Name == querybuilder.FTSInternalKey {
|
||||
return []*schema.Column{logsV2Columns["resources_string"]}, nil
|
||||
}
|
||||
columns := []*schema.Column{logsV2Columns["resources_string"], logsV2Columns["resource"]}
|
||||
return columns, nil
|
||||
case telemetrytypes.FieldContextScope:
|
||||
|
||||
137
pkg/telemetrylogs/filter_expr_logs_fts_test.go
Normal file
137
pkg/telemetrylogs/filter_expr_logs_fts_test.go
Normal file
@@ -0,0 +1,137 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/flagger/flaggertest"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestSearchFunctionFTS(t *testing.T) {
|
||||
fl := flaggertest.New(t)
|
||||
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
ctx := context.Background()
|
||||
fm := NewFieldMapper(fl)
|
||||
cb := NewConditionBuilder(fm, fl)
|
||||
keys := buildCompleteFieldKeyMap(releaseTime)
|
||||
for _, field := range IntrinsicFields {
|
||||
f := field
|
||||
keys[field.Name] = append(keys[field.Name], &f)
|
||||
}
|
||||
|
||||
startNs := uint64(releaseTime.Add(-1 * time.Hour).UnixNano())
|
||||
endNs := uint64(releaseTime.Add(1 * time.Hour).UnixNano())
|
||||
|
||||
makeOpts := func(ftsKeys []*telemetrytypes.TelemetryFieldKey) querybuilder.FilterExprVisitorOpts {
|
||||
return querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: instrumentationtest.New().Logger(),
|
||||
FieldMapper: fm,
|
||||
ConditionBuilder: cb,
|
||||
FieldKeys: keys,
|
||||
JsonKeyToKey: GetBodyJSONKey,
|
||||
StartNs: startNs,
|
||||
EndNs: endNs,
|
||||
FTSFieldKeys: ftsKeys,
|
||||
}
|
||||
}
|
||||
|
||||
t.Run("search quoted string fans out to all columns", func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause("search('error')", makeOpts(DefaultFTSFieldKeys))
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
sql, _ := clause.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
// Must touch all 8 targets: body, severity_text, trace_id, span_id, plus 4 map pairs
|
||||
assert.Contains(t, sql, "match(LOWER(body), LOWER(?))")
|
||||
assert.Contains(t, sql, "match(severity_text, ?)")
|
||||
assert.Contains(t, sql, "match(trace_id, ?)")
|
||||
assert.Contains(t, sql, "match(span_id, ?)")
|
||||
assert.Contains(t, sql, "arrayExists(x -> match(x, ?), mapKeys(attributes_string))")
|
||||
assert.Contains(t, sql, "arrayExists(x -> match(x, ?), mapValues(attributes_string))")
|
||||
assert.Contains(t, sql, "arrayExists(x -> match(x, ?), mapKeys(attributes_number))")
|
||||
assert.Contains(t, sql, "arrayExists(x -> match(x, ?), mapKeys(attributes_bool))")
|
||||
assert.Contains(t, sql, "arrayExists(x -> match(x, ?), mapKeys(resources_string))")
|
||||
})
|
||||
|
||||
t.Run("search unquoted token produces same result as quoted", func(t *testing.T) {
|
||||
quoted, err := querybuilder.PrepareWhereClause("search('error')", makeOpts(DefaultFTSFieldKeys))
|
||||
require.NoError(t, err)
|
||||
unquoted, err := querybuilder.PrepareWhereClause("search(error)", makeOpts(DefaultFTSFieldKeys))
|
||||
require.NoError(t, err)
|
||||
|
||||
sqlQ, argsQ := quoted.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
sqlU, argsU := unquoted.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
assert.Equal(t, sqlQ, sqlU)
|
||||
assert.Equal(t, argsQ, argsU)
|
||||
})
|
||||
|
||||
t.Run("NOT search wraps entire condition", func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause("NOT search('healthcheck')", makeOpts(DefaultFTSFieldKeys))
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
sql, _ := clause.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
assert.Contains(t, sql, "NOT (")
|
||||
})
|
||||
|
||||
t.Run("search combined with other filter", func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause("search('error') AND severity_text = 'ERROR'", makeOpts(DefaultFTSFieldKeys))
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
sql, _ := clause.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
assert.Contains(t, sql, "match(LOWER(body), LOWER(?))")
|
||||
assert.Contains(t, sql, "severity_text = ?")
|
||||
})
|
||||
|
||||
t.Run("search invalid regex is escaped as literal", func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause("search('[ERROR-1234]')", makeOpts(DefaultFTSFieldKeys))
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
_, args := clause.WhereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
// FormatFullTextSearch escapes invalid regex — all args should be the escaped form
|
||||
for _, arg := range args {
|
||||
if s, ok := arg.(string); ok {
|
||||
assert.Equal(t, `\[ERROR-1234\]`, s)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("search with FTSColumnKeys nil returns error", func(t *testing.T) {
|
||||
_, err := querybuilder.PrepareWhereClause("search('error')", makeOpts(nil))
|
||||
require.Error(t, err)
|
||||
_, _, _, _, _, additionals := errors.Unwrapb(err)
|
||||
found := false
|
||||
for _, a := range additionals {
|
||||
if strings.Contains(a, "search() is only supported for log queries") {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
assert.True(t, found, "expected 'only supported for log queries' error, got: %v", additionals)
|
||||
})
|
||||
|
||||
t.Run("search with window exceeding 6 hours returns error", func(t *testing.T) {
|
||||
opts := makeOpts(DefaultFTSFieldKeys)
|
||||
opts.StartNs = uint64(releaseTime.UnixNano())
|
||||
opts.EndNs = uint64(releaseTime.Add(7 * time.Hour).UnixNano())
|
||||
_, err := querybuilder.PrepareWhereClause("search('error')", opts)
|
||||
require.Error(t, err)
|
||||
_, _, _, _, _, additionals := errors.Unwrapb(err)
|
||||
found := false
|
||||
for _, a := range additionals {
|
||||
if strings.Contains(a, "6-hour") {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
assert.True(t, found, "expected 6-hour window error, got: %v", additionals)
|
||||
})
|
||||
}
|
||||
@@ -100,7 +100,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
category: "Single word",
|
||||
query: "<script>alert('xss')</script>",
|
||||
shouldPass: false,
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got '<'",
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got '<'",
|
||||
},
|
||||
|
||||
// Single word searches with spaces
|
||||
@@ -166,7 +166,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
category: "Special characters",
|
||||
query: "[tracing]",
|
||||
shouldPass: false,
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got '['",
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got '['",
|
||||
},
|
||||
{
|
||||
category: "Special characters",
|
||||
@@ -196,7 +196,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
category: "Special characters",
|
||||
query: "ERROR: cannot execute update() in a read-only context",
|
||||
shouldPass: false,
|
||||
expectedErrorContains: "expecting one of {(, AND, FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got ')'",
|
||||
expectedErrorContains: "expecting one of {(, AND, FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got ')'",
|
||||
},
|
||||
{
|
||||
category: "Special characters",
|
||||
@@ -618,7 +618,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
shouldPass: false,
|
||||
expectedQuery: "",
|
||||
expectedArgs: []any{},
|
||||
expectedErrorContains: "expecting one of {(, ), FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'and'",
|
||||
expectedErrorContains: "expecting one of {(, ), FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'and'",
|
||||
},
|
||||
{
|
||||
category: "Keyword conflict",
|
||||
@@ -626,7 +626,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
shouldPass: false,
|
||||
expectedQuery: "",
|
||||
expectedArgs: []any{},
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'or'",
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'or'",
|
||||
},
|
||||
{
|
||||
category: "Keyword conflict",
|
||||
@@ -634,7 +634,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
shouldPass: false,
|
||||
expectedQuery: "",
|
||||
expectedArgs: []any{},
|
||||
expectedErrorContains: "expecting one of {(, ), FREETEXT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got EOF",
|
||||
expectedErrorContains: "expecting one of {(, ), FREETEXT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got EOF",
|
||||
},
|
||||
{
|
||||
category: "Keyword conflict",
|
||||
@@ -642,7 +642,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
shouldPass: false,
|
||||
expectedQuery: "",
|
||||
expectedArgs: []any{},
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'like'",
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'like'",
|
||||
},
|
||||
{
|
||||
category: "Keyword conflict",
|
||||
@@ -650,7 +650,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
shouldPass: false,
|
||||
expectedQuery: "",
|
||||
expectedArgs: []any{},
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'between'",
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'between'",
|
||||
},
|
||||
{
|
||||
category: "Keyword conflict",
|
||||
@@ -658,7 +658,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
shouldPass: false,
|
||||
expectedQuery: "",
|
||||
expectedArgs: []any{},
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'in'",
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'in'",
|
||||
},
|
||||
{
|
||||
category: "Keyword conflict",
|
||||
@@ -666,7 +666,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
shouldPass: false,
|
||||
expectedQuery: "",
|
||||
expectedArgs: []any{},
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'exists'",
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'exists'",
|
||||
},
|
||||
{
|
||||
category: "Keyword conflict",
|
||||
@@ -674,7 +674,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
shouldPass: false,
|
||||
expectedQuery: "",
|
||||
expectedArgs: []any{},
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'regexp'",
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'regexp'",
|
||||
},
|
||||
{
|
||||
category: "Keyword conflict",
|
||||
@@ -682,7 +682,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
shouldPass: false,
|
||||
expectedQuery: "",
|
||||
expectedArgs: []any{},
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'contains'",
|
||||
expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'contains'",
|
||||
},
|
||||
{
|
||||
category: "Keyword conflict",
|
||||
@@ -2018,9 +2018,9 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
expectedErrorContains: "",
|
||||
},
|
||||
|
||||
{category: "Only keywords", query: "AND", shouldPass: false, expectedErrorContains: "expecting one of {(, ), FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'AND'"},
|
||||
{category: "Only keywords", query: "OR", shouldPass: false, expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'OR'"},
|
||||
{category: "Only keywords", query: "NOT", shouldPass: false, expectedErrorContains: "expecting one of {(, ), FREETEXT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got EOF"},
|
||||
{category: "Only keywords", query: "AND", shouldPass: false, expectedErrorContains: "expecting one of {(, ), FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'AND'"},
|
||||
{category: "Only keywords", query: "OR", shouldPass: false, expectedErrorContains: "expecting one of {(, ), AND, FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'OR'"},
|
||||
{category: "Only keywords", query: "NOT", shouldPass: false, expectedErrorContains: "expecting one of {(, ), FREETEXT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got EOF"},
|
||||
|
||||
{category: "Only functions", query: "has", shouldPass: false, expectedErrorContains: "expecting one of {(, )} but got EOF"},
|
||||
{category: "Only functions", query: "hasAny", shouldPass: false, expectedErrorContains: "expecting one of {(, )} but got EOF"},
|
||||
@@ -2162,7 +2162,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
shouldPass: false,
|
||||
expectedQuery: "",
|
||||
expectedArgs: nil,
|
||||
expectedErrorContains: "line 1:0 expecting one of {(, ), FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'and'",
|
||||
expectedErrorContains: "line 1:0 expecting one of {(, ), FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'and'",
|
||||
},
|
||||
{
|
||||
category: "Operator keywords as keys",
|
||||
@@ -2170,7 +2170,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
shouldPass: false,
|
||||
expectedQuery: "",
|
||||
expectedArgs: nil,
|
||||
expectedErrorContains: "line 1:0 expecting one of {(, ), AND, FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'or'",
|
||||
expectedErrorContains: "line 1:0 expecting one of {(, ), AND, FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'or'",
|
||||
},
|
||||
{
|
||||
category: "Operator keywords as keys",
|
||||
@@ -2178,7 +2178,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
shouldPass: false,
|
||||
expectedQuery: "",
|
||||
expectedArgs: nil,
|
||||
expectedErrorContains: "line 1:3 expecting one of {(, ), FREETEXT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got '='",
|
||||
expectedErrorContains: "line 1:3 expecting one of {(, ), FREETEXT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got '='",
|
||||
},
|
||||
{
|
||||
category: "Operator keywords as keys",
|
||||
@@ -2186,7 +2186,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
shouldPass: false,
|
||||
expectedQuery: "",
|
||||
expectedArgs: nil,
|
||||
expectedErrorContains: "line 1:0 expecting one of {(, ), AND, FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'between'",
|
||||
expectedErrorContains: "line 1:0 expecting one of {(, ), AND, FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'between'",
|
||||
},
|
||||
{
|
||||
category: "Operator keywords as keys",
|
||||
@@ -2194,7 +2194,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
shouldPass: false,
|
||||
expectedQuery: "",
|
||||
expectedArgs: nil,
|
||||
expectedErrorContains: "line 1:0 expecting one of {(, ), AND, FREETEXT, NOT, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'in'",
|
||||
expectedErrorContains: "line 1:0 expecting one of {(, ), AND, FREETEXT, NOT, SEARCH, boolean, has(), hasAll(), hasAny(), hasToken(), number, quoted text} but got 'in'",
|
||||
},
|
||||
|
||||
// Using function keywords as keys
|
||||
|
||||
@@ -663,11 +663,11 @@ func (b *logQueryStatementBuilder) addFilterCondition(
|
||||
FieldKeys: keys,
|
||||
BodyJSONEnabled: bodyJSONEnabled,
|
||||
SkipResourceFilter: true,
|
||||
FullTextColumn: b.fullTextColumn,
|
||||
JsonKeyToKey: b.jsonKeyToKey,
|
||||
Variables: variables,
|
||||
StartNs: start,
|
||||
EndNs: end,
|
||||
FTSFieldKeys: DefaultFTSFieldKeys,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -2,6 +2,7 @@ package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@@ -364,7 +365,7 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
expectedErr error
|
||||
}{
|
||||
{
|
||||
name: "List with full text search",
|
||||
name: "List with full text search exceeds 6h window",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
@@ -373,11 +374,7 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
},
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE match(LOWER(body), LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{"hello", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
expectedErr: fmt.Errorf("parsing the search expression"),
|
||||
},
|
||||
{
|
||||
name: "list query with mat col order by",
|
||||
@@ -385,7 +382,7 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'cartservice' hello",
|
||||
Expression: "service.name = 'cartservice'",
|
||||
},
|
||||
Limit: 10,
|
||||
Order: []qbtypes.OrderBy{
|
||||
@@ -402,8 +399,8 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND match(LOWER(body), LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? ORDER BY `attribute_string_materialized$$key$$name` AS `materialized.key.name` desc LIMIT ?",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), "hello", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? ORDER BY `attribute_string_materialized$$key$$name` AS `materialized.key.name` desc LIMIT ?",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
@@ -1037,7 +1034,7 @@ func TestStmtBuilderBodyField(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestStmtBuilderBodyFullTextSearch(t *testing.T) {
|
||||
func TestStmtBuilderFTS(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
requestType qbtypes.RequestType
|
||||
@@ -1045,6 +1042,9 @@ func TestStmtBuilderBodyFullTextSearch(t *testing.T) {
|
||||
enableUseJSONBody bool
|
||||
expected qbtypes.Statement
|
||||
expectedErr error
|
||||
// optional per-case time window (ms); zero → use default 1747947419000/1747983448000
|
||||
startMs uint64
|
||||
endMs uint64
|
||||
}{
|
||||
{
|
||||
name: "fts",
|
||||
@@ -1055,10 +1055,11 @@ func TestStmtBuilderBodyFullTextSearch(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
enableUseJSONBody: true,
|
||||
startMs: 1705309200000,
|
||||
endMs: 1705316400000,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE match(LOWER(body_v2.message), LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{"error", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{querybuilder.BodyFullTextSearchDefaultWarning},
|
||||
Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE (match(LOWER(body_v2.message), LOWER(?)) OR match(severity_text, ?) OR match(trace_id, ?) OR match(span_id, ?) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_string)) OR arrayExists(x -> match(x, ?), mapValues(attributes_string))) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_number)) OR arrayExists(x -> match(x, ?), arrayMap(x -> toString(x), mapValues(attributes_number)))) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_bool)) OR arrayExists(x -> match(x, ?), arrayMap(x -> toString(x), mapValues(attributes_bool)))) OR (arrayExists(x -> match(x, ?), mapKeys(resources_string)) OR arrayExists(x -> match(x, ?), mapValues(resources_string)))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{"error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "1705309200000000000", uint64(1705307400), "1705316400000000000", uint64(1705316400), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
@@ -1071,15 +1072,16 @@ func TestStmtBuilderBodyFullTextSearch(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
enableUseJSONBody: true,
|
||||
startMs: 1705309200000,
|
||||
endMs: 1705316400000,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE match(LOWER(body_v2.message), LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{"error", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{querybuilder.BodyFullTextSearchDefaultWarning},
|
||||
Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE (match(LOWER(body_v2.message), LOWER(?)) OR match(severity_text, ?) OR match(trace_id, ?) OR match(span_id, ?) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_string)) OR arrayExists(x -> match(x, ?), mapValues(attributes_string))) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_number)) OR arrayExists(x -> match(x, ?), arrayMap(x -> toString(x), mapValues(attributes_number)))) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_bool)) OR arrayExists(x -> match(x, ?), arrayMap(x -> toString(x), mapValues(attributes_bool)))) OR (arrayExists(x -> match(x, ?), mapKeys(resources_string)) OR arrayExists(x -> match(x, ?), mapValues(resources_string)))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{"error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "1705309200000000000", uint64(1705307400), "1705316400000000000", uint64(1705316400), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "fts_disabled",
|
||||
name: "fts_json_disabled",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
@@ -1087,12 +1089,91 @@ func TestStmtBuilderBodyFullTextSearch(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
enableUseJSONBody: false,
|
||||
startMs: 1705309200000,
|
||||
endMs: 1705316400000,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE match(LOWER(body), LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{"error", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE (match(LOWER(body), LOWER(?)) OR match(severity_text, ?) OR match(trace_id, ?) OR match(span_id, ?) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_string)) OR arrayExists(x -> match(x, ?), mapValues(attributes_string))) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_number)) OR arrayExists(x -> match(x, ?), arrayMap(x -> toString(x), mapValues(attributes_number)))) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_bool)) OR arrayExists(x -> match(x, ?), arrayMap(x -> toString(x), mapValues(attributes_bool)))) OR (arrayExists(x -> match(x, ?), mapKeys(resources_string)) OR arrayExists(x -> match(x, ?), mapValues(resources_string)))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{"error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "1705309200000000000", uint64(1705307400), "1705316400000000000", uint64(1705316400), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
// search() function: uses a 2-hour window to stay under the 6-hour limit
|
||||
{
|
||||
name: "search_fans_out_to_all_columns",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "search('error')"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableUseJSONBody: false,
|
||||
startMs: 1705309200000,
|
||||
endMs: 1705316400000,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE (match(LOWER(body), LOWER(?)) OR match(severity_text, ?) OR match(trace_id, ?) OR match(span_id, ?) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_string)) OR arrayExists(x -> match(x, ?), mapValues(attributes_string))) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_number)) OR arrayExists(x -> match(x, ?), arrayMap(x -> toString(x), mapValues(attributes_number)))) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_bool)) OR arrayExists(x -> match(x, ?), arrayMap(x -> toString(x), mapValues(attributes_bool)))) OR (arrayExists(x -> match(x, ?), mapKeys(resources_string)) OR arrayExists(x -> match(x, ?), mapValues(resources_string)))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{"error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "1705309200000000000", uint64(1705307400), "1705316400000000000", uint64(1705316400), 10},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "search_unquoted_token",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "search(error)"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableUseJSONBody: false,
|
||||
startMs: 1705309200000,
|
||||
endMs: 1705316400000,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE (match(LOWER(body), LOWER(?)) OR match(severity_text, ?) OR match(trace_id, ?) OR match(span_id, ?) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_string)) OR arrayExists(x -> match(x, ?), mapValues(attributes_string))) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_number)) OR arrayExists(x -> match(x, ?), arrayMap(x -> toString(x), mapValues(attributes_number)))) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_bool)) OR arrayExists(x -> match(x, ?), arrayMap(x -> toString(x), mapValues(attributes_bool)))) OR (arrayExists(x -> match(x, ?), mapKeys(resources_string)) OR arrayExists(x -> match(x, ?), mapValues(resources_string)))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{"error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "1705309200000000000", uint64(1705307400), "1705316400000000000", uint64(1705316400), 10},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "search_not_wraps_condition",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "NOT search('healthcheck')"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableUseJSONBody: false,
|
||||
startMs: 1705309200000,
|
||||
endMs: 1705316400000,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE NOT ((match(LOWER(body), LOWER(?)) OR match(severity_text, ?) OR match(trace_id, ?) OR match(span_id, ?) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_string)) OR arrayExists(x -> match(x, ?), mapValues(attributes_string))) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_number)) OR arrayExists(x -> match(x, ?), arrayMap(x -> toString(x), mapValues(attributes_number)))) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_bool)) OR arrayExists(x -> match(x, ?), arrayMap(x -> toString(x), mapValues(attributes_bool)))) OR (arrayExists(x -> match(x, ?), mapKeys(resources_string)) OR arrayExists(x -> match(x, ?), mapValues(resources_string))))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{"healthcheck", "healthcheck", "healthcheck", "healthcheck", "healthcheck", "healthcheck", "healthcheck", "healthcheck", "healthcheck", "healthcheck", "healthcheck", "healthcheck", "1705309200000000000", uint64(1705307400), "1705316400000000000", uint64(1705316400), 10},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "search_combined_with_filter",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "search('error') AND severity_text = 'ERROR'"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableUseJSONBody: false,
|
||||
startMs: 1705309200000,
|
||||
endMs: 1705316400000,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE ((match(LOWER(body), LOWER(?)) OR match(severity_text, ?) OR match(trace_id, ?) OR match(span_id, ?) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_string)) OR arrayExists(x -> match(x, ?), mapValues(attributes_string))) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_number)) OR arrayExists(x -> match(x, ?), arrayMap(x -> toString(x), mapValues(attributes_number)))) OR (arrayExists(x -> match(x, ?), mapKeys(attributes_bool)) OR arrayExists(x -> match(x, ?), arrayMap(x -> toString(x), mapValues(attributes_bool)))) OR (arrayExists(x -> match(x, ?), mapKeys(resources_string)) OR arrayExists(x -> match(x, ?), mapValues(resources_string)))) AND severity_text = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{"error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "error", "ERROR", "1705309200000000000", uint64(1705307400), "1705316400000000000", uint64(1705316400), 10},
|
||||
},
|
||||
},
|
||||
{
|
||||
// default window is ~10h which exceeds the 6-hour search() limit
|
||||
name: "search_window_exceeds_6h",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "search('error')"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableUseJSONBody: false,
|
||||
expectedErr: fmt.Errorf("6-hour"),
|
||||
},
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
@@ -1118,19 +1199,24 @@ func TestStmtBuilderBodyFullTextSearch(t *testing.T) {
|
||||
fl,
|
||||
)
|
||||
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
startMs := uint64(1747947419000)
|
||||
if c.startMs != 0 {
|
||||
startMs = c.startMs
|
||||
}
|
||||
endMs := uint64(1747983448000)
|
||||
if c.endMs != 0 {
|
||||
endMs = c.endMs
|
||||
}
|
||||
q, err := statementBuilder.Build(context.Background(), startMs, endMs, c.requestType, c.query, nil)
|
||||
if c.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), c.expectedErr.Error())
|
||||
} else {
|
||||
if err != nil {
|
||||
_, _, _, _, _, add := errors.Unwrapb(err)
|
||||
t.Logf("error additionals: %v", add)
|
||||
}
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, c.expected.Query, q.Query)
|
||||
require.Equal(t, c.expected.Args, q.Args)
|
||||
require.Equal(t, c.expected.Warnings, q.Warnings)
|
||||
if c.expected.Query != "" {
|
||||
require.Equal(t, c.expected.Query, q.Query)
|
||||
require.Equal(t, c.expected.Args, q.Args)
|
||||
require.Equal(t, c.expected.Warnings, q.Warnings)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
12
pkg/types/retentiontypes/store.go
Normal file
12
pkg/types/retentiontypes/store.go
Normal file
@@ -0,0 +1,12 @@
|
||||
package retentiontypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Store interface {
|
||||
// ListTTLSettingsByTableNameAndBeforeCreatedAt returns successful TTL settings before the given timestamp.
|
||||
ListTTLSettingsByTableNameAndBeforeCreatedAt(ctx context.Context, orgID valuer.UUID, tableName string, beforeMs int64) ([]*TTLSetting, error)
|
||||
}
|
||||
@@ -1,10 +1,141 @@
|
||||
package retentiontypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
const secondsPerDay = 24 * 60 * 60
|
||||
|
||||
const (
|
||||
DefaultLogsRetentionDays = 15
|
||||
DefaultMetricsRetentionDays = 30
|
||||
DefaultTracesRetentionDays = 15
|
||||
)
|
||||
|
||||
const (
|
||||
TraceTTL = "traces"
|
||||
MetricsTTL = "metrics"
|
||||
LogsTTL = "logs"
|
||||
)
|
||||
|
||||
const (
|
||||
TTLSettingStatusPending = "pending"
|
||||
TTLSettingStatusFailed = "failed"
|
||||
TTLSettingStatusSuccess = "success"
|
||||
)
|
||||
|
||||
// RetentionPolicySegment is a half-open time range using one retention policy.
|
||||
type RetentionPolicySegment struct {
|
||||
StartMs int64
|
||||
EndMs int64
|
||||
Rules []CustomRetentionRule
|
||||
DefaultDays int
|
||||
}
|
||||
|
||||
// NewRetentionPolicySegment creates a retention policy segment for a half-open time range.
|
||||
func NewRetentionPolicySegment(startMs int64, endMs int64, rules []CustomRetentionRule, defaultDays int) *RetentionPolicySegment {
|
||||
return &RetentionPolicySegment{
|
||||
StartMs: startMs,
|
||||
EndMs: endMs,
|
||||
Rules: rules,
|
||||
DefaultDays: defaultDays,
|
||||
}
|
||||
}
|
||||
|
||||
// BuildRetentionPolicySegmentsFromRows converts successful TTL settings into retention policy segments.
|
||||
func BuildRetentionPolicySegmentsFromRows(rows []*TTLSetting, fallbackDefaultDays int, startMs, endMs int64) ([]*RetentionPolicySegment, error) {
|
||||
if startMs >= endMs {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var activeAtStart *TTLSetting
|
||||
inWindow := make([]*TTLSetting, 0, len(rows))
|
||||
for _, row := range rows {
|
||||
rowMs := row.CreatedAt.UnixMilli()
|
||||
if rowMs <= startMs {
|
||||
activeAtStart = row
|
||||
continue
|
||||
}
|
||||
if rowMs >= endMs {
|
||||
continue
|
||||
}
|
||||
inWindow = append(inWindow, row)
|
||||
}
|
||||
|
||||
activeRules, activeDefault, err := parseTTLSetting(activeAtStart, fallbackDefaultDays)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
segments := make([]*RetentionPolicySegment, 0, len(inWindow)+1)
|
||||
cursor := startMs
|
||||
for _, row := range inWindow {
|
||||
rowMs := row.CreatedAt.UnixMilli()
|
||||
if rowMs <= cursor {
|
||||
activeRules, activeDefault, err = parseTTLSetting(row, fallbackDefaultDays)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
continue
|
||||
}
|
||||
segments = append(segments, NewRetentionPolicySegment(cursor, rowMs, activeRules, activeDefault))
|
||||
cursor = rowMs
|
||||
activeRules, activeDefault, err = parseTTLSetting(row, fallbackDefaultDays)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if cursor < endMs {
|
||||
segments = append(segments, NewRetentionPolicySegment(cursor, endMs, activeRules, activeDefault))
|
||||
}
|
||||
|
||||
return segments, nil
|
||||
}
|
||||
|
||||
func parseTTLSetting(row *TTLSetting, fallbackDefaultDays int) ([]CustomRetentionRule, int, error) {
|
||||
if row == nil {
|
||||
return nil, fallbackDefaultDays, nil
|
||||
}
|
||||
|
||||
defaultDays := row.TTL
|
||||
if row.Condition == "" {
|
||||
defaultDays = (row.TTL + secondsPerDay - 1) / secondsPerDay
|
||||
}
|
||||
if defaultDays <= 0 {
|
||||
defaultDays = fallbackDefaultDays
|
||||
}
|
||||
|
||||
if row.Condition == "" {
|
||||
return nil, defaultDays, nil
|
||||
}
|
||||
|
||||
var rules []CustomRetentionRule
|
||||
if err := json.Unmarshal([]byte(row.Condition), &rules); err != nil {
|
||||
return nil, 0, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "parse ttl_setting condition for row %q", row.ID.StringValue())
|
||||
}
|
||||
|
||||
return rules, defaultDays, nil
|
||||
}
|
||||
|
||||
// CustomRetentionRule is one custom retention rule as stored in ttl_setting.condition.
|
||||
// Rules are evaluated in declaration order; the first matching rule wins.
|
||||
type CustomRetentionRule struct {
|
||||
Filters []FilterCondition `json:"conditions"`
|
||||
TTLDays int `json:"ttlDays"`
|
||||
}
|
||||
|
||||
// FilterCondition is one label-key, allowed-values condition inside a retention rule.
|
||||
type FilterCondition struct {
|
||||
Key string `json:"key"`
|
||||
Values []string `json:"values"`
|
||||
}
|
||||
|
||||
type TTLSetting struct {
|
||||
bun.BaseModel `bun:"table:ttl_setting"`
|
||||
types.Identifiable
|
||||
@@ -17,3 +148,73 @@ type TTLSetting struct {
|
||||
OrgID string `json:"-" bun:"org_id,notnull"`
|
||||
Condition string `bun:"condition,type:text"`
|
||||
}
|
||||
|
||||
type TTLParams struct {
|
||||
Type string
|
||||
ColdStorageVolume string
|
||||
ToColdStorageDuration int64
|
||||
DelDuration int64
|
||||
}
|
||||
|
||||
type CustomRetentionTTLParams struct {
|
||||
Type string `json:"type"`
|
||||
DefaultTTLDays int `json:"defaultTTLDays"`
|
||||
TTLConditions []CustomRetentionRule `json:"ttlConditions"`
|
||||
ColdStorageVolume string `json:"coldStorageVolume,omitempty"`
|
||||
ToColdStorageDurationDays int64 `json:"coldStorageDurationDays,omitempty"`
|
||||
}
|
||||
|
||||
type GetTTLParams struct {
|
||||
Type string
|
||||
}
|
||||
|
||||
type GetCustomRetentionTTLResponse struct {
|
||||
Version string `json:"version"`
|
||||
Status string `json:"status"`
|
||||
|
||||
ExpectedLogsTime int `json:"expected_logs_ttl_duration_hrs,omitempty"`
|
||||
ExpectedLogsMoveTime int `json:"expected_logs_move_ttl_duration_hrs,omitempty"`
|
||||
|
||||
DefaultTTLDays int `json:"default_ttl_days,omitempty"`
|
||||
TTLConditions []CustomRetentionRule `json:"ttl_conditions,omitempty"`
|
||||
ColdStorageVolume string `json:"cold_storage_volume,omitempty"`
|
||||
ColdStorageTTLDays int `json:"cold_storage_ttl_days,omitempty"`
|
||||
}
|
||||
|
||||
type CustomRetentionTTLResponse struct {
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type TTLStatusItem struct {
|
||||
Id int `json:"id" db:"id"`
|
||||
UpdatedAt time.Time `json:"updated_at" db:"updated_at"`
|
||||
CreatedAt time.Time `json:"created_at" db:"created_at"`
|
||||
TableName string `json:"table_name" db:"table_name"`
|
||||
TTL int `json:"ttl" db:"ttl"`
|
||||
Status string `json:"status" db:"status"`
|
||||
ColdStorageTtl int `json:"cold_storage_ttl" db:"cold_storage_ttl"`
|
||||
}
|
||||
|
||||
type SetTTLResponseItem struct {
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type DBResponseTTL struct {
|
||||
EngineFull string `ch:"engine_full"`
|
||||
}
|
||||
|
||||
type GetTTLResponseItem struct {
|
||||
MetricsTime int `json:"metrics_ttl_duration_hrs,omitempty"`
|
||||
MetricsMoveTime int `json:"metrics_move_ttl_duration_hrs,omitempty"`
|
||||
TracesTime int `json:"traces_ttl_duration_hrs,omitempty"`
|
||||
TracesMoveTime int `json:"traces_move_ttl_duration_hrs,omitempty"`
|
||||
LogsTime int `json:"logs_ttl_duration_hrs,omitempty"`
|
||||
LogsMoveTime int `json:"logs_move_ttl_duration_hrs,omitempty"`
|
||||
ExpectedMetricsTime int `json:"expected_metrics_ttl_duration_hrs,omitempty"`
|
||||
ExpectedMetricsMoveTime int `json:"expected_metrics_move_ttl_duration_hrs,omitempty"`
|
||||
ExpectedTracesTime int `json:"expected_traces_ttl_duration_hrs,omitempty"`
|
||||
ExpectedTracesMoveTime int `json:"expected_traces_move_ttl_duration_hrs,omitempty"`
|
||||
ExpectedLogsTime int `json:"expected_logs_ttl_duration_hrs,omitempty"`
|
||||
ExpectedLogsMoveTime int `json:"expected_logs_move_ttl_duration_hrs,omitempty"`
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
83
pkg/types/retentiontypes/ttl_test.go
Normal file
83
pkg/types/retentiontypes/ttl_test.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package retentiontypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestBuildRetentionPolicySegmentsFromRows(t *testing.T) {
|
||||
start := time.Date(2026, 5, 4, 0, 0, 0, 0, time.UTC)
|
||||
end := start.AddDate(0, 0, 1)
|
||||
|
||||
ruleA := CustomRetentionRule{
|
||||
Filters: []FilterCondition{{Key: "service.name", Values: []string{"api"}}},
|
||||
TTLDays: 7,
|
||||
}
|
||||
ruleB := CustomRetentionRule{
|
||||
Filters: []FilterCondition{{Key: "env", Values: []string{"prod"}}},
|
||||
TTLDays: 15,
|
||||
}
|
||||
|
||||
t.Run("row before window is active at start", func(t *testing.T) {
|
||||
segments, err := BuildRetentionPolicySegmentsFromRows(
|
||||
[]*TTLSetting{
|
||||
ttlSetting(t, start.Add(-time.Hour), 45, []CustomRetentionRule{ruleA}),
|
||||
},
|
||||
30,
|
||||
start.UnixMilli(),
|
||||
end.UnixMilli(),
|
||||
)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, []*RetentionPolicySegment{
|
||||
NewRetentionPolicySegment(start.UnixMilli(), end.UnixMilli(), []CustomRetentionRule{ruleA}, 45),
|
||||
}, segments)
|
||||
})
|
||||
|
||||
t.Run("row inside window splits segments", func(t *testing.T) {
|
||||
firstChange := start.Add(6 * time.Hour)
|
||||
secondChange := start.Add(18 * time.Hour)
|
||||
|
||||
segments, err := BuildRetentionPolicySegmentsFromRows(
|
||||
[]*TTLSetting{
|
||||
ttlSetting(t, firstChange, 21, []CustomRetentionRule{ruleA}),
|
||||
ttlSetting(t, secondChange, 14, []CustomRetentionRule{ruleB}),
|
||||
},
|
||||
30,
|
||||
start.UnixMilli(),
|
||||
end.UnixMilli(),
|
||||
)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, []*RetentionPolicySegment{
|
||||
NewRetentionPolicySegment(start.UnixMilli(), firstChange.UnixMilli(), nil, 30),
|
||||
NewRetentionPolicySegment(firstChange.UnixMilli(), secondChange.UnixMilli(), []CustomRetentionRule{ruleA}, 21),
|
||||
NewRetentionPolicySegment(secondChange.UnixMilli(), end.UnixMilli(), []CustomRetentionRule{ruleB}, 14),
|
||||
}, segments)
|
||||
})
|
||||
|
||||
t.Run("no rows uses fallback", func(t *testing.T) {
|
||||
segments, err := BuildRetentionPolicySegmentsFromRows(nil, 30, start.UnixMilli(), end.UnixMilli())
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, []*RetentionPolicySegment{
|
||||
NewRetentionPolicySegment(start.UnixMilli(), end.UnixMilli(), nil, 30),
|
||||
}, segments)
|
||||
})
|
||||
}
|
||||
|
||||
func ttlSetting(t *testing.T, createdAt time.Time, ttlDays int, rules []CustomRetentionRule) *TTLSetting {
|
||||
t.Helper()
|
||||
|
||||
condition, err := json.Marshal(rules)
|
||||
require.NoError(t, err)
|
||||
|
||||
return &TTLSetting{
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: createdAt,
|
||||
},
|
||||
TTL: ttlDays,
|
||||
Condition: string(condition),
|
||||
}
|
||||
}
|
||||
20
pkg/types/zeustypes/attributes.go
Normal file
20
pkg/types/zeustypes/attributes.go
Normal file
@@ -0,0 +1,20 @@
|
||||
package zeustypes
|
||||
|
||||
import "go.opentelemetry.io/otel/attribute"
|
||||
|
||||
var (
|
||||
// Identifies the organization.
|
||||
OrganizationID = attribute.Key("signoz.organization.id")
|
||||
|
||||
// Identifies the retention bucket a meter belongs to.
|
||||
RetentionDuration = attribute.Key("signoz.retention.duration")
|
||||
)
|
||||
|
||||
func NewDimensions(kvs ...attribute.KeyValue) map[string]string {
|
||||
dimensions := map[string]string{}
|
||||
for _, kv := range kvs {
|
||||
dimensions[string(kv.Key)] = kv.Value.AsString()
|
||||
}
|
||||
|
||||
return dimensions
|
||||
}
|
||||
29
pkg/types/zeustypes/deployment.go
Normal file
29
pkg/types/zeustypes/deployment.go
Normal file
@@ -0,0 +1,29 @@
|
||||
package zeustypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
type GettableDeployment struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Cluster struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Region struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
}
|
||||
|
||||
func NewGettableDeployment(data []byte) (*GettableDeployment, error) {
|
||||
deployment := new(GettableDeployment)
|
||||
err := json.Unmarshal(data, deployment)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return deployment, nil
|
||||
}
|
||||
46
pkg/types/zeustypes/host.go
Normal file
46
pkg/types/zeustypes/host.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package zeustypes
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
type Host struct {
|
||||
Name string `json:"name" required:"true"`
|
||||
IsDefault bool `json:"is_default" required:"true"`
|
||||
URL string `json:"url" required:"true"`
|
||||
}
|
||||
|
||||
type GettableHost struct {
|
||||
Name string `json:"name" required:"true"`
|
||||
State string `json:"state" required:"true"`
|
||||
Tier string `json:"tier" required:"true"`
|
||||
Hosts []Host `json:"hosts" required:"true"`
|
||||
}
|
||||
|
||||
type PostableHost struct {
|
||||
Name string `json:"name" required:"true"`
|
||||
}
|
||||
|
||||
func NewGettableHost(data []byte) *GettableHost {
|
||||
parsed := gjson.ParseBytes(data)
|
||||
dns := parsed.Get("cluster.region.dns").String()
|
||||
|
||||
hostResults := parsed.Get("hosts").Array()
|
||||
hosts := make([]Host, len(hostResults))
|
||||
|
||||
for i, h := range hostResults {
|
||||
name := h.Get("name").String()
|
||||
hosts[i].Name = name
|
||||
hosts[i].IsDefault = h.Get("is_default").Bool()
|
||||
hosts[i].URL = (&url.URL{Scheme: "https", Host: name + "." + dns}).String()
|
||||
}
|
||||
|
||||
return &GettableHost{
|
||||
Name: parsed.Get("name").String(),
|
||||
State: parsed.Get("state").String(),
|
||||
Tier: parsed.Get("tier").String(),
|
||||
Hosts: hosts,
|
||||
}
|
||||
}
|
||||
54
pkg/types/zeustypes/meter.go
Normal file
54
pkg/types/zeustypes/meter.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package zeustypes
|
||||
|
||||
import "time"
|
||||
|
||||
type MeterCheckpoint struct {
|
||||
Name string
|
||||
StartDate time.Time
|
||||
}
|
||||
|
||||
type Meter struct {
|
||||
// MeterName is the fully-qualified meter identifier.
|
||||
MeterName MeterName `json:"name"`
|
||||
|
||||
// Value is the aggregated integer scalar for this meter over the reporting window.
|
||||
Value int64 `json:"value"`
|
||||
|
||||
// Unit is the metric unit for this meter.
|
||||
Unit MeterUnit `json:"unit"`
|
||||
|
||||
// Aggregation names the aggregation applied to produce Value.
|
||||
Aggregation MeterAggregation `json:"aggregation"`
|
||||
|
||||
// StartUnixMilli is the inclusive window start in epoch milliseconds.
|
||||
StartUnixMilli int64 `json:"start_unix_milli"`
|
||||
|
||||
// EndUnixMilli is the exclusive window end in epoch milliseconds.
|
||||
EndUnixMilli int64 `json:"end_unix_milli"`
|
||||
|
||||
// IsCompleted is false for the current day's partial value.
|
||||
IsCompleted bool `json:"is_completed"`
|
||||
|
||||
// Dimensions is the per-meter label set.
|
||||
Dimensions map[string]string `json:"dimensions"`
|
||||
}
|
||||
|
||||
func NewMeter(
|
||||
name MeterName,
|
||||
value int64,
|
||||
unit MeterUnit,
|
||||
aggregation MeterAggregation,
|
||||
window MeterWindow,
|
||||
dimensions map[string]string,
|
||||
) Meter {
|
||||
return Meter{
|
||||
MeterName: name,
|
||||
Value: value,
|
||||
Unit: unit,
|
||||
Aggregation: aggregation,
|
||||
StartUnixMilli: window.StartUnixMilli,
|
||||
EndUnixMilli: window.EndUnixMilli,
|
||||
IsCompleted: window.IsCompleted,
|
||||
Dimensions: dimensions,
|
||||
}
|
||||
}
|
||||
12
pkg/types/zeustypes/meter_aggregation.go
Normal file
12
pkg/types/zeustypes/meter_aggregation.go
Normal file
@@ -0,0 +1,12 @@
|
||||
package zeustypes
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
type MeterAggregation struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
MeterAggregationSum = MeterAggregation{valuer.NewString("sum")}
|
||||
MeterAggregationMax = MeterAggregation{valuer.NewString("max")}
|
||||
)
|
||||
66
pkg/types/zeustypes/meter_name.go
Normal file
66
pkg/types/zeustypes/meter_name.go
Normal file
@@ -0,0 +1,66 @@
|
||||
package zeustypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"regexp"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
)
|
||||
|
||||
var (
|
||||
MeterSpanSize = MustNewMeterName("signoz.meter.span.size")
|
||||
MeterLogSize = MustNewMeterName("signoz.meter.log.size")
|
||||
MeterDatapointCount = MustNewMeterName("signoz.meter.metric.datapoint.count")
|
||||
MeterPlatformActive = MustNewMeterName("signoz.meter.platform.active")
|
||||
)
|
||||
|
||||
var meterNameRegex = regexp.MustCompile(`^[a-z][a-z0-9_.]+$`)
|
||||
|
||||
// MeterName is a validated dotted Zeus meter name.
|
||||
type MeterName struct {
|
||||
s string
|
||||
}
|
||||
|
||||
func NewMeterName(s string) (MeterName, error) {
|
||||
if !meterNameRegex.MatchString(s) {
|
||||
return MeterName{}, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid meter name: %s", s)
|
||||
}
|
||||
|
||||
return MeterName{s: s}, nil
|
||||
}
|
||||
|
||||
func MustNewMeterName(s string) MeterName {
|
||||
name, err := NewMeterName(s)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return name
|
||||
}
|
||||
|
||||
func (n MeterName) String() string {
|
||||
return n.s
|
||||
}
|
||||
|
||||
func (n MeterName) IsZero() bool {
|
||||
return n.s == ""
|
||||
}
|
||||
|
||||
func (n MeterName) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(n.s)
|
||||
}
|
||||
|
||||
func (n *MeterName) UnmarshalJSON(data []byte) error {
|
||||
var s string
|
||||
if err := json.Unmarshal(data, &s); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
name, err := NewMeterName(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
*n = name
|
||||
return nil
|
||||
}
|
||||
30
pkg/types/zeustypes/meter_test.go
Normal file
30
pkg/types/zeustypes/meter_test.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package zeustypes
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestNewMeterWindow(t *testing.T) {
|
||||
start := time.Date(2026, 5, 4, 0, 0, 0, 0, time.UTC)
|
||||
|
||||
window, err := NewMeterWindow(start.UnixMilli(), start.AddDate(0, 0, 1).UnixMilli(), true)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, start.UnixMilli(), window.StartUnixMilli)
|
||||
require.Equal(t, start.AddDate(0, 0, 1).UnixMilli(), window.EndUnixMilli)
|
||||
require.True(t, window.IsCompleted)
|
||||
|
||||
_, err = NewMeterWindow(0, start.UnixMilli(), true)
|
||||
require.Error(t, err)
|
||||
|
||||
_, err = NewMeterWindow(start.UnixMilli(), start.UnixMilli(), false)
|
||||
require.Error(t, err)
|
||||
}
|
||||
|
||||
func TestMustNewMeterWindowPanicsForInvalidWindow(t *testing.T) {
|
||||
require.Panics(t, func() {
|
||||
MustNewMeterWindow(0, 0, true)
|
||||
})
|
||||
}
|
||||
12
pkg/types/zeustypes/meter_unit.go
Normal file
12
pkg/types/zeustypes/meter_unit.go
Normal file
@@ -0,0 +1,12 @@
|
||||
package zeustypes
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
type MeterUnit struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
MeterUnitCount = MeterUnit{valuer.NewString("count")}
|
||||
MeterUnitBytes = MeterUnit{valuer.NewString("bytes")}
|
||||
)
|
||||
43
pkg/types/zeustypes/meter_window.go
Normal file
43
pkg/types/zeustypes/meter_window.go
Normal file
@@ -0,0 +1,43 @@
|
||||
package zeustypes
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
)
|
||||
|
||||
type MeterWindow struct {
|
||||
StartUnixMilli int64
|
||||
EndUnixMilli int64
|
||||
IsCompleted bool
|
||||
}
|
||||
|
||||
func NewMeterWindow(startUnixMilli, endUnixMilli int64, isCompleted bool) (MeterWindow, error) {
|
||||
if startUnixMilli <= 0 {
|
||||
return MeterWindow{}, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "meter window start must be positive: %d", startUnixMilli)
|
||||
}
|
||||
|
||||
if endUnixMilli <= startUnixMilli {
|
||||
return MeterWindow{}, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "meter window end must be after start: [%d, %d)", startUnixMilli, endUnixMilli)
|
||||
}
|
||||
|
||||
return MeterWindow{
|
||||
StartUnixMilli: startUnixMilli,
|
||||
EndUnixMilli: endUnixMilli,
|
||||
IsCompleted: isCompleted,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func MustNewMeterWindow(startUnixMilli, endUnixMilli int64, isCompleted bool) MeterWindow {
|
||||
window, err := NewMeterWindow(startUnixMilli, endUnixMilli, isCompleted)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return window
|
||||
}
|
||||
|
||||
func (w MeterWindow) Day() time.Time {
|
||||
t := time.UnixMilli(w.StartUnixMilli).UTC()
|
||||
return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, time.UTC)
|
||||
}
|
||||
13
pkg/types/zeustypes/profile.go
Normal file
13
pkg/types/zeustypes/profile.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package zeustypes
|
||||
|
||||
type PostableProfile struct {
|
||||
UsesOtel bool `json:"uses_otel" required:"true"`
|
||||
HasExistingObservabilityTool bool `json:"has_existing_observability_tool" required:"true"`
|
||||
ExistingObservabilityTool string `json:"existing_observability_tool" required:"true"`
|
||||
ReasonsForInterestInSigNoz []string `json:"reasons_for_interest_in_signoz" required:"true"`
|
||||
LogsScalePerDayInGB int64 `json:"logs_scale_per_day_in_gb" required:"true"`
|
||||
NumberOfServices int64 `json:"number_of_services" required:"true"`
|
||||
NumberOfHosts int64 `json:"number_of_hosts" required:"true"`
|
||||
WhereDidYouDiscoverSigNoz string `json:"where_did_you_discover_signoz" required:"true"`
|
||||
TimelineForMigratingToSigNoz string `json:"timeline_for_migrating_to_signoz" required:"true"`
|
||||
}
|
||||
@@ -1,87 +0,0 @@
|
||||
package zeustypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
type PostableHost struct {
|
||||
Name string `json:"name" required:"true"`
|
||||
}
|
||||
|
||||
type PostableProfile struct {
|
||||
UsesOtel bool `json:"uses_otel" required:"true"`
|
||||
HasExistingObservabilityTool bool `json:"has_existing_observability_tool" required:"true"`
|
||||
ExistingObservabilityTool string `json:"existing_observability_tool" required:"true"`
|
||||
ReasonsForInterestInSigNoz []string `json:"reasons_for_interest_in_signoz" required:"true"`
|
||||
LogsScalePerDayInGB int64 `json:"logs_scale_per_day_in_gb" required:"true"`
|
||||
NumberOfServices int64 `json:"number_of_services" required:"true"`
|
||||
NumberOfHosts int64 `json:"number_of_hosts" required:"true"`
|
||||
WhereDidYouDiscoverSigNoz string `json:"where_did_you_discover_signoz" required:"true"`
|
||||
TimelineForMigratingToSigNoz string `json:"timeline_for_migrating_to_signoz" required:"true"`
|
||||
}
|
||||
|
||||
type GettableHost struct {
|
||||
Name string `json:"name" required:"true"`
|
||||
State string `json:"state" required:"true"`
|
||||
Tier string `json:"tier" required:"true"`
|
||||
Hosts []Host `json:"hosts" required:"true"`
|
||||
}
|
||||
|
||||
type Host struct {
|
||||
Name string `json:"name" required:"true"`
|
||||
IsDefault bool `json:"is_default" required:"true"`
|
||||
URL string `json:"url" required:"true"`
|
||||
}
|
||||
|
||||
func NewGettableHost(data []byte) *GettableHost {
|
||||
parsed := gjson.ParseBytes(data)
|
||||
dns := parsed.Get("cluster.region.dns").String()
|
||||
|
||||
hostResults := parsed.Get("hosts").Array()
|
||||
hosts := make([]Host, len(hostResults))
|
||||
|
||||
for i, h := range hostResults {
|
||||
name := h.Get("name").String()
|
||||
hosts[i].Name = name
|
||||
hosts[i].IsDefault = h.Get("is_default").Bool()
|
||||
hosts[i].URL = (&url.URL{Scheme: "https", Host: name + "." + dns}).String()
|
||||
}
|
||||
|
||||
return &GettableHost{
|
||||
Name: parsed.Get("name").String(),
|
||||
State: parsed.Get("state").String(),
|
||||
Tier: parsed.Get("tier").String(),
|
||||
Hosts: hosts,
|
||||
}
|
||||
}
|
||||
|
||||
// GettableDeployment represents the parsed deployment info from zeus.GetDeployment.
|
||||
// NOTE: this is not a full response structure, add more fields from actual response as per requirement.
|
||||
type GettableDeployment struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Cluster struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Region struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
}
|
||||
|
||||
// NewGettableDeployment parses raw GetDeployment bytes into a GettableDeployment.
|
||||
func NewGettableDeployment(data []byte) (*GettableDeployment, error) {
|
||||
deployment := new(GettableDeployment)
|
||||
err := json.Unmarshal(data, deployment)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to unmarshal deployment response")
|
||||
}
|
||||
|
||||
return deployment, nil
|
||||
}
|
||||
@@ -49,6 +49,14 @@ func (provider *provider) PutMetersV2(_ context.Context, _ string, _ []byte) err
|
||||
return errors.New(errors.TypeUnsupported, zeus.ErrCodeUnsupported, "putting meters v2 is not supported")
|
||||
}
|
||||
|
||||
func (provider *provider) PutMetersV3(_ context.Context, _ string, _ string, _ []byte) error {
|
||||
return errors.New(errors.TypeUnsupported, zeus.ErrCodeUnsupported, "putting meters v3 is not supported")
|
||||
}
|
||||
|
||||
func (provider *provider) ListMeterCheckpoints(_ context.Context, _ string) ([]zeustypes.MeterCheckpoint, error) {
|
||||
return nil, errors.New(errors.TypeUnsupported, zeus.ErrCodeUnsupported, "list meter checkpoints is not supported")
|
||||
}
|
||||
|
||||
func (provider *provider) PutProfile(_ context.Context, _ string, _ *zeustypes.PostableProfile) error {
|
||||
return errors.New(errors.TypeUnsupported, zeus.ErrCodeUnsupported, "putting profile is not supported")
|
||||
}
|
||||
|
||||
@@ -35,6 +35,16 @@ type Zeus interface {
|
||||
// Puts the meters for the given license key using Zeus.
|
||||
PutMetersV2(context.Context, string, []byte) error
|
||||
|
||||
// PutMetersV3 ships one day's raw JSON array of meter readings to the
|
||||
// v2/meters endpoint. idempotencyKey is propagated as X-Idempotency-Key so
|
||||
// Zeus can UPSERT on retries.
|
||||
PutMetersV3(ctx context.Context, licenseKey string, idempotencyKey string, body []byte) error
|
||||
|
||||
// ListMeterCheckpoints returns the latest sealed (is_completed=true) UTC day
|
||||
// Zeus has stored for each billing meter name. Missing meter names are
|
||||
// treated by the cron as bootstrap cases.
|
||||
ListMeterCheckpoints(ctx context.Context, licenseKey string) ([]zeustypes.MeterCheckpoint, error)
|
||||
|
||||
// Put profile for the given license key.
|
||||
PutProfile(context.Context, string, *zeustypes.PostableProfile) error
|
||||
|
||||
|
||||
Reference in New Issue
Block a user